max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
921
package sqlancer.dbms; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assumptions.assumeTrue; import org.junit.jupiter.api.Test; import sqlancer.Main; public class TestMySQLTLP { String mysqlAvailable = System.getenv("MYSQL_AVAILABLE"); boolean mysqlIsAvailable = mysqlAvailable != null && mysqlAvailable.equalsIgnoreCase("true"); @Test public void testMySQL() { assumeTrue(mysqlIsAvailable); assertEquals(0, Main.executeMain(new String[] { "--random-seed", "0", "--timeout-seconds", TestConfig.SECONDS, "--max-expression-depth", "1", "--num-threads", "4", "--num-queries", TestConfig.NUM_QUERIES, "mysql", "--oracle", "TLP_WHERE" })); } }
335
2,690
#ifndef SM_LOGGER_HPP #define SM_LOGGER_HPP #include <string> #include <chrono> namespace sm { namespace logging { struct LoggingEvent; class Logger { public: typedef std::chrono::system_clock Clock; typedef Clock::time_point Time; typedef Clock::duration Duration; Logger(); virtual ~Logger(); double currentTimeSecondsUtc() const; std::string currentTimeString() const; Time currentTime() const; void log(const LoggingEvent & event); protected: virtual Time currentTimeImplementation() const; virtual void logImplementation(const LoggingEvent & event) = 0; }; } // namespace logging } // namespace sm #endif /* SM_LOGGER_HPP */
397
379
package cc.bitky.clusterdeviceplatform.client.server.repo; import cc.bitky.clusterdeviceplatform.messageutils.msg.statusreply.MsgReplyDeviceStatus; public class MsgPackage { MsgReplyDeviceStatus chargeStatus; MsgReplyDeviceStatus workStatus; public MsgPackage(MsgReplyDeviceStatus chargeStatus, MsgReplyDeviceStatus workStatus) { this.chargeStatus = chargeStatus; this.workStatus = workStatus; } public MsgReplyDeviceStatus getChargeStatus() { return chargeStatus; } public MsgReplyDeviceStatus getWorkStatus() { return workStatus; } }
205
452
<reponame>suztomo/firebase-admin-java /* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.firebase.remoteconfig; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotEquals; import org.junit.Test; public class ParameterValueTest { @Test public void testCreateExplicitValue() { final ParameterValue.Explicit parameterValue = ParameterValue.of("title text"); assertEquals("title text", parameterValue.getValue()); } @Test public void testCreateInAppDefault() { final ParameterValue.InAppDefault parameterValue = ParameterValue.inAppDefault(); assertEquals(ParameterValue.InAppDefault.class, parameterValue.getClass()); } @Test public void testEquality() { ParameterValue.Explicit parameterValueOne = ParameterValue.of("value"); ParameterValue.Explicit parameterValueTwo = ParameterValue.of("value"); ParameterValue.Explicit parameterValueThree = ParameterValue.of("title"); assertEquals(parameterValueOne, parameterValueTwo); assertNotEquals(parameterValueOne, parameterValueThree); ParameterValue.InAppDefault parameterValueFour = ParameterValue.inAppDefault(); ParameterValue.InAppDefault parameterValueFive = ParameterValue.inAppDefault(); assertEquals(parameterValueFour, parameterValueFive); } }
540
561
########################################################################## # # Copyright (c) 2011-2012, <NAME>. All rights reserved. # Copyright (c) 2011-2012, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of <NAME> nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import weakref import IECore import Gaffer import GafferUI class ScriptWindow( GafferUI.Window ) : def __init__( self, script, **kw ) : self.__titleChangedSignal = GafferUI.WidgetEventSignal() GafferUI.Window.__init__( self, **kw ) self.__script = script self.__titleBehaviour = _WindowTitleBehaviour( self, script ) self.__listContainer = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Vertical, spacing = 0 ) menuDefinition = self.menuDefinition( script.applicationRoot() ) if script.applicationRoot() else IECore.MenuDefinition() self.__listContainer.append( GafferUI.MenuBar( menuDefinition ) ) # Must parent `__listContainer` to the window before setting the layout, # because `CompoundEditor.__parentChanged` needs to find the ancestor # ScriptWindow. self.setChild( self.__listContainer ) applicationRoot = self.__script.ancestor( Gaffer.ApplicationRoot ) layouts = GafferUI.Layouts.acquire( applicationRoot ) if applicationRoot is not None else None if layouts is not None : self.setLayout( layouts.createDefault( script ) ) else : self.setLayout( GafferUI.CompoundEditor( script ) ) self.closedSignal().connect( Gaffer.WeakMethod( self.__closed ), scoped = False ) ScriptWindow.__instances.append( weakref.ref( self ) ) def menuBar( self ) : return self.__listContainer[0] def scriptNode( self ) : return self.__script def setLayout( self, compoundEditor ) : if len( self.__listContainer ) > 1 : del self.__listContainer[1] assert( compoundEditor.scriptNode().isSame( self.scriptNode() ) ) self.__listContainer.append( compoundEditor, expand=True ) def getLayout( self ) : return self.__listContainer[1] # Calling this will disable automatic title updates when the script state # changes name/dirty state. def setTitle( self, title ) : self.__titleBehaviour = None self._setTitle( title ) def _setTitle( self, title ) : GafferUI.Window.setTitle( self, title ) self.__titleChangedSignal( self, title ) def titleChangedSignal( self ) : return self.__titleChangedSignal def _acceptsClose( self ) : if not self.__script["unsavedChanges"].getValue() : return True f = self.__script["fileName"].getValue() f = f.rpartition( "/" )[2] if f else "untitled" dialogue = GafferUI.ConfirmationDialogue( "Discard Unsaved Changes?", "The file %s has unsaved changes. Do you want to discard them?" % f, confirmLabel = "Discard" ) return dialogue.waitForConfirmation( parentWindow=self ) def __closed( self, widget ) : scriptParent = self.__script.parent() if scriptParent is not None : scriptParent.removeChild( self.__script ) __instances = [] # weak references to all instances - used by acquire() ## Returns the ScriptWindow for the specified script, creating one # if necessary. @staticmethod def acquire( script, createIfNecessary=True ) : for w in ScriptWindow.__instances : scriptWindow = w() if scriptWindow is not None and scriptWindow.scriptNode().isSame( script ) : return scriptWindow return ScriptWindow( script ) if createIfNecessary else None ## Returns an IECore.MenuDefinition which is used to define the menu bars for all ScriptWindows # created as part of the specified application. This can be edited at any time to modify subsequently # created ScriptWindows - typically editing would be done as part of gaffer startup. @staticmethod def menuDefinition( applicationOrApplicationRoot ) : if isinstance( applicationOrApplicationRoot, Gaffer.Application ) : applicationRoot = applicationOrApplicationRoot.root() else : assert( isinstance( applicationOrApplicationRoot, Gaffer.ApplicationRoot ) ) applicationRoot = applicationOrApplicationRoot menuDefinition = getattr( applicationRoot, "_scriptWindowMenuDefinition", None ) if menuDefinition : return menuDefinition menuDefinition = IECore.MenuDefinition() applicationRoot._scriptWindowMenuDefinition = menuDefinition return menuDefinition ## This function provides the top level functionality for instantiating # the UI. Once called, new ScriptWindows will be instantiated for each # script added to the application, and EventLoop.mainEventLoop().stop() will # be called when the last script is removed. @classmethod def connect( cls, applicationRoot ) : applicationRoot["scripts"].childAddedSignal().connect( 0, ScriptWindow.__scriptAdded, scoped = False ) applicationRoot["scripts"].childRemovedSignal().connect( ScriptWindow.__staticScriptRemoved, scoped = False ) __automaticallyCreatedInstances = [] # strong references to instances made by __scriptAdded() @staticmethod def __scriptAdded( scriptContainer, script ) : w = ScriptWindow( script ) w.setVisible( True ) w.getLayout().restoreWindowState() ScriptWindow.__automaticallyCreatedInstances.append( w ) @staticmethod def __staticScriptRemoved( scriptContainer, script ) : for w in ScriptWindow.__automaticallyCreatedInstances : if w.scriptNode().isSame( script ) : ScriptWindow.__automaticallyCreatedInstances.remove( w ) if not len( scriptContainer.children() ) and GafferUI.EventLoop.mainEventLoop().running() : GafferUI.EventLoop.mainEventLoop().stop() class _WindowTitleBehaviour : def __init__( self, window, script ) : self.__window = weakref.ref( window ) self.__script = weakref.ref( script ) self.__scriptPlugSetConnection = script.plugSetSignal().connect( Gaffer.WeakMethod( self.__scriptPlugChanged ) ) self.__metadataChangedConnection = Gaffer.Metadata.nodeValueChangedSignal().connect( Gaffer.WeakMethod( self.__metadataChanged ) ) self.__updateTitle() def __updateTitle( self ) : w = self.__window() if not w : return f = self.__script()["fileName"].getValue() if not f : f = "untitled" d = "" else : d, n, f = f.rpartition( "/" ) d = " - " + d u = " *" if self.__script()["unsavedChanges"].getValue() else "" ro = " (read only) " if Gaffer.MetadataAlgo.readOnly( self.__script() ) else "" w._setTitle( "Gaffer %s : %s%s%s%s" % ( Gaffer.About.versionString(), f, ro, u, d ) ) def __scriptPlugChanged( self, plug ) : if plug.isSame( self.__script()["fileName"] ) or plug.isSame( self.__script()["unsavedChanges"] ) : self.__updateTitle() def __metadataChanged( self, nodeTypeId, key, node ) : if Gaffer.MetadataAlgo.readOnlyAffectedByChange( self.__script(), nodeTypeId, key, node ) : self.__updateTitle()
2,578
852
#ifndef RecoSelectors_GenParticleCustomSelector_h #define RecoSelectors_GenParticleCustomSelector_h /* \class GenParticleCustomSelector * * \author <NAME>, UCSD * */ #include "DataFormats/HepMCCandidate/interface/GenParticle.h" class GenParticleCustomSelector { public: GenParticleCustomSelector() {} GenParticleCustomSelector(double ptMin, double minRapidity, double maxRapidity, double tip, double lip, bool chargedOnly, int status, const std::vector<int>& pdgId = std::vector<int>(), bool invertRapidityCut = false, double minPhi = -3.2, double maxPhi = 3.2) : ptMin_(ptMin), minRapidity_(minRapidity), maxRapidity_(maxRapidity), meanPhi_((minPhi + maxPhi) / 2.), rangePhi_((maxPhi - minPhi) / 2.), tip_(tip), lip_(lip), chargedOnly_(chargedOnly), status_(status), pdgId_(pdgId), invertRapidityCut_(invertRapidityCut) { if (minPhi >= maxPhi) { throw cms::Exception("Configuration") << "GenParticleCustomSelector: minPhi (" << minPhi << ") must be smaller than maxPhi (" << maxPhi << "). The range is constructed from minPhi to maxPhi around their " "average."; } if (minPhi >= M_PI) { throw cms::Exception("Configuration") << "GenParticleCustomSelector: minPhi (" << minPhi << ") must be smaller than PI. The range is constructed from minPhi " "to maxPhi around their average."; } if (maxPhi <= -M_PI) { throw cms::Exception("Configuration") << "GenParticleCustomSelector: maxPhi (" << maxPhi << ") must be larger than -PI. The range is constructed from minPhi " "to maxPhi around their average."; } } /// Operator() performs the selection: e.g. if (tPSelector(tp)) {...} bool operator()(const reco::GenParticle& tp) const { if (chargedOnly_ && tp.charge() == 0) return false; //select only if charge!=0 bool testId = false; unsigned int idSize = pdgId_.size(); if (idSize == 0) testId = true; else for (unsigned int it = 0; it != idSize; ++it) { if (tp.pdgId() == pdgId_[it]) testId = true; } auto etaOk = [&](const reco::GenParticle& p) -> bool { float eta = p.eta(); if (!invertRapidityCut_) return (eta >= minRapidity_) && (eta <= maxRapidity_); else return (eta < minRapidity_ || eta > maxRapidity_); }; auto phiOk = [&](const reco::GenParticle& p) { float dphi = deltaPhi(atan2f(p.py(), p.px()), meanPhi_); return dphi >= -rangePhi_ && dphi <= rangePhi_; }; auto ptOk = [&](const reco::GenParticle& p) { double pt = p.pt(); return pt >= ptMin_; }; return (ptOk(tp) && etaOk(tp) && phiOk(tp) && sqrt(tp.vertex().perp2()) <= tip_ && fabs(tp.vertex().z()) <= lip_ && tp.status() == status_ && testId); } private: double ptMin_; double minRapidity_; double maxRapidity_; float meanPhi_; float rangePhi_; double tip_; double lip_; bool chargedOnly_; int status_; std::vector<int> pdgId_; bool invertRapidityCut_; }; #include "FWCore/Framework/interface/ConsumesCollector.h" #include "CommonTools/UtilAlgos/interface/ParameterAdapter.h" namespace reco { namespace modules { template <> struct ParameterAdapter<GenParticleCustomSelector> { static GenParticleCustomSelector make(const edm::ParameterSet& cfg, edm::ConsumesCollector& iC) { return make(cfg); } static GenParticleCustomSelector make(const edm::ParameterSet& cfg) { return GenParticleCustomSelector(cfg.getParameter<double>("ptMin"), cfg.getParameter<double>("minRapidity"), cfg.getParameter<double>("maxRapidity"), cfg.getParameter<double>("tip"), cfg.getParameter<double>("lip"), cfg.getParameter<bool>("chargedOnly"), cfg.getParameter<int>("status"), cfg.getParameter<std::vector<int> >("pdgId"), cfg.getParameter<bool>("invertRapidityCut"), cfg.getParameter<double>("minPhi"), cfg.getParameter<double>("maxPhi")); } }; } // namespace modules } // namespace reco #endif
2,643
971
/* * Copyright (c) 2014 <NAME>. * * This program is made available under the terms of the MIT License. * * Created on Mar 10, 2014 */ #include "fakeit/EventHandler.hpp" #include "fakeit/FakeitContext.hpp" #include "fakeit/DefaultEventLogger.hpp" #include "fakeit/DefaultEventFormatter.hpp" namespace fakeit { class AbstractFakeit : public FakeitContext { public: virtual ~AbstractFakeit() = default; protected: virtual fakeit::EventHandler &accessTestingFrameworkAdapter() = 0; virtual EventFormatter &accessEventFormatter() = 0; }; class DefaultFakeit : public AbstractFakeit { DefaultEventFormatter _formatter; fakeit::EventFormatter *_customFormatter; fakeit::EventHandler *_testingFrameworkAdapter; public: DefaultFakeit() : _formatter(), _customFormatter(nullptr), _testingFrameworkAdapter(nullptr) { } virtual ~DefaultFakeit() = default; void setCustomEventFormatter(fakeit::EventFormatter &customEventFormatter) { _customFormatter = &customEventFormatter; } void resetCustomEventFormatter() { _customFormatter = nullptr; } void setTestingFrameworkAdapter(fakeit::EventHandler &testingFrameforkAdapter) { _testingFrameworkAdapter = &testingFrameforkAdapter; } void resetTestingFrameworkAdapter() { _testingFrameworkAdapter = nullptr; } protected: fakeit::EventHandler &getTestingFrameworkAdapter() override { if (_testingFrameworkAdapter) return *_testingFrameworkAdapter; return accessTestingFrameworkAdapter(); } EventFormatter &getEventFormatter() override { if (_customFormatter) return *_customFormatter; return accessEventFormatter(); } EventFormatter &accessEventFormatter() override { return _formatter; } }; }
836
865
# # SPDX-License-Identifier: Apache-2.0 # from string import Template import os import yaml from api.config import CELLO_HOME class NodeConfig: """Class represents crypto-config yaml.""" def __init__(self, org, peer_file="core.yaml", orderer_file="orderer.yaml", ca_file=""): """ init node config :param org: organization name :param peer: peer profile template :param ca: ca profile template :param orderer: orderer profile template :return: none :rtype: xxx """ self.org = org self.peer_file = peer_file self.orderer_file = orderer_file self.ca_file = ca_file @staticmethod def _render(src, dst, **kw): """ Generate configuration file based on parameters :param kw: Node configuration parameters,Use the underline interval key。 e.g., peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" :param src: Node profile template :param dst: Node profile :return: none :rtype: none """ try: with open(src, 'r+') as f: cfg = yaml.load(f, Loader=yaml.FullLoader) for key, value in kw.items(): keys = key.split("_") # switch = {2: cfg[keys[0]][keys[1]], # 3: cfg[keys[0]][keys[1]][keys[2]], # 4: cfg[keys[0]][keys[1]][keys[2]][keys[3]], # 5: cfg[keys[0]][keys[1]][keys[2]][keys[3]][keys[4]]} if len(keys) == 2: cfg[keys[0]][keys[1]] = value elif len(keys) == 3: cfg[keys[0]][keys[1]][keys[2]] = value elif len(keys) == 4: cfg[keys[0]][keys[1]][keys[2]][keys[3]] = value elif len(keys) == 5: cfg[keys[0]][keys[1]][keys[2]][keys[3]][keys[4]] = value with open(dst, 'w+') as f: yaml.dump(cfg, f) except Exception as e: raise e def __from_dst(self, node, node_type): """ Location of the new profile :param node: node name :param node_type: node type (peer, orderer, ca) :return: dst :rtype: string """ if node_type == "peer": dst = "{}/{}/crypto-config/peerOrganizations/{}/peers/{}.{}/{}"\ .format(CELLO_HOME, self.org, self.org, node, self.org, self.peer_file) elif node_type == "orderer": dst = "{}/{}/crypto-config/ordererOrganizations/{}/orderers/{}.{}/{}"\ .format(CELLO_HOME, self.org, self.org.split(".", 1)[1], node, self.org.split(".", 1)[1], self.orderer_file) else: dst = "" return dst def peer(self, node, **kwargs): """ Location of the node profile :param node: peer name :param kwargs: Node configuration parameters,Use the underline interval key。 e.g., peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" :return: none :rtype: none """ src = "/opt/node/core.yaml.bak" dst = self.__from_dst(node, "peer") self._render(src, dst, **kwargs) def orderer(self, node, **kwargs): """ Location of the orderer profile :param node: orderer name :param kwargs: Node configuration parameters,Use the underline interval key。 e.g., peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" :return: none :rtype: none """ src = "/opt/node/orderer.yaml.bak" dst = self.__from_dst(node, "orderer") self._render(src, dst, **kwargs) def ca(self, node, **kwargs): """ Location of the orderer profile :param node: ca name :param kwargs: Node configuration parameters,Use the underline interval key。 e.g., peer listenAddress, kwargs["peer_listenAddress"]="0.0.0.0:7051" chaincode builder, kwargs["chaincode_builder"]="hyperledger/fabric-ccenv:1.4.2" :return: none :rtype: none """ src = self.ca_file dst = self.__from_dst(node, "ca") self._render(src, dst, **kwargs)
2,445
3,227
#include "MainWindow.h" #include "typedefs.h" #include <QApplication> #include <CGAL/Qt/resources.h> #include <CGAL/Qt/init_ogl_context.h> int main(int argc, char** argv) { CGAL::Qt::init_ogl_context(4,3); QApplication application(argc,argv); application.setOrganizationDomain("geometryfactory.com"); application.setOrganizationName("GeometryFactory"); application.setApplicationName("Alpha Shape Reconstruction"); // Import resources from libCGALQt (Qt5). // See https://doc.qt.io/qt-5/qdir.html#Q_INIT_RESOURCE CGAL_QT_INIT_RESOURCES; Q_INIT_RESOURCE(Alpha_shape_3); MainWindow mw; mw.show(); return application.exec(); }
247
1,093
<gh_stars>1000+ /* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.integration.syslog.config; import org.springframework.beans.factory.BeanNameAware; import org.springframework.beans.factory.config.AbstractFactoryBean; import org.springframework.context.ApplicationEventPublisher; import org.springframework.context.ApplicationEventPublisherAware; import org.springframework.context.SmartLifecycle; import org.springframework.integration.JavaUtils; import org.springframework.integration.ip.tcp.connection.AbstractServerConnectionFactory; import org.springframework.integration.ip.udp.UnicastReceivingChannelAdapter; import org.springframework.integration.syslog.MessageConverter; import org.springframework.integration.syslog.inbound.SyslogReceivingChannelAdapterSupport; import org.springframework.integration.syslog.inbound.TcpSyslogReceivingChannelAdapter; import org.springframework.integration.syslog.inbound.UdpSyslogReceivingChannelAdapter; import org.springframework.messaging.MessageChannel; import org.springframework.util.Assert; /** * Factory bean to create syslog inbound adapters (UDP or TCP). * * @author <NAME> * @author <NAME> * * @since 3.0 * */ public class SyslogReceivingChannelAdapterFactoryBean extends AbstractFactoryBean<SyslogReceivingChannelAdapterSupport> implements SmartLifecycle, BeanNameAware, ApplicationEventPublisherAware { public enum Protocol { udp, tcp } private volatile SyslogReceivingChannelAdapterSupport syslogAdapter; private final Protocol protocol; private volatile MessageChannel outputChannel; private volatile boolean autoStartup = true; private volatile MessageChannel errorChannel; private volatile int phase; private volatile Long sendTimeout; private volatile AbstractServerConnectionFactory connectionFactory; private volatile UnicastReceivingChannelAdapter udpAdapter; private volatile Integer port; private volatile MessageConverter converter; private volatile String beanName; private volatile ApplicationEventPublisher applicationEventPublisher; /** * Instantiates a factory bean that creates a {@link UdpSyslogReceivingChannelAdapter} * if the protocol is {@link Protocol#udp} or a {@link TcpSyslogReceivingChannelAdapter} if * the protocol is {@link Protocol#tcp}. * @param protocol The protocol. */ public SyslogReceivingChannelAdapterFactoryBean(Protocol protocol) { Assert.notNull(protocol, "'protocol' cannot be null"); this.protocol = protocol; } public void setOutputChannel(MessageChannel outputChannel) { this.outputChannel = outputChannel; } public void setAutoStartup(boolean autoStartup) { this.autoStartup = autoStartup; } public void setErrorChannel(MessageChannel errorChannel) { this.errorChannel = errorChannel; } public void setPhase(int phase) { this.phase = phase; } public void setSendTimeout(long sendTimeout) { this.sendTimeout = sendTimeout; } public void setConnectionFactory(AbstractServerConnectionFactory connectionFactory) { this.connectionFactory = connectionFactory; } public void setUdpAdapter(UnicastReceivingChannelAdapter udpAdapter) { this.udpAdapter = udpAdapter; } public void setPort(int port) { this.port = port; } public void setConverter(MessageConverter converter) { this.converter = converter; } @Override public void setApplicationEventPublisher(ApplicationEventPublisher applicationEventPublisher) { this.applicationEventPublisher = applicationEventPublisher; } @Override public void start() { if (this.syslogAdapter != null) { this.syslogAdapter.start(); } } @Override public void stop() { if (this.syslogAdapter != null) { this.syslogAdapter.stop(); } } @Override public boolean isRunning() { if (this.syslogAdapter != null) { return this.syslogAdapter.isRunning(); } return false; } @Override public int getPhase() { return this.phase; } @Override public void setBeanName(String name) { this.beanName = name; } @Override public boolean isAutoStartup() { return this.autoStartup; } @Override public void stop(Runnable callback) { if (this.syslogAdapter != null) { this.syslogAdapter.stop(callback); } else { callback.run(); } } @Override public Class<?> getObjectType() { return this.syslogAdapter == null ? SyslogReceivingChannelAdapterSupport.class : this.syslogAdapter.getClass(); } @Override protected SyslogReceivingChannelAdapterSupport createInstance() { SyslogReceivingChannelAdapterSupport adapter; if (this.protocol == Protocol.tcp) { adapter = new TcpSyslogReceivingChannelAdapter(); if (this.connectionFactory != null) { Assert.isNull(this.port, "Cannot specify both 'port' and 'connectionFactory'"); ((TcpSyslogReceivingChannelAdapter) adapter).setConnectionFactory(this.connectionFactory); } else if (this.applicationEventPublisher != null) { ((TcpSyslogReceivingChannelAdapter) adapter) .setApplicationEventPublisher(this.applicationEventPublisher); } Assert.isNull(this.udpAdapter, "Cannot specify 'udp-attributes' when the protocol is 'tcp'"); } else if (this.protocol == Protocol.udp) { adapter = new UdpSyslogReceivingChannelAdapter(); if (this.udpAdapter != null) { Assert.isNull(this.port, "Cannot specify both 'port' and 'udpAdapter'"); ((UdpSyslogReceivingChannelAdapter) adapter).setUdpAdapter(this.udpAdapter); } Assert.isNull(this.connectionFactory, "Cannot specify 'connection-factory' unless the protocol is 'tcp'"); } else { throw new IllegalStateException("Unsupported protocol: " + this.protocol.toString()); } adapter.setAutoStartup(this.autoStartup); adapter.setPhase(this.phase); JavaUtils.INSTANCE .acceptIfNotNull(this.port, adapter::setPort) .acceptIfNotNull(this.outputChannel, adapter::setOutputChannel) .acceptIfNotNull(this.errorChannel, adapter::setErrorChannel) .acceptIfNotNull(this.sendTimeout, adapter::setSendTimeout) .acceptIfNotNull(this.converter, adapter::setConverter) .acceptIfNotNull(this.beanName, adapter::setBeanName) .acceptIfNotNull(getBeanFactory(), adapter::setBeanFactory); adapter.afterPropertiesSet(); this.syslogAdapter = adapter; return this.syslogAdapter; } }
2,132
1,853
<reponame>galorojo/cppinsights #include <array> int main() { // check that clamp works and we only initialize the first 100 elements. std::array<int, 100000> arr{}; char buffer[100000]{}; }
74
1,473
/* * Copyright 2020 NAVER Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.navercorp.pinpoint.plugin.process; import com.navercorp.pinpoint.common.trace.AnnotationKey; import com.navercorp.pinpoint.common.trace.AnnotationKeyFactory; import com.navercorp.pinpoint.common.trace.ServiceType; import com.navercorp.pinpoint.common.trace.ServiceTypeFactory; import static com.navercorp.pinpoint.common.trace.AnnotationKeyProperty.VIEW_IN_RECORD_SET; public class ProcessPluginConstants { public static final ServiceType SERVICE_TYPE = ServiceTypeFactory.of(6005, "PROCESS"); public static final AnnotationKey PROCESS_COMMAND = AnnotationKeyFactory.of(180, "process.command", VIEW_IN_RECORD_SET); public static final AnnotationKey PROCESS_ID = AnnotationKeyFactory.of(181, "process.pid", VIEW_IN_RECORD_SET); }
401
3,144
//------------------------------------------------------------------------------ // Copyright (c) 2018 by contributors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //------------------------------------------------------------------------------ /* This file is the implementation of the basic Loss class. */ #include "src/loss/loss.h" #include "src/loss/squared_loss.h" #include "src/loss/cross_entropy_loss.h" namespace xLearn { //------------------------------------------------------------------------------ // Class register //------------------------------------------------------------------------------ CLASS_REGISTER_IMPLEMENT_REGISTRY(xLearn_loss_registry, Loss); REGISTER_LOSS("squared", SquaredLoss); REGISTER_LOSS("cross-entropy", CrossEntropyLoss); // Predict in one thread void pred_thread(const DMatrix* matrix, Model* model, std::vector<real_t>* pred, Score* score_func_, bool is_norm, size_t start_idx, size_t end_idx) { CHECK_GE(end_idx, start_idx); for (size_t i = start_idx; i < end_idx; ++i) { SparseRow* row = matrix->row[i]; real_t norm = is_norm ? matrix->norm[i] : 1.0; (*pred)[i] = score_func_->CalcScore(row, *model, norm); } } // Predict in multi-thread void Loss::Predict(const DMatrix* matrix, Model& model, std::vector<real_t>& pred) { CHECK_NOTNULL(matrix); CHECK_NE(pred.empty(), true); CHECK_EQ(pred.size(), matrix->row_length); index_t row_len = matrix->row_length; // Predict in multi-thread for (int i = 0; i < threadNumber_; ++i) { size_t start_idx = getStart(row_len, threadNumber_, i); size_t end_idx = getEnd(row_len, threadNumber_, i); pool_->enqueue(std::bind(pred_thread, matrix, &model, &pred, score_func_, norm_, start_idx, end_idx)); } // Wait all of the threads finish their job pool_->Sync(threadNumber_); } // Given data sample and current model, calculate gradient. // Note that this method doesn't update local model, and the // gradient will be pushed to the parameter server, which is // used for distributed computation. void Loss::CalcGradDist(DMatrix* matrix, Model& model, std::vector<real_t>& grad) { for(;;) { // Get a mini-batch from current data matrix DMatrix mini_batch; mini_batch.ReAlloc(batch_size_); index_t len = matrix->GetMiniBatch(batch_size_, mini_batch); if (len == 0) { break; } mini_batch.row_length = len; // Compress the sparse data matrix and sparse model // parameter to dense format std::vector<index_t> feature_list; mini_batch.Compress(feature_list); /* // Pull the model parameter from parameter server store->pull(feature_list, model); // Calculate gradient this->CalcGrad(matrix, model, grad); // Push gradient to the parameter server ps->push(grad, feature_list); */ } } } // namespace xLearn
1,457
367
<gh_stars>100-1000 import torch from torch import nn from torchvision.models import vgg11, vgg16, resnet34 """ Code heavily adapted from ternaus robot-surgery-segmentation https://github.com/ternaus/robot-surgery-segmentation """ class MultiClass_Resnet34(nn.Module): def __init__(self, num_classes=1, num_filters=32, pretrained=True, is_deconv=False): super().__init__() self.num_classes = num_classes self.pool = nn.MaxPool2d(2, 2) self.encoder = resnet34(pretrained=pretrained) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder.conv1, self.encoder.bn1, self.encoder.relu, self.pool) self.conv2 = self.encoder.layer1 self.conv3 = self.encoder.layer2 self.conv4 = self.encoder.layer3 self.conv5 = self.encoder.layer4 self.center = MultiClass_DecoderBlock(512, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec5 = MultiClass_DecoderBlock(512 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec4 = MultiClass_DecoderBlock(256 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec3 = MultiClass_DecoderBlock(128 + num_filters * 8, num_filters * 4 * 2, num_filters * 2, is_deconv) self.dec2 = MultiClass_DecoderBlock(64 + num_filters * 2, num_filters * 2 * 2, num_filters * 2 * 2, is_deconv) self.dec1 = MultiClass_DecoderBlock(num_filters * 2 * 2, num_filters * 2 * 2, num_filters, is_deconv) self.dec0 = MultiClass_ConvRelu(num_filters, num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) def forward(self, x): conv1 = self.conv1(x) conv2 = self.conv2(conv1) conv3 = self.conv3(conv2) conv4 = self.conv4(conv3) conv5 = self.conv5(conv4) center = self.center(self.pool(conv5)) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(dec2) dec0 = self.dec0(dec1) x_out = self.final(dec0) return x_out class MultiClass_UNet_VGG16(nn.Module): def __init__(self, num_classes=1, num_filters=32, pretrained=True): super().__init__() self.num_classes = num_classes self.encoder = vgg16(pretrained=pretrained).features self.pool = nn.MaxPool2d(2, 2) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder[0], self.relu, self.encoder[2], self.relu) self.conv2 = nn.Sequential(self.encoder[5], self.relu, self.encoder[7], self.relu) self.conv3 = nn.Sequential(self.encoder[10], self.relu, self.encoder[12], self.relu, self.encoder[14], self.relu) self.conv4 = nn.Sequential(self.encoder[17], self.relu, self.encoder[19], self.relu, self.encoder[21], self.relu) self.conv5 = nn.Sequential(self.encoder[24], self.relu, self.encoder[26], self.relu, self.encoder[28], self.relu) self.center = MultiClass_DecoderBlock(512, num_filters * 8 * 2, num_filters * 8) self.dec5 = MultiClass_DecoderBlock( 512 + num_filters * 8, num_filters * 8 * 2, num_filters * 8) self.dec4 = MultiClass_DecoderBlock( 512 + num_filters * 8, num_filters * 8 * 2, num_filters * 8) self.dec3 = MultiClass_DecoderBlock( 256 + num_filters * 8, num_filters * 4 * 2, num_filters * 2) self.dec2 = MultiClass_DecoderBlock( 128 + num_filters * 2, num_filters * 2 * 2, num_filters) self.dec1 = MultiClass_ConvRelu(64 + num_filters, num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) def forward(self, x): conv1 = self.conv1(x) conv2 = self.conv2(self.pool(conv1)) conv3 = self.conv3(self.pool(conv2)) conv4 = self.conv4(self.pool(conv3)) conv5 = self.conv5(self.pool(conv4)) center = self.center(self.pool(conv5)) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(torch.cat([dec2, conv1], 1)) x_out = self.final(dec1) return x_out class MultiClass_UNet_VGG11(nn.Module): def __init__(self, num_classes=1, num_filters=32, pretrained=True): super().__init__() self.num_classes = num_classes self.pool = nn.MaxPool2d(2, 2) self.encoder = vgg11(pretrained=pretrained).features self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder[0], self.relu) self.conv2 = nn.Sequential(self.encoder[3], self.relu) self.conv3 = nn.Sequential( self.encoder[6], self.relu, self.encoder[8], self.relu, ) self.conv4 = nn.Sequential( self.encoder[11], self.relu, self.encoder[13], self.relu, ) self.conv5 = nn.Sequential( self.encoder[16], self.relu, self.encoder[18], self.relu, ) self.center = MultiClass_DecoderBlock(256 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv=True) self.dec5 = MultiClass_DecoderBlock(512 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv=True) self.dec4 = MultiClass_DecoderBlock(512 + num_filters * 8, num_filters * 8 * 2, num_filters * 4, is_deconv=True) self.dec3 = MultiClass_DecoderBlock(256 + num_filters * 4, num_filters * 4 * 2, num_filters * 2, is_deconv=True) self.dec2 = MultiClass_DecoderBlock(128 + num_filters * 2, num_filters * 2 * 2, num_filters, is_deconv=True) self.dec1 = MultiClass_ConvRelu(64 + num_filters, num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) def forward(self, x): conv1 = self.conv1(x) conv2 = self.conv2(self.pool(conv1)) conv3 = self.conv3(self.pool(conv2)) conv4 = self.conv4(self.pool(conv3)) conv5 = self.conv5(self.pool(conv4)) center = self.center(self.pool(conv5)) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(torch.cat([dec2, conv1], 1)) x_out = self.final(dec1) return x_out class MultiClass_LinkNet34(nn.Module): def __init__(self, num_classes=1, num_channels=3, pretrained=True): super().__init__() assert num_channels == 3 self.num_classes = num_classes filters = [64, 128, 256, 512] resnet = resnet34(pretrained=pretrained) self.firstconv = resnet.conv1 self.firstbn = resnet.bn1 self.firstrelu = resnet.relu self.firstmaxpool = resnet.maxpool self.encoder1 = resnet.layer1 self.encoder2 = resnet.layer2 self.encoder3 = resnet.layer3 self.encoder4 = resnet.layer4 # Decoder self.decoder4 = DecoderBlockLinkNet(filters[3], filters[2]) self.decoder3 = DecoderBlockLinkNet(filters[2], filters[1]) self.decoder2 = DecoderBlockLinkNet(filters[1], filters[0]) self.decoder1 = DecoderBlockLinkNet(filters[0], filters[0]) # Final Classifier self.finaldeconv1 = nn.ConvTranspose2d(filters[0], 32, 3, stride=2) self.finalrelu1 = nn.ReLU(inplace=True) self.finalconv2 = nn.Conv2d(32, 32, 3) self.finalrelu2 = nn.ReLU(inplace=True) self.finalconv3 = nn.Conv2d(32, num_classes, 2, padding=1) # noinspection PyCallingNonCallable def forward(self, x): # Encoder x = self.firstconv(x) x = self.firstbn(x) x = self.firstrelu(x) x = self.firstmaxpool(x) e1 = self.encoder1(x) e2 = self.encoder2(e1) e3 = self.encoder3(e2) e4 = self.encoder4(e3) # Decoder with Skip Connections d4 = self.decoder4(e4) + e3 d3 = self.decoder3(d4) + e2 d2 = self.decoder2(d3) + e1 d1 = self.decoder1(d2) # Final Classification f1 = self.finaldeconv1(d1) f2 = self.finalrelu1(f1) f3 = self.finalconv2(f2) f4 = self.finalrelu2(f3) f5 = self.finalconv3(f4) x_out = f5 return x_out class MultiClass_ConvRelu(nn.Module): def __init__(self, in_, out): super().__init__() self.conv = nn.Conv2d(in_, out, 3, padding=1) self.activation = nn.ReLU(inplace=True) def forward(self, x): x = self.conv(x) x = self.activation(x) return x class MultiClass_DecoderBlock(nn.Module): def __init__(self, in_channels, middle_channels, out_channels, is_deconv=True): super().__init__() self.in_channels = in_channels if is_deconv: self.block = nn.Sequential( MultiClass_ConvRelu(in_channels, middle_channels), nn.ConvTranspose2d(middle_channels, out_channels, kernel_size=4, stride=2, padding=1), nn.ReLU(inplace=True) ) else: self.block = nn.Sequential( nn.Upsample(scale_factor=2, mode='bilinear'), MultiClass_ConvRelu(in_channels, middle_channels), MultiClass_ConvRelu(middle_channels, out_channels), ) def forward(self, x): return self.block(x) class DecoderBlockLinkNet(nn.Module): def __init__(self, in_channels, n_filters): super().__init__() self.relu = nn.ReLU(inplace=True) # B, C, H, W -> B, C/4, H, W self.conv1 = nn.Conv2d(in_channels, in_channels // 4, 1) self.norm1 = nn.BatchNorm2d(in_channels // 4) # B, C/4, H, W -> B, C/4, 2 * H, 2 * W self.deconv2 = nn.ConvTranspose2d(in_channels // 4, in_channels // 4, kernel_size=4, stride=2, padding=1, output_padding=0) self.norm2 = nn.BatchNorm2d(in_channels // 4) # B, C/4, H, W -> B, C, H, W self.conv3 = nn.Conv2d(in_channels // 4, n_filters, 1) self.norm3 = nn.BatchNorm2d(n_filters) def forward(self, x): x = self.conv1(x) x = self.norm1(x) x = self.relu(x) x = self.deconv2(x) x = self.norm2(x) x = self.relu(x) x = self.conv3(x) x = self.norm3(x) x = self.relu(x) return x
5,675
1,006
<gh_stars>1000+ /**************************************************************************** * arch/arm/src/imxrt/imxrt_iomuxc.h * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ #ifndef __ARCH_ARM_SRC_IMXRT_IMX_IOMUXC_H #define __ARCH_ARM_SRC_IMXRT_IMX_IOMUXC_H /**************************************************************************** * Included Files ****************************************************************************/ #include <nuttx/config.h> #include <stdint.h> #include "hardware/imxrt_iomuxc.h" /**************************************************************************** * Pre-processor Definitions ****************************************************************************/ /* 16-bit Encoding: * * .... RRRR ODDD LSST */ /* Output Pull Up/Down: * * .... RRRR .... .... */ #define _IOMUX_PULLTYPE_SHIFT (8) /* Bits 8-9: Pull up/down type */ #define _IOMUX_PULLTYPE_MASK (3 << _IOMUX_PULLTYPE_SHIFT) # define _IOMUX_PULL_NONE (0 << _IOMUX_PULLTYPE_SHIFT) /* Pull/keeper disabled */ # define _IOMUX_PULL_KEEP (1 << _IOMUX_PULLTYPE_SHIFT) /* Output determined by keeper */ # define _IOMUX_PULL_ENABLE (2 << _IOMUX_PULLTYPE_SHIFT) /* Output pulled up or down */ #define _IOMUX_PULLDESC_SHIFT (10) /* Bits 10-11: Pull up/down description */ #define _IOMUX_PULLDESC_MASK (3 << _IOMUX_PULLDESC_SHIFT) # define _IOMUX_PULL_UP_22K (PULL_UP_22K << _IOMUX_PULLDESC_SHIFT) /* Pull up with 22 KOhm resister */ # define _IOMUX_PULL_UP_47K (PULL_UP_47K << _IOMUX_PULLDESC_SHIFT) /* Pull up with 47 KOhm resister */ # define _IOMUX_PULL_UP_100K (PULL_UP_100K << _IOMUX_PULLDESC_SHIFT) /* Pull up with 100 KOhm resister */ # define _IOMUX_PULL_DOWN_100K (PULL_DOWN_100K << _IOMUX_PULLDESC_SHIFT) /* Pull down with 100 KOhm resister */ #define IOMUX_PULL_SHIFT (8) /* Bits 8-11: Pull up/down selection */ #define IOMUX_PULL_MASK (15 << IOMUX_PULL_SHIFT) # define IOMUX_PULL_NONE _IOMUX_PULL_NONE # define IOMUX_PULL_KEEP _IOMUX_PULL_KEEP # define IOMUX_PULL_UP_22K (_IOMUX_PULL_ENABLE | _IOMUX_PULL_UP_22K) # define IOMUX_PULL_UP_47K (_IOMUX_PULL_ENABLE | _IOMUX_PULL_UP_47K) # define IOMUX_PULL_UP_100K (_IOMUX_PULL_ENABLE | _IOMUX_PULL_UP_100K) # define IOMUX_PULL_DOWN_100K (_IOMUX_PULL_ENABLE | _IOMUX_PULL_DOWN_100K) /* Open Drain Output: * * .... .... O... .... */ #define IOMUX_CMOS_OUTPUT (0) /* Bit 7: 0=CMOS output */ #define IOMUX_OPENDRAIN (1 << 7) /* Bit 7: 1=Enable open-drain output */ /* Output Drive Strength: * * .... .... .DDD .... */ #define IOMUX_DRIVE_SHIFT (4) /* Bits 4-6: Output Drive Strength */ #define IOMUX_DRIVE_MASK (7 << IOMUX_DRIVE_SHIFT) # define IOMUX_DRIVE_HIZ (DRIVE_HIZ << IOMUX_DRIVE_SHIFT) /* HI-Z */ # define IOMUX_DRIVE_260OHM (DRIVE_260OHM << IOMUX_DRIVE_SHIFT) /* 150 Ohm @3.3V, 260 Ohm @1.8V */ # define IOMUX_DRIVE_130OHM (DRIVE_130OHM << IOMUX_DRIVE_SHIFT) /* 75 Ohm @3.3V, 130 Ohm @1.8V */ # define IOMUX_DRIVE_90OHM (DRIVE_90OHM << IOMUX_DRIVE_SHIFT) /* 50 Ohm @3.3V, 90 Ohm @1.8V */ # define IOMUX_DRIVE_60OHM (DRIVE_60OHM << IOMUX_DRIVE_SHIFT) /* 37 Ohm @3.3V, 60 Ohm @1.8V */ # define IOMUX_DRIVE_50OHM (DRIVE_50OHM << IOMUX_DRIVE_SHIFT) /* 30 Ohm @3.3V, 50 Ohm @1.8V */ # define IOMUX_DRIVE_40OHM (DRIVE_40OHM << IOMUX_DRIVE_SHIFT) /* 25 Ohm @3.3V, 40 Ohm @1.8V */ # define IOMUX_DRIVE_33OHM (DRIVE_33OHM << IOMUX_DRIVE_SHIFT) /* 20 Ohm @3.3V, 33 Ohm @1.8V */ /* Output Slew Rate: * * .... .... .... L... */ #define IOMUX_SLEW_SLOW (0) /* Bit 3: 0=Slow Slew Rate */ #define IOMUX_SLEW_FAST (1 << 3) /* Bit 3: 1=Fast Slew Rate */ /* Output Speed: * * .... .... .... .SS. */ #define IOMUX_SPEED_SHIFT (1) /* Bits 1-2: Speed */ #define IOMUX_SPEED_MASK (3 << IOMUX_SPEED_SHIFT) # define IOMUX_SPEED_LOW (SPEED_LOW << IOMUX_SPEED_SHIFT) /* Low frequency (50 MHz) */ # define IOMUX_SPEED_MEDIUM (SPEED_MEDIUM << IOMUX_SPEED_SHIFT) /* Medium frequency (100, 150 MHz) */ # define IOMUX_SPEED_MAX (SPEED_MAX << IOMUX_SPEED_SHIFT) /* Maximum frequency (100, 150, 200 MHz) */ /* Input Schmitt Trigger: * * .... .... .... ...T */ #define IOMUX_CMOS_INPUT (0) /* Bit 0: 0=CMOS input */ #define IOMUX_SCHMITT_TRIGGER (1 << 0) /* Bit 0: 1=Enable Schmitt trigger if input */ /**************************************************************************** * Public Types ****************************************************************************/ /* The smallest integer type that can hold the IOMUX encoding */ typedef uint16_t iomux_pinset_t; /**************************************************************************** * Public Function Prototypes ****************************************************************************/ /**************************************************************************** * Name: imxrt_padmux_map * * Description: * This function map a Pad Mux register index to the corresponding Pad * Control register index. * ****************************************************************************/ unsigned int imxrt_padmux_map(unsigned int padmux); /**************************************************************************** * Name: imxrt_iomux_configure * * Description: * This function writes the encoded pad configuration to the Pad Control * register. * ****************************************************************************/ int imxrt_iomux_configure(uintptr_t padctl, iomux_pinset_t ioset); #endif /* __ARCH_ARM_SRC_IMXRT_IMX_IOMUXC_H */
2,478
2,151
<reponame>cohortfsllc/cohort-cocl2-sandbox /* * Copyright 2012 The Native Client Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_V2_DECODE_H #define NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_V2_DECODE_H #include "native_client/src/trusted/validator_arm/model.h" #include "native_client/src/trusted/validator_arm/inst_classes.h" namespace nacl_arm_dec { // Models a arm instruction parser that returns the decoder to use // to decode an instruction. struct DecoderState { explicit DecoderState() {} virtual ~DecoderState() {} // Parses the given instruction, returning the decoder to use. virtual const class ClassDecoder &decode(const Instruction) const = 0; }; } // namespace #endif // NATIVE_CLIENT_SRC_TRUSTED_VALIDATOR_ARM_V2_DECODE_H
305
418
../../../Bolts/Bolts/Common/BFCancellationTokenSource.h
22
473
/* * Author: <NAME> <<EMAIL>> * * Copyright (c) 2014 Kaprica Security, Inc. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. * */ #include "libcgc.h" #include "cgc_stdarg.h" #include "cgc_stdlib.h" #include "cgc_string.h" #include "cgc_pkk.h" #define STEG_TAG 0xD86B74D1 #define PPM_TAG 0x03259036 #define MODE_TAG 0xBB85A71C #define TEXT_TAG 0xBFEFDDE9 #define END_TAG 0xAB660AF0 #define SECRET_TAG 0xB58333C6 #define SECRET_END_TAG 0x0507A018 #define MODE_EM 0x1337 #define MODE_EX 0x7331 #define MAX_PPM_SIZE (1 * 1024 * 1024) #define MAX_TEXT_LEN 1000 #define GET_BIT(b, i) (((b) >> (8 - (i))) & 1) typedef struct input { unsigned int total_size; unsigned int pkk_size; pkk_t *pkk_data; unsigned short mode; unsigned short text_size; char *text_data; } input_t; int cgc_read_n(int fd, char *buf, cgc_size_t len) { cgc_size_t i; char *c = buf; for (i = 0; i < len; ++i) { cgc_size_t rx; if (cgc_receive(fd, c, 1, &rx) != 0 || rx == 0) break; c++; } return c - buf; } int cgc_readuntil(int fd, char *buf, cgc_size_t len, char delim) { cgc_size_t i; char *c = buf; for (i = 0; i < len; ++i) { cgc_size_t rx; if (cgc_receive(fd, c, 1, &rx) != 0 || rx == 0) break; if (*(c++) == delim) break; } *(c-1) = '\0'; return c - buf; } void cgc_cleanup_input(input_t *input) { if (input) { if (input->pkk_data) cgc_free_pkk(input->pkk_data); if (input->text_data) cgc_free(input->text_data); cgc_free(input); } } input_t* cgc_parse_input() { unsigned int dword; unsigned short word; input_t *input = NULL; unsigned int total_bytes = 0; input = (input_t *) cgc_malloc(sizeof(input_t)); if (input == NULL) goto fail; if (cgc_read_n(STDIN, (char *)&dword, sizeof(dword)) != sizeof(dword) || dword != STEG_TAG) goto fail; total_bytes += sizeof(dword); if (cgc_read_n(STDIN, (char *)&dword, sizeof(dword)) != sizeof(dword)) goto fail; total_bytes += sizeof(dword); input->total_size = dword; if (cgc_read_n(STDIN, (char *)&dword, sizeof(dword)) != sizeof(dword) || dword != PPM_TAG) goto fail; total_bytes += sizeof(dword); if (cgc_read_n(STDIN, (char *)&dword, sizeof(dword)) != sizeof(dword)) goto fail; total_bytes += sizeof(dword); input->pkk_size = dword; if (input->pkk_size > 0 && input->pkk_size < MAX_PPM_SIZE) { char *pkk_data = cgc_malloc(input->pkk_size); if (cgc_read_n(STDIN, pkk_data, input->pkk_size) != input->pkk_size) goto fail; input->pkk_data = cgc_parse_pkk(pkk_data, input->pkk_size); if (input->pkk_data == NULL) goto fail; total_bytes += input->pkk_size; } if (cgc_read_n(STDIN, (char *)&dword, sizeof(dword)) != sizeof(dword) || dword != MODE_TAG) goto fail; total_bytes += sizeof(dword); if (cgc_read_n(STDIN, (char *)&word, sizeof(word)) != sizeof(word)) goto fail; total_bytes += sizeof(word); input->mode = word; if (cgc_read_n(STDIN, (char *)&dword, sizeof(dword)) != sizeof(dword) || dword != TEXT_TAG) goto fail; total_bytes += sizeof(dword); if (cgc_read_n(STDIN, (char *)&word, sizeof(word)) != sizeof(word)) goto fail; total_bytes += sizeof(word); input->text_size = word; if (input->text_size > 0) { if (input->text_size > MAX_TEXT_LEN) goto fail; input->text_data = cgc_malloc(input->text_size); if (cgc_read_n(STDIN, (char *)input->text_data, input->text_size) != input->text_size) goto fail; total_bytes += input->text_size; } if (cgc_read_n(STDIN, (char *)&dword, sizeof(dword)) != sizeof(dword) || dword != END_TAG) goto fail; total_bytes += sizeof(dword); if (total_bytes != input->total_size) goto fail; return input; fail: cgc_cleanup_input(input); return NULL; } int cgc_embed_text(pkk_t *pkk, char *text, unsigned short len) { int size; char *message, *cur; char *pixel; if (pkk && text && pkk->pixels) { pixel = (char *) pkk->pixels; size = sizeof(int) * 2 + sizeof(short) + len; if (pkk->width * pkk->height * sizeof(pixel_t) / 8 < size) return -1; message = cgc_malloc(size); if (message == NULL) return -1; cur = message; *(int *)cur = SECRET_TAG; cur += sizeof(int); *(short *)cur = len; cur += sizeof(short); cgc_memcpy(cur, text, len); cur += len; *(int *)cur = SECRET_END_TAG; int i, j; for (i = 0; i < size; ++i) { char c = message[i]; for (j = 1; j <= 8; ++j) { int lsb = *pixel & 1; if (lsb != GET_BIT(c, j)) { if (lsb) *pixel = *pixel & ~1; else *pixel = *pixel | 1; } pixel++; } } return 0; } return -1; } char cgc_recover_byte(char **pixel) { int j; char c = '\0'; for (j = 0; j < 8; ++j) { c = c << 1; int lsb = **pixel & 1; c |= lsb; (*pixel)++; } return c; } int cgc_extract_text(pkk_t *pkk, char *buf) { char c; char *pixel; if (pkk && buf && pkk->pixels) { int i, j, tag = 0; short text_size; pixel = (char *) pkk->pixels; for (i = 0; i < 4; ++i) { c = cgc_recover_byte(&pixel); tag |= ((c << 8*i) & (0xFF << 8*i)); } if (tag != SECRET_TAG) return -1; for (i = 0; i < 2; ++i) { c = cgc_recover_byte(&pixel); text_size |= ((c << 8*i) & (0xFF << 8*i)); } /* Bug: Not checking text_size */ #if PATCHED for (i = 0; i < text_size && i < MAX_TEXT_LEN; ++i) #else for (i = 0; i < text_size; ++i) #endif { c = cgc_recover_byte(&pixel); buf[i] = c; } tag = 0; for (i = 0; i < 4; ++i) { c = cgc_recover_byte(&pixel); tag |= ((c << 8*i) & (0xFF << 8*i)); } if (tag != SECRET_END_TAG) return -1; return 0; } return -1; } int main(int cgc_argc, char *cgc_argv[]) { char text[MAX_TEXT_LEN]; int out_len; char *output; input_t *input; if ((input = cgc_parse_input()) == NULL) { cgc_printf("[ERROR] Failed to parse input.\n"); return -1; } switch (input->mode) { case MODE_EM: if (cgc_embed_text(input->pkk_data, input->text_data, input->text_size) != 0) cgc_printf("[ERROR] Failed to embed your message.\n"); else { output = cgc_output_pkk(input->pkk_data, &out_len); if (output) { cgc_transmit(STDOUT, output, out_len, NULL); cgc_free(output); } } break; case MODE_EX: if (cgc_extract_text(input->pkk_data, text) != 0) cgc_printf("[ERROR] Failed to extract the message.\n"); else { cgc_printf("Secret Text: %s\n", text); } break; default: cgc_printf("[ERROR] Invalid mode.\n"); break; } cgc_cleanup_input(input); return 0; }
3,569
2,728
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- try: from ._models_py3 import CertificateRequest from ._models_py3 import CheckNameAvailabilityParameters from ._models_py3 import CheckNameAvailabilityResult from ._models_py3 import ClientDiscoveryDisplay from ._models_py3 import ClientDiscoveryForLogSpecification from ._models_py3 import ClientDiscoveryForProperties from ._models_py3 import ClientDiscoveryForServiceSpecification from ._models_py3 import ClientDiscoveryResponse from ._models_py3 import ClientDiscoveryValueForSingleApi from ._models_py3 import CmkKekIdentity from ._models_py3 import CmkKeyVaultProperties from ._models_py3 import Error from ._models_py3 import ErrorAdditionalInfo from ._models_py3 import IdentityData from ._models_py3 import JobsSummary from ._models_py3 import MonitoringSummary from ._models_py3 import NameInfo from ._models_py3 import OperationResource from ._models_py3 import PatchTrackedResource from ._models_py3 import PatchVault from ._models_py3 import PrivateEndpoint from ._models_py3 import PrivateEndpointConnection from ._models_py3 import PrivateEndpointConnectionVaultProperties from ._models_py3 import PrivateLinkResource from ._models_py3 import PrivateLinkResources from ._models_py3 import PrivateLinkServiceConnectionState from ._models_py3 import RawCertificateData from ._models_py3 import ReplicationUsage from ._models_py3 import ReplicationUsageList from ._models_py3 import Resource from ._models_py3 import ResourceCertificateAndAadDetails from ._models_py3 import ResourceCertificateAndAcsDetails from ._models_py3 import ResourceCertificateDetails from ._models_py3 import Sku from ._models_py3 import SystemData from ._models_py3 import TrackedResource from ._models_py3 import UpgradeDetails from ._models_py3 import UserIdentity from ._models_py3 import Vault from ._models_py3 import VaultCertificateResponse from ._models_py3 import VaultExtendedInfoResource from ._models_py3 import VaultList from ._models_py3 import VaultProperties from ._models_py3 import VaultPropertiesEncryption from ._models_py3 import VaultUsage from ._models_py3 import VaultUsageList except (SyntaxError, ImportError): from ._models import CertificateRequest # type: ignore from ._models import CheckNameAvailabilityParameters # type: ignore from ._models import CheckNameAvailabilityResult # type: ignore from ._models import ClientDiscoveryDisplay # type: ignore from ._models import ClientDiscoveryForLogSpecification # type: ignore from ._models import ClientDiscoveryForProperties # type: ignore from ._models import ClientDiscoveryForServiceSpecification # type: ignore from ._models import ClientDiscoveryResponse # type: ignore from ._models import ClientDiscoveryValueForSingleApi # type: ignore from ._models import CmkKekIdentity # type: ignore from ._models import CmkKeyVaultProperties # type: ignore from ._models import Error # type: ignore from ._models import ErrorAdditionalInfo # type: ignore from ._models import IdentityData # type: ignore from ._models import JobsSummary # type: ignore from ._models import MonitoringSummary # type: ignore from ._models import NameInfo # type: ignore from ._models import OperationResource # type: ignore from ._models import PatchTrackedResource # type: ignore from ._models import PatchVault # type: ignore from ._models import PrivateEndpoint # type: ignore from ._models import PrivateEndpointConnection # type: ignore from ._models import PrivateEndpointConnectionVaultProperties # type: ignore from ._models import PrivateLinkResource # type: ignore from ._models import PrivateLinkResources # type: ignore from ._models import PrivateLinkServiceConnectionState # type: ignore from ._models import RawCertificateData # type: ignore from ._models import ReplicationUsage # type: ignore from ._models import ReplicationUsageList # type: ignore from ._models import Resource # type: ignore from ._models import ResourceCertificateAndAadDetails # type: ignore from ._models import ResourceCertificateAndAcsDetails # type: ignore from ._models import ResourceCertificateDetails # type: ignore from ._models import Sku # type: ignore from ._models import SystemData # type: ignore from ._models import TrackedResource # type: ignore from ._models import UpgradeDetails # type: ignore from ._models import UserIdentity # type: ignore from ._models import Vault # type: ignore from ._models import VaultCertificateResponse # type: ignore from ._models import VaultExtendedInfoResource # type: ignore from ._models import VaultList # type: ignore from ._models import VaultProperties # type: ignore from ._models import VaultPropertiesEncryption # type: ignore from ._models import VaultUsage # type: ignore from ._models import VaultUsageList # type: ignore from ._recovery_services_client_enums import ( AuthType, CreatedByType, InfrastructureEncryptionState, PrivateEndpointConnectionStatus, ProvisioningState, ResourceIdentityType, SkuName, TriggerType, UsagesUnit, VaultPrivateEndpointState, VaultUpgradeState, ) __all__ = [ 'CertificateRequest', 'CheckNameAvailabilityParameters', 'CheckNameAvailabilityResult', 'ClientDiscoveryDisplay', 'ClientDiscoveryForLogSpecification', 'ClientDiscoveryForProperties', 'ClientDiscoveryForServiceSpecification', 'ClientDiscoveryResponse', 'ClientDiscoveryValueForSingleApi', 'CmkKekIdentity', 'CmkKeyVaultProperties', 'Error', 'ErrorAdditionalInfo', 'IdentityData', 'JobsSummary', 'MonitoringSummary', 'NameInfo', 'OperationResource', 'PatchTrackedResource', 'PatchVault', 'PrivateEndpoint', 'PrivateEndpointConnection', 'PrivateEndpointConnectionVaultProperties', 'PrivateLinkResource', 'PrivateLinkResources', 'PrivateLinkServiceConnectionState', 'RawCertificateData', 'ReplicationUsage', 'ReplicationUsageList', 'Resource', 'ResourceCertificateAndAadDetails', 'ResourceCertificateAndAcsDetails', 'ResourceCertificateDetails', 'Sku', 'SystemData', 'TrackedResource', 'UpgradeDetails', 'UserIdentity', 'Vault', 'VaultCertificateResponse', 'VaultExtendedInfoResource', 'VaultList', 'VaultProperties', 'VaultPropertiesEncryption', 'VaultUsage', 'VaultUsageList', 'AuthType', 'CreatedByType', 'InfrastructureEncryptionState', 'PrivateEndpointConnectionStatus', 'ProvisioningState', 'ResourceIdentityType', 'SkuName', 'TriggerType', 'UsagesUnit', 'VaultPrivateEndpointState', 'VaultUpgradeState', ]
2,231
2,541
<reponame>chemzqm/wept // // WANavigationController.h // WeAppExample // // Created by lionvoom on 2020/11/16. // Copyright © 2020 wept. All rights reserved. // #import <UIKit/UIKit.h> @class WAAppTask; NS_ASSUME_NONNULL_BEGIN @interface WANavigationController : UINavigationController @property (nonatomic, weak) WAAppTask *appTask; @end NS_ASSUME_NONNULL_END
145
643
package com.hellokoding.algorithm; import java.util.HashMap; import java.util.Map; public class WindowSliding_GivenString_FindMinSubstr { public static String findMinSubstr(String s, String t) { Map<Character, Integer> tChars = new HashMap<>(); for(char c : t.toCharArray()) { tChars.compute(c, (key, value) -> value == null ? 1 : ++value); } Map<Character, Integer> sChars = new HashMap<>(); String windowSubstr = ""; String minSubstr = ""; int i = 0; int j = 0; int matched = 0; while(i < s.length()) { Character c; while(j < s.length() && matched < tChars.size()) { c = s.charAt(j++); sChars.compute(c, (key, value) -> value == null ? 1 : ++value); windowSubstr += c; if (tChars.containsKey(c) && tChars.get(c).equals(sChars.get(c))) matched++; } if (matched == tChars.size() && (minSubstr.equals("") || windowSubstr.length() < minSubstr.length())){ minSubstr = windowSubstr; } c = windowSubstr.charAt(0); sChars.compute(c, (key, value) -> --value); if (tChars.containsKey(c) && tChars.get(c) > sChars.get(c)) matched--; windowSubstr = windowSubstr.substring(1); i++; } return minSubstr; } public static void main(String[] args) { System.out.println(findMinSubstr("ADOBECODEBANC", "ABC")); System.out.println(findMinSubstr("aaaaaaaaaaaabbbbbcdd", "abcdd")); System.out.println(findMinSubstr("a", "aa")); } }
816
634
<filename>server/data/data-cassandra/src/main/java/org/apache/james/rrt/cassandra/CassandraRecipientRewriteTable.java /**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.rrt.cassandra; import java.util.Map; import java.util.stream.Stream; import javax.inject.Inject; import org.apache.james.core.Domain; import org.apache.james.rrt.api.RecipientRewriteTableException; import org.apache.james.rrt.lib.AbstractRecipientRewriteTable; import org.apache.james.rrt.lib.Mapping; import org.apache.james.rrt.lib.MappingSource; import org.apache.james.rrt.lib.Mappings; import org.apache.james.rrt.lib.MappingsImpl; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; public class CassandraRecipientRewriteTable extends AbstractRecipientRewriteTable { private final CassandraRecipientRewriteTableDAO cassandraRecipientRewriteTableDAO; private final CassandraMappingsSourcesDAO cassandraMappingsSourcesDAO; @Inject CassandraRecipientRewriteTable(CassandraRecipientRewriteTableDAO cassandraRecipientRewriteTableDAO, CassandraMappingsSourcesDAO cassandraMappingsSourcesDAO) { this.cassandraRecipientRewriteTableDAO = cassandraRecipientRewriteTableDAO; this.cassandraMappingsSourcesDAO = cassandraMappingsSourcesDAO; } @Override public void addMapping(MappingSource source, Mapping mapping) { cassandraRecipientRewriteTableDAO.addMapping(source, mapping) .then(cassandraMappingsSourcesDAO.addMapping(mapping, source)) .block(); } @Override public void removeMapping(MappingSource source, Mapping mapping) { cassandraRecipientRewriteTableDAO.removeMapping(source, mapping) .then(cassandraMappingsSourcesDAO.removeMapping(mapping, source)) .block(); } @Override public Mappings getStoredMappings(MappingSource source) { return cassandraRecipientRewriteTableDAO.retrieveMappings(source) .blockOptional() .orElse(MappingsImpl.empty()); } @Override public Map<MappingSource, Mappings> getAllMappings() { return cassandraRecipientRewriteTableDAO.getAllMappings() .collect(ImmutableMap.toImmutableMap( pair -> pair.getLeft(), pair -> MappingsImpl.fromMappings(pair.getRight()), Mappings::union)) .block(); } @Override protected Mappings mapAddress(String user, Domain domain) { return cassandraRecipientRewriteTableDAO.retrieveMappings(MappingSource.fromUser(user, domain)).blockOptional() .or(() -> cassandraRecipientRewriteTableDAO.retrieveMappings(MappingSource.fromDomain(domain)).blockOptional()) .orElse(MappingsImpl.empty()); } @Override public Stream<MappingSource> listSources(Mapping mapping) throws RecipientRewriteTableException { Preconditions.checkArgument(listSourcesSupportedType.contains(mapping.getType()), "Not supported mapping of type %s", mapping.getType()); return cassandraMappingsSourcesDAO.retrieveSources(mapping).toStream(); } }
1,626
777
<reponame>google-ar/chromium<filename>components/policy/core/common/cloud/user_cloud_policy_store.h // Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef COMPONENTS_POLICY_CORE_COMMON_CLOUD_USER_CLOUD_POLICY_STORE_H_ #define COMPONENTS_POLICY_CORE_COMMON_CLOUD_USER_CLOUD_POLICY_STORE_H_ #include <string> #include "base/compiler_specific.h" #include "base/files/file_path.h" #include "base/macros.h" #include "base/memory/weak_ptr.h" #include "components/policy/core/common/cloud/user_cloud_policy_store_base.h" #include "components/policy/policy_export.h" #include "components/policy/proto/policy_signing_key.pb.h" namespace base { class SequencedTaskRunner; } namespace policy { // Implements a cloud policy store that is stored in a simple file in the user's // profile directory. This is used on (non-chromeos) platforms that do not have // a secure storage implementation. // // The public key, which is used to verify signatures of policy, is also // persisted in a file. During the load operation, the key is loaded from the // file and is itself verified against the verification public key before using // it to verify the policy signature. During the store operation, the key cache // file is updated whenever the key rotation happens. class POLICY_EXPORT UserCloudPolicyStore : public UserCloudPolicyStoreBase { public: // Creates a policy store associated with a signed-in (or in the progress of // it) user. UserCloudPolicyStore( const base::FilePath& policy_file, const base::FilePath& key_file, scoped_refptr<base::SequencedTaskRunner> background_task_runner); ~UserCloudPolicyStore() override; // Factory method for creating a UserCloudPolicyStore for a profile with path // |profile_path|. static std::unique_ptr<UserCloudPolicyStore> Create( const base::FilePath& profile_path, scoped_refptr<base::SequencedTaskRunner> background_task_runner); // The username from signin for validation of the policy. std::string signin_username() const { return signin_username_; } // Sets the username from signin for validation of the policy. void SetSigninUsername(const std::string& username); // Loads policy immediately on the current thread. Virtual for mocks. virtual void LoadImmediately(); // Deletes any existing policy blob and notifies observers via OnStoreLoaded() // that the blob has changed. Virtual for mocks. virtual void Clear(); // CloudPolicyStore implementation. void Load() override; void Store(const enterprise_management::PolicyFetchResponse& policy) override; private: // Callback invoked when a new policy has been loaded from disk. If // |validate_in_background| is true, then policy is validated via a background // thread. void PolicyLoaded(bool validate_in_background, struct PolicyLoadResult policy_load_result); // Starts policy blob validation. |callback| is invoked once validation is // complete. If |validate_in_background| is true, then the validation work // occurs on a background thread (results are sent back to the calling // thread). void Validate( std::unique_ptr<enterprise_management::PolicyFetchResponse> policy, std::unique_ptr<enterprise_management::PolicySigningKey> key, bool validate_in_background, const UserCloudPolicyValidator::CompletionCallback& callback); // Callback invoked to install a just-loaded policy after validation has // finished. void InstallLoadedPolicyAfterValidation(bool doing_key_rotation, const std::string& signing_key, UserCloudPolicyValidator* validator); // Callback invoked to store the policy after validation has finished. void StorePolicyAfterValidation(UserCloudPolicyValidator* validator); // The current key used to verify signatures of policy. This value is // eventually consistent with the one persisted in the key cache file. This // is, generally, different from |policy_signature_public_key_|, which always // corresponds to the currently effective policy. std::string persisted_policy_key_; // Path to file where we store persisted policy. base::FilePath policy_path_; // Path to file where we store the signing key for the policy blob. base::FilePath key_path_; // The username from signin for validation of the policy. std::string signin_username_; // WeakPtrFactory used to create callbacks for validating and storing policy. base::WeakPtrFactory<UserCloudPolicyStore> weak_factory_; DISALLOW_COPY_AND_ASSIGN(UserCloudPolicyStore); }; } // namespace policy #endif // COMPONENTS_POLICY_CORE_COMMON_CLOUD_USER_CLOUD_POLICY_STORE_H_
1,461
353
<gh_stars>100-1000 package com.wepay.waltz.test.mock; import com.wepay.zktools.clustermgr.ClusterManager; import com.wepay.zktools.clustermgr.ClusterManagerException; import com.wepay.zktools.clustermgr.Endpoint; import com.wepay.zktools.clustermgr.ManagedClient; import com.wepay.zktools.clustermgr.ManagedServer; import com.wepay.zktools.clustermgr.internal.PartitionAssignment; import com.wepay.zktools.clustermgr.internal.ServerDescriptor; import com.wepay.zktools.zookeeper.ZNode; import java.security.SecureRandom; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.IdentityHashMap; import java.util.LinkedList; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; public class MockClusterManager implements ClusterManager { private static final String CLUSTER_NAME = "mock cluster"; private final int numPartitions; private final LinkedList<ManagedClient> managedClients = new LinkedList<>(); private final IdentityHashMap<ManagedServer, ZNode> managedServers = new IdentityHashMap<>(); private final HashMap<ZNode, ServerDescriptor> serverDescriptors = new HashMap<>(); private final SecureRandom rnd = new SecureRandom(); private AtomicInteger serverId = new AtomicInteger(rnd.nextInt()); private AtomicInteger clientId = new AtomicInteger(rnd.nextInt()); public MockClusterManager(int numPartitions) { this.numPartitions = numPartitions; } @Override public void close() { } @Override public String clusterName() { return CLUSTER_NAME; } @Override public int numPartitions() { return numPartitions; } @Override public Set<ServerDescriptor> serverDescriptors() throws ClusterManagerException { return new HashSet<>(serverDescriptors.values()); } @Override public PartitionAssignment partitionAssignment() throws ClusterManagerException { throw new UnsupportedOperationException(this.getClass().getSimpleName() + "does not assign partitions"); } @Override public void manage(ManagedClient client) throws ClusterManagerException { synchronized (managedClients) { client.setClientId(clientId.incrementAndGet()); managedClients.add(client); } } @Override public void manage(ManagedServer server) throws ClusterManagerException { synchronized (managedServers) { if (managedServers.containsKey(server)) { throw new ClusterManagerException("server already managed"); } int id = serverId.incrementAndGet(); try { // Fake znode ZNode znode = new ZNode("/serverDescriptors/s_" + id); managedServers.put(server, znode); serverDescriptors.put(znode, new ServerDescriptor(id, server.endpoint(), Collections.emptyList())); server.setServerId(id); } catch (Exception ex) { throw new ClusterManagerException("unable to manage server", ex); } } } @Override public void unmanage(ManagedClient client) throws ClusterManagerException { synchronized (managedClients) { managedClients.remove(client); } } @Override public void unmanage(ManagedServer server) throws ClusterManagerException { synchronized (managedServers) { ZNode znode = managedServers.remove(server); if (znode != null) { serverDescriptors.remove(znode); } } } public Collection<ManagedClient> managedClients() { return Collections.unmodifiableList(managedClients); } public Collection<ManagedServer> managedServers() { return Collections.unmodifiableSet(managedServers.keySet()); } public Endpoint endPoint(ManagedServer server) { synchronized (managedServers) { ServerDescriptor descriptor = serverDescriptors.get(managedServers.get(server)); return descriptor != null ? descriptor.endpoint : null; } } }
1,580
550
<reponame>RalfRalf/java-sdk<filename>src/main/java/com/qiniu/storage/UploadOptions.java package com.qiniu.storage; import com.qiniu.util.StringMap; public final class UploadOptions { /** * 用于服务器上传回调通知的自定义参数,参数的key必须以x: 开头 eg: x:foo */ public final StringMap params; /** * 用于设置meta数据,参数的key必须以x-qn-meta- 开头 eg: x-qn-meta-key */ public final StringMap metaDataParam; /** * 指定上传文件的MimeType */ public final String mimeType; /** * 启用上传内容crc32校验 */ public final boolean checkCrc; public static UploadOptions defaultOptions() { return new UploadOptions.Builder().build(); } private UploadOptions(StringMap params, StringMap metaDataParam, String mimeType, boolean checkCrc) { this.params = params; this.metaDataParam = metaDataParam; this.mimeType = mimeType; this.checkCrc = checkCrc; } public static class Builder { private StringMap params; private StringMap metaDataParam; private String mimeType; private boolean checkCrc; /** * 用于服务器上传回调通知的自定义参数,参数的key必须以x: 开头 eg: x:foo */ public Builder params(StringMap params) { this.params = params; return this; } /** * 用于设置meta数据,参数的key必须以x-qn-meta- 开头 eg: x-qn-meta-key */ public Builder metaData(StringMap params) { this.metaDataParam = params; return this; } /** * 指定上传文件的MimeType */ public Builder mimeType(String mimeType) { this.mimeType = mimeType; return this; } /** * 启用上传内容crc32校验 */ public Builder checkCrc(boolean checkCrc) { this.checkCrc = checkCrc; return this; } public UploadOptions build() { mimeType = mime(mimeType); params = filterParam(params); metaDataParam = filterMetaData(metaDataParam); return new UploadOptions(params, metaDataParam, mimeType, checkCrc); } private String mime(String mimeType) { if (mimeType == null || mimeType.equals("")) { return "application/octet-stream"; } return mimeType; } /** * 过滤用户自定义参数,只有参数名以<code>x:</code>开头的参数才会被使用 * * @param params 待过滤的用户自定义参数 * @return 过滤后的用户自定义参数 */ private StringMap filterParam(StringMap params) { final StringMap ret = new StringMap(); if (params == null) { return ret; } params.forEach(new StringMap.Consumer() { @Override public void accept(String key, Object value) { ret.putWhen(key, value, key != null && key.startsWith("x:") && value != null && !value.equals("")); } }); return ret; } /** * 过滤meta data参数,只有参数名以<code>x-qn-meta-</code>开头的参数才会被使用 * * @param params 待过滤的用户自定义参数 * @return 过滤后的参数 */ private StringMap filterMetaData(StringMap params) { final StringMap ret = new StringMap(); if (params == null) { return ret; } params.forEach(new StringMap.Consumer() { @Override public void accept(String key, Object value) { ret.putWhen(key, value, key != null && key.startsWith("x-qn-meta-") && value != null && !value.equals("")); } }); return ret; } } }
2,264
3,508
package com.fishercoder.solutions; import java.util.Arrays; public class _1979 { public static class Solution1 { public int findGCD(int[] nums) { Arrays.sort(nums); return getGcd(nums[0], nums[nums.length - 1]); } int getGcd(int a, int b) { return b == 0 ? a : getGcd(b, a % b); } } }
185
442
#!/usr/bin/env python # BEGIN ALL import rospy from sensor_msgs.msg import LaserScan # BEGIN MEASUREMENT def scan_callback(msg): range_ahead = msg.ranges[len(msg.ranges)/2] print "range ahead: %0.1f" % range_ahead # END MEASUREMENT rospy.init_node('range_ahead') scan_sub = rospy.Subscriber('scan', LaserScan, scan_callback) rospy.spin() # END ALL
139
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef SD_SDCOMMANDS_HRC #define SD_SDCOMMANDS_HRC #define CMD_SID_OBJECT_MIRROR ".uno:Mirror" #define CMD_SID_OBJECT_CHOOSE_MODE ".uno:AdvancedMode" #define CMD_SID_ANIMATION_EFFECTS ".uno:AnimationEffects" #define CMD_SID_ANIMATION_OBJECTS ".uno:AnimationObjects" #define CMD_SID_ANIMATOR_ADD ".uno:AnimatorAddObject" #define CMD_SID_ANIMATOR_CREATE ".uno:AnimatorCreateObject" #define CMD_SID_ANIMATOR_INIT ".uno:AnimatorInit" #define CMD_SID_ANIMATOR_STATE ".uno:AnimatorState" #define CMD_SID_DRAWTBX_ARROWS ".uno:ArrowsToolbox" #define CMD_SID_MOREBACK ".uno:Backward" #define CMD_SID_BEFORE_OBJ ".uno:BeforeObject" #define CMD_SID_BEHIND_OBJ ".uno:BehindObject" #define CMD_SID_BIG_HANDLES ".uno:BigHandles" #define CMD_SID_BREAK ".uno:Break" #define CMD_SID_CAPTUREPOINT ".uno:CapturePoint" #define CMD_SID_CHANGEBEZIER ".uno:ChangeBezier" #define CMD_SID_CHANGEPOLYGON ".uno:ChangePolygon" #define CMD_SID_CLICK_CHANGE_ROTATION ".uno:ClickChangeRotation" #define CMD_SID_OBJECT_CLOSE ".uno:CloseObject" #define CMD_SID_COLORVIEW ".uno:ColorView" #define CMD_SID_COMBINE ".uno:Combine" #define CMD_SID_3D_CONE ".uno:Cone" #define CMD_SID_CONNECT ".uno:Connect" #define CMD_SID_TOOL_CONNECTOR ".uno:Connector" #define CMD_SID_CONNECTOR_ARROW_END ".uno:ConnectorArrowEnd" #define CMD_SID_CONNECTOR_ARROWS ".uno:ConnectorArrows" #define CMD_SID_CONNECTOR_ARROW_START ".uno:ConnectorArrowStart" #define CMD_SID_CONNECTION_DLG ".uno:ConnectorAttributes" #define CMD_SID_CONNECTOR_CIRCLE_END ".uno:ConnectorCircleEnd" #define CMD_SID_CONNECTOR_CIRCLES ".uno:ConnectorCircles" #define CMD_SID_CONNECTOR_CIRCLE_START ".uno:ConnectorCircleStart" #define CMD_SID_CONNECTOR_CURVE ".uno:ConnectorCurve" #define CMD_SID_CONNECTOR_CURVE_ARROW_END ".uno:ConnectorCurveArrowEnd" #define CMD_SID_CONNECTOR_CURVE_ARROWS ".uno:ConnectorCurveArrows" #define CMD_SID_CONNECTOR_CURVE_ARROW_START ".uno:ConnectorCurveArrowStart" #define CMD_SID_CONNECTOR_CURVE_CIRCLE_END ".uno:ConnectorCurveCircleEnd" #define CMD_SID_CONNECTOR_CURVE_CIRCLES ".uno:ConnectorCurveCircles" #define CMD_SID_CONNECTOR_CURVE_CIRCLE_START ".uno:ConnectorCurveCircleStart" #define CMD_SID_CONNECTOR_LINE ".uno:ConnectorLine" #define CMD_SID_CONNECTOR_LINE_ARROW_END ".uno:ConnectorLineArrowEnd" #define CMD_SID_CONNECTOR_LINE_ARROWS ".uno:ConnectorLineArrows" #define CMD_SID_CONNECTOR_LINE_ARROW_START ".uno:ConnectorLineArrowStart" #define CMD_SID_CONNECTOR_LINE_CIRCLE_END ".uno:ConnectorLineCircleEnd" #define CMD_SID_CONNECTOR_LINE_CIRCLES ".uno:ConnectorLineCircles" #define CMD_SID_CONNECTOR_LINE_CIRCLE_START ".uno:ConnectorLineCircleStart" #define CMD_SID_CONNECTOR_LINES ".uno:ConnectorLines" #define CMD_SID_CONNECTOR_LINES_ARROW_END ".uno:ConnectorLinesArrowEnd" #define CMD_SID_CONNECTOR_LINES_ARROWS ".uno:ConnectorLinesArrows" #define CMD_SID_CONNECTOR_LINES_ARROW_START ".uno:ConnectorLinesArrowStart" #define CMD_SID_CONNECTOR_LINES_CIRCLE_END ".uno:ConnectorLinesCircleEnd" #define CMD_SID_CONNECTOR_LINES_CIRCLES ".uno:ConnectorLinesCircles" #define CMD_SID_CONNECTOR_LINES_CIRCLE_START ".uno:ConnectorLinesCircleStart" #define CMD_SID_DRAWTBX_CONNECTORS ".uno:ConnectorToolbox" #define CMD_SID_CONVERT_TO_CONTOUR ".uno:convert_to_contour" #define CMD_SID_CONVERT_TO_3D ".uno:ConvertInto3D" #define CMD_SID_CONVERT_TO_3D_LATHE ".uno:ConvertInto3DLathe" #define CMD_SID_CONVERT_TO_3D_LATHE_FAST ".uno:ConvertInto3DLatheFast" #define CMD_SID_CONVERT_TO_BITMAP ".uno:ConvertIntoBitmap" #define CMD_SID_CONVERT_TO_METAFILE ".uno:ConvertIntoMetaFile" #define CMD_SID_CONVERT_TO_1BIT_MATRIX ".uno:ConvertTo1BitMatrix" #define CMD_SID_CONVERT_TO_1BIT_THRESHOLD ".uno:ConvertTo1BitThreshold" #define CMD_SID_CONVERT_TO_4BIT_COLORS ".uno:ConvertTo4BitColors" #define CMD_SID_CONVERT_TO_4BIT_GRAYS ".uno:ConvertTo4BitGrays" #define CMD_SID_CONVERT_TO_8BIT_COLORS ".uno:ConvertTo8BitColors" #define CMD_SID_CONVERT_TO_8BIT_GRAYS ".uno:ConvertTo8BitGrays" #define CMD_SID_CONVERT_TO_24BIT ".uno:ConvertToTrueColor" #define CMD_SID_COPYOBJECTS ".uno:CopyObjects" #define CMD_SID_OBJECT_CROOK_ROTATE ".uno:CrookRotate" #define CMD_SID_OBJECT_CROOK_SLANT ".uno:CrookSlant" #define CMD_SID_OBJECT_CROOK_STRETCH ".uno:CrookStretch" #define CMD_SID_3D_CUBE ".uno:Cube" #define CMD_SID_CUSTOMSHOW_DLG ".uno:CustomShowDialog" #define CMD_SID_3D_CYLINDER ".uno:Cylinder" #define CMD_SID_3D_PYRAMID ".uno:Cyramid" #define CMD_SID_DELETE_LAYER ".uno:DeleteLayer" #define CMD_SID_DELETE_PAGE ".uno:DeletePage" #define CMD_SID_DELETE_SNAPITEM ".uno:DeleteSnapItem" #define CMD_SID_UNSELECT ".uno:DeSelect" #define CMD_SID_DIAMODE ".uno:DiaMode" #define CMD_SID_DISMANTLE ".uno:Dismantle" #define CMD_SID_DOUBLECLICK_TEXTEDIT ".uno:DoubleClickTextEdit" #define CMD_SID_DRAWINGMODE ".uno:DrawingMode" #define CMD_SID_DUPLICATE_PAGE ".uno:DuplicatePage" #define CMD_SID_EDIT_HYPERLINK ".uno:EditHyperlink" #define CMD_SID_EDIT_OUTLINER ".uno:EditOutline" #define CMD_SID_CUSTOM_ANIMATION_PANEL ".uno:CustomAnimation" #define CMD_SID_CUSTOM_ANIMATION_SCHEMES_PANEL ".uno:CustomAnimationSchemes" #define CMD_SID_DRAWTBX_ELLIPSES ".uno:EllipseToolbox" #define CMD_SID_EXPAND_PAGE ".uno:ExpandPage" #define CMD_SID_SETFILLCOLOR ".uno:SetFillColor" #define CMD_SID_MOREFRONT ".uno:Forward" #define CMD_SID_GETBLUE ".uno:GetBlue" #define CMD_SID_GETFILLSTYLE ".uno:GetFillStyle" #define CMD_SID_GETGREEN ".uno:GetGreen" #define CMD_SID_GETLINESTYLE ".uno:GetLineStyle" #define CMD_SID_GETLINEWIDTH ".uno:GetLineWidth" #define CMD_SID_GETRED ".uno:GetRed" #define CMD_SID_GLUE_EDITMODE ".uno:GlueEditMode" #define CMD_SID_GLUE_ESCDIR ".uno:GlueEscapeDirection" #define CMD_SID_GLUE_ESCDIR_BOTTOM ".uno:GlueEscapeDirectionBottom" #define CMD_SID_GLUE_ESCDIR_LEFT ".uno:GlueEscapeDirectionLeft" #define CMD_SID_GLUE_ESCDIR_RIGHT ".uno:GlueEscapeDirectionRight" #define CMD_SID_GLUE_ESCDIR_TOP ".uno:GlueEscapeDirectionTop" #define CMD_SID_GLUE_HORZALIGN_CENTER ".uno:GlueHorzAlignCenter" #define CMD_SID_GLUE_HORZALIGN_LEFT ".uno:GlueHorzAlignLeft" #define CMD_SID_GLUE_HORZALIGN_RIGHT ".uno:GlueHorzAlignRight" #define CMD_SID_GLUE_INSERT_POINT ".uno:GlueInsertPoint" #define CMD_SID_GLUE_PERCENT ".uno:GluePercent" #define CMD_SID_GLUE_VERTALIGN_BOTTOM ".uno:GlueVertAlignBottom" #define CMD_SID_GLUE_VERTALIGN_CENTER ".uno:GlueVertAlignCenter" #define CMD_SID_GLUE_VERTALIGN_TOP ".uno:GlueVertAlignTop" #define CMD_SID_GRADIENT ".uno:Gradient" #define CMD_SID_SETGRADENDCOLOR ".uno:GradientEndColor" #define CMD_SID_SETGRADSTARTCOLOR ".uno:GradientStartColor" #define CMD_SID_GRAPHIC_EXPORT ".uno:GraphicExport" #define CMD_SID_GRID_FRONT ".uno:GridFront" #define CMD_SID_3D_HALF_SPHERE ".uno:HalfSphere" #define CMD_SID_HANDLES_DRAFT ".uno:HandlesDraft" #define CMD_SID_HANDOUT_MASTERPAGE ".uno:HandoutMasterPage" #define CMD_SID_HANDOUTMODE ".uno:HandoutMode" #define CMD_SID_HATCH ".uno:Hatch" #define CMD_SID_SETHATCHCOLOR ".uno:HatchColor" #define CMD_SID_HELPLINES_FRONT ".uno:HelplinesFront" #define CMD_SID_HELPLINES_USE ".uno:HelplinesUse" #define CMD_SID_HELPLINES_VISIBLE ".uno:HelplinesVisible" #define CMD_SID_HIDE_SLIDE ".uno:HideSlide" #define CMD_SID_SHOW_SLIDE ".uno:ShowSlide" #define CMD_SID_HYPHENATION ".uno:Hyphenation" #define CMD_SID_INSERTFILE ".uno:ImportFromFile" #define CMD_SID_INSERT_FLD_AUTHOR ".uno:InsertAuthorField" #define CMD_SID_INSERT_FLD_DATE_FIX ".uno:InsertDateFieldFix" #define CMD_SID_INSERT_FLD_DATE_VAR ".uno:InsertDateFieldVar" #define CMD_SID_INSERT_FLD_FILE ".uno:InsertFileField" #define CMD_SID_INSERTLAYER ".uno:InsertLayer" #define CMD_SID_INSERTPAGE ".uno:InsertPage" #define CMD_SID_INSERT_FLD_PAGE ".uno:InsertPageField" #define CMD_SID_INSERT_FLD_PAGES ".uno:InsertPagesField" #define CMD_SID_INSERTPAGE_QUICK ".uno:InsertPageQuick" #define CMD_SID_INSERT_FLD_TIME_FIX ".uno:InsertTimeFieldFix" #define CMD_SID_INSERT_FLD_TIME_VAR ".uno:InsertTimeFieldVar" #define CMD_SID_DRAWTBX_INSERT ".uno:InsertToolbox" #define CMD_SID_OBJECT_GRADIENT ".uno:InteractiveGradient" #define CMD_SID_OBJECT_TRANSPARENCE ".uno:InteractiveTransparence" #define CMD_SID_LAYERMODE ".uno:LayerMode" #define CMD_SID_STATUS_LAYOUT ".uno:LayoutStatus" #define CMD_SID_LEAVE_ALL_GROUPS ".uno:LeaveAllGroups" #define CMD_SID_LINE_ARROW_CIRCLE ".uno:LineArrowCircle" #define CMD_SID_LINE_ARROW_END ".uno:LineArrowEnd" #define CMD_SID_LINE_ARROWS ".uno:LineArrows" #define CMD_SID_LINE_ARROW_SQUARE ".uno:LineArrowSquare" #define CMD_SID_LINE_ARROW_START ".uno:LineArrowStart" #define CMD_SID_LINE_CIRCLE_ARROW ".uno:LineCircleArrow" #define CMD_SID_SETLINECOLOR ".uno:LineColor" #define CMD_SID_LINEEND_POLYGON ".uno:LineEndPolygon" #define CMD_SID_LINE_SQUARE_ARROW ".uno:LineSquareArrow" #define CMD_SID_LINETO ".uno:LineTo" #define CMD_SID_DRAWTBX_LINES ".uno:LineToolbox" #define CMD_SID_MANAGE_LINKS ".uno:ManageLinks" #define CMD_SID_MASTERPAGE ".uno:MasterPage" #define CMD_SID_MEASURE_DLG ".uno:MeasureAttributes" #define CMD_SID_DRAW_MEASURELINE ".uno:MeasureLine" #define CMD_SID_HORIZONTAL ".uno:MirrorHorz" #define CMD_SID_VERTICAL ".uno:MirrorVert" #define CMD_SID_MODIFY_FIELD ".uno:ModifyField" #define CMD_SID_MODIFYLAYER ".uno:ModifyLayer" #define CMD_SID_MODIFYPAGE ".uno:ModifyPage" #define CMD_SID_ASSIGN_LAYOUT ".uno:AssignLayout" #define CMD_SID_PRESENTATIONOBJECT ".uno:ModifyPresentationObject" #define CMD_SID_POLYGON_MORPHING ".uno:Morphing" #define CMD_SID_MOVETO ".uno:MoveTo" #define CMD_SID_NAME_GROUP ".uno:NameGroup" #define CMD_SID_OBJECT_TITLE_DESCRIPTION ".uno:ObjectTitleDescription" #define CMD_SID_NAVIGATOR_INIT ".uno:NavigatorInit" #define CMD_SID_NAVIGATOR_OBJECT ".uno:NavigatorObject" #define CMD_SID_NAVIGATOR_PAGE ".uno:NavigatorPage" #define CMD_SID_NAVIGATOR_PAGENAME ".uno:NavigatorPageName" #define CMD_SID_NAVIGATOR_PEN ".uno:NavigatorPen" #define CMD_SID_NAVIGATOR_STATE ".uno:NavigatorState" #define CMD_SID_CONNECTION_NEW_ROUTING ".uno:NewRouting" #define CMD_SID_NOTES_MASTERPAGE ".uno:NotesMasterPage" #define CMD_SID_NOTESMODE ".uno:NotesMode" #define CMD_SID_POSITION ".uno:ObjectPosition" #define CMD_SID_DRAWTBX_3D_OBJECTS ".uno:Objects3DToolbox" #define CMD_SID_ORIGINAL_SIZE ".uno:OriginalSize" #define CMD_SID_SAVEGRAPHIC ".uno:SaveGraphic" #define CMD_SID_OUTLINEMODE ".uno:OutlineMode" #define CMD_SID_OUTPUT_QUALITY_BLACKWHITE ".uno:OutputQualityBlackWhite" #define CMD_SID_OUTPUT_QUALITY_COLOR ".uno:OutputQualityColor" #define CMD_SID_OUTPUT_QUALITY_CONTRAST ".uno:OutputQualityContrast" #define CMD_SID_OUTPUT_QUALITY_GRAYSCALE ".uno:OutputQualityGrayscale" #define CMD_SID_PACKNGO ".uno:PackAndGo" #define CMD_SID_PAGEMARGIN ".uno:PageMargin" #define CMD_SID_PAGEMODE ".uno:PageMode" #define CMD_SID_PAGESETUP ".uno:PageSetup" #define CMD_SID_PAGESIZE ".uno:PageSize" #define CMD_SID_PAGES_PER_ROW ".uno:PagesPerRow" #define CMD_SID_STATUS_PAGE ".uno:PageStatus" #define CMD_SID_PARASPACE_DECREASE ".uno:ParaspaceDecrease" #define CMD_SID_PARASPACE_INCREASE ".uno:ParaspaceIncrease" #define CMD_SID_PICK_THROUGH ".uno:PickThrough" #define CMD_SID_DRAW_POLYGON ".uno:Polygon" #define CMD_SID_PRESENTATION ".uno:Presentation" #define CMD_SID_PRESENTATION_DLG ".uno:PresentationDialog" #define CMD_SID_PRESENTATION_END ".uno:PresentationEnd" #define CMD_SID_CLEAR_UNDO_STACK ".uno:ClearUndoStack" #define CMD_SID_PRESENTATION_LAYOUT ".uno:PresentationLayout" #define CMD_SID_PREVIEW_STATE ".uno:PreviewState" #define CMD_SID_PREVIEW_WIN ".uno:PreviewWindow" #define CMD_SID_QUICKEDIT ".uno:QuickEdit" #define CMD_SID_DRAWTBX_RECTANGLES ".uno:RectangleToolbox" #define CMD_SID_REHEARSE_TIMINGS ".uno:RehearseTimings" #define CMD_SID_RENAMELAYER ".uno:RenameLayer" #define CMD_SID_RENAMEPAGE ".uno:RenamePage" #define CMD_SID_RENAMEPAGE_QUICK ".uno:RenamePageQuick" #define CMD_SID_REVERSE_ORDER ".uno:ReverseOrder" #define CMD_SID_RULER_OBJECT ".uno:RulerObject" #define CMD_SID_SELECTAT ".uno:SelectAt" #define CMD_SID_SELECTGRADIENT ".uno:SelectGradient" #define CMD_SID_SELECTHATCH ".uno:SelectHatch" #define CMD_SID_SETFILLSTYLE ".uno:SetFillStyle" #define CMD_SID_SETLINESTYLE ".uno:SetLineStyle" #define CMD_SID_SETLINEWIDTH ".uno:SetLineWidth" #define CMD_SID_SET_SNAPITEM ".uno:SetSnapItem" #define CMD_SID_OBJECT_SHEAR ".uno:Shear" #define CMD_SID_3D_SHELL ".uno:Shell3D" #define CMD_SID_SLIDE_TRANSITIONS_PANEL ".uno:SlideChangeWindow" #define CMD_SID_SLIDE_MASTERPAGE ".uno:SlideMasterPage" #define CMD_SID_SNAP_BORDER ".uno:SnapBorder" #define CMD_SID_SNAP_FRAME ".uno:SnapFrame" #define CMD_SID_SNAP_POINTS ".uno:SnapPoints" #define CMD_SID_SOLID_CREATE ".uno:SolidCreate" #define CMD_SID_3D_SPHERE ".uno:Sphere" #define CMD_SID_STARTAPP ".uno:StartApplication" #define CMD_SID_SUMMARY_PAGE ".uno:SummaryPage" #define CMD_SID_SWITCHLAYER ".uno:SwitchLayer" #define CMD_SID_SWITCHPAGE ".uno:SwitchPage" #define CMD_SID_SWITCH_POINTEDIT ".uno:SwitchPointEdit" #define CMD_SID_TEXTALIGNMENT ".uno:TextAlignment" #define CMD_SID_TEXTATTR_DLG ".uno:TextAttributes" #define CMD_SID_TEXT_FITTOSIZE ".uno:TextFitToSizeTool" #define CMD_SID_DRAWTBX_TEXT ".uno:TextToolbox" #define CMD_SID_TITLE_MASTERPAGE ".uno:TitleMasterPage" #define CMD_SID_3D_TORUS ".uno:Torus" #define CMD_SID_TEXT_FITTOSIZE_VERTICAL ".uno:VerticalTextFitToSizeTool" #define CMD_SID_ZOOM_PANNING ".uno:ZoomPanning" #define CMD_SID_OBJECT_CROP ".uno:Crop" #define CMD_SID_HEADER_AND_FOOTER ".uno:HeaderAndFooter" #define CMD_SID_INSERT_PAGE_NUMBER ".uno:InsertPageNumber" #define CMD_SID_INSERT_DATE_TIME ".uno:InsertDateAndTime" #define CMD_SID_MASTER_LAYOUTS ".uno:MasterLayouts" #define CMD_SID_DRAW_FONTWORK ".uno:DrawFontwork" #define CMD_SID_DRAW_FONTWORK_VERTICAL ".uno:DrawFontworkVertical" #define CMD_SID_LAYER_DIALOG_WIN ".uno:LayerDialogChildWindow" #define CMD_SID_INSERT_MASTER_PAGE ".uno:InsertMasterPage" #define CMD_SID_DELETE_MASTER_PAGE ".uno:DeleteMasterPage" #define CMD_SID_RENAME_MASTER_PAGE ".uno:RenameMasterPage" #define CMD_SID_CLOSE_MASTER_VIEW ".uno:CloseMasterView" #define CMD_SID_SELECT_BACKGROUND ".uno:SelectBackground" #define CMD_SID_DISPLAY_MASTER_BACKGROUND ".uno:DisplayMasterBackground" #define CMD_SID_DISPLAY_MASTER_OBJECTS ".uno:DisplayMasterObjects" #define CMD_SID_TABLE_TOOLBOX ".uno:TableToolBox" #define CMD_SID_TABLE_DISTRIBUTE_COLUMNS ".uno:DistributeColumns" #define CMD_SID_TABLE_DISTRIBUTE_ROWS ".uno:DistributeRows" #define CMD_SID_TP_APPLY_TO_ALL_SLIDES ".uno:TaskPaneApplyToAllSlides" #define CMD_SID_TP_APPLY_TO_SELECTED_SLIDES ".uno:TaskPaneApplyToSelectedSlides" #define CMD_SID_TP_USE_FOR_NEW_PRESENTATIONS ".uno:TaskPaneUseForNewPresentations" #define CMD_SID_TP_SHOW_SMALL_PREVIEW ".uno:TaskPaneShowSmallPreview" #define CMD_SID_TP_SHOW_LARGE_PREVIEW ".uno:TaskPaneShowLargePreview" #define CMD_SID_TP_EDIT_MASTER ".uno:TaskPaneEditMaster" #define CMD_SID_INSERTPAGE_LAYOUT_MENU ".uno:TaskPaneInsertPage" #define CMD_SID_SWITCH_SHELL ".uno:SwitchViewShell" #define CMD_SID_SWITCH_SHELL_PANE ".uno:SwitchViewShellPane" #define CMD_SID_LEFT_PANE_IMPRESS ".uno:LeftPaneImpress" #define CMD_SID_LEFT_PANE_DRAW ".uno:LeftPaneDraw" #define CMD_SID_RIGHT_PANE ".uno:RightPane" #define CMD_SID_NOTES_WINDOW ".uno:NotesChildWindow" #define CMD_SID_NORMAL_MULTI_PANE_GUI ".uno:NormalMultiPaneGUI" #define CMD_SID_SLIDE_SORTER_MULTI_PANE_GUI ".uno:SlideSorterMultiPaneGUI" #define CMD_SID_TASK_PANE ".uno:TaskPaneShowPanel" #endif
12,357
831
/* * Copyright 2013 MongoDB, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #if defined(__linux__) # include <sys/syscall.h> #elif defined(_WIN32) # include <process.h> #else # include <unistd.h> #endif #include <stdarg.h> #include <time.h> #include "mongoc-log.h" #include "mongoc-log-private.h" #include "mongoc-thread-private.h" static mongoc_mutex_t gLogMutex; static mongoc_log_func_t gLogFunc = mongoc_log_default_handler; #ifdef MONGOC_TRACE static bool gLogTrace = true; #endif static void *gLogData; static MONGOC_ONCE_FUN( _mongoc_ensure_mutex_once) { mongoc_mutex_init(&gLogMutex); MONGOC_ONCE_RETURN; } void mongoc_log_set_handler (mongoc_log_func_t log_func, void *user_data) { static mongoc_once_t once = MONGOC_ONCE_INIT; mongoc_once(&once, &_mongoc_ensure_mutex_once); mongoc_mutex_lock(&gLogMutex); gLogFunc = log_func; gLogData = user_data; mongoc_mutex_unlock(&gLogMutex); } /* just for testing */ void _mongoc_log_get_handler (mongoc_log_func_t *log_func, void **user_data) { *log_func = gLogFunc; *user_data = gLogData; } void mongoc_log (mongoc_log_level_t log_level, const char *log_domain, const char *format, ...) { va_list args; char *message; static mongoc_once_t once = MONGOC_ONCE_INIT; int stop_logging; mongoc_once(&once, &_mongoc_ensure_mutex_once); stop_logging = !gLogFunc; #ifdef MONGOC_TRACE stop_logging = stop_logging || (log_level == MONGOC_LOG_LEVEL_TRACE && !gLogTrace); #endif if (stop_logging) { return; } BSON_ASSERT (format); va_start(args, format); message = bson_strdupv_printf(format, args); va_end(args); mongoc_mutex_lock(&gLogMutex); gLogFunc(log_level, log_domain, message, gLogData); mongoc_mutex_unlock(&gLogMutex); bson_free(message); } const char * mongoc_log_level_str (mongoc_log_level_t log_level) { switch (log_level) { case MONGOC_LOG_LEVEL_ERROR: return "ERROR"; case MONGOC_LOG_LEVEL_CRITICAL: return "CRITICAL"; case MONGOC_LOG_LEVEL_WARNING: return "WARNING"; case MONGOC_LOG_LEVEL_MESSAGE: return "MESSAGE"; case MONGOC_LOG_LEVEL_INFO: return "INFO"; case MONGOC_LOG_LEVEL_DEBUG: return "DEBUG"; case MONGOC_LOG_LEVEL_TRACE: return "TRACE"; default: return "UNKNOWN"; } } void mongoc_log_default_handler (mongoc_log_level_t log_level, const char *log_domain, const char *message, void *user_data) { struct timeval tv; struct tm tt; time_t t; FILE *stream; char nowstr[32]; int pid; bson_gettimeofday(&tv); t = tv.tv_sec; #ifdef _WIN32 # ifdef _MSC_VER localtime_s(&tt, &t); # else tt = *(localtime(&t)); # endif #else localtime_r(&t, &tt); #endif strftime (nowstr, sizeof nowstr, "%Y/%m/%d %H:%M:%S", &tt); switch (log_level) { case MONGOC_LOG_LEVEL_ERROR: case MONGOC_LOG_LEVEL_CRITICAL: case MONGOC_LOG_LEVEL_WARNING: stream = stderr; break; case MONGOC_LOG_LEVEL_MESSAGE: case MONGOC_LOG_LEVEL_INFO: case MONGOC_LOG_LEVEL_DEBUG: case MONGOC_LOG_LEVEL_TRACE: default: stream = stdout; } #ifdef __linux__ pid = syscall (SYS_gettid); #elif defined(_WIN32) pid = (int)_getpid (); #else pid = (int)getpid (); #endif fprintf (stream, "%s.%04ld: [%5d]: %8s: %12s: %s\n", nowstr, tv.tv_usec / 1000L, pid, mongoc_log_level_str(log_level), log_domain, message); } bool _mongoc_log_trace_is_enabled (void) { #ifdef MONGOC_TRACE return gLogTrace; #else return false; #endif } void mongoc_log_trace_enable (void) { #ifdef MONGOC_TRACE gLogTrace = true; #endif } void mongoc_log_trace_disable (void) { #ifdef MONGOC_TRACE gLogTrace = false; #endif } void mongoc_log_trace_bytes (const char *domain, const uint8_t *_b, size_t _l) { bson_string_t *str, *astr; int32_t _i; uint8_t _v; #ifdef MONGOC_TRACE if (!gLogTrace) { return; } #endif str = bson_string_new(NULL); astr = bson_string_new(NULL); for (_i = 0; _i < _l; _i++) { _v = *(_b + _i); if ((_i % 16) == 0) { bson_string_append_printf(str, "%05x: ", _i); } bson_string_append_printf(str, " %02x", _v); if (isprint(_v)) { bson_string_append_printf(astr, " %c", _v); } else { bson_string_append(astr, " ."); } if ((_i % 16) == 15) { mongoc_log(MONGOC_LOG_LEVEL_TRACE, domain, "%s %s", str->str, astr->str); bson_string_truncate(str, 0); bson_string_truncate(astr, 0); } else if ((_i % 16) == 7) { bson_string_append(str, " "); bson_string_append(astr, " "); } } if (_i != 16) { mongoc_log(MONGOC_LOG_LEVEL_TRACE, domain, "%-56s %s", str->str, astr->str); } bson_string_free(str, true); bson_string_free(astr, true); } void mongoc_log_trace_iovec (const char *domain, const mongoc_iovec_t *_iov, size_t _iovcnt) { bson_string_t *str, *astr; const char *_b; unsigned _i = 0; unsigned _j = 0; unsigned _k = 0; size_t _l = 0; uint8_t _v; #ifdef MONGOC_TRACE if (!gLogTrace) { return; } #endif for (_i = 0; _i < _iovcnt; _i++) { _l += _iov[_i].iov_len; } _i = 0; str = bson_string_new(NULL); astr = bson_string_new(NULL); for (_j = 0; _j < _iovcnt; _j++) { _b = (char *)_iov[_j].iov_base; _l = _iov[_j].iov_len; for (_k = 0; _k < _l; _k++, _i++) { _v = *(_b + _k); if ((_i % 16) == 0) { bson_string_append_printf(str, "%05x: ", _i); } bson_string_append_printf(str, " %02x", _v); if (isprint(_v)) { bson_string_append_printf(astr, " %c", _v); } else { bson_string_append(astr, " ."); } if ((_i % 16) == 15) { mongoc_log(MONGOC_LOG_LEVEL_TRACE, domain, "%s %s", str->str, astr->str); bson_string_truncate(str, 0); bson_string_truncate(astr, 0); } else if ((_i % 16) == 7) { bson_string_append(str, " "); bson_string_append(astr, " "); } } } if (_i != 16) { mongoc_log(MONGOC_LOG_LEVEL_TRACE, domain, "%-56s %s", str->str, astr->str); } bson_string_free(str, true); bson_string_free(astr, true); }
3,667
318
<reponame>dreameverything/Gaea<gh_stars>100-1000 /* * Copyright Beijing 58 Information Technology Co.,Ltd. * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.bj58.spat.gaea.server.util.tools; import java.util.Map; import com.bj58.spat.gaea.server.util.config.PropertiesHelper; public class UnityFactory { private static Map<String,Object> mapConfig = null; private UnityFactory(String configPath) { try { PropertiesHelper ph = new PropertiesHelper(configPath); mapConfig = ph.getAllKeyValue(); } catch (Exception e) { e.printStackTrace(); } } private static Object lockHelper = new Object(); private static UnityFactory unityFactory = null; public static UnityFactory getIntrance(String configPath) { if(unityFactory == null){ synchronized(lockHelper){ if(unityFactory == null){ System.out.println("UnityFactory:"+configPath); unityFactory = new UnityFactory(configPath); } } } return unityFactory; } @SuppressWarnings("unchecked") public <T> T create(Class<?> clazz) throws Exception { String value = mapConfig.get(clazz.getName()).toString(); System.out.println("create:"+value); return (T)Class.forName(value).newInstance(); } }
685
1,269
<reponame>cminusQAQ/graph4nlp<gh_stars>1000+ import copy import torch import torch.nn as nn import torch.nn.functional as F import torch.nn.init as init from graph4nlp.pytorch.models.base import Graph2XBase from graph4nlp.pytorch.modules.prediction.generation.decoder_strategy import DecoderStrategy from graph4nlp.pytorch.modules.prediction.generation.TreeBasedDecoder import StdTreeDecoder from graph4nlp.pytorch.modules.utils.tree_utils import Tree, to_cuda class Graph2Tree(Graph2XBase): """ Graph2Tree is a general end-to-end neural encoder-decoder model that maps an input graph to a tree structure. The graph2tree model consists the following components: 1) node embedding 2) graph embedding 3) tree decoding. Since the full pipeline will consist all parameters, so we will add prefix to the original parameters in each component as follows (except the listed four parameters): 1) emb_ + parameter_name (eg: ``emb_input_size``) 2) gnn_ + parameter_name (eg: ``gnn_direction_option``) 3) dec_ + parameter_name (eg: ``dec_max_decoder_step``) Considering neatness, we will only present the four hyper-parameters which don't meet regulations. Parameters ---------- vocab_model: VocabModel The vocabulary. graph_name: str The graph type. Excepted in ["dependency", "constituency", "node_emb", "node_emb_refined"]. gnn: str The graph neural network type. Expected in ["gcn", "gat", "graphsage", "ggnn"] embedding_style: dict The options used in the embedding module. """ def __init__( self, vocab_model, embedding_style, graph_name, # embedding emb_input_size, emb_hidden_size, emb_word_dropout, emb_rnn_dropout, emb_fix_word_emb, emb_fix_bert_emb, # gnn gnn, gnn_num_layers, gnn_direction_option, gnn_input_size, gnn_hidden_size, gnn_output_size, gnn_feat_drop, gnn_attn_drop, # decoder dec_use_copy, dec_hidden_size, dec_dropout, dec_teacher_forcing_rate, dec_max_decoder_step, dec_max_tree_depth, dec_attention_type, dec_use_sibling, # optional criterion=None, share_vocab=False, **kwargs ): super(Graph2Tree, self).__init__( vocab_model=vocab_model, emb_input_size=emb_input_size, emb_hidden_size=emb_hidden_size, graph_name=graph_name, gnn_direction_option=gnn_direction_option, gnn=gnn, gnn_num_layers=gnn_num_layers, embedding_style=embedding_style, gnn_feats_dropout=gnn_feat_drop, gnn_attn_dropout=gnn_attn_drop, emb_rnn_dropout=emb_rnn_dropout, emb_fix_word_emb=emb_fix_word_emb, emb_fix_bert_emb=emb_fix_bert_emb, emb_word_dropout=emb_word_dropout, gnn_hidden_size=gnn_hidden_size, gnn_input_size=gnn_input_size, gnn_output_size=gnn_output_size, **kwargs ) self.src_vocab, self.tgt_vocab = vocab_model.in_word_vocab, vocab_model.out_word_vocab self.gnn_hidden_size = gnn_hidden_size self.dec_hidden_size = dec_hidden_size self.use_copy = dec_use_copy self.input_size = self.src_vocab.vocab_size self.output_size = self.tgt_vocab.vocab_size self.criterion = ( nn.NLLLoss( size_average=False, ignore_index=self.src_vocab.get_symbol_idx(self.src_vocab.pad_token), ) if criterion is None else criterion ) self.use_share_vocab = share_vocab if self.use_share_vocab == 0: self.tgt_word_embedding = nn.Embedding( self.tgt_vocab.vocab_size, dec_hidden_size, padding_idx=self.tgt_vocab.get_symbol_idx(self.tgt_vocab.pad_token), _weight=torch.from_numpy(self.tgt_vocab.embeddings).float(), ) self.decoder = StdTreeDecoder( attn_type=dec_attention_type, embeddings=self.enc_word_emb.word_emb_layer if self.use_share_vocab else self.tgt_word_embedding, enc_hidden_size=gnn_hidden_size, dec_emb_size=self.tgt_vocab.embedding_dims, dec_hidden_size=dec_hidden_size, output_size=self.output_size, criterion=self.criterion, teacher_force_ratio=dec_teacher_forcing_rate, use_sibling=dec_use_sibling, use_copy=self.use_copy, dropout_for_decoder=dec_dropout, max_dec_seq_length=dec_max_decoder_step, max_dec_tree_depth=dec_max_tree_depth, tgt_vocab=self.tgt_vocab, ) def forward(self, batch_graph, tgt_tree_batch, oov_dict=None): batch_graph = self.graph_initializer(batch_graph) if hasattr(self, "graph_topology") and hasattr(self.graph_topology, "dynamic_topology"): batch_graph = self.graph_topology.dynamic_topology(batch_graph) batch_graph = self.gnn_encoder(batch_graph) batch_graph.node_features["rnn_emb"] = batch_graph.node_features["node_feat"] loss = self.decoder(g=batch_graph, tgt_tree_batch=tgt_tree_batch, oov_dict=oov_dict) return loss def translate(self, input_graph, use_beam_search=True, beam_size=4, oov_dict=None): device = input_graph.device prev_c = torch.zeros((1, self.dec_hidden_size), requires_grad=False) prev_h = torch.zeros((1, self.dec_hidden_size), requires_grad=False) batch_graph = self.graph_initializer(input_graph) if hasattr(self, "graph_topology") and hasattr(self.graph_topology, "dynamic_topology"): batch_graph = self.graph_topology.dynamic_topology(batch_graph) batch_graph = self.gnn_encoder(batch_graph) batch_graph.node_features["rnn_emb"] = batch_graph.node_features["node_feat"] params = self.decoder._extract_params(batch_graph) graph_node_embedding = params["graph_node_embedding"] if self.decoder.graph_pooling_strategy == "max": graph_level_embedding = torch.max(graph_node_embedding, 1)[0] rnn_node_embedding = params["rnn_node_embedding"] # graph_node_mask = params["graph_node_mask"] enc_w_list = params["enc_batch"] enc_outputs = graph_node_embedding prev_c = graph_level_embedding prev_h = graph_level_embedding # decode queue_decode = [] queue_decode.append({"s": (prev_c, prev_h), "parent": 0, "child_index": 1, "t": Tree()}) head = 1 while head <= len(queue_decode) and head <= self.decoder.max_dec_tree_depth: s = queue_decode[head - 1]["s"] parent_h = s[1] t = queue_decode[head - 1]["t"] # sibling_state = torch.zeros( # (1, self.dec_hidden_size), dtype=torch.float, requires_grad=False # ).to(device) # flag_sibling = False # for q_index in range(len(queue_decode)): # if ( # (head <= len(queue_decode)) # and (q_index < head - 1) # and (queue_decode[q_index]["parent"] == queue_decode[head - 1]["parent"]) # and ( # queue_decode[q_index]["child_index"] < queue_decode[head - 1]["child_index"] # ) # ): # flag_sibling = True # sibling_index = q_index # if flag_sibling: # sibling_state = queue_decode[sibling_index]["s"][1] if head == 1: prev_word = torch.tensor( [self.tgt_vocab.get_symbol_idx(self.tgt_vocab.start_token)], dtype=torch.long ) else: prev_word = torch.tensor([self.tgt_vocab.get_symbol_idx("(")], dtype=torch.long) prev_word = to_cuda(prev_word, device) i_child = 1 if not use_beam_search: while True: prediction, (curr_c, curr_h), _ = self.decoder.decode_step( tgt_batch_size=1, dec_single_input=prev_word, dec_single_state=s, memory=enc_outputs, parent_state=parent_h, oov_dict=oov_dict, enc_batch=enc_w_list, ) s = (curr_c, curr_h) prev_word = torch.log(prediction + 1e-31) prev_word = prev_word.argmax(1) if ( int(prev_word[0]) == self.tgt_vocab.get_symbol_idx(self.tgt_vocab.end_token) or t.num_children >= self.decoder.max_dec_seq_length ): break elif int(prev_word[0]) == self.tgt_vocab.get_symbol_idx( self.tgt_vocab.non_terminal_token ): queue_decode.append( { "s": (s[0].clone(), s[1].clone()), "parent": head, "child_index": i_child, "t": Tree(), } ) t.add_child(int(prev_word[0])) else: t.add_child(int(prev_word[0])) i_child = i_child + 1 else: topk = 1 # decoding goes sentence by sentence assert graph_node_embedding.size(0) == 1 beam_search_generator = DecoderStrategy( beam_size=beam_size, vocab=self.tgt_vocab, decoder=self.decoder, rnn_type="lstm", use_copy=True, use_coverage=False, ) decoded_results = beam_search_generator.beam_search_for_tree_decoding( decoder_initial_state=(s[0], s[1]), decoder_initial_input=prev_word, parent_state=parent_h, graph_node_embedding=enc_outputs, rnn_node_embedding=rnn_node_embedding, device=device, topk=topk, oov_dict=oov_dict, enc_batch=enc_w_list, ) generated_sentence = decoded_results[0][0] for node_i in generated_sentence: if int(node_i.wordid.item()) == self.tgt_vocab.get_symbol_idx( self.tgt_vocab.non_terminal_token ): queue_decode.append( { "s": (node_i.h[0].clone(), node_i.h[1].clone()), "parent": head, "child_index": i_child, "t": Tree(), } ) t.add_child(int(node_i.wordid.item())) i_child = i_child + 1 elif ( int(node_i.wordid.item()) != self.tgt_vocab.get_symbol_idx(self.tgt_vocab.end_token) and int(node_i.wordid.item()) != self.tgt_vocab.get_symbol_idx(self.tgt_vocab.start_token) and int(node_i.wordid.item()) != self.tgt_vocab.get_symbol_idx("(") ): t.add_child(int(node_i.wordid.item())) i_child = i_child + 1 head = head + 1 for i in range(len(queue_decode) - 1, 0, -1): cur = queue_decode[i] queue_decode[cur["parent"] - 1]["t"].children[cur["child_index"] - 1] = cur["t"] return queue_decode[0]["t"].to_list(self.tgt_vocab) def init(self, init_weight): for name, param in self.named_parameters(): if param.requires_grad: if ( ("word_embedding" in name) or ("word_emb_layer" in name) or ("bert_embedding" in name) ): pass else: if len(param.size()) >= 2: if "rnn" in name: init.orthogonal_(param) else: init.xavier_uniform_(param, gain=1.0) else: init.uniform_(param, -init_weight, init_weight) def post_process(self, decode_results, vocab): candidate = [int(c) for c in decode_results] pred_str = " ".join(self.tgt_vocab.get_idx_symbol_for_list(candidate)) return [pred_str] def inference_forward(self, batch_graph, beam_size, topk=1, oov_dict=None): """ Decoding with the support of beam_search. Specifically, when ``beam_size`` is 1, it is equal to greedy search. Parameters ---------- batch_graph: GraphData The graph input beam_size: int The beam width. When it is 1, the output is equal to greedy search's output. topk: int, default=1 The number of decoded output to be reserved. Usually, ``topk`` should be smaller or equal to ``beam_size`` oov_dict: VocabModel, default=None The vocabulary for copy. Returns ------- results: torch.Tensor The results with the shape of ``[batch_size, topk, max_decoder_step]`` containing the word indexes. # noqa """ return self.translate( input_graph=batch_graph["graph_data"], use_beam_search=(beam_size > 1), beam_size=beam_size, oov_dict=oov_dict, ) @classmethod def from_args(cls, opt, vocab_model): """ The function for building ``Graph2Tree`` model. Parameters ---------- opt: dict The configuration dict. It should has the same hierarchy and keys as the template. vocab_model: VocabModel The vocabulary. Returns ------- model: Graph2Tree """ initializer_args = cls._get_node_initializer_params(opt) gnn_args = cls._get_gnn_params(opt) dec_args = cls._get_decoder_params(opt) args = copy.deepcopy(initializer_args) args.update(gnn_args) args.update(dec_args) args["share_vocab"] = opt["graph_construction_args"]["graph_construction_share"][ "share_vocab" ] return cls(vocab_model=vocab_model, **args) @staticmethod def _get_decoder_params(opt): dec_args = opt["decoder_args"] shared_args = copy.deepcopy(dec_args["rnn_decoder_share"]) private_args = copy.deepcopy(dec_args["rnn_decoder_private"]) ret = copy.deepcopy(dict(shared_args, **private_args)) dec_ret = {"dec_" + key: value for key, value in ret.items()} return dec_ret @staticmethod def _get_gnn_params(opt): args = opt["graph_embedding_args"] shared_args = copy.deepcopy(args["graph_embedding_share"]) private_args = copy.deepcopy(args["graph_embedding_private"]) if "activation" in private_args.keys(): private_args["activation"] = ( getattr(F, private_args["activation"]) if private_args["activation"] else None ) if "norm" in private_args.keys(): private_args["norm"] = ( getattr(F, private_args["norm"]) if private_args["norm"] else None ) gnn_shared_args = {"gnn_" + key: value for key, value in shared_args.items()} pri_shared_args = {"gnn_" + key: value for key, value in private_args.items()} ret = copy.deepcopy(dict(gnn_shared_args, **pri_shared_args)) ret["gnn"] = opt["graph_embedding_name"] return ret @staticmethod def _get_node_initializer_params(opt): # Dynamic graph construction related params are stored here init_args = opt["graph_construction_args"]["graph_construction_private"] ret: dict = copy.deepcopy(init_args) args = opt["graph_initialization_args"] ret.update(args) ret.pop("embedding_style") emb_ret = {"emb_" + key: value for key, value in ret.items()} emb_ret["embedding_style"] = args["embedding_style"] emb_ret["graph_name"] = opt["graph_construction_name"] return emb_ret
9,130
890
<gh_stars>100-1000 /* * * Copyright 2018 Asylo authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ #include "asylo/platform/primitives/util/status_conversions.h" #include "absl/status/status.h" #include "asylo/platform/primitives/primitive_status.h" namespace asylo { namespace primitives { PrimitiveStatus MakePrimitiveStatus(const Status& status) { return PrimitiveStatus{static_cast<int>(status.code()), status.message().data(), status.message().size()}; } Status MakeStatus(const PrimitiveStatus& primitive_status) { return Status{static_cast<absl::StatusCode>(primitive_status.error_code()), primitive_status.error_message()}; } } // namespace primitives } // namespace asylo
396
1,279
<reponame>shoes22/openpilot bool flash_is_locked(void) { return (FLASH->CR & FLASH_CR_LOCK); } void flash_unlock(void) { FLASH->KEYR = 0x45670123; FLASH->KEYR = 0xCDEF89AB; } bool flash_erase_sector(uint8_t sector, bool unlocked) { // don't erase the bootloader(sector 0) if (sector != 0 && sector < 12 && unlocked) { FLASH->CR = (sector << 3) | FLASH_CR_SER; FLASH->CR |= FLASH_CR_STRT; while (FLASH->SR & FLASH_SR_BSY); return true; } return false; } void flash_write_word(void *prog_ptr, uint32_t data) { uint32_t *pp = prog_ptr; FLASH->CR = FLASH_CR_PSIZE_1 | FLASH_CR_PG; *pp = data; while (FLASH->SR & FLASH_SR_BSY); } void flush_write_buffer(void) { }
308
5,169
<reponame>Gantios/Specs { "name": "LinkV-Communication", "version": "0.0.1", "summary": "RTC + IM 优化接口", "description": "RTC + IM you can send im message", "homepage": "https://github.com/linkv-io/LinkV-Communication-iOS", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "wangyansnow": "<EMAIL>" }, "source": { "git": "https://github.com/linkv-io/LinkV-Communication-iOS.git", "tag": "0.0.1" }, "platforms": { "ios": "9.0" }, "source_files": "LVCEngine/LVCEngine/**/*", "public_header_files": "LVCEngine/LVCEngine/**/*.h", "vendored_frameworks": [ "LVCEngine/framework/LinkV.framework", "LVCEngine/framework/LVIMLib.framework" ], "vendored_libraries": [ "LVCEngine/framework/libcares.a", "LVCEngine/framework/libopencore-amrnb.a", "LVCEngine/framework/libprotobuf-lite.a" ], "resources": "LVCEngine/resources/default.metallib", "libraries": [ "c++", "resolv.9", "sqlite3", "z" ], "xcconfig": { "ENABLE_BITCODE": "NO" } }
490
718
<filename>examples/Aviato/package.json { "name": "Aviato", "version": "0.0.1", "private": true, "scripts": { "start": "./node_modules/.bin/react-native start", "ios": "./node_modules/.bin/react-native run-ios", "android": "./node_modules/.bin/react-native run-android" }, "dependencies": { "react": "~15.3.1", "react-native": "0.36.0-rc.1", "react-native-side-menu": "^0.20.0", "react-router-native": "file:../../" } }
204
301
package com.sap.iot.starterkit.cert.type; import java.util.List; public class Envelope { private String messageType; private List<Message> messages; private String mode; public String getMessageType() { return messageType; } public void setMessageType(String messageType) { this.messageType = messageType; } public List<Message> getMessages() { return messages; } public void setMessages(List<Message> messages) { this.messages = messages; } public String getMode() { return mode; } public void setMode(String mode) { this.mode = mode; } }
193
14,668
<reponame>zealoussnow/chromium<filename>ui/gfx/animation/slide_animation.cc // Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ui/gfx/animation/slide_animation.h" #include <math.h> #include "base/cxx17_backports.h" #include "ui/gfx/animation/animation_delegate.h" namespace gfx { SlideAnimation::SlideAnimation(AnimationDelegate* target) : LinearAnimation(target), target_(target) {} SlideAnimation::~SlideAnimation() = default; void SlideAnimation::Reset(double value) { direction_ = absl::nullopt; value_current_ = value; Stop(); } void SlideAnimation::Show() { BeginAnimating(Direction::kShowing); } void SlideAnimation::Hide() { BeginAnimating(Direction::kHiding); } void SlideAnimation::SetSlideDuration(base::TimeDelta duration) { slide_duration_ = duration; } void SlideAnimation::SetDampeningValue(double dampening_value) { dampening_value_ = dampening_value; } double SlideAnimation::GetCurrentValue() const { return value_current_; } base::TimeDelta SlideAnimation::GetDuration() { const double current_progress = direction_ == Direction::kShowing ? value_current_ : 1.0 - value_current_; return slide_duration_ * (1 - pow(current_progress, dampening_value_)); } void SlideAnimation::BeginAnimating(Direction direction) { if (direction_ == direction) return; direction_ = direction; value_start_ = value_current_; value_end_ = (direction_ == Direction::kShowing) ? 1.0 : 0.0; // Make sure we actually have something to do. if (slide_duration_.is_zero()) { AnimateToState(1.0); // Skip to the end of the animation. if (delegate()) { delegate()->AnimationProgressed(this); delegate()->AnimationEnded(this); } } else if (value_current_ != value_end_) { // This will also reset the currently-occurring animation. SetDuration(GetDuration()); Start(); } } void SlideAnimation::AnimateToState(double state) { state = Tween::CalculateValue(tween_type_, base::clamp(state, 0.0, 1.0)); if (state == 1.0) direction_ = absl::nullopt; value_current_ = value_start_ + (value_end_ - value_start_) * state; // Correct for any overshoot (while state may be capped at 1.0, let's not // take any rounding error chances. if ((value_end_ >= value_start_) ? (value_current_ > value_end_) : (value_current_ < value_end_)) { value_current_ = value_end_; } } } // namespace gfx
893
3,301
package com.alibaba.alink.operator.batch.recommendation; import org.apache.flink.ml.api.misc.param.Params; import com.alibaba.alink.operator.common.recommendation.ItemCfRecommKernel; import com.alibaba.alink.operator.common.recommendation.RecommType; import com.alibaba.alink.params.recommendation.BaseRateRecommParams; /** * Rating for user-item pair with item CF model. */ public class ItemCfRateRecommBatchOp extends BaseRecommBatchOp <ItemCfRateRecommBatchOp> implements BaseRateRecommParams <ItemCfRateRecommBatchOp> { private static final long serialVersionUID = 6828072772516197481L; public ItemCfRateRecommBatchOp() { this(null); } public ItemCfRateRecommBatchOp(Params params) { super(ItemCfRecommKernel::new, RecommType.RATE, params); } }
264
5,169
{ "name": "TJMGameBoard", "version": "1.0.0", "summary": "A simple game board written in Objective-C.", "description": "I wrote this to implement a simple game board model object.", "homepage": "https://github.com/Cubd/Game-Board", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "Cubd": "<EMAIL>" }, "platforms": { "ios": "5.0" }, "source": { "git": "https://github.com/Cubd/Game-Board.git", "tag": "1.0.0" }, "source_files": "Source/**/*.{h,m}", "exclude_files": "Test Project", "requires_arc": true, "dependencies": { "TJMTwoDimensionalArray": [ "~> 1.0" ] } }
281
4,535
<gh_stars>1000+ // Copyright 2017-2018 Intel Corporation. #include "tile/hal/opencl/device_memory.h" #include <utility> #include "tile/hal/opencl/cl_mem_arena.h" #include "tile/hal/opencl/cl_mem_buffer.h" namespace vertexai { namespace tile { namespace hal { namespace opencl { DeviceMemory::DeviceMemory(const std::shared_ptr<DeviceState>& device_state) : device_state_{device_state} {} std::shared_ptr<hal::Buffer> DeviceMemory::MakeBuffer(std::uint64_t size, BufferAccessMask /* access */) { Err err; CLObj<cl_mem> mem = ocl::CreateBuffer(device_state_->cl_ctx().get(), CL_MEM_READ_WRITE, size, nullptr, err.ptr()); Err::Check(err, "Unable to allocate device-local memory"); return std::make_shared<CLMemBuffer>(device_state_, size, std::move(mem)); } std::shared_ptr<hal::Arena> DeviceMemory::MakeArena(std::uint64_t size, BufferAccessMask /* access */) { Err err; CLObj<cl_mem> mem = ocl::CreateBuffer(device_state_->cl_ctx().get(), CL_MEM_READ_WRITE, size, nullptr, err.ptr()); Err::Check(err, "Unable to allocate device-local memory"); return std::make_shared<CLMemArena>(device_state_, size, std::move(mem)); } } // namespace opencl } // namespace hal } // namespace tile } // namespace vertexai
433
357
x = 48 y = 180 def gcd(x, y): while y != 0: hey = x x = y y = hey % y return x def lcm(a, b): return (a * b / gcd(a, b)) gcd(x, lcm(x,y)) print("GCD is %d" % gcd(x,y)) print("LCM is %d" % lcm(x,y)) gcd(gcd(y,x), lcm(x,y))
164
428
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.surfnet.oaaas.auth; import com.sun.jersey.api.client.Client; import com.sun.jersey.api.client.ClientResponse; import com.sun.jersey.api.client.config.ClientConfig; import com.sun.jersey.api.client.config.DefaultClientConfig; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.codehaus.jackson.map.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.core.io.ClassPathResource; import org.springframework.util.Assert; import org.surfnet.oaaas.model.TokenResponseCache; import org.surfnet.oaaas.model.TokenResponseCacheImpl; import org.surfnet.oaaas.model.VerifyTokenResponse; import javax.servlet.*; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.core.HttpHeaders; import java.io.IOException; import java.util.Properties; /** * {@link Filter} which can be used to protect all relevant resources by * validating the oauth access token with the Authorization server. This is an * example configuration: * <p/> * <pre> * {@code * <filter> * <filter-name>authorization-server</filter-name> * <filter-class>org.surfnet.oaaas.auth.AuthorizationServerFilter</filter-class> * <init-param> * <param-name>resource-server-key</param-name> * <param-value>university-foo</param-value> * </init-param> * <init-param> * <param-name>resource-server-secret</param-name> * <param-value><KEY></param-value> * </init-param> * <init-param> * <param-name>authorization-server-url</param-name> * <param-value>http://<host-name>/v1/tokeninfo</param-value> * </init-param> * <init-param> * <param-name>type-information-is-included</param-name> * <param-value>true</param-value> * </init-param> * </filter> * <filter-mapping> * <filter-name>authorization-server</filter-name> * <url-pattern>/*</url-pattern> * </filter-mapping> * } * </pre> * <p/> * The response of the Authorization Server is put on the * {@link HttpServletRequest} with the name * {@link AuthorizationServerFilter#VERIFY_TOKEN_RESPONSE}. * <p/> * Of course it might be better to use a properties file depending on the * environment (e.g. OTAP) to get the name, secret and url. This can be achieved * simple to provide an apis.application.properties file on the classpath or configure a * properties file name as init-param (to have multiple resource servers in the same tomcat instance). * <p/> * See {@link AuthorizationServerFilter#init(FilterConfig)} * <p/> * <p/> * Also note that by default the responses from the Authorization Server are not * cached. This in configurable in the properties file used by this Filter. Again * see {@link AuthorizationServerFilter#init(FilterConfig)} * <p/> * The cache behaviour can also be changed if you override * {@link AuthorizationServerFilter#cacheAccessTokens()} and to configure the * cache differently override {@link AuthorizationServerFilter#buildCache()} */ public class AuthorizationServerFilter implements Filter { private static final Logger LOG = LoggerFactory.getLogger(AuthorizationServerFilter.class); /* * Endpoint of the authorization server (e.g. something like * http://<host-name>/v1/tokeninfo) */ private String authorizationServerUrl; /* * Base64-encoded concatenation of the name of the resource server and the * secret separated with a colon */ private String authorizationValue; /* * Client to make GET calls to the authorization server */ private Client client; /* * Constant for the access token (oauth2 spec) */ private static final String BEARER = "bearer"; /* * Constant name of the request attribute where the response is stored */ public static final String VERIFY_TOKEN_RESPONSE = "VERIFY_TOKEN_RESPONSE"; /* * If not overridden by a subclass / configured otherwise we don't cache the answers from the authorization * server */ private boolean cacheEnabled; private TokenResponseCache cache; /* * By default we respond to preflight CORS requests and have a lenient policy as we are secured by OAuth2 */ private boolean allowCorsRequests = true; /* * Key and secret obtained out-of-band to authenticate against the * authorization server */ private String resourceServerKey; private String resourceServerSecret; /** * Whether (java) type information is included in the VerifyTokenResponse. */ private boolean typeInformationIsIncluded = false; private ObjectMapper objectMapper; @Override public void init(FilterConfig filterConfig) throws ServletException { /* * First check on the presence of a init-param where to look for the properties to support * multiple resource servers in the same war. Then look for second best apis-resource-server.properties file, then * try to use the filter config if parameters are present. If this also * fails trust on the setters (e.g. probably in test modus), but apply * fail-fast strategy */ ClassPathResource res = null; String propertiesFile = filterConfig.getInitParameter("apis-resource-server.properties.file"); if (StringUtils.isNotEmpty(propertiesFile)) { res = new ClassPathResource(propertiesFile); } if (res == null || !res.exists()) { res = new ClassPathResource("apis-resource-server.properties"); } if (res != null && res.exists()) { Properties prop = new Properties(); try { prop.load(res.getInputStream()); } catch (IOException e) { throw new RuntimeException("Error in reading the apis-resource-server.properties file", e); } resourceServerKey = prop.getProperty("adminService.resourceServerKey"); resourceServerSecret = prop.getProperty("adminService.resourceServerSecret"); authorizationServerUrl = prop.getProperty("adminService.tokenVerificationUrl"); cacheEnabled = Boolean.valueOf(prop.getProperty("adminService.cacheEnabled")); String allowCorsRequestsProperty = prop.getProperty("adminService.allowCorsRequests"); if (StringUtils.isNotEmpty(allowCorsRequestsProperty)) { allowCorsRequests = Boolean.valueOf(allowCorsRequestsProperty); } String typeInformationIsIncludedProperty = prop.getProperty("adminService.jsonTypeInfoIncluded"); if (StringUtils.isNotEmpty(typeInformationIsIncludedProperty)) { typeInformationIsIncluded = Boolean.valueOf(typeInformationIsIncludedProperty); } } else if (filterConfig.getInitParameter("resource-server-key") != null) { resourceServerKey = filterConfig.getInitParameter("resource-server-key"); resourceServerSecret = filterConfig.getInitParameter("resource-server-secret"); authorizationServerUrl = filterConfig.getInitParameter("authorization-server-url"); typeInformationIsIncluded = Boolean.valueOf(filterConfig.getInitParameter("type-information-is-included")); } Assert.hasText(resourceServerKey, "Must provide a resource server key"); Assert.hasText(resourceServerSecret, "Must provide a resource server secret"); Assert.hasText(authorizationServerUrl, "Must provide a authorization server url"); this.authorizationValue = new String(Base64.encodeBase64(resourceServerKey.concat(":").concat(resourceServerSecret) .getBytes())); if (cacheAccessTokens()) { this.cache = buildCache(); Assert.notNull(this.cache); } this.client = createClient(); this.objectMapper = createObjectMapper(typeInformationIsIncluded); } protected ObjectMapper createObjectMapper(boolean typeInformationIsIncluded) { ObjectMapper mapper = new ObjectMapperProvider().getContext(ObjectMapper.class); if (typeInformationIsIncluded) { mapper.enableDefaultTyping(ObjectMapper.DefaultTyping.NON_FINAL); } else { mapper.disableDefaultTyping(); } return mapper; } /** * @return Client */ protected Client createClient() { ClientConfig cc = new DefaultClientConfig(); cc.getClasses().add(ObjectMapperProvider.class); return Client.create(cc); } @SuppressWarnings({"rawtypes", "unchecked"}) protected TokenResponseCache buildCache() { return new TokenResponseCacheImpl(1000, 60 * 5); } @Override public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain chain) throws IOException, ServletException { HttpServletRequest request = (HttpServletRequest) servletRequest; HttpServletResponse response = (HttpServletResponse) servletResponse; if (handledCorsPreflightRequest(request, response)) { return; } /* * The Access Token from the Client app as documented in * http://tools.ietf.org/html/draft-ietf-oauth-v2#section-7 */ final String accessToken = getAccessToken(request); if (accessToken != null) { VerifyTokenResponse tokenResponse = getVerifyTokenResponse(accessToken); if (isValidResponse(tokenResponse)) { request.setAttribute(VERIFY_TOKEN_RESPONSE, tokenResponse); chain.doFilter(request, response); return; } } sendError(response, HttpServletResponse.SC_FORBIDDEN, "OAuth2 endpoint"); } protected VerifyTokenResponse getVerifyTokenResponse(String accessToken) { VerifyTokenResponse verifyTokenResponse = null; if (cacheAccessTokens()) { verifyTokenResponse = cache.getVerifyToken(accessToken); if (verifyTokenResponse != null) { return verifyTokenResponse; } } if (verifyTokenResponse == null) { ClientResponse res = client.resource(String.format("%s?access_token=%s", authorizationServerUrl, accessToken)) .header(HttpHeaders.AUTHORIZATION, "Basic " + authorizationValue).accept("application/json") .get(ClientResponse.class); try { String responseString = res.getEntity(String.class); int statusCode = res.getClientResponseStatus().getStatusCode(); LOG.debug("Got verify token response (status: {}): '{}'", statusCode, responseString); if (statusCode == HttpServletResponse.SC_OK) { verifyTokenResponse = objectMapper.readValue(responseString, VerifyTokenResponse.class); } } catch (Exception e) { LOG.error("Exception in reading result from AuthorizationServer", e); // anti-pattern, but null case is explicitly handled } } if (isValidResponse(verifyTokenResponse) && cacheAccessTokens()) { cache.storeVerifyToken(accessToken, verifyTokenResponse); } return verifyTokenResponse; } protected void sendError(HttpServletResponse response, int statusCode, String reason) { LOG.warn("No valid access-token on request. Will respond with error response: {} {}", statusCode, reason); try { response.sendError(statusCode, reason); response.flushBuffer(); } catch (IOException e) { throw new RuntimeException(reason, e); } } protected boolean cacheAccessTokens() { return cacheEnabled; } /* * http://www.w3.org/TR/cors/#resource-preflight-requests */ protected boolean handledCorsPreflightRequest(HttpServletRequest request, HttpServletResponse response) throws IOException { if (!this.allowCorsRequests || StringUtils.isBlank(request.getHeader("Origin"))) { return false; } /* * We must do this anyway, this being (probably) a CORS request */ response.setHeader("Access-Control-Allow-Origin", "*"); if (StringUtils.isNotBlank(request.getHeader("Access-Control-Request-Method")) && request.getMethod().equalsIgnoreCase("OPTIONS")) { /* * We don't want to propogate the request any further */ response.setHeader("Access-Control-Allow-Methods", getAccessControlAllowedMethods()); String requestHeaders = request.getHeader("Access-Control-Request-Headers"); if (StringUtils.isNotBlank(requestHeaders)) { response.setHeader("Access-Control-Allow-Headers", getAllowedHeaders(requestHeaders)); } response.setHeader("Access-Control-Max-Age", getAccessControlMaxAge()); response.setStatus(HttpServletResponse.SC_OK); response.flushBuffer(); return true; } return false; } protected String getAllowedHeaders(String requestHeaders) { return requestHeaders; } protected String getAccessControlMaxAge() { return "86400"; } protected String getAccessControlAllowedMethods() { return "GET, OPTIONS, HEAD, PUT, PATCH, POST, DELETE"; } private boolean isValidResponse(VerifyTokenResponse tokenResponse) { return tokenResponse != null && tokenResponse.getPrincipal() != null && tokenResponse.getError() == null; } private String getAccessToken(HttpServletRequest request) { String accessToken = null; String header = request.getHeader(HttpHeaders.AUTHORIZATION); if (header != null) { int space = header.indexOf(' '); if (space > 0) { String method = header.substring(0, space); if (BEARER.equalsIgnoreCase(method)) { accessToken = header.substring(space + 1); } } } return accessToken; } @Override public void destroy() { } public void setAuthorizationServerUrl(String authorizationServerUrl) { this.authorizationServerUrl = authorizationServerUrl; } public void setResourceServerSecret(String resourceServerSecret) { this.resourceServerSecret = resourceServerSecret; } public void setResourceServerKey(String resourceServerKey) { this.resourceServerKey = resourceServerKey; } public void setCacheEnabled(boolean cacheEnabled) { this.cacheEnabled = cacheEnabled; } public void setAllowCorsRequests(boolean allowCorsRequests) { this.allowCorsRequests = allowCorsRequests; } public void setTypeInformationIsIncluded(boolean typeInformationIsIncluded) { this.typeInformationIsIncluded = typeInformationIsIncluded; } }
4,783
729
<gh_stars>100-1000 /* * Copyright (c) 2011-2021, The DART development contributors * All rights reserved. * * The list of contributors can be found at: * https://github.com/dartsim/dart/blob/master/LICENSE * * This file is provided under the following "BSD-style" License: * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #ifndef DART_UTILS_MJCF_DETAIL_TYPES_HPP_ #define DART_UTILS_MJCF_DETAIL_TYPES_HPP_ #include "dart/common/Platform.hpp" namespace dart { namespace utils { namespace MjcfParser { namespace detail { /// This attribute specifies whether the angles in the MJCF model are /// expressed in units of degrees or radians. The compiler converts degrees /// into radians, and mjModel always uses radians. For URDF models the parser /// sets this attribute to "radian" internally, regardless of the XML setting. enum class Angle { /// [Default] Always use for MJCF DEGREE = 0, /// Always use for URDF RADIAN = 1, }; /// This attribute specifies whether the frame positions and orientations in the /// MJCF model are expressed in local or global coordinates; recall Coordinate /// frames. The compiler converts global into local coordinates, and mjModel /// always uses local coordinates. For URDF models the parser sets this /// attribute to "local" internally, regardless of the XML setting. enum class Coordinate { /// [Default] LOCAL = 0, GLOBAL = 1, }; /// This attribute selects the numerical integrator to be used. Currently the /// available integrators are the semi-implicit Euler method and the fixed-step /// 4-th order Runge Kutta method. enum class Integrator { /// [Default] EULER = 0, RK4 = 1, }; enum class InertiaFromGeom { #if DART_OS_WINDOWS IFG_FALSE, IFG_TRUE, IFG_AUTO, #else FALSE, TRUE, AUTO, #endif }; enum class CollisionType { ALL, PREDEFINED, DYNAMIC, }; enum class ConeType { PYRAMIDAL, ELLIPTIC, }; enum class JacobianType { DENSE, SPARSE, AUTO, }; enum class SolverType { PGS, CG, NEWTON, }; enum class GeomType { PLANE, HFIELD, /// [Default] SPHERE, CAPSULE, ELLIPSOID, CYLINDER, BOX, MESH, }; enum class JointType { FREE, BALL, SLIDE, /// [Default] HINGE, }; } // namespace detail } // namespace MjcfParser } // namespace utils } // namespace dart #endif // #ifndef DART_UTILS_MJCF_DETAIL_TYPES_HPP_
1,213
547
<gh_stars>100-1000 { "$meta": { "label": "Spanish", "humanizer_language": "es" }, "restarter": { "start_timeout": "Se ha agotado el tiempo de espera para el arranque.", "crash_detected": "Caída del servidor detectada.", "hang_detected": "Suspensión detectada", "partial_hang_warn": "Debido a una suspensión parcial, el servidor se reiniciará en 1 minutos. Por favor, desconectate ahora.", "partial_hang_warn_discord": "Debido a una suspensión parcial, **%{servername}** se reiniciará en un minuto.", "schedule_reason": "Reinicio programado a las %{time}", "schedule_warn": "El servidor se va a reiniciar en %{smart_count} minutos. Por favor desconéctense. |||| El servidor se va a reiniciar en %{smart_count} minutos.", "schedule_warn_discord": "**%{servername}** El servidor se va a reiniciar en %{smart_count} minutos. |||| **%{servername}** El servidor se va a reiniciar en %{smart_count} minutos." }, "ban_messages": { "kick_temporary": "(%{author}) Has sido baneado del servidor por \"%{reason}\". Tu baneo expirará en: %{expiration}.", "kick_permanent": "(%{author}) Has sido baneado permanentemente del servidor por \"%{reason}\".", "reject_temporary": "Has sido baneado del servidor.\nTu baneo expirará en: %{expiration}.\nID del baneo: %{id}.\nRazón del baneo: %{reason}.\nBaneado por: %{author}.", "reject_permanent": "Has sido baneado permanentemente del servidor.\nID del baneo: %{id}.\nRazón del baneo: %{reason}.\nBaneado por: %{author}." }, "server_actions": { "restarting": "Reiniciando Servidor (%{reason}).", "restarting_discord": "**%{servername}** se está reiniciando: (%{reason}).", "stopping": "Apagando el servidor: (%{reason}).", "stopping_discord": "**%{servername}** se está apagando (%{reason}).", "spawning_discord": "**%{servername}** está iniciandose." }, "discord": { "status_online": "**%{servername}** se encuentra **Online**!", "status_offline": "**%{servername}** se encuentra **Offline**!" }, "nui_warning": { "title": "ADVERTENCIA", "warned_by": "Advertido por:", "instruction": "Presione [ESPACIO] durante 10 segundos para descartar este mensaje." } }
1,027
450
<filename>forest-common/src/main/java/com/zhizus/forest/common/codec/Response.java package com.zhizus.forest.common.codec; import com.zhizus.forest.common.exception.ForestErrorMsg; import java.io.Serializable; /** * Created by Dempe on 2016/12/7. */ public class Response implements Serializable { private int code = 0; private String errMsg = ""; private Object result; public Object getResult() { return result; } public void setResult(Object result) { this.result = result; } public String getErrMsg() { return errMsg; } public void setErrMsg(String errMsg) { this.errMsg = errMsg; } public void setForestErrorMsg(ForestErrorMsg forestErrorMsg) { this.errMsg = forestErrorMsg.getMessage(); this.code = forestErrorMsg.getErrorCode(); } public int getCode() { return code; } public void setCode(int code) { this.code = code; } @Override public String toString() { return "Response{" + "code=" + code + ", errMsg='" + errMsg + '\'' + ", result=" + result + '}'; } }
506
841
<reponame>jgoestl/cgeo package cgeo.geocaching.maps.mapsforge.v6.layers; import cgeo.geocaching.maps.mapsforge.v6.TapHandler; import org.mapsforge.core.graphics.Canvas; import org.mapsforge.core.model.BoundingBox; import org.mapsforge.core.model.LatLong; import org.mapsforge.core.model.Point; import org.mapsforge.map.layer.Layer; public class TapHandlerLayer extends Layer { private final TapHandler tapHandler; public TapHandlerLayer(final TapHandler tapHandler) { this.tapHandler = tapHandler; } @Override public void draw(final BoundingBox arg0, final byte arg1, final Canvas arg2, final Point arg3) { // nothing visible here } @Override public boolean onTap(final LatLong tapLatLong, final Point layerXY, final Point tapXY) { tapHandler.finished(); return true; } @Override public boolean onLongPress(final LatLong tapLatLong, final Point layerXY, final Point tapXY) { tapHandler.onLongPress(tapLatLong); tapHandler.finished(); return true; } }
387
531
// Fill out your copyright notice in the Description page of Project Settings. #pragma once #include "CoreMinimal.h" struct FMathStruct { static int32 Add(int32 A, int32 B); static float Add(float A, float B); };
69
521
<reponame>Fimbure/icebox-1 /* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is the Netscape Portable Runtime (NSPR). * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1999-2000 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Alternatively, the contents of this file may be used under the terms of * either the GNU General Public License Version 2 or later (the "GPL"), or * the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ /* * File: str2addr.c * Description: a test for PR_StringToNetAddr */ #include "nspr.h" #include <stdio.h> #include <stdlib.h> /* Address string to convert */ #define DEFAULT_IPV4_ADDR_STR "172.16.17.32" /* Expected conversion result, in network byte order */ static unsigned char default_ipv4_addr[] = {207, 200, 73, 41}; int main(int argc, char **argv) { PRNetAddr addr; const char *addrStr; unsigned char *bytes; int idx; addrStr = DEFAULT_IPV4_ADDR_STR; if (PR_StringToNetAddr(addrStr, &addr) == PR_FAILURE) { fprintf(stderr, "PR_StringToNetAddr failed\n"); exit(1); } if (addr.inet.family != PR_AF_INET) { fprintf(stderr, "addr.inet.family should be %d but is %d\n", PR_AF_INET, addr.inet.family); exit(1); } bytes = (unsigned char *) &addr.inet.ip; for (idx = 0; idx < 4; idx++) { if (bytes[idx] != default_ipv4_addr[idx]) { fprintf(stderr, "byte %d of IPv4 addr should be %d but is %d\n", idx, default_ipv4_addr[idx], bytes[idx]); exit(1); } } printf("PASS\n"); return 0; }
1,043
621
<reponame>fengjixuchui/VivienneVMM #include "tests.h" #include <cstdio> #include "arbitrary_code.h" #include "test_util.h" #include "..\common\time_util.h" #include "..\VivienneCL\driver_io.h" #include "..\VivienneCL\ntdll.h" #include "..\VivienneCL\token_parser.h" //============================================================================= // Constants //============================================================================= // // This value must be synchronized with arbitrary_code!g_CecmFpuStateSentinel. // #define SENTINEL_FLOAT_VALUE 77777.75f #define FLOAT_PRNG_VALUE 1348.151251f #define CECM_TARGET_TEXT "rcx+rax*4" #define NUMBER_OF_UNIQUE_RANDOM_VALUES 16 #define NUMBER_OF_THREADS 4 #define DEBUG_REGISTER_INDEX 1 #define CONTEXT_BUFFER_SIZE (PAGE_SIZE) #define REQUEST_DURATION_MS (SECONDS_TO_MILLISECONDS(10)) #define WAIT_TIMEOUT_MS (SECONDS_TO_MILLISECONDS(10)) //============================================================================= // Types //============================================================================= typedef struct _FPU_STATE_CONTEXT { HANDLE BarrierEvent; BOOLEAN Active; FLOAT RandomValues[NUMBER_OF_UNIQUE_RANDOM_VALUES]; } FPU_STATE_CONTEXT, *PFPU_STATE_CONTEXT; //============================================================================= // Internal Interface //============================================================================= // // ExerciseCecm // static DWORD WINAPI ExerciseCecm( _In_ LPVOID lpParameter ) { PFPU_STATE_CONTEXT pContext = (PFPU_STATE_CONTEXT)lpParameter; FLOAT SentinelCheck = 0; SIZE_T ValueIndex = 0; DWORD waitstatus = 0; DWORD status = ERROR_SUCCESS; // // Wait until all threads have been created. // waitstatus = WaitForSingleObject(pContext->BarrierEvent, WAIT_TIMEOUT_MS); if (WAIT_OBJECT_0 != waitstatus) { FAIL_TEST("WaitForSingleObject failed: %u\n", GetLastError()); } while (pContext->Active) { ValueIndex = (ValueIndex + 1) % ARRAYSIZE(pContext->RandomValues); SentinelCheck = AcCaptureMemoryFpuState( pContext->RandomValues, ARRAYSIZE(pContext->RandomValues), ValueIndex); if (SENTINEL_FLOAT_VALUE != SentinelCheck) { FAIL_TEST( "Invalid sentinel float: %f (tid = %u)\n", SentinelCheck, GetCurrentThreadId()); } (VOID)NtYieldExecution(); } return status; } // // InitializeFpuStateContext // _Check_return_ static BOOL InitializeFpuStateContext( _Out_ PFPU_STATE_CONTEXT pContext ) { HANDLE BarrierEvent = NULL; BOOL status = TRUE; // Zero out parameters. RtlSecureZeroMemory(pContext, sizeof(*pContext)); BarrierEvent = CreateEventW(NULL, TRUE, FALSE, NULL); if (!BarrierEvent) { printf("CreateEvent failed: %u\n", GetLastError()); status = FALSE; goto exit; } // // Manually generate float values. // for (ULONG i = 0; i < ARRAYSIZE(pContext->RandomValues); ++i) { pContext->RandomValues[i] = (FLOAT)((i + 1) * FLOAT_PRNG_VALUE); printf(" %02u: %f\n", i, pContext->RandomValues[i]); } // Set out parameters. pContext->BarrierEvent = BarrierEvent; pContext->Active = TRUE; exit: if (!status) { if (BarrierEvent) { if (!CloseHandle(BarrierEvent)) { printf("CloseHandle failed: %u\n", GetLastError()); } } } return status; } // // ReleaseFpuStateContext // _Check_return_ static BOOL ReleaseFpuStateContext( _In_ PFPU_STATE_CONTEXT pContext ) { BOOL status = TRUE; if (pContext->BarrierEvent) { if (!CloseHandle(pContext->BarrierEvent)) { printf("CloseHandle failed: %u\n", GetLastError()); status = FALSE; } } return status; } // // ExecuteCecmForInstructionAddress // static BOOL ExecuteCecmForInstructionAddress( _In_ ULONG_PTR Address, _In_ PCEC_MEMORY_DESCRIPTION pMemoryDescription ) { FPU_STATE_CONTEXT Context = {}; PCEC_MEMORY_VALUES pValuesCtx = NULL; DWORD ThreadIds[NUMBER_OF_THREADS] = {}; HANDLE hThreads[NUMBER_OF_THREADS] = {}; PMEMORY_DATA_VALUE pMemoryDataValue = NULL; DWORD waitstatus = 0; BOOL status = FALSE; printf("Executing CECM fpu state validation for 0x%IX\n", Address); // // Initialize the exercise thread context. // status = InitializeFpuStateContext(&Context); if (!status) { printf("InitializeFpuStateContext failed: %u\n", GetLastError()); goto exit; } // // Allocate the captured context buffer. // pValuesCtx = (PCEC_MEMORY_VALUES)HeapAlloc( GetProcessHeap(), HEAP_ZERO_MEMORY, CONTEXT_BUFFER_SIZE); if (!pValuesCtx) { printf("HeapAlloc failed: %u\n", GetLastError()); status = FALSE; goto exit; } printf("Creating %Iu exercise threads...\n", ARRAYSIZE(hThreads)); // // Create threads which exercise the code from which we will be capturing // memory context. // for (ULONG i = 0; i < ARRAYSIZE(hThreads); ++i) { hThreads[i] = CreateThread( NULL, 0, ExerciseCecm, &Context, 0, &ThreadIds[i]); if (!hThreads[i]) { printf("CreateThread failed: %u\n", GetLastError()); status = FALSE; goto exit; } printf(" tid: %u (0x%X)\n", ThreadIds[i], ThreadIds[i]); } // // Activate the exercise threads. // status = SetEvent(Context.BarrierEvent); if (!status) { printf("SetEvent failed: %u\n", GetLastError()); goto exit; } printf( "Requesting memory values for [%s] at 0x%IX\n", CECM_TARGET_TEXT, Address); // // Issue the synchronous CECM request. // status = VivienneIoCaptureMemoryValues( GetCurrentProcessId(), DEBUG_REGISTER_INDEX, Address, HWBP_TYPE::Execute, HWBP_SIZE::Byte, pMemoryDescription, REQUEST_DURATION_MS, pValuesCtx, CONTEXT_BUFFER_SIZE); if (!status) { printf("VivienneIoCaptureMemoryValues failed: %u\n", GetLastError()); goto exit; } // // Lazily signal that all threads should terminate. // Context.Active = FALSE; // // Wait for all threads to terminate. // waitstatus = WaitForMultipleObjects( ARRAYSIZE(hThreads), hThreads, TRUE, WAIT_TIMEOUT_MS); if (waitstatus < WAIT_OBJECT_0 || waitstatus >= ARRAYSIZE(hThreads)) { printf("WaitForMultipleObjects failed: %u\n", GetLastError()); status = FALSE; goto exit; } // // Print the results. // printf( "Cecm request completed with %u unique values:\n", pValuesCtx->NumberOfValues); for (ULONG i = 0; i < pValuesCtx->NumberOfValues; ++i) { pMemoryDataValue = (PMEMORY_DATA_VALUE)(&pValuesCtx->Values[i]); printf(" %02u: %f\n", i, pMemoryDataValue->Float); } // // Examine the captured unique register values. The returned list of values // should be equal to the random value array generated earlier. // if (ARRAYSIZE(Context.RandomValues) != pValuesCtx->NumberOfValues) { printf( "Unexpected number of captured values: actual = %u, expected = %Iu\n", pValuesCtx->NumberOfValues, ARRAYSIZE(Context.RandomValues)); status = FALSE; goto exit; } // // Verify that the returned list of values matches the random values in the // thread context. // for (SIZE_T i = 0; i < pValuesCtx->NumberOfValues; ++i) { PMEMORY_DATA_VALUE pValuesMdt = (PMEMORY_DATA_VALUE)&pValuesCtx->Values[i]; BOOLEAN ValueFound = FALSE; for (SIZE_T j = 0; j < ARRAYSIZE(Context.RandomValues); ++j) { PMEMORY_DATA_VALUE pRandomMdt = (PMEMORY_DATA_VALUE)&Context.RandomValues[j]; if (pRandomMdt->Float == pValuesMdt->Float) { // // Check for duplicate matches. // if (ValueFound) { printf("Duplicate value found.\n"); status = FALSE; goto exit; } ValueFound = TRUE; } } // // If we do not match a value then our captured context is bugged. // if (!ValueFound) { printf("Unmatched value: %f.\n", pValuesMdt->Double); status = FALSE; goto exit; } } // // Verify that there were no validation errors. // if (pValuesCtx->Statistics.ValidationErrors) { printf( "Unexpected number of validation errors: %Iu\n", pValuesCtx->Statistics.ValidationErrors); status = FALSE; goto exit; } // // Verify that all debug registers on all processors were cleared. // status = AreAllHardwareBreakpointsCleared(); if (!status) { printf("Failed to clear a hardware breakpoint.\n"); goto exit; } exit: for (ULONG i = 0; i < ARRAYSIZE(hThreads); ++i) { if (hThreads[i]) { #pragma warning(suppress : 6001) // Using uninitialized memory. if (!CloseHandle(hThreads[i])) { printf("CloseHandle failed: %u\n", GetLastError()); status = FALSE; } } } if (pValuesCtx) { if (!HeapFree(GetProcessHeap(), 0, pValuesCtx)) { printf("HeapFree failed: %u\n", GetLastError()); status = FALSE; } } if (!ReleaseFpuStateContext(&Context)) { printf("ReleaseFpuStateContext failed: %u\n", GetLastError()); status = FALSE; } return status; } //============================================================================= // Test Interface //============================================================================= // // TestCaptureMemoryValuesFpuState // // This test verifies that installing hardware breakpoints (via a CECM request) // does not corrupt SSE/AVX registers. // VOID TestCaptureMemoryValuesFpuState() { CEC_MEMORY_DESCRIPTION MemoryDescription = {}; BOOL status = TRUE; PRINT_TEST_HEADER; // // Initialize the target memory description. // status = ParseMemoryDescriptionToken( CECM_TARGET_TEXT, MDT_FLOAT, &MemoryDescription); if (!status) { FAIL_TEST("ParseMemoryDescriptionToken failed.\n"); } // // Execute a CECM request for a few different addresses. // status = ExecuteCecmForInstructionAddress( (ULONG_PTR)&g_AcCecmFpuStateCaptureAddress1, &MemoryDescription); if (!status) { FAIL_TEST("ExecuteCecmForInstructionAddress failed. (1)\n"); } status = ExecuteCecmForInstructionAddress( (ULONG_PTR)&g_AcCecmFpuStateCaptureAddress2, &MemoryDescription); if (!status) { FAIL_TEST("ExecuteCecmForInstructionAddress failed. (2)\n"); } status = ExecuteCecmForInstructionAddress( (ULONG_PTR)&g_AcCecmFpuStateCaptureAddress3, &MemoryDescription); if (!status) { FAIL_TEST("ExecuteCecmForInstructionAddress failed. (3)\n"); } PRINT_TEST_FOOTER; }
5,295
338
package com.tvd12.ezyfoxserver.testing; import java.util.List; import org.testng.annotations.Test; import com.tvd12.ezyfox.collect.Lists; import com.tvd12.test.base.BaseTest; import com.tvd12.test.performance.Performance; public class ForeachPerformanceTest extends BaseTest { @SuppressWarnings("unused") @Test public void test() { List<String> list = Lists.newArrayList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"); long time1 = Performance.create() .test(()-> { list.forEach(s -> {String s1 = s + 1;}); }) .getTime(); long time2 = Performance.create() .test(()-> { for(String s : list) { String s1 = s + 1; } }) .getTime(); System.out.println("time1 = " + time1); System.out.println("time2 = " + time2); } @Test public void test2() { List<String> list1 = Lists.newArrayList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"); List<String> list2 = Lists.newArrayList("0", "1", "2", "3", "4", "5", "6", "7", "8", "9"); long time1 = Performance.create() .test(()-> { list1.addAll(list2); }) .getTime(); long time2 = Performance.create() .test(()-> { list2.forEach(s -> list1.add(s)); }) .getTime(); System.out.println("time1 = " + time1); System.out.println("time2 = " + time2); } }
886
1,045
package com.amazonaws.kinesisvideo.demoapp; import android.content.Context; import org.junit.Test; import org.junit.runner.RunWith; import static junit.framework.Assert.assertEquals; import static org.junit.Assert.assertEquals; /** * Instrumentation test, which will execute on an Android device. * * @see <a href="http://d.android.com/tools/testing">Testing documentation</a> */ public class ExampleInstrumentedTest { /** * Instrumented test example. */ @Test public void useAppContext() { // Context of the app under test. } }
195
1,108
<reponame>jaffrey007/sql2o package org.sql2o.issues.pojos; /** * Created by IntelliJ IDEA. * User: lars * Date: 10/17/11 * Time: 9:08 PM * To change this template use File | Settings | File Templates. */ public class Issue1Pojo { public int val; public void setVal(int val) { this.val = val + 1; } }
133
309
# -*- coding: utf-8 -*- r""" The kl-UCB-switch policy, for bounded distributions. - Reference: [Garivier et al, 2018](https://arxiv.org/abs/1805.05071) """ from __future__ import division, print_function # Python 2 compatibility __author__ = "<NAME>" __version__ = "0.9" from math import log, sqrt import numpy as np np.seterr(divide='ignore') # XXX dangerous in general, controlled here! try: from .kullback import klucbBern from .klUCB import klUCB, c except ImportError: from kullback import klucbBern from klUCB import klUCB, c #: Default value for the tolerance for computing numerical approximations of the kl-UCB indexes. TOLERANCE = 1e-4 # --- different threshold functions def threshold_switch_bestchoice(T, K, gamma=1.0/5): r""" The threshold function :math:`f(T, K)`, to know when to switch from using :math:`I^{KL}_k(t)` (kl-UCB index) to using :math:`I^{MOSS}_k(t)` (MOSS index). .. math:: f(T, K) := \lfloor (T / K)^{\gamma} \rfloor, \gamma = 1/5. """ return np.floor((T / float(K)) ** gamma) def threshold_switch_delayed(T, K, gamma=8.0/9): r""" Another threshold function :math:`f(T, K)`, to know when to switch from using :math:`I^{KL}_k(t)` (kl-UCB index) to using :math:`I^{MOSS}_k(t)` (MOSS index). .. math:: f(T, K) := \lfloor (T / K)^{\gamma} \rfloor, \gamma = 8/9. """ return np.floor((T / float(K)) ** gamma) threshold_switch_default = threshold_switch_bestchoice # --- Numerical functions required for the indexes for kl-UCB-switch def klucbplus_index(reward, pull, horizon, nbArms, klucb=klucbBern, c=c, tolerance=TOLERANCE): r""" One kl-UCB+ index, from [Cappé et al. 13](https://arxiv.org/pdf/1210.1136.pdf): .. math:: \hat{\mu}_k(t) &= \frac{X_k(t)}{N_k(t)}, \\ I^{KL+}_k(t) &= \sup\limits_{q \in [a, b]} \left\{ q : \mathrm{kl}(\hat{\mu}_k(t), q) \leq \frac{c \log(T / (K * N_k(t)))}{N_k(t)} \right\}. """ return klucb(reward / pull, c * log(horizon / (nbArms * pull)) / pull, tolerance) # def klucbplus_indexes(rewards, pulls, horizon, nbArms, klucb=klucbBern, c=c, tolerance=TOLERANCE): # r""" The kl-UCB+ indexes, from [Cappé et al. 13](https://arxiv.org/pdf/1210.1136.pdf): # .. math:: # \hat{\mu}_k(t) &= \frac{X_k(t)}{N_k(t)}, \\ # I^{KL+}_k(t) &= \sup\limits_{q \in [a, b]} \left\{ q : \mathrm{kl}(\hat{\mu}_k(t), q) \leq \frac{c \log(T / (K * N_k(t)))}{N_k(t)} \right\}. # """ # return klucb(rewards / pulls, c * np.log(horizon / (nbArms * pulls)) / pulls, tolerance) def mossplus_index(reward, pull, horizon, nbArms): r""" One MOSS+ index, from [Audibert & Bubeck, 2010](http://www.jmlr.org/papers/volume11/audibert10a/audibert10a.pdf): .. math:: I^{MOSS+}_k(t) = \frac{X_k(t)}{N_k(t)} + \sqrt{\max\left(0, \frac{\log\left(\frac{T}{K N_k(t)}\right)}{N_k(t)}\right)}. """ return (reward / pull) + sqrt(max(0, log(horizon / (nbArms * pull))) / (2 * pull)) # def mossplus_indexes(rewards, pulls, horizon, nbArms): # r""" The MOSS+ indexes, from [Audibert & Bubeck, 2010](http://www.jmlr.org/papers/volume11/audibert10a/audibert10a.pdf): # .. math:: # I^{MOSS+}_k(t) = \frac{X_k(t)}{N_k(t)} + \sqrt{\max\left(0, \frac{\log\left(\frac{T}{K N_k(t)}\right)}{N_k(t)}\right)}. # """ # return (rewards / pulls) + np.sqrt(np.maximum(0, np.log(horizon / (nbArms * pulls))) / (2 * pulls)) # --- Classes class klUCBswitch(klUCB): """ The kl-UCB-switch policy, for bounded distributions. - Reference: [Garivier et al, 2018](https://arxiv.org/abs/1805.05071) """ def __init__(self, nbArms, horizon=None, threshold="best", tolerance=TOLERANCE, klucb=klucbBern, c=c, lower=0., amplitude=1. ): super(klUCBswitch, self).__init__(nbArms, tolerance=tolerance, klucb=klucb, c=c, lower=lower, amplitude=amplitude) assert horizon is not None, "Error: the klUCBswitch policy require knowledge of the horizon T. Use klUCBswitchAnytime if you need an anytime variant." # DEBUG assert horizon >= 1, "Error: the horizon T should be >= 1." # DEBUG self.horizon = horizon #: Parameter :math:`T` = known horizon of the experiment. # A function, like :func:`threshold_switch`, of T and K, to decide when to switch from kl-UCB indexes to MOSS indexes (for each arm). self._threshold_switch_name = "?" if isinstance(threshold, str): self._threshold_switch_name = "" if "best" in threshold: threshold_switch = threshold_switch_bestchoice elif "delayed" in threshold: threshold_switch = threshold_switch_delayed self._threshold_switch_name = "delayed f" else: threshold_switch = threshold_switch_default else: threshold_switch = threshold self._threshold_switch_name = threshold.__name__ #: For klUCBswitch (not the anytime variant), we can precompute the threshold as it is constant, :math:`= f(T, K)`. self.constant_threshold_switch = threshold_switch(self.horizon, self.nbArms) #: Initialize internal memory: at first, every arm uses the kl-UCB index, then some will switch to MOSS. (Array of K bool). self.use_MOSS_index = np.zeros(nbArms, dtype=bool) def __str__(self): name = "" if self.klucb.__name__[5:] == "Bern" else self.klucb.__name__[5:] + ", " complement = "$T={}${}{}{}".format(self.horizon, name, "" if self.c == 1 else r", $c={:.3g}$".format(self.c), "" if self._threshold_switch_name == "" else ", {}".format(self._threshold_switch_name)) return r"kl-UCB-switch({})".format(complement) def computeIndex(self, arm): r""" Compute the current index, at time t and after :math:`N_k(t)` pulls of arm k: .. math:: U_k(t) = \begin{cases} U^{KL+}_k(t) & \text{if } N_k(t) \leq f(T, K), \\ U^{MOSS+}_k(t) & \text{if } N_k(t) > f(T, K). \end{cases}. - It starts by using :func:`klucbplus_index`, then it calls :func:`threshold_switch` to know when to stop and start using :func:`mossplus_index`. """ if self.pulls[arm] < 1: return float('+inf') elif self.use_MOSS_index[arm]: # no need to compute the threshold, we already use the MOSS index return mossplus_index(self.rewards[arm], self.pulls[arm], self.horizon, self.nbArms) else: if self.pulls[arm] > self.constant_threshold_switch: self.self.use_MOSS_index[arm] = True return mossplus_index(self.rewards[arm], self.pulls[arm], self.horizon, self.nbArms) else: # default is to use kl-UCB index return klucbplus_index(self.rewards[arm], self.pulls[arm], self.horizon, self.nbArms, klucb=self.klucb, c=self.c, tolerance=self.tolerance) # def computeAllIndex(self): # """ Compute the current indexes for all arms, in a vectorized manner.""" # # XXX I don't think I could hack numpy operations to be faster than a loop for this algorithm # indexes = FIXME # indexes[self.pulls < 1] = float('+inf') # self.index[:] = indexes # --- Numerical functions required for the indexes for anytime variant kl-UCB-switch def logplus(x): r""" The :math:`\log_+` function. .. math:: \log_+(x) := \max(0, \log(x)). """ return max(0, log(x)) # def logplus_vect(x): # r""" The :math:`\log_+` function. # .. math:: \log_+(x) := \max(0, \log(x)). # """ # return np.maximum(0, np.log(x)) def phi(x): r""" The :math:`\phi(x)` function defined in equation (6) in their paper. .. math:: \phi(x) := \log_+(x (1 + (\log_+(x))^2)). """ return logplus(x * (1 + (logplus(x))**2)) # def phi_vect(x): # r""" The :math:`\phi(x)` function defined in equation (6) in their paper. # .. math:: \phi(x) := \log_+(x (1 + (\log_+(x))^2)). # """ # return logplus_vect(x * (1 + (logplus_vect(x))**2)) def klucb_index(reward, pull, t, nbArms, klucb=klucbBern, c=c, tolerance=TOLERANCE): r""" One kl-UCB index, from [Garivier & Cappé - COLT, 2011](https://arxiv.org/pdf/1102.2490.pdf): .. math:: \hat{\mu}_k(t) &= \frac{X_k(t)}{N_k(t)}, \\ I^{KL}_k(t) &= \sup\limits_{q \in [a, b]} \left\{ q : \mathrm{kl}(\hat{\mu}_k(t), q) \leq \frac{c \log(t / N_k(t))}{N_k(t)} \right\}. """ return klucb(reward / pull, c * phi(t / (nbArms * pull)) / pull, tolerance) # def klucb_indexes(rewards, pulls, t, nbArms, klucb=klucbBern, c=c, tolerance=TOLERANCE): # r""" The kl-UCB indexes, from [Garivier & Cappé - COLT, 2011](https://arxiv.org/pdf/1102.2490.pdf): # .. math:: # \hat{\mu}_k(t) &= \frac{X_k(t)}{N_k(t)}, \\ # I^{KL}_k(t) &= \sup\limits_{q \in [a, b]} \left\{ q : \mathrm{kl}(\hat{\mu}_k(t), q) \leq \frac{c \log(t / N_k(t))}{N_k(t)} \right\}. # """ # return klucb(rewards / pulls, c * phi_vect(t / (nbArms * pulls)) / pulls, tolerance) def moss_index(reward, pull, t, nbArms): r""" One MOSS index, from [<NAME>, 2010](http://www.jmlr.org/papers/volume11/audibert10a/audibert10a.pdf): .. math:: I^{MOSS}_k(t) = \frac{X_k(t)}{N_k(t)} + \sqrt{\max\left(0, \frac{\log\left(\frac{t}{K N_k(t)}\right)}{N_k(t)}\right)}. """ return (reward / pull) + sqrt(phi(log(t / (nbArms * pull))) / (2 * pull)) # def moss_indexes(rewards, pulls, t, nbArms): # r""" The MOSS indexes, from [<NAME>, 2010](http://www.jmlr.org/papers/volume11/audibert10a/audibert10a.pdf): # .. math:: # I^{MOSS}_k(t) &= \frac{X_k(t)}{N_k(t)} + \sqrt{\max\left(0, \frac{\log\left(\frac{t}{K N_k(t)}\right)}{N_k(t)}\right)}. # """ # return (rewards / pulls) + np.sqrt(phi_vect(np.log(t / (nbArms * pulls))) / (2 * pulls)) # --- Anytime variant class klUCBswitchAnytime(klUCBswitch): r""" The anytime variant of the kl-UCB-switch policy, for bounded distributions. - It does not use a doubling trick, but an augmented exploration function (replaces the :math:`\log_+` by :math:`\phi` in both :func:`klucb_index` and :func:`moss_index` from :func:`klucbplus_index` and :func:`mossplus_index`). - Reference: [Garivier et al, 2018](https://arxiv.org/abs/1805.05071) """ def __init__(self, nbArms, threshold="delayed", tolerance=TOLERANCE, klucb=klucbBern, c=c, lower=0., amplitude=1. ): super(klUCBswitchAnytime, self).__init__(nbArms, horizon=float('+inf'), threshold=threshold, tolerance=tolerance, klucb=klucb, c=c, lower=lower, amplitude=amplitude) self._threshold_switch_name = "?" if isinstance(threshold, str): self._threshold_switch_name = "" if "best" in threshold: threshold_switch = threshold_switch_bestchoice elif "delayed" in threshold: threshold_switch = threshold_switch_delayed self._threshold_switch_name = "delayed f" else: threshold_switch = threshold_switch_default else: threshold_switch = threshold self._threshold_switch_name = threshold.__name__ #: A function, like :func:`threshold_switch`, of T and K, to decide when to switch from kl-UCB indexes to MOSS indexes (for each arm). self.threshold_switch = threshold_switch def __str__(self): name = "" if self.klucb.__name__[5:] == "Bern" else self.klucb.__name__[5:] + ", " complement = "{}{}{}".format(name, "" if self.c == 1 else r", $c={:.3g}$".format(self.c), "" if self._threshold_switch_name == "" else ", {}".format(self._threshold_switch_name)) if complement.startswith(", "): complement = complement.replace(", ", "", 1) complement = "({})".format(complement) if complement != "" else "" return r"kl-UCB-switch{}".format(complement) def computeIndex(self, arm): r""" Compute the current index, at time t and after :math:`N_k(t)` pulls of arm k: .. math:: U_k(t) = \begin{cases} U^{KL}_k(t) & \text{if } N_k(t) \leq f(t, K), \\ U^{MOSS}_k(t) & \text{if } N_k(t) > f(t, K). \end{cases}. - It starts by using :func:`klucb_index`, then it calls :func:`threshold_switch` to know when to stop and start using :func:`moss_index`. """ if self.pulls[arm] < 1: return float('+inf') elif self.use_MOSS_index[arm]: # no need to compute the threshold, we already use the MOSS index return moss_index(self.rewards[arm], self.pulls[arm], self.t, self.nbArms) else: if self.pulls[arm] > self.threshold_switch(self.t, self.nbArms): self.self.use_MOSS_index[arm] = True return moss_index(self.rewards[arm], self.pulls[arm], self.t, self.nbArms) else: # default is to use kl-UCB index return klucb_index(self.rewards[arm], self.pulls[arm], self.t, self.nbArms, klucb=self.klucb, c=self.c, tolerance=self.tolerance)
6,054
421
{ "featured_image_for_themes": { "name": "Featured image for themes", "description": "Featured image for posts and pages if it does not function by default", "value": "0" }, "flush_rewrite_rules": { "name": "Flush rewrite rules", "description": "flush rewrite rules", "value": "1" } }
147
3,570
<reponame>Diffblue-benchmarks/Microsoft-malmo<filename>scripts/python-wheel/backwards-compatible-imports/MalmoPython.py<gh_stars>1000+ from malmo.MalmoPython import *
55
511
/**************************************************************************** * * Copyright 2019 Samsung Electronics All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific * language governing permissions and limitations under the License. * ****************************************************************************/ #ifndef __UI_WIDGET_INTERNAL_H__ #define __UI_WIDGET_INTERNAL_H__ #include <tinyara/config.h> #include <stdint.h> #include <stdbool.h> #include <vec/vec.h> #include <araui/ui_commons.h> #include <araui/ui_widget.h> #include <araui/ui_animation.h> #include "ui_asset_internal.h" #include "ui_widget_internal.h" #include "ui_commons_internal.h" #include "ui_renderer.h" #define CONFIG_UI_MAX_WIDGET_NUM 100 enum { UV_TOP_LEFT, UV_BOTTOM_LEFT, UV_BOTTOM_RIGHT, UV_TOP_RIGHT }; typedef struct ui_widget_body_s ui_widget_body_t; typedef void (*add_callback)(ui_widget_t widget); typedef void (*remove_callback)(ui_widget_t widget); typedef void (*draw_callback)(ui_widget_t widget, uint32_t dt); typedef float (*easing_callback)(float t, float b, float c, float d); typedef void (*touch_callback)(ui_widget_body_t *widget, ui_touch_event_t event, ui_coord_t coord); typedef void (*update_callback)(ui_widget_t widget, uint32_t dt); typedef enum { UI_EMPTY_WIDGET, UI_IMAGE_WIDGET, UI_TEXT_WIDGET, UI_BUTTON_WIDGET, UI_SCROLL_WIDGET, UI_PAGINATOR_WIDGET, UI_QUICK_PANEL } ui_widget_type_t; typedef struct { uint32_t timeout; uint32_t current; } interval_info_t; typedef struct { ui_coord_t down_coord; ui_coord_t move_coord; } ui_touch_info_t; struct ui_widget_body_s { ui_widget_type_t type; bool visible; #if defined(CONFIG_UI_ENABLE_TOUCH) bool touchable; bool is_hooker; #endif ui_rect_t local_rect; //!< Widget's local position and size ui_rect_t global_rect; //!< Widget's global(absolute) position and size (size is same with the local's) float scale_x; float scale_y; int32_t degree; int32_t pivot_x; int32_t pivot_y; ui_mat3_t trans_mat; bool update_flag; struct ui_widget_body_s *parent; vec_void_t children; /** * @brief Public callback functions */ tick_callback tick_cb; interval_callback interval_cb; /** * @brief Internal callback functions */ add_callback add_cb; remove_callback remove_cb; draw_callback render_cb; update_callback update_cb; anim_finished_callback anim_finished_cb; ui_anim_t *anim; #if defined(CONFIG_UI_ENABLE_TOUCH) ui_touch_info_t touch_info; touch_callback touch_cb; #endif interval_info_t interval_info; void *userdata; }; typedef struct { ui_widget_body_t base; ui_image_asset_body_t *image; ui_uv_t uv[4]; // top-left, bottom-left, bottom-right, top-right } ui_image_widget_body_t; typedef struct { ui_widget_body_t base; ui_font_asset_body_t *font; size_t font_size; ui_color_t font_color; uint32_t *utf_code; uint32_t *width_array; size_t text_length; size_t line_num; ui_align_t align; bool word_wrap; } ui_text_widget_body_t; typedef struct { ui_widget_body_t base; bool pressed; #if defined(CONFIG_UI_ENABLE_TOUCH) button_touched_callback touched_cb; #endif } ui_button_widget_body_t; typedef enum { UI_SCROLL_STATE_NONE, //!< If touch down, state will be changed to UI_SCROLL_STATE_THRESHOLD UI_SCROLL_STATE_THRESHOLD, //!< If the moving delta position is over the threshold, state be changed to UI_SCROLL_STATE_SCROLLING UI_SCROLL_STATE_SCROLLING //!< If touch up, state will be changed to UI_SCROLL_STATE_NONE } ui_scroll_state_t; typedef struct { ui_widget_body_t base; //!< Scroll widget's primitive variables int32_t content_width; int32_t content_height; ui_direction_t direction; ui_scroll_state_t state; ui_coord_t offset; ui_coord_t prev_offset; /** * @brief All children of the scroll widget can be moved (min_offset.x ~ 0), (min_offset.y ~ 0) * min_offset values are negative integers. */ ui_coord_t min_offset; ui_reach_offset_action_type_t min_offset_reach; ui_reach_offset_action_type_t max_offset_reach; #if defined(CONFIG_UI_ENABLE_TOUCH) ui_coord_t prev_touch; //!< This variables are updated when every frame by using touch delta value and the delta time float scroll_velocity_x; float scroll_velocity_y; #endif // CONFIG_UI_ENABLE_TOUCH } ui_scroll_widget_body_t; typedef enum { UI_PAGINATOR_STATE_NONE, UI_PAGINATOR_STATE_THRESHOLD, UI_PAGINATOR_STATE_SCROLLING, UI_PAGINATOR_STATE_TWEEN_NEXT, UI_PAGINATOR_STATE_TWEEN_PREV, UI_PAGINATOR_STATE_TWEEN_CUR } ui_paginator_state_t; typedef struct { ui_widget_body_t base; ui_direction_t direction; ui_paginator_state_t state; uint32_t page_count; int32_t cur_page_num; ui_widget_body_t **pages; ui_widget_body_t *cur_page; ui_widget_body_t *next_page; ui_widget_body_t *prev_page; int32_t offset; #if defined(CONFIG_UI_ENABLE_TOUCH) ui_coord_t prev_touch; #endif // CONFIG_UI_ENABLE_TOUCH } ui_paginator_widget_body_t; typedef enum { UI_QUICK_PANEL_STATE_NONE, UI_QUICK_PANEL_STATE_THRESHOLD, UI_QUICK_PANEL_STATE_TRANSITION, UI_QUICK_PANEL_STATE_AT_SCREEN } ui_quick_panel_state_t; typedef struct { ui_widget_body_t base; bool press; ui_coord_t touch_down; ui_quick_panel_state_t state; ui_quick_panel_event_type_t event_type; ui_transition_type_t transition_type; ui_widget_body_t *focus; } ui_quick_panel_body_t; typedef struct { ui_widget_body_t *queue[CONFIG_UI_MAX_WIDGET_NUM]; int start; int end; } ui_widget_body_queue_t; #ifdef __cplusplus extern "C" { #endif bool ui_widget_check_widget_type(ui_widget_t widget, ui_widget_type_t type); ui_error_t ui_widget_update_position_info(ui_widget_body_t *widget); void ui_widget_init(ui_widget_body_t *body, int32_t width, int32_t height); void ui_widget_deinit(ui_widget_body_t *body); void ui_widget_update_global_rect(ui_widget_body_t *widget); ui_error_t ui_widget_destroy_sync(ui_widget_body_t *body); ui_error_t ui_widget_set_position_sync(ui_widget_body_t *body, int32_t x, int32_t y); ui_error_t ui_widget_set_rotation_sync(ui_widget_body_t *body, int32_t degree); ui_error_t ui_widget_set_scale_sync(ui_widget_body_t *body, uint32_t scale_x, uint32_t scale_y); ui_widget_body_t *ui_widget_search_by_coord(ui_widget_body_t *widget, ui_coord_t coord); void ui_quick_panel_disappear_tween_end_func(ui_widget_t widget, ui_anim_t anim); void ui_quick_panel_appear_tween_end_func(ui_widget_t widget, ui_anim_t anim); void ui_widget_queue_init(void); bool ui_widget_is_queue_empty(void); void ui_widget_queue_enqueue(ui_widget_body_t *body); ui_widget_body_t *ui_widget_queue_dequeue(void); #ifdef __cplusplus } #endif #endif
2,829
1,587
<gh_stars>1000+ package io.reflectoring.solid.isp; interface IAdapterOrderForBurger { void orderBurger(int quantity); }
42
2,073
<reponame>dvhvsekhar/activemq<filename>activemq-client/src/main/java/org/apache/activemq/blob/FTPBlobDownloadStrategy.java /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.blob; import java.io.FilterInputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import javax.jms.JMSException; import org.apache.activemq.command.ActiveMQBlobMessage; import org.apache.commons.net.ftp.FTPClient; /** * A FTP implementation for {@link BlobDownloadStrategy}. */ public class FTPBlobDownloadStrategy extends FTPStrategy implements BlobDownloadStrategy { public FTPBlobDownloadStrategy(BlobTransferPolicy transferPolicy) throws MalformedURLException { super(transferPolicy); } public InputStream getInputStream(ActiveMQBlobMessage message) throws IOException, JMSException { // Do some checks on the received URL against the transfer policy URL uploadURL = new URL(super.transferPolicy.getUploadUrl()); String protocol = message.getURL().getProtocol(); if (!protocol.equals(uploadURL.getProtocol())) { throw new IOException("The message URL protocol is incorrect"); } String host = message.getURL().getHost(); if (!host.equals(uploadURL.getHost())) { throw new IOException("The message URL host is incorrect"); } int port = message.getURL().getPort(); if (uploadURL.getPort() != 0 && port != uploadURL.getPort()) { throw new IOException("The message URL port is incorrect"); } url = message.getURL(); final FTPClient ftp = createFTP(); String path = url.getPath(); String workingDir = path.substring(0, path.lastIndexOf("/")); String file = path.substring(path.lastIndexOf("/") + 1); ftp.changeWorkingDirectory(workingDir); ftp.setFileType(FTPClient.BINARY_FILE_TYPE); InputStream input = new FilterInputStream(ftp.retrieveFileStream(file)) { public void close() throws IOException { in.close(); ftp.quit(); ftp.disconnect(); } }; return input; } public void deleteFile(ActiveMQBlobMessage message) throws IOException, JMSException { url = message.getURL(); final FTPClient ftp = createFTP(); String path = url.getPath(); try { if (!ftp.deleteFile(path)) { throw new JMSException("Delete file failed: " + ftp.getReplyString()); } } finally { ftp.quit(); ftp.disconnect(); } } }
1,262
675
/* * Copyright 2016 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.idea.blaze.base.lang.buildfile.references; import com.google.common.collect.Lists; import com.google.idea.blaze.base.lang.buildfile.completion.NamedBuildLookupElement; import com.google.idea.blaze.base.lang.buildfile.psi.Argument; import com.google.idea.blaze.base.lang.buildfile.psi.FuncallExpression; import com.google.idea.blaze.base.lang.buildfile.psi.FunctionStatement; import com.google.idea.blaze.base.lang.buildfile.psi.Parameter; import com.intellij.codeInsight.lookup.LookupElement; import com.intellij.openapi.util.TextRange; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiReferenceBase; import com.intellij.psi.util.PsiTreeUtil; import java.util.List; import javax.annotation.Nullable; /** * Only keyword arguments resolve, but we include this class for code completion purposes. As the * user is typing a keyword arg, they'll start with a positional arg element. */ public class ArgumentReference<T extends Argument> extends PsiReferenceBase<T> { public ArgumentReference(T element, TextRange rangeInElement, boolean soft) { super(element, rangeInElement, false); } @Nullable protected FunctionStatement resolveFunction() { FuncallExpression call = PsiTreeUtil.getParentOfType(myElement, FuncallExpression.class); if (call == null) { return null; } PsiElement callee = call.getReferencedElement(); return callee instanceof FunctionStatement ? (FunctionStatement) callee : null; } @Nullable @Override public PsiElement resolve() { return null; } @Override public Object[] getVariants() { FunctionStatement function = resolveFunction(); if (function == null) { return EMPTY_ARRAY; } List<LookupElement> params = Lists.newArrayList(); for (Parameter param : function.getParameters()) { params.add(new NamedBuildLookupElement(param, QuoteType.NoQuotes)); } return params.toArray(); } }
798
772
<reponame>ryansloan/code-dot-or<gh_stars>100-1000 { "bn-BD": { "data": { "short_instructions": { "Course 4 Maze 2": "TNT বর্জন করুন!", "Crack Random Substitution": "নির্দেশনা", "Crack a Caesar Cipher": "নির্দেশনা", "NEW Course 4 Artist Functions 14": "মুক্ত-খেলার সময়! ", "frozen circle": "একটি বৃত্ত একটি বিশেষ আকৃতি । আপনি কি বলতে পারবেন প্রশ্নবোধক চিহ্নের পরিবর্তে কোন সংখ্যা বসালে একটি বৃত্ত তৈরি হবে ?", "frozen circle function": "নতুন \"Create a circle\" ব্লক ব্যবহার করে ১০ টি উপরিপাতিত বৃত্ত তৈরি করেন । বৃত্তের মাঝে লাফ দিয়ে এগিয়ে যেতে ভুলবেন না।", "frozen circle function in circle": "এখন চলুন 18 ডিগ্রি তফাতে 20 টি উপরিপাতিত বৃত্ত তৈরি করা যাক।", "frozen circle function with parameter": "\"Create Circle\" একটি ব্লক যা বিভিন্ন আকারের বৃত্ত তৈরি করে । আপনি এই ব্লক ব্যবহার করে কি একটি ছোটো বৃত্ত যার আকার ৫ এবং একটি বড় বৃত্ত যার আকার ১০ বানাতে পারবেন ?", "frozen freeplay": "আপনি আনুষ্ঠানিক ভাবে এখন একজন শিল্পী। আপনি আপনার শীতকালীন ওয়ান্ডারল্যান্ড তৈরি করুন।" } } } }
1,321
5,411
<reponame>thorium-cfx/fivem // Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "base/task/single_thread_task_executor.h" #include "base/run_loop.h" #include "base/task/post_task.h" #include "base/test/bind_test_util.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" using ::testing::IsNull; using ::testing::NotNull; namespace base { TEST(SingleThreadTaskExecutorTest, GetTaskExecutorForCurrentThread) { EXPECT_THAT(GetTaskExecutorForCurrentThread(), IsNull()); { SingleThreadTaskExecutor single_thread_task_executor; EXPECT_THAT(GetTaskExecutorForCurrentThread(), NotNull()); } EXPECT_THAT(GetTaskExecutorForCurrentThread(), IsNull()); } TEST(SingleThreadTaskExecutorTest, GetTaskExecutorForCurrentThreadInPostedTask) { SingleThreadTaskExecutor single_thread_task_executor; TaskExecutor* task_executor = GetTaskExecutorForCurrentThread(); EXPECT_THAT(task_executor, NotNull()); RunLoop run_loop; single_thread_task_executor.task_runner()->PostTask( FROM_HERE, BindLambdaForTesting([&]() { EXPECT_EQ(GetTaskExecutorForCurrentThread(), task_executor); run_loop.Quit(); })); run_loop.Run(); } } // namespace base
482
5,941
/** * FreeRDP: A Remote Desktop Protocol Implementation * X11 Input * * Copyright 2013 <NAME> <<EMAIL>> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include <X11/Xlib.h> #include <X11/Xutil.h> #ifdef WITH_XCURSOR #include <X11/Xcursor/Xcursor.h> #endif #ifdef WITH_XI #include <X11/extensions/XInput2.h> #endif #include <math.h> #include <float.h> #include <limits.h> #include "xf_event.h" #include "xf_input.h" #include <winpr/assert.h> #include <freerdp/log.h> #define TAG CLIENT_TAG("x11") #ifdef WITH_XI #define PAN_THRESHOLD 50 #define ZOOM_THRESHOLD 10 #define MIN_FINGER_DIST 5 static const char* xf_input_get_class_string(int class) { if (class == XIKeyClass) return "XIKeyClass"; else if (class == XIButtonClass) return "XIButtonClass"; else if (class == XIValuatorClass) return "XIValuatorClass"; else if (class == XIScrollClass) return "XIScrollClass"; else if (class == XITouchClass) return "XITouchClass"; return "XIUnknownClass"; } int xf_input_init(xfContext* xfc, Window window) { int i, j; int nmasks = 0; int ndevices = 0; int major = 2; int minor = 2; XIDeviceInfo* info; XIEventMask evmasks[64]; int opcode, event, error; BYTE masks[8][XIMaskLen(XI_LASTEVENT)] = { 0 }; WINPR_ASSERT(xfc); memset(xfc->contacts, 0, sizeof(xfc->contacts)); xfc->firstDist = -1.0; xfc->z_vector = 0; xfc->px_vector = 0; xfc->py_vector = 0; xfc->active_contacts = 0; if (!XQueryExtension(xfc->display, "XInputExtension", &opcode, &event, &error)) { WLog_WARN(TAG, "XInput extension not available."); return -1; } xfc->XInputOpcode = opcode; XIQueryVersion(xfc->display, &major, &minor); if (major * 1000 + minor < 2002) { WLog_WARN(TAG, "Server does not support XI 2.2"); return -1; } if (xfc->context.settings->MultiTouchInput) xfc->use_xinput = TRUE; info = XIQueryDevice(xfc->display, XIAllDevices, &ndevices); for (i = 0; i < ndevices; i++) { BOOL touch = FALSE; XIDeviceInfo* dev = &info[i]; for (j = 0; j < dev->num_classes; j++) { XIAnyClassInfo* class = dev->classes[j]; XITouchClassInfo* t = (XITouchClassInfo*)class; if ((class->type == XITouchClass) && (t->mode == XIDirectTouch) && (strcmp(dev->name, "Virtual core pointer") != 0)) { touch = TRUE; } } for (j = 0; j < dev->num_classes; j++) { XIAnyClassInfo* class = dev->classes[j]; XITouchClassInfo* t = (XITouchClassInfo*)class; if (xfc->context.settings->MultiTouchInput) { WLog_INFO(TAG, "%s (%d) \"%s\" id: %d", xf_input_get_class_string(class->type), class->type, dev->name, dev->deviceid); } evmasks[nmasks].mask = masks[nmasks]; evmasks[nmasks].mask_len = sizeof(masks[0]); ZeroMemory(masks[nmasks], sizeof(masks[0])); evmasks[nmasks].deviceid = dev->deviceid; if ((class->type == XITouchClass) && (t->mode == XIDirectTouch) && (strcmp(dev->name, "Virtual core pointer") != 0)) { if (xfc->context.settings->MultiTouchInput) { WLog_INFO(TAG, "%s %s touch device (id: %d, mode: %d), supporting %d touches.", dev->name, (t->mode == XIDirectTouch) ? "direct" : "dependent", dev->deviceid, t->mode, t->num_touches); } XISetMask(masks[nmasks], XI_TouchBegin); XISetMask(masks[nmasks], XI_TouchUpdate); XISetMask(masks[nmasks], XI_TouchEnd); nmasks++; } if (xfc->use_xinput) { if (!touch && (class->type == XIButtonClass) && strcmp(dev->name, "Virtual core pointer")) { WLog_INFO(TAG, "%s button device (id: %d, mode: %d)", dev->name, dev->deviceid, t->mode); XISetMask(masks[nmasks], XI_ButtonPress); XISetMask(masks[nmasks], XI_ButtonRelease); XISetMask(masks[nmasks], XI_Motion); nmasks++; } } } } XIFreeDeviceInfo(info); if (nmasks > 0) { Status xstatus = XISelectEvents(xfc->display, window, evmasks, nmasks); if (xstatus != 0) WLog_WARN(TAG, "XISelectEvents returned %d", xstatus); } return 0; } static BOOL xf_input_is_duplicate(xfContext* xfc, const XGenericEventCookie* cookie) { const XIDeviceEvent* event; WINPR_ASSERT(xfc); WINPR_ASSERT(cookie); event = cookie->data; WINPR_ASSERT(event); if ((xfc->lastEvent.time == event->time) && (xfc->lastEvType == cookie->evtype) && (xfc->lastEvent.detail == event->detail) && (fabs(xfc->lastEvent.event_x - event->event_x) < DBL_EPSILON) && (fabs(xfc->lastEvent.event_y - event->event_y) < DBL_EPSILON)) { return TRUE; } return FALSE; } static void xf_input_save_last_event(xfContext* xfc, const XGenericEventCookie* cookie) { const XIDeviceEvent* event; WINPR_ASSERT(xfc); WINPR_ASSERT(cookie); event = cookie->data; WINPR_ASSERT(event); xfc->lastEvType = cookie->evtype; xfc->lastEvent.time = event->time; xfc->lastEvent.detail = event->detail; xfc->lastEvent.event_x = event->event_x; xfc->lastEvent.event_y = event->event_y; } static void xf_input_detect_pan(xfContext* xfc) { double dx[2]; double dy[2]; double px; double py; double dist_x; double dist_y; rdpContext* ctx; WINPR_ASSERT(xfc); ctx = &xfc->context; WINPR_ASSERT(ctx); if (xfc->active_contacts != 2) { return; } dx[0] = xfc->contacts[0].pos_x - xfc->contacts[0].last_x; dx[1] = xfc->contacts[1].pos_x - xfc->contacts[1].last_x; dy[0] = xfc->contacts[0].pos_y - xfc->contacts[0].last_y; dy[1] = xfc->contacts[1].pos_y - xfc->contacts[1].last_y; px = fabs(dx[0]) < fabs(dx[1]) ? dx[0] : dx[1]; py = fabs(dy[0]) < fabs(dy[1]) ? dy[0] : dy[1]; xfc->px_vector += px; xfc->py_vector += py; dist_x = fabs(xfc->contacts[0].pos_x - xfc->contacts[1].pos_x); dist_y = fabs(xfc->contacts[0].pos_y - xfc->contacts[1].pos_y); if (dist_y > MIN_FINGER_DIST) { if (xfc->px_vector > PAN_THRESHOLD) { { PanningChangeEventArgs e; EventArgsInit(&e, "xfreerdp"); e.dx = 5; e.dy = 0; PubSub_OnPanningChange(ctx->pubSub, xfc, &e); } xfc->px_vector = 0; xfc->py_vector = 0; xfc->z_vector = 0; } else if (xfc->px_vector < -PAN_THRESHOLD) { { PanningChangeEventArgs e; EventArgsInit(&e, "xfreerdp"); e.dx = -5; e.dy = 0; PubSub_OnPanningChange(ctx->pubSub, xfc, &e); } xfc->px_vector = 0; xfc->py_vector = 0; xfc->z_vector = 0; } } if (dist_x > MIN_FINGER_DIST) { if (xfc->py_vector > PAN_THRESHOLD) { { PanningChangeEventArgs e; EventArgsInit(&e, "xfreerdp"); e.dx = 0; e.dy = 5; PubSub_OnPanningChange(ctx->pubSub, xfc, &e); } xfc->py_vector = 0; xfc->px_vector = 0; xfc->z_vector = 0; } else if (xfc->py_vector < -PAN_THRESHOLD) { { PanningChangeEventArgs e; EventArgsInit(&e, "xfreerdp"); e.dx = 0; e.dy = -5; PubSub_OnPanningChange(ctx->pubSub, xfc, &e); } xfc->py_vector = 0; xfc->px_vector = 0; xfc->z_vector = 0; } } } static void xf_input_detect_pinch(xfContext* xfc) { double dist; double delta; ZoomingChangeEventArgs e; rdpContext* ctx; WINPR_ASSERT(xfc); ctx = &xfc->context; WINPR_ASSERT(ctx); if (xfc->active_contacts != 2) { xfc->firstDist = -1.0; return; } /* first calculate the distance */ dist = sqrt(pow(xfc->contacts[1].pos_x - xfc->contacts[0].last_x, 2.0) + pow(xfc->contacts[1].pos_y - xfc->contacts[0].last_y, 2.0)); /* if this is the first 2pt touch */ if (xfc->firstDist <= 0) { xfc->firstDist = dist; xfc->lastDist = xfc->firstDist; xfc->z_vector = 0; xfc->px_vector = 0; xfc->py_vector = 0; } else { delta = xfc->lastDist - dist; if (delta > 1.0) delta = 1.0; if (delta < -1.0) delta = -1.0; /* compare the current distance to the first one */ xfc->z_vector += delta; xfc->lastDist = dist; if (xfc->z_vector > ZOOM_THRESHOLD) { EventArgsInit(&e, "xfreerdp"); e.dx = e.dy = -10; PubSub_OnZoomingChange(ctx->pubSub, xfc, &e); xfc->z_vector = 0; xfc->px_vector = 0; xfc->py_vector = 0; } if (xfc->z_vector < -ZOOM_THRESHOLD) { EventArgsInit(&e, "xfreerdp"); e.dx = e.dy = 10; PubSub_OnZoomingChange(ctx->pubSub, xfc, &e); xfc->z_vector = 0; xfc->px_vector = 0; xfc->py_vector = 0; } } } static void xf_input_touch_begin(xfContext* xfc, const XIDeviceEvent* event) { int i; WINPR_UNUSED(xfc); for (i = 0; i < MAX_CONTACTS; i++) { if (xfc->contacts[i].id == 0) { xfc->contacts[i].id = event->detail; xfc->contacts[i].count = 1; xfc->contacts[i].pos_x = event->event_x; xfc->contacts[i].pos_y = event->event_y; xfc->active_contacts++; break; } } } static void xf_input_touch_update(xfContext* xfc, const XIDeviceEvent* event) { int i; WINPR_ASSERT(xfc); WINPR_ASSERT(event); for (i = 0; i < MAX_CONTACTS; i++) { if (xfc->contacts[i].id == event->detail) { xfc->contacts[i].count++; xfc->contacts[i].last_x = xfc->contacts[i].pos_x; xfc->contacts[i].last_y = xfc->contacts[i].pos_y; xfc->contacts[i].pos_x = event->event_x; xfc->contacts[i].pos_y = event->event_y; xf_input_detect_pinch(xfc); xf_input_detect_pan(xfc); break; } } } static void xf_input_touch_end(xfContext* xfc, const XIDeviceEvent* event) { int i; WINPR_UNUSED(xfc); for (i = 0; i < MAX_CONTACTS; i++) { if (xfc->contacts[i].id == event->detail) { xfc->contacts[i].id = 0; xfc->contacts[i].count = 0; xfc->active_contacts--; break; } } } static int xf_input_handle_event_local(xfContext* xfc, const XEvent* event) { union { const XGenericEventCookie* cc; XGenericEventCookie* vc; } cookie; cookie.cc = &event->xcookie; XGetEventData(xfc->display, cookie.vc); if ((cookie.cc->type == GenericEvent) && (cookie.cc->extension == xfc->XInputOpcode)) { switch (cookie.cc->evtype) { case XI_TouchBegin: if (xf_input_is_duplicate(xfc, cookie.cc) == FALSE) xf_input_touch_begin(xfc, cookie.cc->data); xf_input_save_last_event(xfc, cookie.cc); break; case XI_TouchUpdate: if (xf_input_is_duplicate(xfc, cookie.cc) == FALSE) xf_input_touch_update(xfc, cookie.cc->data); xf_input_save_last_event(xfc, cookie.cc); break; case XI_TouchEnd: if (xf_input_is_duplicate(xfc, cookie.cc) == FALSE) xf_input_touch_end(xfc, cookie.cc->data); xf_input_save_last_event(xfc, cookie.cc); break; default: WLog_ERR(TAG, "unhandled xi type= %d", cookie.cc->evtype); break; } } XFreeEventData(xfc->display, cookie.vc); return 0; } #ifdef WITH_DEBUG_X11 static char* xf_input_touch_state_string(DWORD flags) { if (flags & RDPINPUT_CONTACT_FLAG_DOWN) return "RDPINPUT_CONTACT_FLAG_DOWN"; else if (flags & RDPINPUT_CONTACT_FLAG_UPDATE) return "RDPINPUT_CONTACT_FLAG_UPDATE"; else if (flags & RDPINPUT_CONTACT_FLAG_UP) return "RDPINPUT_CONTACT_FLAG_UP"; else if (flags & RDPINPUT_CONTACT_FLAG_INRANGE) return "RDPINPUT_CONTACT_FLAG_INRANGE"; else if (flags & RDPINPUT_CONTACT_FLAG_INCONTACT) return "RDPINPUT_CONTACT_FLAG_INCONTACT"; else if (flags & RDPINPUT_CONTACT_FLAG_CANCELED) return "RDPINPUT_CONTACT_FLAG_CANCELED"; else return "RDPINPUT_CONTACT_FLAG_UNKNOWN"; } #endif static void xf_input_hide_cursor(xfContext* xfc) { #ifdef WITH_XCURSOR if (!xfc->cursorHidden) { XcursorImage ci; XcursorPixel xp = 0; static Cursor nullcursor = None; xf_lock_x11(xfc); ZeroMemory(&ci, sizeof(ci)); ci.version = XCURSOR_IMAGE_VERSION; ci.size = sizeof(ci); ci.width = ci.height = 1; ci.xhot = ci.yhot = 0; ci.pixels = &xp; nullcursor = XcursorImageLoadCursor(xfc->display, &ci); if ((xfc->window) && (nullcursor != None)) XDefineCursor(xfc->display, xfc->window->handle, nullcursor); xfc->cursorHidden = TRUE; xf_unlock_x11(xfc); } #endif } static void xf_input_show_cursor(xfContext* xfc) { #ifdef WITH_XCURSOR xf_lock_x11(xfc); if (xfc->cursorHidden) { if (xfc->window) { if (!xfc->pointer) XUndefineCursor(xfc->display, xfc->window->handle); else XDefineCursor(xfc->display, xfc->window->handle, xfc->pointer->cursor); } xfc->cursorHidden = FALSE; } xf_unlock_x11(xfc); #endif } static int xf_input_touch_remote(xfContext* xfc, XIDeviceEvent* event, int evtype) { int x, y; int touchId; int contactId; RdpeiClientContext* rdpei = xfc->rdpei; if (!rdpei) return 0; xf_input_hide_cursor(xfc); touchId = event->detail; x = (int)event->event_x; y = (int)event->event_y; xf_event_adjust_coordinates(xfc, &x, &y); if (evtype == XI_TouchBegin) { WLog_DBG(TAG, "TouchBegin: %d", touchId); rdpei->TouchBegin(rdpei, touchId, x, y, &contactId); } else if (evtype == XI_TouchUpdate) { WLog_DBG(TAG, "TouchUpdate: %d", touchId); rdpei->TouchUpdate(rdpei, touchId, x, y, &contactId); } else if (evtype == XI_TouchEnd) { WLog_DBG(TAG, "TouchEnd: %d", touchId); rdpei->TouchEnd(rdpei, touchId, x, y, &contactId); } return 0; } static int xf_input_event(xfContext* xfc, XIDeviceEvent* event, int evtype) { xf_input_show_cursor(xfc); switch (evtype) { case XI_ButtonPress: xf_generic_ButtonEvent(xfc, (int)event->event_x, (int)event->event_y, event->detail, event->event, xfc->remote_app, TRUE); break; case XI_ButtonRelease: xf_generic_ButtonEvent(xfc, (int)event->event_x, (int)event->event_y, event->detail, event->event, xfc->remote_app, FALSE); break; case XI_Motion: xf_generic_MotionNotify(xfc, (int)event->event_x, (int)event->event_y, event->detail, event->event, xfc->remote_app); break; } return 0; } static int xf_input_handle_event_remote(xfContext* xfc, const XEvent* event) { union { const XGenericEventCookie* cc; XGenericEventCookie* vc; } cookie; cookie.cc = &event->xcookie; XGetEventData(xfc->display, cookie.vc); if ((cookie.cc->type == GenericEvent) && (cookie.cc->extension == xfc->XInputOpcode)) { switch (cookie.cc->evtype) { case XI_TouchBegin: xf_input_touch_remote(xfc, cookie.cc->data, XI_TouchBegin); break; case XI_TouchUpdate: xf_input_touch_remote(xfc, cookie.cc->data, XI_TouchUpdate); break; case XI_TouchEnd: xf_input_touch_remote(xfc, cookie.cc->data, XI_TouchEnd); break; default: xf_input_event(xfc, cookie.cc->data, cookie.cc->evtype); break; } } XFreeEventData(xfc->display, cookie.vc); return 0; } #else int xf_input_init(xfContext* xfc, Window window) { return 0; } #endif int xf_input_handle_event(xfContext* xfc, const XEvent* event) { #ifdef WITH_XI if (xfc->context.settings->MultiTouchInput) { return xf_input_handle_event_remote(xfc, event); } if (xfc->context.settings->MultiTouchGestures) { return xf_input_handle_event_local(xfc, event); } #endif return 0; }
7,061
653
package org.itstack.naive.chat.domain.user.repository; import org.itstack.naive.chat.domain.user.model.*; import org.itstack.naive.chat.infrastructure.po.UserFriend; import java.util.List; /** * 博 客:http://bugstack.cn * 公众号:bugstack虫洞栈 | 沉淀、分享、成长,让自己和他人都能有所收获! * create by 小傅哥 on @2020 */ public interface IUserRepository { String queryUserPassword(String userId); /** * 查询用户信息 * * @param userId 用户ID * @return 用户信息 */ UserInfo queryUserInfo(String userId); /** * 查询个人用户对话框列表 * * @param userId 个人用户ID * @return 对话框列表 */ List<TalkBoxInfo> queryTalkBoxInfoList(String userId); /** * 添加对话框 * * @param userId 用户ID * @param talkId 好友ID * @param talkType 对话框类型[0好友、1群组] */ void addTalkBoxInfo(String userId, String talkId, Integer talkType); /** * 查询个人用户好友列表 * * @param userId 个人用户ID * @return 好友列表 */ List<UserFriendInfo> queryUserFriendInfoList(String userId); /** * 查询个人用户群组列表 * * @param userId 个人用户ID * @return 群组列表 */ List<GroupsInfo> queryUserGroupInfoList(String userId); /** * 模糊查询用户 * * @param userId 用户ID * @param searchKey 用户名、用户ID * @return < 10个用户集合 */ List<LuckUserInfo> queryFuzzyUserInfoList(String userId, String searchKey); /** * 添加好友到数据库中 * * @param userFriendList 好友集合 */ void addUserFriend(List<UserFriend> userFriendList); /** * 添加聊天记录 * * @param chatRecordInfo 聊天记录信息 */ void appendChatRecord(ChatRecordInfo chatRecordInfo); /** * 查询聊天记录 * * @param talkId 对话框ID * @param userId 好友ID * @param talkType 对话框类型;0好友、1群组 * @return 聊天记录(10条) */ List<ChatRecordInfo> queryChatRecordInfoList(String talkId, String userId, Integer talkType); /** * 删除用户对话框 * * @param userId 用户ID * @param talkId 对话框ID */ void deleteUserTalk(String userId, String talkId); /** * 查询用户群组ID集合 * * @param userId 用户ID * @return 用户群组ID集合 */ List<String> queryUserGroupsIdList(String userId); /** * 查询用户群组对话框 * * @param userId 用户Id * @return 群组Id */ List<String> queryTalkBoxGroupsIdList(String userId); }
1,495
11,699
from taichi.types.annotations import * from taichi.types.compound_types import CompoundType, matrix, struct, vector from taichi.types.primitive_types import *
45
1,056
<reponame>arusinha/incubator-netbeans<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.bugtracking.spi; import java.beans.PropertyChangeListener; import javax.swing.JComponent; import org.openide.util.HelpCtx; /** * Provides access to a Queries UI. * <p> * Typically a Query UI should provide at least a query criteria * editor available when creating new queries or modifying existing ones. * In case it isn't possible to create or modify a query on the client it is * possible to provide no QueryController and no UI at all - e.g. an immutable * server defined query with no remote api to modify the criteria. * </p> * * <p> * When editing or creating a Query, the UI is presented in an * TopComponent in the editor area. Fire <code>PROP_CHANGED</code> to notify the Query * TopComponent that the UI state changed, {@link #isChanged()} will be called * accordingly to determine if the IDE-s general SaveAction should be enabled. * On save or TopComponent close are then the <code>saveChanges()</code> * and <code>discardUnsavedChanges()</code> methods called accordingly. * </p> * * <p> * Please <b>note</b>, that the results of an query * are always presented in the TaskDashboard, but eventually, in case the need appears, * it is also possible for the bugtracking plugin implementation to provide a * customized result view - e.g a table listing more attributes than then TasksDashboard does. * </p> * * @author <NAME> * @since 1.85 */ public interface QueryController { /** * Fired when the data presented in the Query UI were changed by the user. * @since 1.85 */ public static String PROP_CHANGED = "bugtracking.query.changed"; /** * The mode in which this controllers component is shown. * * @see #providesMode(org.netbeans.modules.bugtracking.spi.QueryController.QueryMode) * @since 1.85 */ public enum QueryMode { /** * Determines the Controller Component to create or edit a Query. * @since 1.85 */ EDIT, /** * Determines the Controller Component to view the Query results. * @since 1.85 */ VIEW } /** * Determines if the Query provides an Editor or a Result view. * Depending on the returned value the Query Open (view) and Edit actions will be * enabled on a query node in the TasksDashboard. * * @param mode * @return <code>true</code> if the given mode is provided by the particular * implementation, otherwise false</code> * @since 1.85 */ public boolean providesMode(QueryMode mode); /** * Returns a visual Query component. * * @param mode * @return a visual component representing a bugtracking query * @since 1.85 */ public JComponent getComponent(QueryMode mode); /** * Returns the help context associated with this controllers visual component * @return help context * @since 1.85 */ public HelpCtx getHelpCtx(); /** * Called when the component returned by this controller was opened. * @since 1.85 */ public void opened(); /** * Called when the component returned by this controller was closed. * @since 1.85 */ public void closed(); /** * This method is called when the general IDE Save button is pressed or when * Save was chosen on close of an Query TopComponent. * * @param name in case the Query wasn't saved yet a new name is provided. Otherwise might be null. * @return <code>true</code> in case the save worked, otherwise <code>false</code> * @since 1.85 */ public boolean saveChanges(String name); /** * This method is called when Discard was chosen on close of an Query TopComponent. * * @return <code>true</code> in case the discard worked, otherwise <code>false</code> * @since 1.85 */ public boolean discardUnsavedChanges(); /** * Determines whether the state of the UI has changed and is supposed to be saved. * * @return <code>true</code> in case there are changes to be saved, otherwise <code>false</code> */ public boolean isChanged(); /** * Registers a PropertyChangeListener. * * @param l a PropertyChangeListener * @since 1.85 */ public void addPropertyChangeListener(PropertyChangeListener l); /** * Unregisters a PropertyChangeListener. * * @param l a PropertyChangeListener * @since 1.85 */ public void removePropertyChangeListener(PropertyChangeListener l); }
1,781
1,109
<filename>modules/experiment/src/test/java/com/intuit/wasabi/experiment/BucketsImplTest.java /******************************************************************************* * Copyright 2016 Intuit * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package com.intuit.wasabi.experiment; import com.intuit.wasabi.assignmentobjects.RuleCache; import com.intuit.wasabi.authenticationobjects.UserInfo; import com.intuit.wasabi.eventlog.EventLog; import com.intuit.wasabi.exceptions.BucketNotFoundException; import com.intuit.wasabi.exceptions.ConstraintViolationException; import com.intuit.wasabi.exceptions.ExperimentNotFoundException; import com.intuit.wasabi.experiment.impl.BucketsImpl; import com.intuit.wasabi.experimentobjects.Application; import com.intuit.wasabi.experimentobjects.Bucket; import com.intuit.wasabi.experimentobjects.BucketList; import com.intuit.wasabi.experimentobjects.Experiment; import com.intuit.wasabi.experimentobjects.ExperimentValidator; import com.intuit.wasabi.experimentobjects.exceptions.InvalidExperimentStateException; import com.intuit.wasabi.repository.ExperimentRepository; import com.intuit.wasabi.repository.MutexRepository; import com.intuit.wasabi.repository.RepositoryException; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.junit.runner.RunWith; import org.mockito.Matchers; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.util.ArrayList; import java.util.List; import static com.googlecode.catchexception.CatchException.verifyException; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(MockitoJUnitRunner.class) public class BucketsImplTest { @Rule public ExpectedException thrown = ExpectedException.none(); @Mock private ExperimentRepository databaseRepository; @Mock private ExperimentRepository cassandraRepository; @Mock private MutexRepository mutexRepository; @Mock private ExperimentValidator validator; @Mock private RuleCache ruleCache; @Mock private Experiments experiments; @Mock private Buckets buckets; @Mock private EventLog eventLog; private final static Application.Name testApp = Application.Name.valueOf("testApp"); private Experiment.ID experimentID; private Bucket.Label bucketLabel; private UserInfo changeUser = UserInfo.from(UserInfo.Username.valueOf("userinfo")).build(); @Before public void setup() { experimentID = Experiment.ID.newInstance(); bucketLabel = Bucket.Label.valueOf("aLabel"); } @Test public void testCreateBucket() throws Exception { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog) { @Override public Bucket getBucket(Experiment.ID experimentID, Bucket.Label bucketLabel) { return Bucket.newInstance(experimentID, bucketLabel).withAllocationPercent(.3).build(); } }; final Bucket newBucket = Bucket.newInstance(experimentID, bucketLabel).withAllocationPercent(.3).build(); Bucket bucket = Bucket.newInstance(experimentID, bucketLabel).withAllocationPercent(.3).build(); Experiment experiment = Experiment.withID(experimentID) .withApplicationName(testApp) .withState(Experiment.State.DELETED) .build(); when(experiments.getExperiment(experiment.getID())).thenReturn(null); verifyException(bucketsImpl, ExperimentNotFoundException.class) .createBucket(experiment.getID(), newBucket, changeUser); when(experiments.getExperiment(experiment.getID())).thenReturn(experiment); verifyException(bucketsImpl, InvalidExperimentStateException.class) .createBucket(experimentID, newBucket, changeUser); experiment.setState(Experiment.State.DRAFT); when(cassandraRepository.getBucket(newBucket.getExperimentID(), newBucket.getLabel())).thenReturn(bucket); verifyException(bucketsImpl, ConstraintViolationException.class) .createBucket(experimentID, newBucket, changeUser); when(cassandraRepository.getBucket(newBucket.getExperimentID(), newBucket.getLabel())).thenReturn(null); Bucket result = bucketsImpl.createBucket(experimentID, newBucket, changeUser); assert result.getLabel() == newBucket.getLabel(); assert result.getExperimentID() == experimentID; experiment.setState(Experiment.State.RUNNING); result = bucketsImpl.createBucket(experimentID, newBucket, changeUser); assert result.getLabel() == newBucket.getLabel(); assert result.getExperimentID() == experimentID; doThrow(RepositoryException.class).when(databaseRepository).createBucket(newBucket); verifyException(bucketsImpl, RepositoryException.class).createBucket(experimentID, newBucket, changeUser); } @Test public void testAdjustAllocationPercentages() { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Bucket newBucket = Bucket.newInstance(experimentID, bucketLabel).withAllocationPercent(.3).build(); Bucket bucket = Bucket.newInstance(experimentID, Bucket.Label.valueOf("a")).withAllocationPercent(.4).build(); Bucket bucket2 = Bucket.newInstance(experimentID, Bucket.Label.valueOf("b")).withAllocationPercent(.6).build(); BucketList bucketList = new BucketList(); bucketList.addBucket(bucket); bucketList.addBucket(bucket2); Experiment experiment = Experiment.withID(experimentID) .withApplicationName(testApp) .withState(Experiment.State.DRAFT) .build(); when(buckets.getBuckets(experimentID, false)).thenReturn(bucketList); BucketList newBuckets = bucketsImpl.adjustAllocationPercentages(experiment, newBucket); bucket.setAllocationPercent(.42); bucket2.setAllocationPercent(.28); bucketList = new BucketList(); bucketList.addBucket(bucket); bucketList.addBucket(bucket2); for (Bucket b1 : bucketList.getBuckets()) { for (Bucket b2 : bucketList.getBuckets()) { if (b1.getLabel().equals(b2.getLabel())) { assertTrue(b1.equals(b2)); } } } assertTrue(bucketList.getBuckets().size() == newBuckets.getBuckets().size()); } @Test public void testValidateBucketChanges() throws Exception { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Bucket bucket = Bucket.newInstance(experimentID, Bucket.Label.valueOf("a")).withAllocationPercent(.3) .withState(Bucket.State.valueOf("OPEN")).build(); Bucket bucket2 = Bucket.newInstance(Experiment.ID.newInstance(), Bucket.Label.valueOf("a")) .withAllocationPercent(.3).withState(Bucket.State.valueOf("CLOSED")).build(); try { bucketsImpl.validateBucketChanges(bucket, bucket2); fail(); } catch (IllegalArgumentException ignored) { } bucket2.setExperimentID(experimentID); bucket2.setLabel(Bucket.Label.valueOf("b")); try { bucketsImpl.validateBucketChanges(bucket, bucket2); fail(); } catch (IllegalArgumentException ignored) { } bucket2.setExperimentID(experimentID); bucket2.setLabel(Bucket.Label.valueOf("a")); try { bucketsImpl.validateBucketChanges(bucket, bucket2); fail(); } catch (IllegalArgumentException ignored) { } bucket2.setState(Bucket.State.valueOf("OPEN")); bucketsImpl.validateBucketChanges(bucket, bucket2); } @Test public void testGetBucketChangeList() throws Exception { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Bucket bucket = Bucket.newInstance(experimentID, bucketLabel) .withControl(true).withAllocationPercent(.5).withDescription("one").withPayload("pay1").build(); Bucket bucket2 = Bucket.newInstance(Experiment.ID.newInstance(), bucketLabel) .withControl(false).withAllocationPercent(.6).withDescription("two").withPayload("pay2").build(); Bucket.Builder builder = Bucket.newInstance(experimentID, bucketLabel); ArrayList<Bucket.BucketAuditInfo> changes = new ArrayList<>(); Bucket.BucketAuditInfo changeData; changeData = new Bucket.BucketAuditInfo("is_control", bucket.isControl().toString(), bucket2.isControl().toString()); changes.add(changeData); changeData = new Bucket.BucketAuditInfo("allocation", bucket.getAllocationPercent().toString(), bucket2.getAllocationPercent().toString()); changes.add(changeData); changeData = new Bucket.BucketAuditInfo("description", bucket.getDescription(), bucket2.getDescription()); changes.add(changeData); changeData = new Bucket.BucketAuditInfo("payload", bucket.getPayload(), bucket2.getPayload()); changes.add(changeData); List<Bucket.BucketAuditInfo> returned = bucketsImpl.getBucketChangeList(bucket, bucket2, builder); assert returned.equals(changes); } @Test public void testUpdateBucket() throws Exception { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Experiment experiment = Experiment.withID(experimentID) .withApplicationName(testApp) .withState(Experiment.State.DRAFT) .build(); Bucket bucket = Bucket.newInstance(experimentID, bucketLabel) .withControl(true).withAllocationPercent(.5).withDescription("one").withState(Bucket.State.OPEN).build(); Bucket updates = Bucket.newInstance(experimentID, bucketLabel) .withControl(true).withAllocationPercent(.5).withDescription("one").build(); when(experiments.getExperiment(experimentID)).thenReturn(null); verifyException(bucketsImpl, ExperimentNotFoundException.class) .updateBucket(experiment.getID(), bucketLabel, updates, changeUser); when(experiments.getExperiment(experimentID)).thenReturn(experiment); when(cassandraRepository.getBucket(experimentID, bucketLabel)).thenReturn(null); verifyException(bucketsImpl, BucketNotFoundException.class) .updateBucket(experiment.getID(), bucketLabel, updates, changeUser); updates.setAllocationPercent(0.7); List<Bucket.BucketAuditInfo> changeList = new ArrayList<>(); Bucket.BucketAuditInfo changeData = new Bucket.BucketAuditInfo("allocation", bucket.getAllocationPercent().toString(), updates.getAllocationPercent().toString()); changeList.add(changeData); when(cassandraRepository.getBucket(experimentID, bucketLabel)).thenReturn(bucket); // The method instantiates an object and sends it as an argument so we have to use matchers when(buckets.getBucketChangeList(Matchers.<Bucket>any(), Matchers.<Bucket>any(), Matchers.<Bucket.Builder>any())).thenReturn(changeList); when(cassandraRepository.updateBucket(bucket)).thenReturn(updates); Bucket result = bucketsImpl.updateBucket(experimentID, bucketLabel, updates, changeUser); assert result.getLabel().equals(updates.getLabel()); } @Test public void testUpdateBucketBatch() throws Exception { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Bucket bucket = Bucket.newInstance(experimentID, bucketLabel) .withControl(true).withAllocationPercent(.5) .withDescription("one").build(); bucket.setLabel(Bucket.Label.valueOf("a")); BucketList bucketList = new BucketList(); bucketList.addBucket(bucket); Experiment experiment = Experiment.withID(experimentID) .withApplicationName(testApp) .withState(Experiment.State.DRAFT) .build(); when(experiments.getExperiment(experimentID)).thenReturn(null); try { bucketsImpl.updateBucketBatch(experimentID, bucketList, changeUser); fail(); } catch (ExperimentNotFoundException ignored) { } when(experiments.getExperiment(experimentID)).thenReturn(experiment); try { bucketsImpl.updateBucketBatch(experimentID, bucketList, changeUser); fail(); } catch (IllegalStateException ignored) { } } @Test public void testCombineOldAndNewBuckets() throws Exception { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Bucket bucket = Bucket.newInstance(experimentID, Bucket.Label.valueOf("a")) .withControl(true).withAllocationPercent(.5).withDescription("one").build(); Bucket bucket2 = Bucket.newInstance(experimentID, Bucket.Label.valueOf("b")) .withControl(false).withAllocationPercent(.5).withDescription("two").build(); Bucket bucketNew = Bucket.newInstance(experimentID, Bucket.Label.valueOf("b")) .withControl(false).withAllocationPercent(.5).withDescription("three").build(); BucketList oldBucketList = new BucketList(); oldBucketList.addBucket(bucket); oldBucketList.addBucket(bucket2); BucketList newBucketList = new BucketList(); newBucketList.addBucket(bucketNew); BucketList expected = new BucketList(); expected.addBucket(bucket); expected.addBucket(bucketNew); BucketList returned = bucketsImpl.combineOldAndNewBuckets(oldBucketList, newBucketList); assert returned.equals(expected); } @Test public void testDeleteBucket() { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Experiment experiment = Experiment.withID(experimentID) .withApplicationName(testApp) .withState(Experiment.State.DRAFT) .build(); Bucket bucket = Bucket.newInstance(experimentID, Bucket.Label.valueOf("awesomeBucket")) .withControl(true).withAllocationPercent(1.0).withDescription("one").build(); //verify that an not available experiment is not processed verifyException(bucketsImpl, ExperimentNotFoundException.class) .deleteBucket(experimentID, bucket.getLabel(), changeUser); when(experiments.getExperiment(experimentID)).thenReturn(experiment); when(cassandraRepository.getBucket(experimentID, bucket.getLabel())).thenReturn(bucket); bucketsImpl.deleteBucket(experimentID, bucket.getLabel(), changeUser); //verify that Bucket gets deleted in both repositories verify(cassandraRepository, times(1)).deleteBucket(experimentID, bucket.getLabel()); verify(databaseRepository, times(1)).deleteBucket(experimentID, bucket.getLabel()); verify(cassandraRepository, times(0)).createBucket(bucket); doThrow(new RepositoryException()).when(databaseRepository).deleteBucket(experimentID, bucket.getLabel()); try { bucketsImpl.deleteBucket(experimentID, bucket.getLabel(), changeUser); fail("Delete Bucket should throw RepositoryException!"); //fail in case no exception is thrown! } catch (RepositoryException e) { //this exception is expected! } //verify that the bucket gets recreated verify(cassandraRepository, times(1)).createBucket(bucket); } @Test public void testGetBucketBuilder() { BucketsImpl bucketsImpl = new BucketsImpl(databaseRepository, cassandraRepository, experiments, buckets, validator, eventLog); Experiment experiment = Experiment.withID(experimentID) .withApplicationName(testApp) .withState(Experiment.State.DRAFT) .build(); verifyException(bucketsImpl, BucketNotFoundException.class).getBucketBuilder(experiment.getID(), bucketLabel); } }
6,590
576
package japicmp.test.binarycompatiblity; public class SuperclassWithStaticField { public static int STATIC_FIELD = 42; }
39
9,425
<reponame>ifraixedes/saltstack-salt """ :codeauthor: :email:`<NAME> <<EMAIL>>` """ import salt.grains.lvm as lvm from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase class LvmGrainsTestCase(TestCase, LoaderModuleMockMixin): """ Test cases for LVM grains """ def setup_loader_modules(self): return { lvm: {"__salt__": {}}, } def test__linux_lvm(self): """ Test grains._linux_lvm, normal return Should return a populated dictionary """ vgs_out = " vg00\n vg01" lvs_out_vg00 = " root\n swap\n tmp \n usr \n var" lvs_out_vg01 = " opt \n" cmd_out = MagicMock( autospec=True, side_effect=[vgs_out, lvs_out_vg00, lvs_out_vg01] ) patch_which = patch( "salt.utils.path.which", autospec=True, return_value="/usr/sbin/lvm" ) patch_cmd_lvm = patch.dict(lvm.__salt__, {"cmd.run": cmd_out}) with patch_which, patch_cmd_lvm: ret = lvm._linux_lvm() assert ret == { "lvm": {"vg00": ["root", "swap", "tmp", "usr", "var"], "vg01": ["opt"]} }, ret def test__linux_lvm_no_lvm(self): """ Test grains._linux_lvm, no lvm installed Should return nothing """ vgs_out = " vg00\n vg01" lvs_out_vg00 = " root\n swap\n tmp \n usr \n var" lvs_out_vg01 = " opt \n" cmd_out = MagicMock( autospec=True, side_effect=[vgs_out, lvs_out_vg00, lvs_out_vg01] ) patch_which = patch("salt.utils.path.which", autospec=True, return_value="") patch_cmd_lvm = patch.dict(lvm.__salt__, {"cmd.run": cmd_out}) with patch_which, patch_cmd_lvm: ret = lvm._linux_lvm() assert ret is None, ret def test__linux_lvm_no_logical_volumes(self): """ Test grains._linux_lvm, lvm is installed but no volumes Should return a dictionary only with the header """ vgs_out = "" cmd_out = MagicMock(autospec=True, side_effect=[vgs_out]) patch_which = patch( "salt.utils.path.which", autospec=True, return_value="/usr/sbin/lvm" ) patch_cmd_lvm = patch.dict(lvm.__salt__, {"cmd.run": cmd_out}) with patch_which, patch_cmd_lvm: ret = lvm._linux_lvm() assert ret == {"lvm": {}}, ret def test__aix_lvm(self): """ Test grains._aix_lvm, normal return Should return a populated dictionary """ lsvg_out = "rootvg\nothervg" lsvg_out_rootvg = ( "rootvg:\nLV NAME TYPE LPs PPs PVs LV STATE " " MOUNT POINT\nhd5 boot 1 1 1 " " closed/syncd N/A\nhd6 paging 32 32 1 " " open/syncd N/A\nhd8 jfs2log 1 1 1 " " open/syncd N/A\nhd4 jfs2 32 32 1 " " open/syncd /\nhd2 jfs2 16 16 1 " " open/syncd /usr\nhd9var jfs2 32 32 1 " " open/syncd /var\nhd3 jfs2 32 32 1 " " open/syncd /tmp\nhd1 jfs2 16 16 1 " " open/syncd /home\nhd10opt jfs2 16 16 1 " " open/syncd /opt" ) lsvg_out_othervg = ( "othervg:\nLV NAME TYPE LPs PPs PVs LV STATE " " MOUNT POINT\nloglv01 jfs2log 1 1 1 " " open/syncd N/A\ndatalv jfs2 16 16 1 " " open/syncd /data" ) cmd_out = MagicMock( autospec=True, side_effect=[lsvg_out, lsvg_out_rootvg, lsvg_out_othervg] ) patch_which = patch( "salt.utils.path.which", autospec=True, return_value="/usr/sbin/lsvg" ) patch_cmd_lvm = patch.dict(lvm.__salt__, {"cmd.run": cmd_out}) with patch_which, patch_cmd_lvm: ret = lvm._aix_lvm() assert ret == { "lvm": { "rootvg": [ "hd5", "hd6", "hd8", "hd4", "hd2", "hd9var", "hd3", "hd1", "hd10opt", ], "othervg": ["loglv01", "datalv"], } }, ret
2,805
443
#pragma once namespace inexor::vulkan_renderer::wrapper { /// @brief A small helper function that return vulkan create infos with sType already set /// @code{.cpp} /// auto render_pass_ci = make_info<VkRenderPassCreateInfo>(); /// @endcode /// @note Also zeros the returned struct template <typename T> [[nodiscard]] T make_info(); } // namespace inexor::vulkan_renderer::wrapper
122
369
<filename>cdap-api/src/main/java/io/cdap/cdap/api/app/ApplicationUpdateContext.java /* * Copyright © 2020 Cask Data, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package io.cdap.cdap.api.app; import io.cdap.cdap.api.Config; import io.cdap.cdap.api.artifact.ArtifactId; import io.cdap.cdap.api.artifact.ArtifactScope; import io.cdap.cdap.api.artifact.ArtifactVersionRange; import java.lang.reflect.Type; import java.util.List; import java.util.Set; import javax.annotation.Nullable; /** * Context for updating Application configs. Provides helper methods for application to support operations like config * upgrade. */ public interface ApplicationUpdateContext { /** * @return All update actions application should perform on the config. */ List<ApplicationConfigUpdateAction> getUpdateActions(); /** * Get the old config as an object of the given type. The platform would perform the json deserialization based on * the provided type. This is for the case where an application has the same/compatible/old config class. Application * should decide on how they want to convert config from old to current type. * * @param configType type of the config platform should deserialize to. * @return application config serialized to an object of given configType. */ <C extends Config> C getConfig(Type configType); /** * Get the application configuration as json string. */ String getConfigAsString(); /** * Returns list of available plugin artifacts based on given parameters. * * @param pluginType the plugin type. * @param pluginName the plugin name. * @param pluginRange the range of the version candidate plugins should be in. * @return artifact list of plugins which matches with given parameters, sorted in ascending order. * Returns empty list if no artifact for the plugin found. */ default List<ArtifactId> getPluginArtifacts(String pluginType, String pluginName, @Nullable ArtifactVersionRange pluginRange) throws Exception { return getPluginArtifacts(pluginType, pluginName, pluginRange, Integer.MAX_VALUE); } /** * Returns list of available plugin artifacts based on given parameters. * * @param pluginType the plugin type. * @param pluginName the plugin name. * @param pluginRange the range of the version candidate plugins should be in. * @param limit number of results to return at max, if null, default will be INT_MAX. * @return artifact list of plugins which matches with given parameters, sorted in ascending order. * Returns empty list if no artifact for the plugin found. */ List<ArtifactId> getPluginArtifacts(String pluginType, String pluginName, @Nullable ArtifactVersionRange pluginRange, int limit) throws Exception; }
983
1,178
<filename>imagepy/menus/Plugins/StackReg/__init__.py catlog = ['stackreg_plgs', '-', 'StackReg License']
39
627
<filename>board/hx20/board.h /* Copyright 2017 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ /* * Microchip Evaluation Board (EVB) with * MEC1701H 144-pin processor card. * EVB connected to Intel SKL RVP3 configured * for eSPI with Kabylake silicon. */ #ifndef __CROS_EC_BOARD_H #define __CROS_EC_BOARD_H /* * Initial board bringup and prevent power button task from * generating event to exit G3 state. * * #define CONFIG_BRINGUP */ /* Keyboard features */ #define CONFIG_KEYBOARD_CUSTOMIZATION /* #define CONFIG_PWM_KBLIGHT */ /* #define CONFIG_KEYBOARD_DEBUG */ /* #define CONFIG_CUSTOMER_PORT80 */ /* * Mouse emulation */ #define CONFIG_8042_AUX #define CONFIG_CUSTOMER_PORT80 #define CONFIG_IGNORED_BTN_SCANCODE /* * Combination key */ #define CONFIG_KEYBOARD_CUSTOMIZATION_COMBINATION_KEY /* The Fn key function not ready yet undefined it until the function finish */ #define CONFIG_KEYBOARD_SCANCODE_CALLBACK #define CONFIG_KEYBOARD_BACKLIGHT /*Assume we should move to CONFIG_PWM_KBLIGHT later*/ /* * Debug on EVB with CONFIG_CHIPSET_DEBUG * Keep WDG disabled and JTAG enabled. * CONFIG_BOARD_PRE_INIT enables JTAG early */ /* #define CONFIG_CHIPSET_DEBUG */ #define CONFIG_BOARD_PRE_INIT /* Add commands to read/write ec serial data structure */ #ifdef CONFIG_CHIPSET_DEBUG #define CONFIG_SYSTEMSERIAL_DEBUG #endif /* * DEBUG: Add CRC32 in last 4 bytes of EC_RO/RW binaries * in SPI. LFW will use DMA CRC32 HW to check data integrity. * #define CONFIG_MCHP_LFW_DEBUG */ /* * Override Boot-ROM JTAG mode * 0x01 = 4-pin standard JTAG * 0x03 = ARM 2-pin SWD + 1-pin SWV * 0x05 = ARM 2-pin SWD no SWV */ #define CONFIG_MCHP_JTAG_MODE 0x03 /* * Enable Trace FIFO Debug port * When this is undefined all TRACEn() and tracen() * macros are defined as blank. * Uncomment this define to enable these messages. * Only enable if GPIO's 0171 & 0171 are available therefore * define this at the board level. */ /* #define CONFIG_MCHP_TFDP */ /* * Enable MCHP specific GPIO EC UART commands * for debug. */ #ifdef CONFIG_CHIPSET_DEBUG #define CONFIG_MEC_GPIO_EC_CMDS #endif /* * Enable CPRINT in chip eSPI module * and EC UART test command. */ /* #define CONFIG_MCHP_ESPI_DEBUG */ /* * Enable board specific ISR on ALL_SYS_PWRGD signal. * Requires for handling Kabylake/Skylake RVP3 board's * ALL_SYS_PWRGD signal. */ /* #define CONFIG_BOARD_EC_HANDLES_ALL_SYS_PWRGD */ /* * EVB eSPI test mode (no eSPI master connected) */ /* * #define EVB_NO_ESPI_TEST_MODE */ /* * DEBUG * Disable ARM Cortex-M4 write buffer so * exceptions become synchronous. * * #define CONFIG_DEBUG_DISABLE_WRITE_BUFFER */ /* New eSPI slave configuration items */ /* * Maximum clock frequence eSPI EC slave advertises * Values in MHz are 20, 25, 33, 50, and 66 */ /* KBL + EVB fly-wire hook up only supports 20MHz */ #define CONFIG_HOSTCMD_ESPI_EC_MAX_FREQ 20 /* * EC eSPI slave advertises IO lanes * 0 = Single * 1 = Single and Dual * 2 = Single and Quad * 3 = Single, Dual, and Quad */ /* KBL + EVB fly-wire hook up only support Single mode */ #define CONFIG_HOSTCMD_ESPI_EC_MODE 0 /* * Bit map of eSPI channels EC advertises * bit[0] = 1 Peripheral channel * bit[1] = 1 Virtual Wire channel * bit[2] = 1 OOB channel * bit[3] = 1 Flash channel */ #define CONFIG_HOSTCMD_ESPI_EC_CHAN_BITMAP 0x0F #define CONFIG_MCHP_ESPI_VW_SAVE_ON_SLEEP /* * Allow dangerous commands. * TODO(shawnn): Remove this config before production. */ #define CONFIG_SYSTEM_UNLOCKED /* Optional features */ #define CONFIG_BATTERY_CUT_OFF #define CONFIG_BATTERY_SMART #define CONFIG_BATTERY_PRESENT_CUSTOM #define CONFIG_BOARD_VERSION_CUSTOM #define CONFIG_CHARGE_MANAGER /* #define CONFIG_CHARGE_RAMP_SW */ #undef CONFIG_HOSTCMD_LOCATE_CHIP #define CONFIG_CHARGER #define CONFIG_USB_PD_PORT_MAX_COUNT 4 #define CONFIG_USB_PD_REV30 #define CONFIG_USB_PD_EXTENDED_MESSAGES #define CONFIG_CHARGER_DISCHARGE_ON_AC /* Charger parameter */ #define CONFIG_CHARGER_ISL9241 #define CONFIG_CHARGER_SENSE_RESISTOR_AC 20 /* BOARD_RS1 */ #define CONFIG_CHARGER_SENSE_RESISTOR 10 /* BOARD_RS2 */ #define CONFIG_CHARGER_INPUT_CURRENT 500 /* Minimum for USB - will negociate higher */ #define CONFIG_CHARGER_MIN_POWER_MW_FOR_POWER_ON 55000 /* only if battery is not present*/ #define CONFIG_CHARGER_CUSTOMER_SETTING #define CONFIG_CMD_CHARGER_DUMP /* * MCHP disable this for Kabylake eSPI bring up * #define CONFIG_CHARGER_MIN_BAT_PCT_FOR_POWER_ON 1 */ /* #define CONFIG_CHIPSET_SKYLAKE */ /* #define CONFIG_CHIPSET_TIGERLAKE */ #define CONFIG_CHIPSET_RESET_HOOK #define CONFIG_HOSTCMD_ESPI #define CONFIG_HOSTCMD_ESPI_VW_SLP_S3 #define CONFIG_HOSTCMD_ESPI_VW_SLP_S4 #define CONFIG_HOSTCMD_ESPI_VW_SLP_S5 #define CONFIG_POWER_S0IX #define CONFIG_POWER_TRACK_HOST_SLEEP_STATE #define CONFIG_CLOCK_CRYSTAL #define CONFIG_EXTPOWER_GPIO /* #define CONFIG_HOSTCMD_PD */ /* #define CONFIG_HOSTCMD_PD_PANIC */ #define CONFIG_I2C #define CONFIG_I2C_MASTER #define CONFIG_KEYBOARD_BOARD_CONFIG #define CONFIG_KEYBOARD_PROTOCOL_8042 #define CONFIG_SIMULATE_KEYCODE /* i2c hid interface for HID mediakeys (brightness, airplane mode) */ #define CONFIG_I2C_SLAVE #define CONFIG_I2C_HID_MEDIAKEYS /* Leds configuration */ #define CONFIG_LED_COMMON #define CONFIG_CMD_LEDTEST #define CONFIG_LED_PWM_COUNT 3 #define CONFIG_LED_PWM_TASK_DISABLED #define CONFIG_CAPSLED_SUPPORT #ifdef CONFIG_ACCEL_KX022 #define CONFIG_LID_ANGLE #define CONFIG_LID_ANGLE_SENSOR_LID LID_ACCEL #define CONFIG_LID_ANGLE_SENSOR_BASE BASE_ACCEL #endif /* CONFIG_ACCEL_KX022 */ #define CONFIG_LID_SWITCH #define LID_DEBOUNCE_US (200 * MSEC) #define CONFIG_POWER_BUTTON_IGNORE_LID #define CONFIG_CPU_PROCHOT_ACTIVE_LOW /* * Enable MCHP Low Power Idle support * and API to power down pins * #define CONFIG_LOW_POWER_IDLE */ /* #define CONFIG_GPIO_POWER_DOWN */ /* * Turn off pin modules during deep sleep. * Requires CONFIG_GPIO_POWER_DOWN */ /* #define CONFIG_MCHP_DEEP_SLP_GPIO_PWR_DOWN */ /* * DEBUG: Configure MEC17xx GPIO060 as 48MHZ_OUT to * verify & debug clock is shutdown in heavy sleep. */ /*#define CONFIG_MCHP_48MHZ_OUT*/ /* * DEBUG: Save and print out PCR sleep enables, * clock required, and interrupt aggregator result * registers. */ #define CONFIG_MCHP_DEEP_SLP_DEBUG #ifdef CONFIG_CHIPSET_DEBUG /* if we are built with debug mode flags the chip * will never halt, so never properly sleep * otherwise the ec will stop responding to commands */ #undef CONFIG_HIBERNATE_DELAY_SEC #define CONFIG_HIBERNATE_DELAY_SEC (60*60*24*365) #endif /* CONFIG_CHIPSET_DEBUG */ /* * MCHP debug EC code turn off GCC link-time-optimization * #define CONFIG_LTO */ #define CONFIG_POWER_BUTTON #define CONFIG_POWER_BUTTON_CUSTOM #define CONFIG_POWER_COMMON #define CONFIG_POWER_SIGNAL_INTERRUPT_STORM_DETECT_THRESHOLD 30 /* * Use for customer boot from G3 */ #define CONFIG_CUSTOM_BOOT_G3 /* * MEC1701H SCI is virtual wire on eSPI *#define CONFIG_SCI_GPIO GPIO_PCH_SCI_L */ #if 0 /* MCHP EVB + KBL/SKL RVP3 no USB charging hardware */ #define CONFIG_USB_CHARGER #define CONFIG_USB_MUX_PI3USB30532 #define CONFIG_USB_MUX_PS8740 #define CONFIG_USB_POWER_DELIVERY #define CONFIG_USB_PD_TCPMV1 #define CONFIG_USB_PD_ALT_MODE #define CONFIG_USB_PD_ALT_MODE_DFP #define CONFIG_USB_PD_DP_HPD_GPIO #define CONFIG_USB_PD_DUAL_ROLE #define CONFIG_USB_PD_LOGGING #define CONFIG_USB_PD_TCPM_TCPCI #endif /* * #define CONFIG_USB_PD_TCPC * #define CONFIG_USB_PD_TCPM_STUB */ #if 0 #define CONFIG_USB_PD_TRY_SRC #define CONFIG_USB_PD_VBUS_DETECT_GPIO #define CONFIG_BC12_DETECT_PI3USB9281 #define CONFIG_BC12_DETECT_PI3USB9281_CHIP_COUNT 2 #define CONFIG_USBC_SS_MUX #define CONFIG_USBC_SS_MUX_DFP_ONLY #define CONFIG_USBC_VCONN #define CONFIG_USBC_VCONN_SWAP #endif /* TODO FRAMEWORK #define CONFIG_VBOOT_HASH */ /* * MEC1701H loads firmware using QMSPI controller * CONFIG_SPI_FLASH_PORT is the index into * spi_devices[] in board.c */ #define CONFIG_SPI_FLASH_PORT 0 #define CONFIG_SPI_FLASH /* * MB use W25Q80 SPI ROM * Size : 1M */ #define CONFIG_FLASH_SIZE 0x100000 #define CONFIG_SPI_FLASH_W25Q80 /* * Enable extra SPI flash and generic SPI * commands via EC UART */ #define CONFIG_CMD_SPI_FLASH #define CONFIG_CMD_SPI_XFER /* common software SHA256 required by vboot and rollback */ #define CONFIG_SHA256 /* Enable EMI0 Region 1 */ #define CONFIG_EMI_REGION1 #ifdef CONFIG_EMI_REGION1 #define EC_EMEMAP_ER1_POWER_STATE 0x01 /* Power state from host*/ #define EC_MEMMAP_ER1_BATT_AVER_TEMP 0x03 /* Battery Temp */ #define EC_MEMMAP_ER1_BATT_PERCENTAGE 0x06 /* Battery Percentage */ #define EC_MEMMAP_ER1_BATT_STATUS 0x07 /* Battery information */ #define EC_MEMMAP_ER1_BATT_MANUF_DAY 0x44 /* Manufacturer date - day */ #define EC_MEMMAP_ER1_BATT_MANUF_MONTH 0x45 /* Manufacturer date - month */ #define EC_MEMMAP_ER1_BATT_MANUF_YEAR 0x46 /* Manufacturer date - year */ #define EC_BATT_FLAG_FULL BIT(0) /* Full Charged */ #define EC_BATT_TYPE BIT(1) /* (0: NiMh,1: LION) */ #define EC_BATT_MODE BIT(2) /* (0=mW, 1=mA) */ #define EC_PS_ENTER_S3 BIT(0) #define EC_PS_RESUME_S3 BIT(1) #define EC_PS_ENTER_S4 BIT(2) #define EC_PS_RESUME_S4 BIT(3) #define EC_PS_ENTER_S5 BIT(4) #define EC_PS_RESUME_S5 BIT(5) #define EC_PS_ENTER_S0ix BIT(6) #define EC_PS_RESUME_S0ix BIT(7) #endif /* * Battery Protect */ #define CONFIG_BATTERY_REQUESTS_NIL_WHEN_DEAD /* EC's thresholds. 3%: boot, 2%: no boot. Required for soft sync. */ #define CONFIG_CHARGER_MIN_BAT_PCT_FOR_POWER_ON 3 #define CONFIG_BATTERY_CHECK_CHARGE_TEMP_LIMITS #undef CONFIG_BATTERY_CRITICAL_SHUTDOWN_TIMEOUT #define CONFIG_BATTERY_CRITICAL_SHUTDOWN_TIMEOUT 5 #define CHARGE_MAX_SLEEP_USEC (100 * MSEC) /* * Enable MCHP SHA256 hardware accelerator module. * API is same as software SHA256 but prefixed with "chip_" * #define CONFIG_SHA256_HW */ /* enable console command to test HW Hash engine * #define CONFIG_CMD_SHA256_TEST */ /* Support PWM */ #define CONFIG_PWM /* Support FAN */ #define CONFIG_FANS 1 #undef CONFIG_FAN_INIT_SPEED #define CONFIG_FAN_INIT_SPEED 15 #define FAN_HARDARE_MAX 7100 #define CONFIG_TEMP_SENSOR #define CONFIG_DPTF #define CONFIG_TEMP_SENSOR_F75303 #define F75303_I2C_ADDR_FLAGS 0x4D #define CONFIG_CHIPSET_CAN_THROTTLE /* Enable EC_PROCHOT_L control */ #define CONFIG_THROTTLE_AP /* Factory mode support */ #define CONFIG_FACTORY_SUPPORT #define CONFIG_PECI #define CONFIG_PECI_COMMON #define CONFIG_PECI_TJMAX 100 /* SPI Accelerometer * CONFIG_SPI_FLASH_PORT is the index into * spi_devices[] in board.c */ /*#define CONFIG_SPI_ACCEL_PORT 1*/ /* * Enable EC UART commands to read/write * motion sensor. */ /*#define CONFIG_CMD_ACCELS*/ /* * Enable 1 slot of secure temporary storage to support * suspend/resume with read/write memory training. */ #define CONFIG_VSTORE #define CONFIG_VSTORE_SLOT_COUNT 1 #define CONFIG_WATCHDOG_HELP #if 0 /* TODO - No wireless on EVB */ #define CONFIG_WIRELESS #define CONFIG_WIRELESS_SUSPEND \ (EC_WIRELESS_SWITCH_WLAN | EC_WIRELESS_SWITCH_WLAN_POWER) /* Wireless signals */ #define WIRELESS_GPIO_WLAN GPIO_WLAN_OFF_L #define WIRELESS_GPIO_WLAN_POWER GPIO_PP3300_WLAN_EN #endif /* * Macros for GPIO signals used in common code that don't match the * schematic names. Signal names in gpio.inc match the schematic and are * then redefined here to so it's more clear which signal is being used for * which purpose. */ #define CONFIG_WP_ACTIVE_HIGH /* LED signals */ /* #define GPIO_BAT_LED_RED GPIO_BATT_LOW_LED_L #define GPIO_BAT_LED_GREEN GPIO_BATT_CHG_LED_L */ /* Power signals */ #define GPIO_AC_PRESENT GPIO_ADP_IN #define GPIO_POWER_BUTTON_L GPIO_ON_OFF_FP_L #define GPIO_PCH_SLP_SUS_L GPIO_SLP_SUS_L #define GPIO_PCH_SLP_S3_L GPIO_PM_SLP_S3_L #define GPIO_PCH_SLP_S4_L GPIO_PM_SLP_S4_L #define GPIO_PCH_PWRBTN_L GPIO_PBTN_OUT_L #define GPIO_PCH_ACOK GPIO_AC_PRESENT_OUT #define GPIO_PCH_RSMRST_L GPIO_EC_RSMRST_L #define GPIO_CPU_PROCHOT GPIO_VCOUT1_PROCHOT_L #define GPIO_LID_OPEN GPIO_LID_SW_L #define GPIO_ENABLE_BACKLIGHT GPIO_EC_BKOFF_L /* SMBus signals */ #define GPIO_I2C_0_SDA GPIO_EC_SMB_SDA0 #define GPIO_I2C_0_SCL GPIO_EC_SMB_CLK0 #define GPIO_I2C_1_SDA GPIO_EC_SMB_SDA1 #define GPIO_I2C_1_SCL GPIO_EC_SMB_CLK1 #define GPIO_I2C_2_SDA GPIO_EC_I2C_3_SDA #define GPIO_I2C_2_SCL GPIO_EC_I2C_3_SCL #define GPIO_I2C_3_SDA GPIO_EC_SMB_SDA3 #define GPIO_I2C_3_SCL GPIO_EC_SMB_CLK3 #define GPIO_I2C_6_SDA GPIO_EC_I2C06_PD_SDA #define GPIO_I2C_6_SCL GPIO_EC_I2C06_PD_CLK /* EVT - DVT cover */ #define GPIO_EC_KBL_PWR_EN GPIO_TYPEC_G_DRV2_EN /* I2C ports */ #define I2C_CONTROLLER_COUNT 5 #define I2C_SLAVE_CONTROLLER_COUNT 1 #define I2C_PORT_COUNT 5 /* * Map I2C Ports to Controllers for this board. * * I2C Controller 0 ---- Port 0 -> PMIC, USB Charger 2 * |-- Port 2 -> USB Charger 1, USB Mux * * I2C Controller 1 ---- Port 3 -> PD MCU, TCPC * I2C Controller 2 ---- Port 4 -> ALS, Accel * I2C Controller 3 ---- Port 5 -> Battery, Charger * * All other ports set to 0xff (not used) */ #define I2C_PORT_TOUCHPAD MCHP_I2C_PORT2 #define I2C_PORT_PD_MCU MCHP_I2C_PORT6 #define I2C_PORT_TCPC MCHP_I2C_PORT3 #define I2C_PORT_BATTERY MCHP_I2C_PORT1 #define I2C_PORT_CHARGER MCHP_I2C_PORT1 #define I2C_PORT_THERMAL MCHP_I2C_PORT3 /* GPIO for power signal */ #ifdef CONFIG_HOSTCMD_ESPI_VW_SLP_S3 #define SLP_S3_SIGNAL_L VW_SLP_S3_L #else #define SLP_S3_SIGNAL_L GPIO_PCH_SLP_S3_L #endif #ifdef CONFIG_HOSTCMD_ESPI_VW_SLP_S4 #define SLP_S4_SIGNAL_L VW_SLP_S4_L #else #define SLP_S4_SIGNAL_L GPIO_PCH_SLP_S4_L #endif #ifdef CONFIG_HOSTCMD_ESPI_VW_SLP_S5 #define SLP_S5_SIGNAL_L VW_SLP_S5_L #else #define SLP_S5_SIGNAL_L GPIO_PCH_SLP_S5_L #endif #define IN_PGOOD_PWR_VR POWER_SIGNAL_MASK(X86_VR_PWRGD) #define IN_PGOOD_PWR_3V5V POWER_SIGNAL_MASK(X86_PWR_3V5V_PG) #define IN_PGOOD_VCCIN_AUX_VR POWER_SIGNAL_MASK(X86_VCCIN_AUX_VR_PG) #define IN_PCH_SLP_S3_DEASSERTED POWER_SIGNAL_MASK(X86_SLP_S3_DEASSERTED) #define IN_PCH_SLP_S4_DEASSERTED POWER_SIGNAL_MASK(X86_SLP_S4_DEASSERTED) #define IN_PCH_SLP_S5_DEASSERTED POWER_SIGNAL_MASK(X86_SLP_S5_DEASSERTED) #define IN_PCH_SLP_SUS_DEASSERTED POWER_SIGNAL_MASK(X86_SLP_SUS_DEASSERTED) #define IN_ALL_PM_SLP_DEASSERTED (IN_PCH_SLP_S3_DEASSERTED | \ IN_PCH_SLP_S4_DEASSERTED | \ IN_PCH_SLP_SUS_DEASSERTED) /* Thermal sensors read through PMIC ADC interface */ #define SCI_HOST_EVENT_MASK \ (EC_HOST_EVENT_MASK(EC_HOST_EVENT_LID_CLOSED) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_LID_OPEN) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_POWER_BUTTON) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_AC_CONNECTED) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_AC_DISCONNECTED) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_BATTERY_LOW) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_BATTERY_CRITICAL) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_BATTERY) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_BATTERY_SHUTDOWN) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_HANG_DETECT) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_HANG_REBOOT) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_UCSI) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_BATT_BTP)) #define SCI_HOST_WAKE_EVENT_MASK \ (EC_HOST_EVENT_MASK(EC_HOST_EVENT_LID_CLOSED) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_LID_OPEN) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_POWER_BUTTON) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_AC_CONNECTED) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_AC_DISCONNECTED) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_BATT_BTP) | \ EC_HOST_EVENT_MASK(EC_HOST_EVENT_KEY_PRESSED)) /* Ambient Light Sensor address */ #define OPT3001_I2C_ADDR_FLAGS OPT3001_I2C_ADDR1_FLAGS /* Modules we want to exclude */ #undef CONFIG_CMD_HASH #undef CONFIG_CMD_TEMP_SENSOR #undef CONFIG_CMD_TIMERINFO /* #undef CONFIG_CONSOLE_CMDHELP */ #ifndef __ASSEMBLER__ #include "gpio_signal.h" #include "registers.h" /* ADC signal */ enum adc_channel { ADC_I_ADP, ADC_I_SYS, ADC_VCIN1_BATT_TEMP, ADC_TP_BOARD_ID, ADC_AD_BID, ADC_AUDIO_BOARD_ID, ADC_PROCHOT_L, /* Number of ADC channels */ ADC_CH_COUNT }; enum hx20_board_version { BOARD_VERSION_UNKNOWN = -1, BOARD_VERSION_0, BOARD_VERSION_1, BOARD_VERSION_2, BOARD_VERSION_3, BOARD_VERSION_4, BOARD_VERSION_5, BOARD_VERSION_6, BOARD_VERSION_7, BOARD_VERSION_8, BOARD_VERSION_9, BOARD_VERSION_10, BOARD_VERSION_11, BOARD_VERSION_12, BOARD_VERSION_13, BOARD_VERSION_14, BOARD_VERSION_15, BOARD_VERSION_COUNT, }; enum pwm_channel { PWM_CH_FAN, PWM_CH_KBL, PWM_CH_DB0_LED_RED, PWM_CH_DB0_LED_GREEN, PWM_CH_DB0_LED_BLUE, PWM_CH_DB1_LED_RED, PWM_CH_DB1_LED_GREEN, PWM_CH_DB1_LED_BLUE, PWM_CH_FPR_LED_RED_EVT, PWM_CH_FPR_LED_GREEN_EVT, PWM_CH_FPR_LED_RED, PWM_CH_FPR_LED_GREEN, PWM_CH_FPR_LED_BLUE, PWM_CH_COUNT }; enum fan_channel { FAN_CH_0 = 0, /* Number of FAN channels */ FAN_CH_COUNT, }; enum temp_sensor_id { TEMP_SENSOR_LOCAL, TEMP_SENSOR_CPU, TEMP_SENSOR_DDR, TEMP_SENSOR_BATTERY, #ifdef CONFIG_PECI TEMP_SENSOR_PECI, #endif /* CONFIG_PECI */ TEMP_SENSOR_COUNT }; /* Power signals list */ enum power_signal { #ifdef CONFIG_POWER_S0IX X86_SLP_S0_DEASSERTED, #endif X86_SLP_S3_DEASSERTED, X86_SLP_S4_DEASSERTED, X86_SLP_S5_DEASSERTED, X86_SLP_SUS_DEASSERTED, X86_PWR_3V5V_PG, X86_VCCIN_AUX_VR_PG, X86_VR_PWRGD, /* Number of X86 signals */ POWER_SIGNAL_COUNT }; enum sensor_id { BASE_ACCEL, BASE_GYRO, #ifdef CONFIG_ACCEL_KX022 LID_ACCEL, #endif SENSOR_COUNT, }; /* Light sensors */ enum als_id { ALS_OPT3001 = 0, ALS_COUNT }; /* TODO: determine the following board specific type-C power constants */ /* * delay to turn on the power supply max is ~16ms. * delay to turn off the power supply max is about ~180ms. */ #define PD_POWER_SUPPLY_TURN_ON_DELAY 30000 /* us */ #define PD_POWER_SUPPLY_TURN_OFF_DELAY 250000 /* us */ /* delay to turn on/off vconn */ #define PD_VCONN_SWAP_DELAY 5000 /* us */ /* Define typical operating power and max power */ #define PD_OPERATING_POWER_MW 15000 #define PD_MAX_POWER_MW 60000 #define PD_MAX_CURRENT_MA 5000 #define PD_MAX_VOLTAGE_MV 20000 /* #define PD_VERBOSE_LOGGING */ #undef CONFIG_UART_TX_BUF_SIZE #define CONFIG_UART_TX_BUF_SIZE 2048 /* * include TFDP macros from mchp chip level */ #include "tfdp_chip.h" /* Map I2C port to controller */ int board_i2c_p2c(int port); /* Return the two slave addresses the specified * controller will respond to when controller * is acting as a slave. * b[6:0] = b[7:1] of I2C address 1 * b[14:8] = b[7:1] of I2C address 2 */ uint16_t board_i2c_slave_addrs(int controller); /* Reset PD MCU */ void board_reset_pd_mcu(void); /* P sensor */ void psensor_interrupt(enum gpio_signal signal); /* SOC */ void soc_signal_interrupt(enum gpio_signal signal); /* chassis function */ void chassis_control_interrupt(enum gpio_signal signal); /* Touchpad process */ void touchpad_interrupt(enum gpio_signal signal); void touchpad_i2c_interrupt(enum gpio_signal signal); /* Mainboard power button handler*/ void mainboard_power_button_interrupt(enum gpio_signal signal); /* fingerprint power button handler*/ void fingerprint_power_button_interrupt(enum gpio_signal signal); void board_power_off(void); void cancel_board_power_off(void); /* power sequence */ int board_chipset_power_on(void); int board_get_version(void); void boot_ap_on_g3(void); void power_button_enable_led(int enable); void s5_power_up_control(int control); int pos_get_state(void); void me_gpio_change(uint32_t flags); int get_hardware_id(enum adc_channel channel); int ac_boot_status(void); void update_me_change(int change); int poweron_reason_powerbtn(void); #ifdef CONFIG_LOW_POWER_IDLE void board_prepare_for_deep_sleep(void); void board_resume_from_deep_sleep(void); #endif #endif /* !__ASSEMBLER__ */ #endif /* __CROS_EC_BOARD_H */
8,614
1,238
<reponame>ishine/SMRCToolkit # coding: utf-8 import tensorflow as tf from sogou_mrc.nn.ops import dropout, add_seq_mask from collections import defaultdict import tensorflow_hub as hub from sogou_mrc.libraries import modeling import os VERY_NEGATIVE_NUMBER = -1e29 class Layer(object): _name_dict = defaultdict(int) def __init__(self, name=None): if name is None: name = "layer" self.name = name + "_" + str(self._name_dict[name] + 1) self._name_dict[name] += 1 class Highway(Layer): def __init__(self, affine_activation=tf.nn.relu, trans_gate_activation=tf.nn.sigmoid, hidden_units=0, keep_prob=1.0, name="highway"): super(Highway, self).__init__(name) self.affine_activation = affine_activation self.trans_gate_activation = trans_gate_activation self.affine_layer = None self.trans_gate_layer = None self.dropout = Dropout(keep_prob) if hidden_units > 0: self.affine_layer = tf.keras.layers.Dense(hidden_units, activation=self.affine_activation) self.trans_gate_layer = tf.keras.layers.Dense(hidden_units, activation=self.trans_gate_activation) def __call__(self, x, training=True): if self.trans_gate_layer is None: hidden_units = x.shape.as_list()[-1] self.affine_layer = tf.keras.layers.Dense(hidden_units, activation=self.affine_activation) self.trans_gate_layer = tf.keras.layers.Dense(hidden_units, activation=self.trans_gate_activation) gate = self.trans_gate_layer(x) trans = self.dropout(self.affine_layer(x), training=training) return gate * trans + (1. - gate) * x class Dropout(Layer): def __init__(self, keep_prob=1.0, name="dropout"): super(Dropout, self).__init__(name) self.keep_prob = keep_prob def __call__(self, x, training): return dropout(x, self.keep_prob, training) class VariationalDropout(Layer): def __init__(self, keep_prob=1.0, name="variational_dropout"): super(VariationalDropout, self).__init__(name) self.keep_prob = keep_prob def __call__(self, x, training): input_shape = tf.shape(x) return dropout(x, self.keep_prob, training, noise_shape=[input_shape[0], 1, input_shape[2]]) class ReduceSequence(Layer): def __init__(self, reduce="mean", name="reduce_sequence"): super(ReduceSequence, self).__init__(name) self.reduce = reduce def __call__(self, x, mask=None): if mask is not None: valid_mask = tf.expand_dims( tf.cast(tf.sequence_mask(mask, tf.shape(x)[1]), tf.float32), 2) if self.reduce == "max": if mask is not None: x += (1.0 - valid_mask) * tf.float32.min return tf.reduce_max(x, axis=1) elif self.reduce == "mean": if mask is not None: x *= valid_mask return tf.reduce_sum(x, axis=1) / (tf.cast(tf.expand_dims(mask, 1), tf.float32) + 1e-16) else: return tf.reduce_mean(x, axis=1) elif self.reduce == "sum": if valid_mask is not None: x *= valid_mask return tf.reduce_sum(x, axis=1) else: raise ValueError() class Conv1DAndMaxPooling(Layer): """ Conv1D for 3D or 4D input tensor, the second-to-last dimension is regarded as timestep """ def __init__(self, filters, kernel_size, strides=1, padding='valid', activation=tf.nn.relu, name="conv1d_and_max_pooling"): super(Conv1DAndMaxPooling, self).__init__(name) self.conv_layer = tf.keras.layers.Conv1D(filters, kernel_size, strides=strides, padding=padding, activation=activation) def __call__(self, x, seq_len=None): input_shape = x.shape.as_list() batch_size = None if len(input_shape) == 4: batch_size = tf.shape(x)[0] seq_length = tf.shape(x)[1] x = tf.reshape(x, (-1, tf.shape(x)[-2], input_shape[-1])) x = self.conv_layer(x) if seq_len is not None: hidden_units = x.shape.as_list()[-1] x = tf.reshape(x, (batch_size, seq_length, tf.shape(x)[1], hidden_units)) x = self.max_pooling(x, seq_len) else: x = tf.reduce_max(x, axis=1) x = tf.reshape(x, (batch_size, -1, x.shape.as_list()[-1])) elif len(input_shape) == 3: x = self.conv_layer(x) x = tf.reduce_max(x, axis=1) else: raise ValueError() return x def max_pooling(self, inputs, seq_len=None): rank = len(inputs.shape) - 2 if seq_len is not None: shape = tf.shape(inputs) mask = tf.sequence_mask(tf.reshape(seq_len, (-1,)), shape[-2]) mask = tf.cast(tf.reshape(mask, (shape[0], shape[1], shape[2], 1)), tf.float32) inputs = inputs * mask + (1 - mask) * VERY_NEGATIVE_NUMBER return tf.reduce_max(inputs, axis=rank) class MultiConv1DAndMaxPooling(Layer): def __init__(self, filters, kernel_sizes, strides=1, padding='valid', activation=None, name="multi_conv1d_and_max_pooling"): super(MultiConv1DAndMaxPooling, self).__init__(name) self.conv_layers = [Conv1DAndMaxPooling(filters, kernel_size, strides=strides, padding=padding, activation=activation, name="conv1d_and_max_pooling" + str(kernel_size)) for kernel_size in kernel_sizes] def __call__(self, x): return tf.concat([layer(x) for layer in self.conv_layers], axis=-1) class MultiLayerRNN(Layer): def __init__(self, layers=None, concat_layer_out=True, input_keep_prob=1.0, name='multi_layer_rnn'): super(MultiLayerRNN, self).__init__(name) self.concat_layer_output = concat_layer_out self.dropout = VariationalDropout(input_keep_prob) self.rnn_layers = layers def __call__(self, x, x_len, training): output = x outputs = [] for layer in self.rnn_layers: output, _ = layer(self.dropout(output, training), x_len) outputs.append(output) if self.concat_layer_output: return tf.concat(outputs, axis=-1) return outputs[-1] class MultiHeadAttention(Layer): def __init__(self, heads, units, attention_on_itself=True, name='encoder_block'): super(MultiHeadAttention, self).__init__(name) self.heads = heads self.units = units self.attention_on_itself = attention_on_itself # only workable when query==key self.dense_layers = [tf.keras.layers.Dense(units) for _ in range(3)] def __call__(self, query, key, value, mask=None): batch_size = tf.shape(query)[0] max_query_len = tf.shape(query)[1] max_key_len = tf.shape(key)[1] wq = tf.transpose( tf.reshape(self.dense_layers[0](query), [batch_size, max_query_len, self.heads, self.units // self.heads]), [2, 0, 1, 3]) # Head*B*QL*(U/Head) wk = tf.transpose( tf.reshape(self.dense_layers[1](key), [batch_size, max_key_len, self.heads, self.units // self.heads]), [2, 0, 1, 3]) # Head*B*KL*(U/Head) wv = tf.transpose( tf.reshape(self.dense_layers[2](value), [batch_size, max_key_len, self.heads, self.units // self.heads]), [2, 0, 1, 3]) # Head*B*KL*(U/Head) attention_score = tf.matmul(wq, wk, transpose_b=True) / tf.sqrt(float(self.units) / self.heads) # Head*B*QL*KL if query == key and not self.attention_on_itself: attention_score += tf.matrix_diag(tf.zeros(max_key_len) - 100.0) if mask is not None: attention_score += tf.expand_dims(mask, 1) similarity = tf.nn.softmax(attention_score, -1) # Head*B*QL*KL return tf.reshape(tf.transpose(tf.matmul(similarity, wv), [1, 2, 0, 3]), [batch_size, max_query_len, self.units]) # B*QL*U class EncoderBlock(Layer): def __init__(self, kernel_size, filters, conv_layers, heads, keep_prob=1.0, name='encoder_block'): super(EncoderBlock, self).__init__(name) self.kernel_size = kernel_size self.filters = filters self.convs = [tf.keras.layers.SeparableConv1D(filters, kernel_size, padding='same', activation=tf.nn.relu) for _ in range(conv_layers)] self.dense1 = tf.keras.layers.Dense(filters, activation=tf.nn.relu) self.dense2 = tf.keras.layers.Dense(filters) self.keep_prob = keep_prob self.multihead_attention = MultiHeadAttention(heads, filters) self.dropout = Dropout(self.keep_prob) def __call__(self, x, training, mask=None): for conv in self.convs: norm_x = tf.contrib.layers.layer_norm(x, begin_norm_axis=-1) x += self.dropout(conv(norm_x), training) norm_x = tf.contrib.layers.layer_norm(x, begin_norm_axis=-1) x += self.dropout(self.multihead_attention(norm_x, norm_x, norm_x, mask), training) norm_x = tf.contrib.layers.layer_norm(x, begin_norm_axis=-1) x += self.dropout(self.dense2(self.dropout(self.dense1(norm_x), training)), training) return x class ElmoEmbedding(Layer): def __init__(self, elmo_url='https://tfhub.dev/google/elmo/2', local_path=None, trainable=True, name='elmo_embedding'): super(ElmoEmbedding, self).__init__(name) self.module_path = elmo_url if local_path is None else local_path self.elmo = hub.Module(self.module_path, trainable=trainable) def __call__(self, tokens_input, tokens_length): embedding = self.elmo(inputs={"tokens": tokens_input, 'sequence_len': tokens_length}, signature="tokens", as_dict=True)["elmo"] return embedding class PartiallyTrainableEmbedding(Layer): """ Special embedding layer of which the top K embeddings are trainable and the rest is fixed. A technique used in DrQA and FusionNet. Note that the trainable K embeddings are the first K rows of the embedding matrix for convenience. """ def __init__(self, trainable_num=1000, pretrained_embedding=None, embedding_shape=None, init_scale=0.02, name="partially_trainable_embedding"): # If pretrained embedding is None, embedding_shape must be specified and # embedding matrix will be randomly initialized. super(PartiallyTrainableEmbedding, self).__init__(name) if pretrained_embedding is None and embedding_shape is None: raise ValueError("At least one of pretrained_embedding and embedding_shape must be specified!") input_shape = pretrained_embedding.shape if pretrained_embedding is not None else embedding_shape if not (0 < trainable_num < input_shape[0]): raise ValueError("trainable_num must be greater that 0 and less than vocabulary size!") with tf.variable_scope(self.name): trainable_embedding_init = tf.constant_initializer(pretrained_embedding[:trainable_num, :]) \ if pretrained_embedding is not None else tf.random_uniform_initializer(-init_scale, init_scale) fixed_embedding_init = tf.constant_initializer(pretrained_embedding[trainable_num:, :]) \ if pretrained_embedding is not None else tf.random_uniform_initializer(-init_scale, init_scale) trainable_embedding = tf.get_variable('trainable_embedding', shape=(trainable_num, input_shape[1]), initializer=trainable_embedding_init, trainable=True) fixed_embeding = tf.get_variable('fix_embedding', shape=(input_shape[0] - trainable_num, input_shape[1]), initializer=fixed_embedding_init, trainable=False) self.embedding = tf.concat([trainable_embedding, fixed_embeding], axis=0) def __call__(self, indices): return tf.nn.embedding_lookup(self.embedding, indices) class Embedding(Layer): def __init__(self, pretrained_embedding=None, embedding_shape=None, trainable=True, init_scale=0.02, name="embedding"): super(Embedding, self).__init__(name) if pretrained_embedding is None and embedding_shape is None: raise ValueError("At least one of pretrained_embedding and embedding_shape must be specified!") input_shape = pretrained_embedding.shape if pretrained_embedding is not None else embedding_shape with tf.variable_scope(self.name): embedding_init = tf.constant_initializer(pretrained_embedding) \ if pretrained_embedding is not None else tf.random_uniform_initializer(-init_scale, init_scale) self.embedding = tf.get_variable('embedding', shape=input_shape, initializer=embedding_init, trainable=trainable) def __call__(self, indices): return tf.nn.embedding_lookup(self.embedding, indices) class CoveEmbedding(Layer): def __init__(self, cove_path, pretrained_word_embedding=None, vocab=None, word_embedding_size=300, name='cove_embedding'): super(CoveEmbedding, self).__init__(name) if pretrained_word_embedding is None: raise ValueError("pretrained glove word embedding must be specified ! ") self.word_embedding_for_cove = Embedding(pretrained_word_embedding, embedding_shape=(len(vocab.get_word_vocab()) + 1, word_embedding_size), trainable=False) self.cove_model = tf.keras.models.load_model(cove_path) self.cove_model.trainable = False def __call__(self, input, input_len): word_embedding_repr = self.word_embedding_for_cove(input) return tf.stop_gradient(self.cove_model(word_embedding_repr, input_len)) class SumMaxEncoder(Layer): def __init__(self, name="sum_max_encoder"): super(SumMaxEncoder, self).__init__(name) def __call__(self, x, seq_len, max_len=None): mask_x1 = add_seq_mask(x, seq_len, 'mul', max_len) mask_x2 = add_seq_mask(x, seq_len, 'add', max_len) a = tf.reduce_sum(mask_x1, 1) b = tf.reduce_max(mask_x2, 1) ret = tf.concat([a, b], axis=1) ret = tf.expand_dims(ret, 1) return ret class BertEmbedding(Layer): def __init__(self, BERT_PRETRAINED_DIR='/uncased_L-12_H-768_A-12/', name='bert_model_helper'): super(BertEmbedding, self).__init__(name) CONFIG_FILE = os.path.join(BERT_PRETRAINED_DIR, 'bert_config.json') self.bert_config = modeling.BertConfig.from_json_file(CONFIG_FILE) self.init_checkpoint = os.path.join(BERT_PRETRAINED_DIR, 'bert_model.ckpt') def __call__(self, input_ids, input_mask, segment_ids, is_training,use_one_hot_embeddings=True,return_pool_output=False): """Creates a classification model.""" self.model = modeling.BertModel( config=self.bert_config, is_training=is_training, input_ids=input_ids, input_mask=input_mask, token_type_ids=segment_ids, use_one_hot_embeddings=use_one_hot_embeddings) return self.model.get_sequence_output() if not return_pool_output else (self.model.get_sequence_output(),self.model.get_pooled_output()) def init_bert(self): tvars = tf.trainable_variables() initialized_variable_names = {} init_checkpoint = self.init_checkpoint if init_checkpoint: (assignment_map, initialized_variable_names) = modeling.get_assignment_map_from_checkpoint(tvars, init_checkpoint) tf.train.init_from_checkpoint(init_checkpoint, assignment_map) tf.logging.info("**** Trainable Variables ****") for var in tvars: init_string = "" if var.name in initialized_variable_names: init_string = ", *INIT_FROM_CKPT*" tf.logging.info(" name = %s, shape = %s%s", var.name, var.shape, init_string)
7,828
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Joigny","circ":"3ème circonscription","dpt":"Yonne","inscrits":5629,"abs":3252,"votants":2377,"blancs":179,"nuls":48,"exp":2150,"res":[{"nuance":"REM","nom":"<NAME>","voix":1384},{"nuance":"FN","nom":"<NAME>","voix":766}]}
113
1,375
# Copyright lowRISC contributors. # Licensed under the Apache License, Version 2.0, see LICENSE for details. # SPDX-License-Identifier: Apache-2.0 import random from typing import List, Optional, Tuple def _extended_euclidean_algorithm(a: int, b: int) -> Tuple[int, int, int]: '''The extended Euclidean algorithm. Returns a tuple (r, s, t) so that gcd is the GCD of the two inputs and r = a s + b t. ''' r, r_nxt = a, b s, s_nxt = 1, 0 t, t_nxt = 0, 1 while r_nxt: q = r // r_nxt r, r_nxt = r_nxt, r - q * r_nxt s, s_nxt = s_nxt, s - q * s_nxt t, t_nxt = t_nxt, t - q * t_nxt # If both inputs are non-positive, the result comes out negative and we # should flip all the signs. if r < 0: r, s, t = - r, - s, - t return (r, s, t) def _intersect_ranges(a: List[Tuple[int, int]], b: List[Tuple[int, int]]) -> List[Tuple[int, int]]: ret = [] paired = ([(r, False) for r in a] + [(r, True) for r in b]) arng = None # type: Optional[Tuple[int, int]] brng = None # type: Optional[Tuple[int, int]] for (lo, hi), is_b in sorted(paired): if is_b: if arng is not None: a0, a1 = arng if a0 <= hi and lo <= a1: ret.append((max(a0, lo), min(a1, hi))) brng = (lo, hi) else: if brng is not None: b0, b1 = brng if b0 <= hi and lo <= b1: ret.append((max(lo, b0), min(hi, b1))) arng = (lo, hi) return ret class KnownMem: '''A representation of what memory/CSRs have architectural values''' def __init__(self, top_addr: int): assert top_addr > 0 self.top_addr = top_addr # A list of pairs of addresses. If the pair (lo, hi) is in the list # then each byte in the address range {lo..hi - 1} has a known value. self.known_ranges = [] # type: List[Tuple[int, int]] def copy(self) -> 'KnownMem': '''Return a shallow copy of the object''' ret = KnownMem(self.top_addr) ret.known_ranges = self.known_ranges.copy() return ret def merge(self, other: 'KnownMem') -> None: '''Merge in values from another KnownMem object''' assert self.top_addr == other.top_addr self.known_ranges = _intersect_ranges(self.known_ranges, other.known_ranges) def touch_range(self, base: int, width: int) -> None: '''Mark {base .. base + width - 1} as known''' assert 0 <= width assert 0 <= base <= self.top_addr - width for off in range(width): self.touch_addr(base + off) def touch_addr(self, addr: int) -> None: '''Mark word starting at addr as known''' assert 0 <= addr < self.top_addr # Find the index of the last range that starts below us, if there is # one, and the index of the first range that starts above us, if there # is one. last_idx_below = None first_idx_above = None for idx, (lo, hi) in enumerate(self.known_ranges): if lo <= addr: last_idx_below = idx continue first_idx_above = idx break # Are we below all other ranges? if last_idx_below is None: # Are we one address below the next range above? In which case, we # need to shuffle it back one. if first_idx_above is not None: lo, hi = self.known_ranges[first_idx_above] assert addr < lo if addr == lo - 1: self.known_ranges[first_idx_above] = (lo - 1, hi) return # Otherwise, we're disjoint. Add a one-element range at the start. self.known_ranges = [(addr, addr + 1)] + self.known_ranges return # If not, are we inside a range? In that case, there's nothing to do. left_lo, left_hi = self.known_ranges[last_idx_below] if addr < left_hi: return left = self.known_ranges[:last_idx_below] # Are we just above it? if addr == left_hi: # If there is no range above, we can just extend the last range by # one. if first_idx_above is None: self.known_ranges = left + [(left_lo, left_hi + 1)] return # Otherwise, does this new address glue two ranges together? assert first_idx_above == last_idx_below + 1 right_lo, right_hi = self.known_ranges[first_idx_above] assert addr < right_lo if addr == right_lo - 1: self.known_ranges = (left + [(left_lo, right_hi)] + self.known_ranges[first_idx_above + 1:]) return # Otherwise, we still extend the range by one (but have to put the # right hand list back too). self.known_ranges = (left + [(left_lo, left_hi + 1)] + self.known_ranges[first_idx_above:]) return # We are miles above the left range. If there is no range above, we can # just append a new 1-element range. left_inc = self.known_ranges[:first_idx_above] if first_idx_above is None: self.known_ranges.append((addr, addr + 1)) return # Otherwise, are we just below the next range? assert first_idx_above == last_idx_below + 1 right_lo, right_hi = self.known_ranges[first_idx_above] assert addr < right_lo if addr == right_lo - 1: self.known_ranges = (left_inc + [(right_lo - 1, right_hi)] + self.known_ranges[first_idx_above + 1:]) return # If not, we just insert a 1-element range in between self.known_ranges = (left_inc + [(addr, addr + 1)] + self.known_ranges[first_idx_above:]) return def pick_lsu_target(self, loads_value: bool, base_addr: int, offset_range: Tuple[int, int], offset_align: int, width: int, addr_align: int) -> Optional[Tuple[int, int]]: '''Try to pick an address with base and offset. If loads_value is true, the memory needs a known value for at least width bytes starting at that address. The address should be encodable as base_addr + offset where offset is in offset_range (inclusive) and is a multiple of offset_align. The address must be a multiple of addr_align. On failure, returns None. On success, returns (addr, offset) where addr is the chosen address and offset is the signed value that should be added to base_addr to get that address. ''' assert 0 <= base_addr < (1 << 32) assert offset_range[0] <= offset_range[1] assert 1 <= offset_align assert 1 <= width assert 1 <= addr_align # The code below assumes signed integers and no overflows. That doesn't # allow us to handle things like when base_addr = 0xffffffff, where # adding an offset of 1 would get us back to zero. # # Convert to a signed 32-bit representation here to make that work. ibase_addr = base_addr - (1 << 32) if base_addr >> 31 else base_addr # We're trying to pick an offset and an address so that # # ibase_addr + offset = addr # # Let's ignore offset_range and questions about valid memory addresses # for a second. We have two alignment requirements from offset and # addr, which mean we're really trying to satisfy something that looks # like # # a = b i + c j # # for a = ibase_addr; b = -offset_align; c = addr_align: find solutions # i, j. # # This is a 2-variable linear Diophantine equation. If gcd(b, c) does # not divide a, there is no solution. Otherwise, the extended Euclidean # algorithm yields x0, y0 such that # # gcd(b, c) = b x0 + c y0. # # Multiplying up by a / gcd(b, c) gives # # a = b i0 + c j0 # # where i0 = x0 * a / gcd(b, c) and j0 = y0 * a / gcd(b, c). # # This is the "inhomogeneous part". It's a solution to the equation, # and every other solution, (i, j) is a translate of the form # # i = i0 + k v # j = j0 - k u # # for some k, where u = b / gcd(b, c) and v = c / gcd(b, c). gcd, x0, y0 = _extended_euclidean_algorithm(-offset_align, addr_align) assert gcd == -offset_align * x0 + addr_align * y0 assert 0 < gcd if ibase_addr % gcd: return None # If gcd divides ibase_addr, we convert x0 and y0 to an initial # solution (i0, j0) as described above by multiplying up by ibase_addr # / gcd. # # Note: the floor divisions below for scale_factor, minus_u and v are # actually exact scale_factor = ibase_addr // gcd i0 = x0 * scale_factor j0 = y0 * scale_factor minus_u = offset_align // gcd v = addr_align // gcd assert 0 < v assert 0 < minus_u # offset_range gives the possible values of offset, which is - b i # in the equations above. Re-arranging the equation for i gives: # # k v = i - i0 # # so # # b k v = b i - b i0 = - offset - b i0 # # or # # k = (- offset - b i0) / (b v) # # Since b < 0 and v > 0, the denominator is negative and this is an # increasing function of offset, so we can get the allowed range for k # by evaluating it at the endpoints of offset_range. # # Round down in the division when computing k_max and round up when # computing k_min (because we're interested in the range of integers # that we can choose). Since b is negative, we negate top and bottom # when rounding up to allow the usual "(num + den - 1) // den" trick to # work properly. bv = - offset_align * v k_max = (-offset_range[1] + offset_align * i0) // bv k_min_num = -offset_range[0] + offset_align * i0 k_min = (- k_min_num + ((- bv) - 1)) // (- bv) # If k_min > k_max, this means b*v gives such big steps that none # landed in the range of allowed offsets if k_max < k_min: return None # Now, we need to consider which memory locations we can actually use. # If we're writing memory, we have a single range of allowed addresses # (all of memory!). If reading, we need to use self.known_ranges. In # either case, adjust for the fact that we need a width-byte access and # then rescale everything into "k units". # # To do that rescaling, we know that c j = addr and that j = j0 - k u. # So # # j0 - k u = addr / c # k u = j0 - addr / c # k = (j0 - addr / c) / u # = (addr / c - j0) / (- u) # # Since u is negative, this is an increasing function of addr, so we # can use address endpoints to get (disjoint) ranges for k. k_ranges = [] k_weights = [] byte_ranges = (self.known_ranges if loads_value else [(0, self.top_addr - 1)]) for byte_lo, byte_top in byte_ranges: # Since we're doing an access of width bytes, we round byte_top # down to the largest base address where the access lies completely # in the range. base_hi = byte_top - width if base_hi < byte_lo: continue # Compute the valid range for addr/c, rounding inwards. word_lo = (byte_lo + addr_align - 1) // addr_align word_hi = base_hi // addr_align # If word_hi < word_lo, there are no multiples of addr_align in the # range [byte_lo, base_hi]. if word_hi < word_lo: continue # Now translate by -j0 and divide through by -u, rounding inwards. k_hi = (word_hi - j0) // minus_u k_lo = (word_lo - j0 + (minus_u - 1)) // minus_u # If k_hi < k_lo, that means there are no multiples of u in the # range [word_lo - j0, word_hi - j0]. if k_hi < k_lo: continue # Finally, take the intersection with [k_min, k_max]. The # intersection is non-empty so long as k_lo <= k_max and k_min <= # k_hi. if k_lo > k_max or k_min > k_hi: continue k_lo = max(k_lo, k_min) k_hi = min(k_hi, k_max) k_ranges.append((k_lo, k_hi)) k_weights.append(k_hi - k_lo + 1) if not k_ranges: return None # We can finally pick a value of k. Pick the range (weighted by # k_weights) and then pick uniformly from in that range. k_lo, k_hi = random.choices(k_ranges, weights=k_weights)[0] k = random.randrange(k_lo, k_hi + 1) # Convert back to a solution to the original problem i = i0 + k * v j = j0 + k * minus_u offset = offset_align * i addr = addr_align * j assert addr == ibase_addr + offset return addr, offset def pick_bad_addr(self) -> Optional[int]: '''Pick bad addresses from gaps present in known addresses.''' gap_list = [] gap_vma = 0 for low, high in self.known_ranges: assert gap_vma <= low if gap_vma < low: gap_list.append((gap_vma, low - 1)) gap_vma = high + 1 if gap_vma <= self.top_addr: gap_list.append((gap_vma, self.top_addr)) if not gap_list: return None gap_len = [1 + hi - lo for lo, hi in gap_list] bad_addr_lo, bad_addr_hi = random.choices(gap_list, weights=gap_len)[0] return random.randint(bad_addr_lo, bad_addr_hi)
6,875
303
<reponame>ofZach/landlinesApp {"id":6545,"line-1":"Ash Sharqiyah North Governorate","line-2":"Oman","attribution":"©2015 CNES / Astrium, Cnes/Spot Image, DigitalGlobe","url":"https://www.google.com/maps/@20.667963,58.692055,16z/data=!3m1!1e3"}
98
683
/* * Copyright The OpenTelemetry Authors * SPDX-License-Identifier: Apache-2.0 */ package io.opentelemetry.javaagent.instrumentation.spring.scheduling; import static io.opentelemetry.javaagent.instrumentation.api.Java8BytecodeBridge.currentContext; import static io.opentelemetry.javaagent.instrumentation.spring.scheduling.SpringSchedulingSingletons.instrumenter; import io.opentelemetry.context.Context; import io.opentelemetry.context.Scope; public class SpringSchedulingRunnableWrapper implements Runnable { private final Runnable runnable; private SpringSchedulingRunnableWrapper(Runnable runnable) { this.runnable = runnable; } @Override public void run() { if (runnable == null) { return; } Context parentContext = currentContext(); if (!instrumenter().shouldStart(parentContext, runnable)) { runnable.run(); return; } Context context = instrumenter().start(parentContext, runnable); try (Scope ignored = context.makeCurrent()) { runnable.run(); instrumenter().end(context, runnable, null, null); } catch (Throwable throwable) { instrumenter().end(context, runnable, null, throwable); throw throwable; } } public static Runnable wrapIfNeeded(Runnable task) { // We wrap only lambdas' anonymous classes and if given object has not already been wrapped. // Anonymous classes have '/' in class name which is not allowed in 'normal' classes. if (task instanceof SpringSchedulingRunnableWrapper) { return task; } return new SpringSchedulingRunnableWrapper(task); } }
544
474
// // TuSDKGPULightGlareFilter.h // TuSDK // // Created by <NAME> on 15/1/17. // Copyright (c) 2015年 tusdk.com. All rights reserved. // #import "TuSDKFilterAdapter.h" #import "TuSDKFilterParameter.h" /** * 眩光混合 */ @interface TuSDKGPULightGlareFilter : TuSDKTwoInputFilter<TuSDKFilterParameterProtocol> /** * 混合 (设值范围0.0-1.0) */ @property(readwrite, nonatomic) CGFloat mix; @end
180
304
package io.jenkins.plugins.analysis.core.charts; import java.util.List; import org.junit.jupiter.api.Test; import edu.hm.hafner.analysis.Report; import edu.hm.hafner.echarts.Palette; import edu.hm.hafner.echarts.PieChartModel; import edu.hm.hafner.echarts.PieData; import static io.jenkins.plugins.analysis.core.charts.Messages.*; import static org.assertj.core.api.Assertions.*; import static org.mockito.Mockito.*; /** * Tests the class {@link NewVersusFixedPieChart}. * * @author <NAME> */ class NewVersusFixedPieChartTest { @Test void testCreate() { int[] sizes = {2, 3, 4}; String[] names = {New_Warnings_Short(), Outstanding_Warnings_Short(), Fixed_Warnings_Short()}; Palette[] colors = {Palette.RED, Palette.YELLOW, Palette.GREEN}; NewVersusFixedPieChart chart = new NewVersusFixedPieChart(); PieChartModel model = chart.create(createReportStub(sizes[0]), createReportStub(sizes[1]), createReportStub(sizes[2])); List<PieData> data = model.getData(); assertThat(model.getData().size()).isEqualTo(3); assertThat(model.getColors().size()).isEqualTo(3); for (int i = 0; i < 3; i++) { assertThat(data.get(i).getName()).isEqualTo(names[i]); assertThat(data.get(i).getValue()).isEqualTo(sizes[i]); assertThat(model.getColors().get(i)).isEqualTo(colors[i].getNormal()); } } private Report createReportStub(final int size) { Report report = mock(Report.class); when(report.size()).thenReturn(size); return report; } }
663
542
/** * Modified MIT License * * Copyright 2017 OneSignal * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * 1. The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * 2. All copies of substantial portions of the Software may only be used in connection * with services provided by OneSignal. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.onesignal; import com.google.android.gms.common.api.GoogleApiClient; // Allows compatibility with pre-8.1.0 of GMS via reflection. // This allows the methods below to be used even if the app developer is using an old version Google Play services. class GoogleApiClientCompatProxy { private final GoogleApiClient googleApiClient; private final Class googleApiClientListenerClass; GoogleApiClientCompatProxy(GoogleApiClient googleApiClient) { this.googleApiClient = googleApiClient; googleApiClientListenerClass = googleApiClient.getClass(); } void connect() { try { googleApiClientListenerClass.getMethod("connect").invoke(googleApiClient); } catch (Throwable t) { t.printStackTrace(); } } void disconnect() { try { googleApiClientListenerClass.getMethod("disconnect").invoke(googleApiClient); } catch (Throwable t) { t.printStackTrace(); } } GoogleApiClient realInstance() { return googleApiClient; } }
697
2,151
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.support.v4.view; import android.view.WindowInsets; class WindowInsetsCompatApi20 { public static Object consumeSystemWindowInsets(Object insets) { return ((WindowInsets) insets).consumeSystemWindowInsets(); } public static int getSystemWindowInsetBottom(Object insets) { return ((WindowInsets) insets).getSystemWindowInsetBottom(); } public static int getSystemWindowInsetLeft(Object insets) { return ((WindowInsets) insets).getSystemWindowInsetLeft(); } public static int getSystemWindowInsetRight(Object insets) { return ((WindowInsets) insets).getSystemWindowInsetRight(); } public static int getSystemWindowInsetTop(Object insets) { return ((WindowInsets) insets).getSystemWindowInsetTop(); } public static boolean hasInsets(Object insets) { return ((WindowInsets) insets).hasInsets(); } public static boolean hasSystemWindowInsets(Object insets) { return ((WindowInsets) insets).hasSystemWindowInsets(); } public static boolean isRound(Object insets) { return ((WindowInsets) insets).isRound(); } public static Object replaceSystemWindowInsets(Object insets, int left, int top, int right, int bottom) { return ((WindowInsets) insets).replaceSystemWindowInsets(left, top, right, bottom); } public static Object getSourceWindowInsets(Object src) { return new WindowInsets((WindowInsets) src); } }
682
327
<reponame>jtravee/neuvector #include <string.h> #include <ctype.h> #include "dpi/dpi_module.h" #define HTTP2_FRAME_HEADER_LENGTH 9 #define HTTP2_MAGIC_FRAME_LENGTH 24 /* Header Type Code */ #define HTTP2_DATA 0 #define HTTP2_HEADERS 1 #define HTTP2_PRIORITY 2 #define HTTP2_RST_STREAM 3 #define HTTP2_SETTINGS 4 #define HTTP2_PUSH_PROMISE 5 #define HTTP2_PING 6 #define HTTP2_GOAWAY 7 #define HTTP2_WINDOW_UPDATE 8 #define HTTP2_CONTINUATION 9 #define HTTP2_ALTSVC 0xA #define HTTP2_BLOCKED 0xB #define HTTP2_SETTING_SENT 0x01 #define HTTP2_SETTING_ACKED 0x02 /* Flags */ #define HTTP2_FLAGS_ACK 0x01 /* for SETTINGS */ #define HTTP2_HEADER_CONTENT_TYPE "content-type"/*12 bytes*/ #define HTTP2_HDR_CONT_TYPE_LEN 12 #define HTTP2_HEADER_APP_GRPC "application/grpc"/*16 bytes*/ #define HTTP2_HDR_APP_GRPC_LEN 16 #define HTTP2_GRPC_CONTYPE_LEN 36 /*4+12+4+16=36 bytes*/ #define HTTP2_LEN_MAX (10*1024) /* Magic Header : PRI * HTTP/2.0\r\n\r\nSM\r\n\r\n */ static uint8_t http2clientmagic[] = { 0x50, 0x52, 0x49, 0x20, 0x2a, 0x20, 0x48, 0x54, 0x54, 0x50, 0x2f, 0x32, 0x2e, 0x30, 0x0d, 0x0a, 0x0d, 0x0a, 0x53, 0x4d, 0x0d, 0x0a, 0x0d, 0x0a }; static const char* http2_type_vals[] = { [HTTP2_DATA] "DATA", [HTTP2_HEADERS] "HEADERS", [HTTP2_PRIORITY] "PRIORITY", [HTTP2_RST_STREAM] "RST_STREAM", [HTTP2_SETTINGS] "SETTINGS", [HTTP2_PUSH_PROMISE] "PUSH_PROMISE", [HTTP2_PING] "PING", [HTTP2_GOAWAY] "GOAWAY", [HTTP2_WINDOW_UPDATE] "WINDOW_UPDATE", [HTTP2_CONTINUATION] "CONTINUATION", [HTTP2_ALTSVC] "ALTSVC", [HTTP2_BLOCKED] "BLOCKED", }; typedef struct grpc_wing_ { uint32_t seq; uint32_t h2_strm_hdr_cnt; uint32_t pktcnt; uint8_t setting_flag; } grpc_wing_t; typedef struct grpc_data_ { grpc_wing_t client, server; bool isgrpc; } grpc_data_t; static bool grpc_find_grpc_content_type(dpi_packet_t *p, uint8_t *ptr, uint32_t hdrstrmlen) { uint32_t pos = 0; DEBUG_LOG(DBG_PARSER, p, "c2s stream length(%u)\n", hdrstrmlen); while (pos < hdrstrmlen) { switch (*(ptr + pos)) { case '@': pos += 2; if ((hdrstrmlen - pos > HTTP2_HDR_CONT_TYPE_LEN) && (*(ptr + pos) == 'c' || *(ptr + pos) == 'C') && strncasecmp((char *)(ptr + pos), HTTP2_HEADER_CONTENT_TYPE, HTTP2_HDR_CONT_TYPE_LEN) == 0) { pos += HTTP2_HDR_CONT_TYPE_LEN; pos += 1;//name/value separator if((hdrstrmlen - pos > HTTP2_HDR_APP_GRPC_LEN) && (*(ptr + pos) == 'a' || *(ptr + pos) == 'A') && strncasecmp((char *)(ptr + pos), HTTP2_HEADER_APP_GRPC, HTTP2_HDR_APP_GRPC_LEN) == 0){ DEBUG_LOG(DBG_PARSER, p, "c2s application/grpc found\n"); return true; } } break; default: break; } pos++; } return false; } static void grpc_parser(dpi_packet_t *p) { dpi_session_t *s = p->session; grpc_data_t *data; grpc_wing_t *w; uint8_t *ptr; uint32_t len; uint32_t h2_hdr_len = 0; uint32_t h2_strm_len = 0; uint8_t h2_strm_type = 0; uint8_t h2_strm_flags = 0; DEBUG_LOG(DBG_PARSER, p, "session_id=%u\n", p->session->id); if (unlikely((data = dpi_get_parser_data(p)) == NULL)) { if ((data = calloc(1, sizeof(*data))) == NULL) { dpi_fire_parser(p); return; } data->client.seq = s->client.init_seq; data->client.setting_flag = 0; data->client.h2_strm_hdr_cnt = 0; data->client.pktcnt = 0; data->server.seq = s->server.init_seq; data->server.setting_flag = 0; data->server.h2_strm_hdr_cnt = 0; data->server.pktcnt = 0; data->isgrpc = false; dpi_put_parser_data(p, data); } w = dpi_is_client_pkt(p) ? &data->client : &data->server; if (w->seq == p->this_wing->init_seq) { ptr = dpi_pkt_ptr(p); len = dpi_pkt_len(p); } else if (dpi_is_seq_in_pkt(p, w->seq)) { uint32_t shift = u32_distance(dpi_pkt_seq(p), w->seq); ptr = dpi_pkt_ptr(p) + shift; len = dpi_pkt_len(p) - shift; } else { dpi_fire_parser(p); return; } /* * There is not enough space to hold http2 frame * header or initial c2s magic frame sequences */ if (len < HTTP2_FRAME_HEADER_LENGTH || (dpi_is_client_pkt(p) && data->client.h2_strm_hdr_cnt == 0 && len < HTTP2_MAGIC_FRAME_LENGTH)) { dpi_fire_parser(p); return; } while (len >= HTTP2_FRAME_HEADER_LENGTH) { if (dpi_is_client_pkt(p) && data->client.h2_strm_hdr_cnt == 0 && memcmp(ptr, http2clientmagic, HTTP2_MAGIC_FRAME_LENGTH) != 0){ dpi_fire_parser(p); return; } if (dpi_is_client_pkt(p) && data->client.h2_strm_hdr_cnt == 0) { h2_hdr_len = 0; h2_strm_len = HTTP2_MAGIC_FRAME_LENGTH; DEBUG_LOG(DBG_PARSER, p, "c2s magic sent\n"); } else { h2_hdr_len = HTTP2_FRAME_HEADER_LENGTH; h2_strm_len = GET_BIG_INT24(ptr); ptr += 3; h2_strm_type = *ptr; ptr += 1; h2_strm_flags = *ptr; ptr += 5; } DEBUG_LOG(DBG_PARSER, p, "%s http2 pkt len(%u) header len(%u), stream len(%u), type(%s), flag(0x%02x), stream cnt(%u)\n", dpi_is_client_pkt(p) ? "c2s" : "s2c", len, h2_hdr_len, h2_strm_len, h2_strm_type > HTTP2_BLOCKED ? "unknown type" : http2_type_vals[h2_strm_type], h2_strm_flags, dpi_is_client_pkt(p) ? data->client.h2_strm_hdr_cnt : data->server.h2_strm_hdr_cnt); /* tcp segmentation */ if (h2_hdr_len + h2_strm_len > len) { if (h2_hdr_len + h2_strm_len > HTTP2_LEN_MAX) { /* abnormal http2 length */ dpi_fire_parser(p); return; } return; } if (dpi_is_client_pkt(p)){ if (data->client.h2_strm_hdr_cnt == 1) { if (h2_strm_type != HTTP2_SETTINGS) { /* * In http2 preface, a HTTP2 SETTING should * immediately follow magic. */ dpi_fire_parser(p); return; } data->client.setting_flag |= HTTP2_SETTING_SENT; DEBUG_LOG(DBG_PARSER, p, "c2s HTTP2_SETTING_SENT\n"); } else if (data->client.h2_strm_hdr_cnt != 0){ if ((h2_strm_type == HTTP2_SETTINGS) && (h2_strm_flags & HTTP2_FLAGS_ACK) && !(data->server.setting_flag & HTTP2_SETTING_ACKED)) { data->server.setting_flag |= HTTP2_SETTING_ACKED; DEBUG_LOG(DBG_PARSER, p, "c2s server HTTP2_SETTING_ACKED\n"); } if (h2_strm_type == HTTP2_HEADERS) { if (!data->isgrpc) { data->isgrpc = grpc_find_grpc_content_type(p, ptr, h2_strm_len); } } } data->client.h2_strm_hdr_cnt++; } else {//s2c if (data->server.h2_strm_hdr_cnt == 0) { if (h2_strm_type != HTTP2_SETTINGS) { /* * In http2 preface, a HTTP2 SETTING should * be first packet from server. */ dpi_fire_parser(p); return; } data->server.setting_flag |= HTTP2_SETTING_SENT; DEBUG_LOG(DBG_PARSER, p, "s2c server HTTP2_SETTING_SENT\n"); } else { if (!(data->client.setting_flag & HTTP2_SETTING_ACKED) && h2_strm_type == HTTP2_SETTINGS && (h2_strm_flags & HTTP2_FLAGS_ACK)){ data->client.setting_flag |= HTTP2_SETTING_ACKED; DEBUG_LOG(DBG_PARSER, p, "s2c client HTTP2_SETTING_ACKED\n"); } } data->server.h2_strm_hdr_cnt++; } ptr += h2_strm_len; len -= (h2_strm_len+h2_hdr_len); w->seq = dpi_ptr_2_seq(p, ptr); dpi_set_asm_seq(p, w->seq); } if (dpi_is_client_pkt(p)) { data->client.pktcnt++; DEBUG_LOG(DBG_PARSER, p, "c2s pktcnt(%u)\n", data->client.pktcnt); } else { data->server.pktcnt++; DEBUG_LOG(DBG_PARSER, p, "s2c pktcnt(%u)\n", data->server.pktcnt); } /*HTTP2 preface fail*/ if (!(data->client.setting_flag & HTTP2_SETTING_ACKED) && data->server.pktcnt >= 2) { dpi_fire_parser(p); return; } /*HTTP2 preface fail*/ if (!(data->server.setting_flag & HTTP2_SETTING_ACKED) && data->client.pktcnt >= 2) { dpi_fire_parser(p); return; } if ((data->client.setting_flag & HTTP2_SETTING_ACKED) && (data->server.setting_flag & HTTP2_SETTING_ACKED) && data->isgrpc) { DEBUG_LOG(DBG_PARSER, p, "HTTP2 PREFACE ESTABLISHED, GRPC IDENTIFIED\n"); dpi_finalize_parser(p); dpi_ignore_parser(p); } } static void grpc_new_session(dpi_packet_t *p) { dpi_hire_parser(p); } static void grpc_delete_data(void *data) { free(data); } static dpi_parser_t dpi_parser_grpc = { new_session: grpc_new_session, delete_data: grpc_delete_data, parser: grpc_parser, name: "grpc", ip_proto: IPPROTO_TCP, type: DPI_PARSER_GRPC, }; dpi_parser_t *dpi_grpc_tcp_parser(void) { return &dpi_parser_grpc; }
5,756
771
{ "name": "CRX Viewer", "desc": "Inspect what's included in Chrome/Firefox extensions.", "url": "https://robwu.nl/crxviewer/", "tags": [ "Misc" ], "maintainers": [ "Rob--W" ], "addedAt": "2021-10-09" }
103
1,133
<gh_stars>1000+ #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # United States Government Sponsorship acknowledged. This software is subject to # U.S. export control laws and regulations and has been classified as 'EAR99 NLR' # (No [Export] License Required except when exporting to an embargoed country, # end user, or in support of a prohibited end use). By downloading this software, # the user agrees to comply with all applicable U.S. export laws and regulations. # The user has the responsibility to obtain export licenses, or other export # authority as may be required before exporting this software to any 'EAR99' # embargoed foreign country or citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ from __future__ import print_function from iscesys.Component.Component import Component from iscesys.Compatibility import Compatibility from stdproc.stdproc.resamp_amps import resamp_amps class Resamp_amps(Component): def resamp_amps(self,imageIn,imageOut): if not (imageIn == None): self.imageIn = imageIn if (self.imageIn == None): self.logger.error("Input slc image not set.") raise Exception if not (imageOut == None): self.imageOut = imageOut if (self.imageOut == None): self.logger.error("Output slc image not set.") raise Exception self.setDefaults() self.imageInAccessor = self.imageIn.getLineAccessorPointer() self.imageOutAccessor = self.imageOut.getLineAccessorPointer() self.computeSecondLocation() self.allocateArrays() self.setState() resamp_amps.resamp_amps_Py(self.imageInAccessor,self.imageOutAccessor) self.getState() self.deallocateArrays() return def setDefaults(self): if (self.numberLines == None): self.numberLines = self.image1.getFileLength() self.logger.warning('The variable NUMBER_LINES has been set to the default value %d which is the number of lines in the slc image.'% (self.numberLines)) if (self.numberFitCoefficients == None): self.numberFitCoefficients = 6 self.logger.warning('The variable NUMBER_FIT_COEFFICIENTS has been set to the default value %s' % (self.numberFitCoefficients)) if (self.firstLineOffset == None): self.firstLineOffset = 1 self.logger.warning('The variable FIRST_LINE_OFFSET has been set to the default value %s' % (self.firstLineOffset)) def computeSecondLocation(self): #this part was previously done in the fortran code self.locationAcross2 = [0]*len(self.locationAcross1) self.locationAcrossOffset2 = [0]*len(self.locationAcross1) self.locationDown2 = [0]*len(self.locationAcross1) self.locationDownOffset2 = [0]*len(self.locationAcross1) self.snr2 = [0]*len(self.locationAcross1) for i in range(len(self.locationAcross1)): self.locationAcross2[i] = self.locationAcross1[i] + self.locationAcrossOffset1[i] self.locationAcrossOffset2[i] = self.locationAcrossOffset1[i] self.locationDown2[i] = self.locationDown1[i] + self.locationDownOffset1[i] self.locationDownOffset2[i] = self.locationDownOffset1[i] self.snr2[i] = self.snr1[i] def setState(self): resamp_amps.setNumberFitCoefficients_Py(int(self.numberFitCoefficients)) resamp_amps.setNumberRangeBin_Py(int(self.numberRangeBin)) resamp_amps.setNumberLines_Py(int(self.numberLines)) resamp_amps.setFirstLineOffset_Py(int(self.firstLineOffset)) resamp_amps.setRadarWavelength_Py(float(self.radarWavelength)) resamp_amps.setSlantRangePixelSpacing_Py(float(self.slantRangePixelSpacing)) resamp_amps.setDopplerCentroidCoefficients_Py(self.dopplerCentroidCoefficients, self.dim1_dopplerCentroidCoefficients) resamp_amps.setLocationAcross1_Py(self.locationAcross1, self.dim1_locationAcross1) resamp_amps.setLocationAcrossOffset1_Py(self.locationAcrossOffset1, self.dim1_locationAcrossOffset1) resamp_amps.setLocationDown1_Py(self.locationDown1, self.dim1_locationDown1) resamp_amps.setLocationDownOffset1_Py(self.locationDownOffset1, self.dim1_locationDownOffset1) resamp_amps.setSNR1_Py(self.snr1, self.dim1_snr1) resamp_amps.setLocationAcross2_Py(self.locationAcross2, self.dim1_locationAcross2) resamp_amps.setLocationAcrossOffset2_Py(self.locationAcrossOffset2, self.dim1_locationAcrossOffset2) resamp_amps.setLocationDown2_Py(self.locationDown2, self.dim1_locationDown2) resamp_amps.setLocationDownOffset2_Py(self.locationDownOffset2, self.dim1_locationDownOffset2) resamp_amps.setSNR2_Py(self.snr2, self.dim1_snr2) return def setNumberFitCoefficients(self,var): self.numberFitCoefficients = int(var) return def setNumberRangeBin(self,var): self.numberRangeBin = int(var) return def setNumberLines(self,var): self.numberLines = int(var) return def setFirstLineOffset(self,var): self.firstLineOffset = int(var) return def setRadarWavelength(self,var): self.radarWavelength = float(var) return def setSlantRangePixelSpacing(self,var): self.slantRangePixelSpacing = float(var) return def setDopplerCentroidCoefficients(self,var): self.dopplerCentroidCoefficients = var return def setLocationAcross1(self,var): self.locationAcross1 = var return def setLocationAcrossOffset1(self,var): self.locationAcrossOffset1 = var return def setLocationDown1(self,var): self.locationDown1 = var return def setLocationDownOffset1(self,var): self.locationDownOffset1 = var return def setSNR1(self,var): self.snr1 = var return def setLocationAcross2(self,var): self.locationAcross2 = var return def setLocationAcrossOffset2(self,var): self.locationAcrossOffset2 = var return def setLocationDown2(self,var): self.locationDown2 = var return def setLocationDownOffset2(self,var): self.locationDownOffset2 = var return def setSNR2(self,var): self.snr2 = var return def getState(self): self.ULRangeOffset = resamp_amps.getULRangeOffset_Py() self.ULAzimuthOffset = resamp_amps.getULAzimuthOffset_Py() self.URRangeOffset = resamp_amps.getURRangeOffset_Py() self.URAzimuthOffset = resamp_amps.getURAzimuthOffset_Py() self.LLRangeOffset = resamp_amps.getLLRangeOffset_Py() self.LLAzimuthOffset = resamp_amps.getLLAzimuthOffset_Py() self.LRRangeOffset = resamp_amps.getLRRangeOffset_Py() self.LRAzimuthOffset = resamp_amps.getLRAzimuthOffset_Py() self.CenterRangeOffset = resamp_amps.getCenterRangeOffset_Py() self.CenterAzimuthOffset = resamp_amps.getCenterAzimuthOffset_Py() return def getULRangeOffset(self): return self.ULRangeOffset def getULAzimuthOffset(self): return self.ULAzimuthOffset def getURRangeOffset(self): return self.URRangeOffset def getURAzimuthOffset(self): return self.URAzimuthOffset def getLLRangeOffset(self): return self.LLRangeOffset def getLLAzimuthOffset(self): return self.LLAzimuthOffset def getLRRangeOffset(self): return self.LRRangeOffset def getLRAzimuthOffset(self): return self.LRAzimuthOffset def getCenterRangeOffset(self): return self.CenterRangeOffset def getCenterAzimuthOffset(self): return self.CenterAzimuthOffset def allocateArrays(self): if (self.dim1_dopplerCentroidCoefficients == None): self.dim1_dopplerCentroidCoefficients = len(self.dopplerCentroidCoefficients) if (not self.dim1_dopplerCentroidCoefficients): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_dopplerCoefficients_Py(self.dim1_dopplerCentroidCoefficients) if (self.dim1_locationAcross1 == None): self.dim1_locationAcross1 = len(self.locationAcross1) if (not self.dim1_locationAcross1): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_ranpos_Py(self.dim1_locationAcross1) if (self.dim1_locationAcrossOffset1 == None): self.dim1_locationAcrossOffset1 = len(self.locationAcrossOffset1) if (not self.dim1_locationAcrossOffset1): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_ranoff_Py(self.dim1_locationAcrossOffset1) if (self.dim1_locationDown1 == None): self.dim1_locationDown1 = len(self.locationDown1) if (not self.dim1_locationDown1): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_azpos_Py(self.dim1_locationDown1) if (self.dim1_locationDownOffset1 == None): self.dim1_locationDownOffset1 = len(self.locationDownOffset1) if (not self.dim1_locationDownOffset1): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_azoff_Py(self.dim1_locationDownOffset1) if (self.dim1_snr1 == None): self.dim1_snr1 = len(self.snr1) if (not self.dim1_snr1): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_sig_Py(self.dim1_snr1) if (self.dim1_locationAcross2 == None): self.dim1_locationAcross2 = len(self.locationAcross2) if (not self.dim1_locationAcross2): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_ranpos2_Py(self.dim1_locationAcross2) if (self.dim1_locationAcrossOffset2 == None): self.dim1_locationAcrossOffset2 = len(self.locationAcrossOffset2) if (not self.dim1_locationAcrossOffset2): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_ranoff2_Py(self.dim1_locationAcrossOffset2) if (self.dim1_locationDown2 == None): self.dim1_locationDown2 = len(self.locationDown2) if (not self.dim1_locationDown2): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_azpos2_Py(self.dim1_locationDown2) if (self.dim1_locationDownOffset2 == None): self.dim1_locationDownOffset2 = len(self.locationDownOffset2) if (not self.dim1_locationDownOffset2): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_azoff2_Py(self.dim1_locationDownOffset2) if (self.dim1_snr2 == None): self.dim1_snr2 = len(self.snr2) if (not self.dim1_snr2): print("Error. Trying to allocate zero size array") raise Exception resamp_amps.allocate_r_sig2_Py(self.dim1_snr2) return def deallocateArrays(self): resamp_amps.deallocate_dopplerCoefficients_Py() resamp_amps.deallocate_r_ranpos_Py() resamp_amps.deallocate_r_ranoff_Py() resamp_amps.deallocate_r_azpos_Py() resamp_amps.deallocate_r_azoff_Py() resamp_amps.deallocate_r_sig_Py() resamp_amps.deallocate_r_ranpos2_Py() resamp_amps.deallocate_r_ranoff2_Py() resamp_amps.deallocate_r_azpos2_Py() resamp_amps.deallocate_r_azoff2_Py() resamp_amps.deallocate_r_sig2_Py() return None logging_name = 'isce.stdproc.resamp_amps' def __init__(self): super(Resamp_amps, self).__init__() self.numberFitCoefficients = None self.numberRangeBin = None self.numberLines = None self.firstLineOffset = None self.radarWavelength = None self.slantRangePixelSpacing = None self.dopplerCentroidCoefficients = [] self.dim1_dopplerCentroidCoefficients = None self.locationAcross1 = [] self.dim1_locationAcross1 = None self.locationAcrossOffset1 = [] self.dim1_locationAcrossOffset1 = None self.locationDown1 = [] self.dim1_locationDown1 = None self.locationDownOffset1 = [] self.dim1_locationDownOffset1 = None self.snr1 = [] self.dim1_snr1 = None self.locationAcross2 = [] self.dim1_locationAcross2 = None self.locationAcrossOffset2 = [] self.dim1_locationAcrossOffset2 = None self.locationDown2 = [] self.dim1_locationDown2 = None self.locationDownOffset2 = [] self.dim1_locationDownOffset2 = None self.snr2 = [] self.dim1_snr2 = None self.ULRangeOffset = None self.ULAzimuthOffset = None self.URRangeOffset = None self.URAzimuthOffset = None self.LLRangeOffset = None self.LLAzimuthOffset = None self.LRRangeOffset = None self.LRAzimuthOffset = None self.CenterRangeOffset = None self.CenterAzimuthOffset = None # self.logger = logging.getLogger('isce.stdproc.resamp_amps') self.dictionaryOfVariables = { 'NUMBER_FIT_COEFFICIENTS' : ['self.numberFitCoefficients', 'int','optional'], 'NUMBER_RANGE_BIN' : ['self.numberRangeBin', 'int','mandatory'], 'NUMBER_LINES' : ['self.numberLines', 'int','optional'], 'FIRST_LINE_OFFSET' : ['self.firstLineOffset', 'int','optional'], 'RADAR_WAVELENGTH' : ['self.radarWavelength', 'float','mandatory'], 'SLANT_RANGE_PIXEL_SPACING' : ['self.slantRangePixelSpacing', 'float','mandatory'], 'DOPPLER_CENTROID_COEFFICIENTS' : ['self.dopplerCentroidCoefficients', 'float','mandatory'], 'LOCATION_ACROSS1' : ['self.locationAcross1', 'float','mandatory'], 'LOCATION_ACROSS_OFFSET1' : ['self.locationAcrossOffset1', 'float','mandatory'], 'LOCATION_DOWN1' : ['self.locationDown1', 'float','mandatory'], 'LOCATION_DOWN_OFFSET1' : ['self.locationDownOffset1', 'float','mandatory'], 'SNR1' : ['self.snr1', 'float','mandatory'], 'LOCATION_ACROSS2' : ['self.locationAcross2', 'float','mandatory'], 'LOCATION_ACROSS_OFFSET2' : ['self.locationAcrossOffset2', 'float','mandatory'], 'LOCATION_DOWN2' : ['self.locationDown2', 'float','mandatory'], 'LOCATION_DOWN_OFFSET2' : ['self.locationDownOffset2', 'float','mandatory'], 'SNR2' : ['self.snr2', 'float','mandatory'] } self.dictionaryOfOutputVariables = { 'UL_RANGE_OFFSET': 'self.ULRangeOffset', 'UL_AZIMUTH_OFFSET' : 'self.ULAzimuthOffset', 'UR_RANGE_OFFSET' : 'self.URRangeOffset' , 'UR_AZIMUTH_OFFSET' : 'self.URAzimuthOffset', 'LL_RANGE_OFFSET' : 'self.LLRangeOffset', 'LL_AZIMUTH_OFFSET' : 'self.LLAzimuthOffset', 'LR_RANGE_OFFSET' : 'self.LRRangeOffset', 'LR_AZIMUTH_OFFSET' : 'self.LRAzimuthOffset', 'CENTER_RANGE_OFFSET' : 'self.CenterRangeOffset', 'CENTER_AZIMUTH_OFFSET' : 'self.CenterAzimuthOffset' } self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = [] typePos = 2 for key , val in self.dictionaryOfVariables.items(): if val[typePos] == 'mandatory': self.mandatoryVariables.append(key) elif val[typePos] == 'optional': self.optionalVariables.append(key) else: print('Error. Variable can only be optional or mandatory') raise Exception return None pass
7,287
3,442
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.plugin.generalconfig; import net.java.sip.communicator.plugin.desktoputil.*; import org.jitsi.service.configuration.*; import org.jitsi.service.neomedia.codec.*; import javax.swing.*; import java.awt.*; import java.awt.event.*; /** * Implements the Silk configuration panel. * * @author <NAME> */ public class SilkConfigForm extends TransparentPanel { /** * The default value for the SAT setting */ private static final String FEC_SAT_DEFAULT = "0.5"; /** * The default value for the FEC setting */ private static final boolean FEC_DEFAULT = true; /** * The default value for the FEC force packet loss setting */ private static final boolean FEC_FORCE_PL_DEFAULT = true; /** * The default value for the 'advertise FEC' setting */ private static final boolean FEC_ADVERTISE_DEFAULT = false; /** * The "restore defaults" button */ private final JButton restoreButton = new JButton(Resources.getString( "plugin.generalconfig.RESTORE")); /** * The "use fec" checkbox */ private final JCheckBox fecCheckbox = new SIPCommCheckBox(); /** * The "force packet loss" checkbox */ private final JCheckBox assumePLCheckbox = new SIPCommCheckBox(); /** * The " advertise FEC" checkbox */ private final JCheckBox advertiseFECCheckbox = new SIPCommCheckBox(); /** * The "speech activity threshold" field */ private final JTextField SATField = new JTextField(6); /** * The <tt>ConfigurationService</tt> to be used to access configuration */ private final ConfigurationService configurationService = GeneralConfigPluginActivator.getConfigurationService(); /** * Initialize a new <tt>OpusConfigForm</tt> instance. */ public SilkConfigForm() { super(new BorderLayout()); Box box = Box.createVerticalBox(); add(box, BorderLayout.NORTH); TransparentPanel contentPanel = new TransparentPanel(); contentPanel.setLayout(new BorderLayout(10, 10)); box.add(contentPanel); TransparentPanel labelPanel = new TransparentPanel(new GridLayout(0, 1, 2, 2)); TransparentPanel valuePanel = new TransparentPanel(new GridLayout(0, 1, 2, 2)); TransparentPanel southPanel = new TransparentPanel(new GridLayout(0, 1, 2, 2)); contentPanel.add(labelPanel, BorderLayout.WEST); contentPanel.add(valuePanel, BorderLayout.CENTER); contentPanel.add(southPanel, BorderLayout.SOUTH); labelPanel.add(new JLabel(Resources.getString( "plugin.generalconfig.SILK_USE_FEC"))); labelPanel.add(new JLabel(Resources.getString( "plugin.generalconfig.SILK_ALWAYS_ASSUME_PACKET_LOSS"))); labelPanel.add(new JLabel(Resources.getString( "plugin.generalconfig.SILK_SAT"))); labelPanel.add(new JLabel(Resources.getString( "plugin.generalconfig.SILK_ADVERTISE_FEC"))); fecCheckbox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent actionEvent) { configurationService.setProperty(Constants.PROP_SILK_FEC, fecCheckbox.isSelected()); } }); fecCheckbox.setSelected(configurationService.getBoolean( Constants.PROP_SILK_FEC, FEC_DEFAULT)); valuePanel.add(fecCheckbox); assumePLCheckbox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent actionEvent) { configurationService.setProperty(Constants.PROP_SILK_ASSUME_PL, assumePLCheckbox.isSelected()); } }); assumePLCheckbox.setSelected(configurationService.getBoolean( Constants.PROP_SILK_ASSUME_PL, FEC_FORCE_PL_DEFAULT)); valuePanel.add(assumePLCheckbox); SATField.addFocusListener(new FocusListener() { public void focusGained(FocusEvent focusEvent){} public void focusLost(FocusEvent focusEvent) { configurationService.setProperty(Constants.PROP_SILK_FEC_SAT, SATField.getText()); } }); SATField.setText(configurationService.getString( Constants.PROP_SILK_FEC_SAT, FEC_SAT_DEFAULT)); valuePanel.add(SATField); advertiseFECCheckbox.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent actionEvent) { configurationService.setProperty( Constants.PROP_SILK_ADVERSISE_FEC, advertiseFECCheckbox.isSelected()); } }); advertiseFECCheckbox.setSelected(configurationService.getBoolean( Constants.PROP_SILK_ADVERSISE_FEC, FEC_ADVERTISE_DEFAULT)); valuePanel.add(advertiseFECCheckbox); southPanel.add(restoreButton); restoreButton.addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent e) { restoreDefaults(); } }); southPanel.add(new JLabel(Resources.getString( "plugin.generalconfig.DEFAULT_LANGUAGE_RESTART_WARN"))); } /** * Restores the UI components and the configuration to their default state */ private void restoreDefaults() { fecCheckbox.setSelected(FEC_DEFAULT); configurationService.setProperty(Constants.PROP_SILK_FEC, FEC_DEFAULT); assumePLCheckbox.setSelected(FEC_FORCE_PL_DEFAULT); configurationService.setProperty( Constants.PROP_SILK_ASSUME_PL, FEC_FORCE_PL_DEFAULT); SATField.setText(FEC_SAT_DEFAULT); configurationService.setProperty( Constants.PROP_SILK_FEC_SAT, FEC_SAT_DEFAULT); advertiseFECCheckbox.setSelected(FEC_ADVERTISE_DEFAULT); configurationService.setProperty( Constants.PROP_SILK_ADVERSISE_FEC, FEC_ADVERTISE_DEFAULT); } }
2,906
831
<filename>gradle-dsl/src/com/android/tools/idea/gradle/dsl/api/GradleFileModel.java /* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.gradle.dsl.api; import com.android.tools.idea.gradle.dsl.api.util.GradleDslModel; import com.intellij.openapi.project.Project; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.PsiFile; import java.util.List; import java.util.Map; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public interface GradleFileModel extends GradleDslModel { @NotNull Project getProject(); void reparse(); boolean isModified(); void resetState(); @NotNull VirtualFile getVirtualFile(); void applyChanges(); @NotNull Map<String, List<BuildModelNotification>> getNotifications(); /** * @return the psi file representing this GradleFileModel. In order to continue using this instance of the model no modifications should * be made to the underlying psi tree of the file for the models lifetime. This method is exposed to allow the PsiFile to be passed into * Intellij IDEA APIs. This method makes no guarantees about the validity of the returned element, callers should perform the necessary * checks before using. */ @Nullable PsiFile getPsiFile(); }
542
375
/* * Copyright 2019 Nokia Solutions and Networks * Licensed under the Apache License, Version 2.0, * see license.txt file for details. */ package org.robotframework.ide.eclipse.main.plugin.tableeditor; import static java.util.stream.Collectors.joining; import static java.util.stream.Collectors.toList; import java.util.AbstractMap.SimpleImmutableEntry; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentHashMap; import java.util.function.Function; import java.util.function.Supplier; import java.util.stream.Stream; import javax.inject.Inject; import org.eclipse.e4.core.services.events.IEventBroker; import org.eclipse.e4.ui.di.UIEventTopic; import org.osgi.service.event.Event; import org.rf.ide.core.libraries.ArgumentsDescriptor; import org.rf.ide.core.testdata.model.AModelElement; import org.rf.ide.core.testdata.model.ExecutableSetting; import org.rf.ide.core.testdata.model.RobotFile; import org.rf.ide.core.testdata.model.table.RobotExecutableRow; import org.rf.ide.core.testdata.model.table.SettingTable; import org.rf.ide.core.testdata.model.table.exec.descs.IExecutableRowDescriptor; import org.rf.ide.core.testdata.model.table.exec.descs.IExecutableRowDescriptor.RowType; import org.rf.ide.core.testdata.model.table.keywords.UserKeyword; import org.rf.ide.core.testdata.model.table.keywords.names.QualifiedKeywordName; import org.rf.ide.core.testdata.model.table.tasks.Task; import org.rf.ide.core.testdata.model.table.testcases.TestCase; import org.rf.ide.core.testdata.text.read.recognizer.RobotToken; import org.rf.ide.core.testdata.text.read.recognizer.RobotTokenType; import org.rf.ide.core.validation.SpecialKeywords; import org.robotframework.ide.eclipse.main.plugin.RedPlugin; import org.robotframework.ide.eclipse.main.plugin.assist.RedKeywordProposal; import org.robotframework.ide.eclipse.main.plugin.assist.RedKeywordProposals; import org.robotframework.ide.eclipse.main.plugin.model.RobotFileInternalElement; import org.robotframework.ide.eclipse.main.plugin.model.RobotModelEvents; import org.robotframework.ide.eclipse.main.plugin.model.RobotSuiteFile; import org.robotframework.ide.eclipse.main.plugin.model.locators.AccessibleKeywordsEntities; import org.robotframework.red.swt.SwtThread; import org.robotframework.services.event.Events; import com.google.common.collect.RangeSet; import com.google.common.collect.Streams; import com.google.common.collect.TreeRangeSet; public class KeywordUsagesFinder { private final Object mutex = new Object(); private final Supplier<RobotSuiteFile> robotModelSupplier; private final RangeSet<Integer> libKwRanges = TreeRangeSet.create(); private final Set<String> libKwTokens = new HashSet<>(); private final Map<String, RedKeywordProposal> foundKeywords = new HashMap<>(); public KeywordUsagesFinder(final Supplier<RobotSuiteFile> fileModel) { this.robotModelSupplier = fileModel; } public CompletableFuture<Void> refresh() { return refresh(() -> {}); } public CompletableFuture<Void> refresh(final Runnable viewerRefresher) { return CompletableFuture.supplyAsync(this::calculateUsedKeywordFutures) .thenApply(futures -> CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])) .thenApply(v -> futures.stream() .map(CompletableFuture::join) .flatMap(List::stream) .collect(toList()))) .thenApply(CompletableFuture::join) .thenAccept(this::storeUsedKeywordData) .thenRun(() -> SwtThread.asyncExec(viewerRefresher)); } private List<CompletableFuture<List<Entry<RedKeywordProposal, RobotToken>>>> calculateUsedKeywordFutures() { final ExecutablesFinder finder = new ExecutablesFinder(robotModelSupplier.get()); final List<CompletableFuture<List<Entry<RedKeywordProposal, RobotToken>>>> futures = new ArrayList<>(); for (final IExecutableRowDescriptor<?> desc : finder.getExecutablesDescriptors()) { futures.add(CompletableFuture.supplyAsync(() -> finder.getExecutablesTokens(desc))); } for (final AModelElement<?> template : finder.getTemplates()) { futures.add(CompletableFuture.supplyAsync(() -> finder.getTemplateTokens(template))); } return futures; } private void storeUsedKeywordData(final List<Entry<RedKeywordProposal, RobotToken>> entries) { synchronized (mutex) { libKwRanges.clear(); libKwTokens.clear(); foundKeywords.clear(); entries.forEach(entry -> { final RedKeywordProposal proposal = entry.getKey(); final RobotToken token = entry.getValue(); if (proposal.isLibraryKeyword()) { libKwRanges.add(token.getRange()); libKwTokens.add(token.getText()); } foundKeywords.put(token.getText(), proposal); }); } } public boolean isLibraryKeyword(final int offset) { synchronized (mutex) { return libKwRanges.contains(offset); } } public boolean isLibraryKeyword(final String token) { synchronized (mutex) { return libKwTokens.contains(token); } } public Optional<ArgumentsDescriptor> getArgumentsDescriptor(final String keywordName) { synchronized (mutex) { return Optional.ofNullable(foundKeywords.get(keywordName)).map(RedKeywordProposal::getArgumentsDescriptor); } } public Optional<QualifiedKeywordName> getQualifiedName(final String keywordName) { synchronized (mutex) { return Optional.ofNullable(foundKeywords.get(keywordName)) .map(p -> QualifiedKeywordName.create(p.getKeywordName(), p.getSourceName())); } } @Inject @org.eclipse.e4.core.di.annotations.Optional private void whenSomeElementWasChangedUsingAnyTable( @UIEventTopic(RobotModelEvents.ROBOT_SUITE_FILE_ALL) final Event event) { final RobotFileInternalElement changedElement = Events.get(event, IEventBroker.DATA, RobotFileInternalElement.class); if (changedElement.getSuiteFile() == robotModelSupplier.get() && (RedPlugin.getDefault().getPreferences().isLibraryKeywordsColoringEnabled() || RedPlugin.getDefault().getPreferences().isKeywordArgumentCellsColoringEnabled())) { refresh(); } } private static class ExecutablesFinder { private final RobotSuiteFile suiteFile; private final RobotFile model; private final Function<String, RedKeywordProposal> proposalCache; private ExecutablesFinder(final RobotSuiteFile suiteFile) { this.suiteFile = suiteFile; this.model = suiteFile.getLinkedElement(); this.proposalCache = findKeywordFunction(); } private Function<String, RedKeywordProposal> findKeywordFunction() { final RedKeywordProposals proposals = new RedKeywordProposals(suiteFile); final AccessibleKeywordsEntities accessibleKwEntities = proposals.getAccessibleKeywordsEntities(suiteFile); final Map<String, RedKeywordProposal> cache = new ConcurrentHashMap<>(); return kw -> cache.computeIfAbsent(kw, kw2 -> proposals.getBestMatchingKeywordProposal(accessibleKwEntities, kw2).orElse(null)); } private List<IExecutableRowDescriptor<?>> getExecutablesDescriptors() { return Streams.concat(getExecutableRows(), getLocalExecutableSettings(), getGeneralExecutableSettings()) .map(RobotExecutableRow::buildLineDescription) .filter(desc -> desc.getRowType() == RowType.SIMPLE || desc.getRowType() == RowType.FOR_CONTINUE) .collect(toList()); } private List<Entry<RedKeywordProposal, RobotToken>> getExecutablesTokens( final IExecutableRowDescriptor<?> desc) { final List<Entry<RedKeywordProposal, RobotToken>> kwTokens = new ArrayList<>(); final RobotToken kwToken = desc.getKeywordAction(); final RedKeywordProposal proposal = proposalCache.apply(kwToken.getText()); if (proposal != null) { kwTokens.add(new SimpleImmutableEntry<>(proposal, kwToken)); final QualifiedKeywordName qualifiedKwName = QualifiedKeywordName.create(proposal.getKeywordName(), proposal.getSourceName()); SpecialKeywords.findNestedExecutableRows(desc, qualifiedKwName) .stream() .map(RobotExecutableRow::buildLineDescription) .map(this::getExecutablesTokens) .forEach(kwTokens::addAll); } return kwTokens; } private Stream<RobotExecutableRow<?>> getExecutableRows() { final List<RobotExecutableRow<?>> rows = new ArrayList<>(); model.getTestCaseTable() .getTestCases() .stream() .filter(t -> !t.getTemplateKeywordName().isPresent()) .flatMap(t -> t.getExecutionContext().stream()) .forEach(rows::add); model.getTasksTable() .getTasks() .stream() .filter(t -> !t.getTemplateKeywordName().isPresent()) .flatMap(t -> t.getExecutionContext().stream()) .forEach(rows::add); model.getKeywordTable() .getKeywords() .stream() .flatMap(t -> t.getExecutionContext().stream()) .forEach(rows::add); return rows.stream(); } private Stream<RobotExecutableRow<?>> getLocalExecutableSettings() { final List<ExecutableSetting> settings = new ArrayList<>(); for (final TestCase t : model.getTestCaseTable().getTestCases()) { settings.addAll(t.getSetupExecutables()); settings.addAll(t.getTeardownExecutables()); } for (final Task t : model.getTasksTable().getTasks()) { settings.addAll(t.getSetupExecutables()); settings.addAll(t.getTeardownExecutables()); } for (final UserKeyword k : model.getKeywordTable().getKeywords()) { settings.addAll(k.getTeardownExecutables()); } return settings.stream().filter(setting -> !setting.isDisabled()).map(ExecutableSetting::asExecutableRow); } private Stream<RobotExecutableRow<?>> getGeneralExecutableSettings() { final SettingTable settingsTable = model.getSettingTable(); final List<ExecutableSetting> settings = new ArrayList<>(); settings.addAll(settingsTable.getTestSetups()); settings.addAll(settingsTable.getTestTeardowns()); settings.addAll(settingsTable.getTaskSetups()); settings.addAll(settingsTable.getTaskTeardowns()); settings.addAll(settingsTable.getSuiteSetups()); settings.addAll(settingsTable.getSuiteTeardowns()); return settings.stream().filter(setting -> !setting.isDisabled()).map(ExecutableSetting::asExecutableRow); } private List<AModelElement<?>> getTemplates() { final List<AModelElement<?>> templates = new ArrayList<>(); templates.addAll(model.getSettingTable().getTestTemplates()); templates.addAll(model.getSettingTable().getTaskTemplates()); for (final TestCase t : model.getTestCaseTable().getTestCases()) { templates.addAll(t.getTemplates()); } for (final Task t : model.getTasksTable().getTasks()) { templates.addAll(t.getTemplates()); } return templates; } private List<Entry<RedKeywordProposal, RobotToken>> getTemplateTokens(final AModelElement<?> template) { final List<RobotToken> keywordTokens = getTemplateKeywordTokens(template); final String keywordName = keywordTokens.stream().map(RobotToken::getText).collect(joining(" ")); final RedKeywordProposal proposal = proposalCache.apply(keywordName); return proposal != null ? keywordTokens.stream() .map(kwToken -> new SimpleImmutableEntry<>(proposal, kwToken)) .collect(toList()) : new ArrayList<>(); } private List<RobotToken> getTemplateKeywordTokens(final AModelElement<?> template) { return template.getElementTokens() .stream() .skip(1) .filter(t -> !t.getTypes().contains(RobotTokenType.COMMENT)) .collect(toList()); } } }
6,041
303
<reponame>ofZach/landlinesApp {"id":5630,"line-1":"Ohio","line-2":"United States","attribution":"©2015 DigitalGlobe, Sanborn, U.S. Geological Survey","url":"https://www.google.com/maps/@39.095413,-84.516095,20z/data=!3m1!1e3"}
90
552
<filename>Engine/source/EtEditor/Import/ColladaImporter.cpp<gh_stars>100-1000 #include "stdafx.h" #include "ColladaImporter.h" #include <gtkmm/label.h> #include <gtkmm/checkbutton.h> #include <gtkmm/separator.h> #include <gtkmm/box.h> #include <EtCore/FileSystem/FileUtil.h> #include <EtPipeline/Assets/EditableMeshAsset.h> #include <EtEditor/Util/GtkUtil.h> #include "ColladaParser.h" #include "MeshDataContainer.h" namespace et { namespace edit { //================== // Collada Importer //================== //------------------------ // ColladaImporter::c-tor // ColladaImporter::ColladaImporter() : ImporterBase() { m_SupportedExtensions.push_back("dae"); } //------------------------------- // ColladaImporter::SetupOptions // void ColladaImporter::SetupOptions(Gtk::Frame* const frame, T_SensitiveFn& sensitiveFn) { Gtk::Box* const vbox = Gtk::make_managed<Gtk::Box>(Gtk::ORIENTATION_VERTICAL); frame->add(*vbox); // at least one asset type should be checked for the import buttonns to be active // create a checkbox and label combo auto makeOptionFn = [vbox](Glib::ustring const& label, bool& option, bool const isSensitive) -> Gtk::CheckButton* { Gtk::Box* const hbox = Gtk::make_managed<Gtk::Box>(Gtk::ORIENTATION_HORIZONTAL); vbox->pack_start(*hbox, false, true, 3u); Gtk::CheckButton* const check = Gtk::make_managed<Gtk::CheckButton>(); check->set_active(option); check->set_sensitive(isSensitive); check->signal_toggled().connect([&option, check]() { option = check->get_active(); }); hbox->pack_start(*check, false, false, 3u); hbox->pack_start(*Gtk::make_managed<Gtk::Label>(label), false, false, 3u); hbox->set_sensitive(isSensitive); return check; }; vbox->pack_start(*Gtk::make_managed<Gtk::Label>("Asset types"), false, true, 3u); std::vector<Gtk::CheckButton*> assetButtons; assetButtons.push_back(makeOptionFn("Import Meshes", m_ImportMeshes, true)); assetButtons.push_back(makeOptionFn("Import Skeletons", m_ImportSkeletons, false)); assetButtons.push_back(makeOptionFn("Import Animations", m_ImportAnimations, false)); for (Gtk::CheckButton* const checkButton : assetButtons) { checkButton->signal_toggled().connect([assetButtons, &sensitiveFn]() { sensitiveFn(std::find_if(assetButtons.cbegin(), assetButtons.cend(), [](Gtk::CheckButton const* const btn) { return btn->get_active(); }) != assetButtons.cend()); }); } vbox->pack_start(*Gtk::make_managed<Gtk::Separator>(Gtk::ORIENTATION_HORIZONTAL), false, true, 3u); vbox->pack_start(*Gtk::make_managed<Gtk::Label>("Mesh options"), false, true, 3u); makeOptionFn("Calculate Tangent Space", m_CalculateTangentSpace, true); makeOptionFn("Pre Transform Vertices", m_PreTransformVertices, true); makeOptionFn("Remove duplicate vertices", m_RemoveDuplicateVertices, true); makeOptionFn("Include Skeletal data", m_IncludeSkeletalData, false); vbox->pack_start(*Gtk::make_managed<Gtk::Separator>(Gtk::ORIENTATION_HORIZONTAL), false, true, 3u); } //------------------------- // ColladaImporter::Import // bool ColladaImporter::Import(std::vector<uint8> const& importData, std::string const& filePath, std::vector<pl::EditorAssetBase*>& outAssets) const { UNUSED(importData); // read the barebones document ColladaParser const parser(importData); if (!parser.IsValid()) { LOG("Collada parser didn't complete due to invalid document", core::LogLevel::Warning); return false; } // read components if (m_ImportMeshes) { std::vector<dae::Node> nodes; std::vector<dae::VisualScene> scenes; parser.IterateNodes([&nodes](core::XML::Element const& nodeEl, dae::Asset const&) { nodes.emplace_back(); ColladaParser::ReadNode(nodes[nodes.size() - 1u], nodeEl); }); parser.IterateVisualScenes([&scenes](core::XML::Element const& sceneEl, dae::Asset const&) { scenes.emplace_back(); ColladaParser::ReadScene(scenes[scenes.size() - 1u], sceneEl); }); std::vector<MeshDataContainer> containers; parser.IterateGeometries([this, &containers, &nodes, &scenes](core::XML::Element const& geometryEl, dae::Asset const& asset) { core::XML::Element const* const meshEl = ColladaParser::GetMeshElFromGeometry(geometryEl); if (meshEl == nullptr) { return; // for now we only support mesh geometries } // read mesh and ensure we can use it //------------------------------------ dae::Mesh mesh; if (!ColladaParser::ReadMesh(mesh, *meshEl)) { return; } // figure out which inputs we actually want to use std::vector<size_t> inputIndices; for (size_t inputIdx = 0u; inputIdx < mesh.m_ResolvedInputs.size(); ++inputIdx) { switch (mesh.m_ResolvedInputs[inputIdx].m_Input.m_Semantic) { case dae::E_Semantic::Position: case dae::E_Semantic::Normal: case dae::E_Semantic::Binormal: case dae::E_Semantic::Tangent: case dae::E_Semantic::Color: case dae::E_Semantic::Texcoord: auto foundIt = std::find_if(inputIndices.begin(), inputIndices.end(), [inputIdx, &mesh](size_t const idx) { return (mesh.m_ResolvedInputs[idx].m_Input.m_Semantic == mesh.m_ResolvedInputs[inputIdx].m_Input.m_Semantic); }); if (foundIt == inputIndices.cend()) { inputIndices.push_back(inputIdx); } else if (mesh.m_ResolvedInputs[*foundIt].m_Input.m_Set > mesh.m_ResolvedInputs[inputIdx].m_Input.m_Set) { *foundIt = inputIdx; } break; } } // figure out transform of the mesh //---------------------------------- mat4 meshTransform; bool hasNode = false; std::string nodeName; { core::HashString const meshId = ColladaParser::GetElementId(geometryEl); if (!meshId.IsEmpty()) { for (dae::Node const& node : nodes) { if (node.GetGeometryTransformName(meshTransform, nodeName, meshId)) { hasNode = true; break; } } for (dae::VisualScene const& scene : scenes) { if (hasNode) { break; } for (dae::Node const& node : scene.m_Nodes) { if (node.GetGeometryTransformName(meshTransform, nodeName, meshId)) { hasNode = true; break; } } } } } mat3 stationary; if (hasNode) { meshTransform = math::inverse(meshTransform); stationary = math::CreateFromMat4(meshTransform); } bool const useTransform = hasNode && m_PreTransformVertices; // create mesh container //----------------------- containers.emplace_back(); MeshDataContainer& meshContainer = containers[containers.size() - 1u]; size_t usedSet = dae::Input::s_InvalidIndex; size_t const increment = mesh.m_MaxInputOffset + 1u; for (size_t const inputIdx : inputIndices) { ET_ASSERT(mesh.m_ResolvedInputs[inputIdx].m_Accessor != nullptr); ET_ASSERT(mesh.m_ResolvedInputs[inputIdx].m_Source != nullptr); dae::Input const& input = mesh.m_ResolvedInputs[inputIdx].m_Input; dae::Accessor const& accessor = *mesh.m_ResolvedInputs[inputIdx].m_Accessor; dae::Source& source = *mesh.m_ResolvedInputs[inputIdx].m_Source; // validate input sets if (usedSet == dae::Input::s_InvalidIndex) { usedSet = input.m_Set; } else { if ((input.m_Set != dae::Input::s_InvalidIndex) && (input.m_Set != usedSet)) { LOG(FS("COLLADA using multiple input sets: [" ET_FMT_SIZET "], [" ET_FMT_SIZET "]", usedSet, input.m_Set), core::Warning); } } // validate data types if (source.m_Type != dae::Source::E_Type::Float) { LOG("COLLADA geometry source was not made of floats, can't access data", core::Warning); containers.pop_back(); return; } for (dae::Accessor::Param const& param : accessor.m_Parameters) { if (param.m_Type != dae::Accessor::E_ParamType::Float) { LOG("COLLADA geometry accessor parameters where not made of floats, can't access data", core::Warning); containers.pop_back(); return; } } // ensure the source data is parsed if (!source.m_IsResolved) { if (!ColladaParser::ResolveSource(source)) { LOG(FS("Failed to resolve COLLADA source '%s'r", source.m_Id.ToStringDbg()), core::Warning); containers.pop_back(); return; } } // access data auto const getVecFn = [&meshContainer, &input]() -> std::vector<vec3>& { switch (input.m_Semantic) { case dae::E_Semantic::Position: return meshContainer.m_Positions; case dae::E_Semantic::Normal: return meshContainer.m_Normals; case dae::E_Semantic::Binormal: return meshContainer.m_BiNormals; case dae::E_Semantic::Tangent: return meshContainer.m_Tangents; default: ET_ASSERT(false, "Unhandled input semantic"); return *reinterpret_cast<std::vector<vec3>*>(nullptr); } }; switch (input.m_Semantic) { case dae::E_Semantic::Position: { // coordinate conversion ivec3 axisIndices = asset.Get3DIndices(); vec3 multiplier = asset.Get3DAxisMultipliers() * asset.m_UnitToMeter; std::vector<vec3>& vec = getVecFn(); for (size_t idx = input.m_Offset; idx < mesh.m_PrimitiveIndices.size(); idx += increment) { size_t const accessorIdx = mesh.m_PrimitiveIndices[idx]; if (accessorIdx >= accessor.m_Count) { LOG("COLLADA failed to read vector from accessor, index out of bounds", core::Warning); containers.pop_back(); return; } vec3 inVec = accessor.ReadVector<3>(source, accessorIdx); if (useTransform) { inVec = (meshTransform * vec4(inVec, 1.f)).xyz; } vec.push_back(math::swizzle(inVec * multiplier, axisIndices)); } } break; case dae::E_Semantic::Normal: case dae::E_Semantic::Binormal: case dae::E_Semantic::Tangent: { // coordinate conversion ivec3 axisIndices = asset.Get3DIndices(); vec3 multiplier = asset.Get3DAxisMultipliers(); std::vector<vec3>& vec = getVecFn(); for (size_t idx = input.m_Offset; idx < mesh.m_PrimitiveIndices.size(); idx += increment) { size_t const accessorIdx = mesh.m_PrimitiveIndices[idx]; if (accessorIdx >= accessor.m_Count) { LOG("COLLADA failed to read vector from accessor, index out of bounds", core::Warning); containers.pop_back(); return; } vec3 inVec = accessor.ReadVector<3>(source, accessorIdx); if (useTransform) { inVec = stationary * inVec; } vec.push_back(math::swizzle(inVec * multiplier, axisIndices)); } } break; case dae::E_Semantic::Color: { std::vector<vec4>& vec = meshContainer.m_Colors; for (size_t idx = input.m_Offset; idx < mesh.m_PrimitiveIndices.size(); idx += increment) { size_t const accessorIdx = mesh.m_PrimitiveIndices[idx]; if (accessorIdx >= accessor.m_Count) { LOG("COLLADA failed to read vector from accessor, index out of bounds", core::Warning); containers.pop_back(); return; } vec.push_back(accessor.ReadVector<4>(source, accessorIdx)); } } break; case dae::E_Semantic::Texcoord: { std::vector<vec2>& vec = meshContainer.m_TexCoords; for (size_t idx = input.m_Offset; idx < mesh.m_PrimitiveIndices.size(); idx += increment) { size_t const accessorIdx = mesh.m_PrimitiveIndices[idx]; if (accessorIdx >= accessor.m_Count) { LOG("COLLADA failed to read vector from accessor, index out of bounds", core::Warning); containers.pop_back(); return; } vec2 tc = accessor.ReadVector<2>(source, accessorIdx); tc.y = 1.f - tc.y; // we need to flip texcoords for rendering in this engine vec.push_back(tc); } } break; default: ET_ASSERT(false, "Unhandled input semantic"); } } // other container data //---------------------- if (!nodeName.empty()) { meshContainer.m_Name = nodeName; } else { meshContainer.m_Name = ColladaParser::GetLibraryElementName(geometryEl); } meshContainer.m_VertexCount = meshContainer.m_Positions.size(); // generate index buffer - this should later be optimized by removing duplicates meshContainer.m_Indices.reserve(meshContainer.m_VertexCount); for (size_t idx = 0; idx < meshContainer.m_VertexCount; ++idx) { meshContainer.m_Indices.push_back(idx); } if (m_RemoveDuplicateVertices) { meshContainer.RemoveDuplicateVertices(); } if (!mesh.m_VertexCounts.empty()) { if (!meshContainer.Triangulate(mesh.m_VertexCounts)) { LOG("Failed to triangulate collada mesh, skipping", core::Warning); containers.pop_back(); return; } } // derived tangent space if (m_CalculateTangentSpace && (meshContainer.m_Normals.size() == meshContainer.m_VertexCount)) { if (meshContainer.m_Tangents.empty() && meshContainer.m_BiNormals.empty()) { std::vector<vec4> tangentVec; meshContainer.ConstructTangentSpace(tangentVec); } else if (meshContainer.m_Tangents.empty() && (meshContainer.m_BiNormals.size() == meshContainer.m_VertexCount)) { meshContainer.m_Tangents.reserve(meshContainer.m_VertexCount); for (size_t idx = 0u; idx < meshContainer.m_VertexCount; ++idx) { meshContainer.m_Tangents.push_back(-math::cross(meshContainer.m_Normals[idx], meshContainer.m_BiNormals[idx])); } } else if (meshContainer.m_BiNormals.empty() && (meshContainer.m_Tangents.size() == meshContainer.m_VertexCount)) { meshContainer.m_BiNormals.reserve(meshContainer.m_VertexCount); for (size_t idx = 0u; idx < meshContainer.m_VertexCount; ++idx) { meshContainer.m_BiNormals.push_back(math::cross(meshContainer.m_Normals[idx], meshContainer.m_Tangents[idx])); } } } }); // convert mesh containers to mesh assets //---------------------------------------- for (MeshDataContainer const& meshContainer : containers) { pl::EditableMeshAsset* const editableMeshAsset = new pl::EditableMeshAsset(); outAssets.push_back(editableMeshAsset); render::MeshAsset* const meshAsset = new render::MeshAsset(); editableMeshAsset->SetAsset(meshAsset); meshContainer.WriteToEtMesh(meshAsset->GetLoadData()); if (containers.size() == 1u) { meshAsset->SetName(core::FileUtil::RemoveExtension(core::FileUtil::ExtractName(filePath)) + "." + pl::EditableMeshAsset::s_EtMeshExt); } else { meshAsset->SetName(meshContainer.m_Name + "." + pl::EditableMeshAsset::s_EtMeshExt); } } containers.clear(); } return true; } } // namespace edit } // namespace et
6,309
2,594
<reponame>deenu713/bundletool /* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License */ package com.android.tools.build.bundletool.model.utils.xmlproto; import com.android.aapt.Resources.XmlElementOrBuilder; import com.android.aapt.Resources.XmlNode.NodeCase; import com.android.aapt.Resources.XmlNodeOrBuilder; /** * Internal interface ensuring that {@link XmlProtoNode} and {@link XmlProtoNodeBuilder} have the * same getters. */ abstract class XmlProtoNodeOrBuilder< ElementProtoT extends XmlElementOrBuilder, ElementWrapperT extends XmlProtoElementOrBuilder<NodeProtoT, ?, ElementProtoT, ElementWrapperT, ?, ?>, NodeProtoT extends XmlNodeOrBuilder> { protected abstract NodeProtoT getProto(); protected abstract ElementProtoT getProtoElement(); protected abstract ElementWrapperT newElement(ElementProtoT element); public final boolean isElement() { return getProto().getNodeCase().equals(NodeCase.ELEMENT); } public final boolean isText() { return getProto().getNodeCase().equals(NodeCase.TEXT); } public final ElementWrapperT getElement() { if (!isElement()) { throw new XmlProtoException( "Expected node of type 'element' but found: %s", getProto().getNodeCase()); } return newElement(getProtoElement()); } public final String getText() { if (!isText()) { throw new XmlProtoException( "Expected node of type 'text' but found: %s", getProto().getNodeCase()); } return getProto().getText(); } @Override public String toString() { return getProto().toString(); } }
691
21,382
package io.ray.runtime.exception; import io.ray.api.id.ActorId; import io.ray.runtime.util.NetworkUtil; import io.ray.runtime.util.SystemUtil; /** * Indicates that the actor died unexpectedly before finishing a task. * * <p>This exception could happen either because the actor process dies while executing a task, or * because a task is submitted to a dead actor. * * <p>If the actor died because of an exception thrown in its creation tasks, RayActorError will * contains this exception as the cause exception. */ public class RayActorException extends RayException { public ActorId actorId; public RayActorException() { super("The actor died unexpectedly before finishing this task."); } public RayActorException(ActorId actorId) { super(String.format("The actor %s died unexpectedly before finishing this task.", actorId)); this.actorId = actorId; } public RayActorException(ActorId actorId, Throwable cause) { super( String.format( "(pid=%d, ip=%s) The actor %s died because of it's creation task failed", SystemUtil.pid(), NetworkUtil.getIpAddress(null), actorId.toString()), cause); this.actorId = actorId; } public RayActorException(Throwable cause) { super( String.format( "(pid=%d, ip=%s) The actor died because of it's creation task failed", SystemUtil.pid(), NetworkUtil.getIpAddress(null)), cause); } }
479
672
<filename>contrib/depends/SDKs/MacOSX10.11.sdk/usr/include/printf.h /*- * Copyright (c) 2005 <NAME> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * $FreeBSD: src/include/printf.h,v 1.5 2011/03/06 17:45:37 pjd Exp $ */ #ifndef _PRINTF_H_ #define _PRINTF_H_ /**************************************************************************** * This is the header file for extensible printf, a set of APIs that allow * adding/modifying conversion specifier(s) for stdio formatted printing. * It is based on the GLIBC API documented in: * * http://www.gnu.org/software/libc/manual/html_node/Customizing-Printf.html * * Because that API affects printf behavior process-wide and so is unsafe, * we adapt a modified form, based on the concept of printf domains in which * changes to conversion specifiers can be made independent of one another * and which don't affect the normal printf behavior. In addition, there * is now a set of printf variants that take a printf domain as an argument. * * See xprintf(5) for more details. ****************************************************************************/ #include <stdio.h> #include <wchar.h> #include <xlocale.h> #include <Availability.h> #ifdef __GNUC__ #define __XPRINTF_ATTR(x) __attribute__(x) #else /* !__GNUC__ */ #define __XPRINTF_ATTR(x) /* nothing */ #endif /* !__GNUC__ */ /* * The API defined by GLIBC allows a renderer to take multiple arguments * This is obviously usable for things like (ptr+len) pairs etc. * The current limit is to deal with up to __PRINTFMAXARG arguments (any * above this limit are ignored). */ #define __PRINTFMAXARG 2 struct printf_info { /* Mac OS X extensions */ void *context; /* User context pointer */ locale_t loc; /* Extended locale */ wchar_t vsep; /* Vector separator char */ /* one of ,:;_ flag or X by default */ /* GLIBC compatible */ int prec; /* precision */ int width; /* Width */ wchar_t spec; /* Format letter */ wchar_t pad; /* Padding char */ /* 0 if 0 flag set, otherwise space */ /* FreeBSD extensions */ wchar_t signchar; /* Sign char */ /* GLIBC compatible flags */ unsigned is_long_double :1; /* L or ll flag */ unsigned is_char :1; /* hh flag */ unsigned is_short :1; /* h flag */ unsigned is_long :1; /* l flag */ unsigned alt :1; /* # flag */ unsigned space :1; /* Space flag */ unsigned left :1; /* - flag */ unsigned showsign :1; /* + flag */ unsigned group :1; /* ' flag */ unsigned extra :1; /* For special use (currently unused) */ unsigned wide :1; /* Nonzero for wide character streams (currently unused) */ /* FreeBSD flags */ unsigned is_quad :1; /* q flag */ unsigned is_intmax :1; /* j flag */ unsigned is_ptrdiff :1; /* t flag */ unsigned is_size :1; /* z flag */ /* Mac OS X flags */ unsigned is_vec :1; /* v flag */ /* private */ int sofar; unsigned get_width; unsigned get_prec; const char *begin; const char *end; void *arg[__PRINTFMAXARG]; }; enum { PA_INT = (1 << 0), /* int */ PA_CHAR = (1 << 1), /* int, cast to char */ PA_WCHAR = (1 << 2), /* wide char */ PA_STRING = (1 << 3), /* const char * (with '\0') */ PA_WSTRING = (1 << 4), /* const wchar_t * */ PA_POINTER = (1 << 5), /* void * */ PA_FLOAT = (1 << 6), /* float (Defined but unused; best to avoid.) */ PA_DOUBLE = (1 << 7), /* double */ PA_VECTOR = (1 << 8), /* vector */ }; #define PA_FLAG_MASK 0xff0000 #define PA_FLAG_LONG_LONG (1 << 16) #define PA_FLAG_LONG (1 << 17) #define PA_FLAG_SHORT (1 << 18) #define PA_FLAG_PTR (1 << 19) #define PA_FLAG_QUAD (1 << 20) #define PA_FLAG_INTMAX (1 << 21) #define PA_FLAG_SIZE (1 << 22) #define PA_FLAG_PTRDIFF (1 << 23) #define PA_FLAG_LONG_DOUBLE PA_FLAG_LONG_LONG /************************ Basic Extensible Printf APIs ************************/ typedef int printf_arginfo_function(const struct printf_info *__info, size_t __n, int *__argtypes); typedef int printf_function(FILE *__stream, const struct printf_info *__info, const void *const *__args); /* * We don't support the GLIBC register_printf_function() or FreeBSD * register_printf_render_std(), because they affect printf globally * and are unsafe. */ /*************** Extensible Printf Domains APIs ****************/ struct _printf_domain; /* forward reference */ typedef struct _printf_domain *printf_domain_t; __BEGIN_DECLS printf_domain_t copy_printf_domain(printf_domain_t __domain) __XPRINTF_ATTR((__nonnull__(1))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); void free_printf_domain(printf_domain_t __domain) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); printf_domain_t new_printf_domain(void) __XPRINTF_ATTR((__malloc__)) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int register_printf_domain_function(printf_domain_t __domain, int __spec, printf_function *__render, printf_arginfo_function *__arginfo, void *__context) __XPRINTF_ATTR((__nonnull__(1))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int register_printf_domain_render_std(printf_domain_t __domain, const char *__specs) __XPRINTF_ATTR((__nonnull__(1))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); /**** All-in-one extensible printf variants ****/ int asxprintf(char ** __restrict __ret, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, ...) __XPRINTF_ATTR((__nonnull__(1, 2, 4))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int dxprintf(int __fd, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, ...) __XPRINTF_ATTR((__nonnull__(2, 4))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int fxprintf(FILE * __restrict __stream, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, ...) __XPRINTF_ATTR((__nonnull__(1, 2, 4))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int sxprintf(char * __restrict __str, size_t __size, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, ...) __XPRINTF_ATTR((__nonnull__(1, 3, 5))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int xprintf(printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, ...) __XPRINTF_ATTR((__nonnull__(1, 3))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vasxprintf(char ** __restrict __ret, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, va_list __ap) __XPRINTF_ATTR((__nonnull__(1, 2, 4))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vdxprintf(int __fd, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, va_list __ap) __XPRINTF_ATTR((__nonnull__(2, 4))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vfxprintf(FILE * __restrict __stream, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, va_list __ap) __XPRINTF_ATTR((__nonnull__(1, 2, 4))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vsxprintf(char * __restrict __str, size_t __size, printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, va_list __ap) __XPRINTF_ATTR((__nonnull__(1, 3, 5))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vxprintf(printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __format, va_list __ap) __XPRINTF_ATTR((__nonnull__(1, 3))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); __END_DECLS /******** Extensible Printf Compilation/Execution APIs *********/ struct _printf_compiled; /* forward reference */ typedef struct _printf_compiled *printf_comp_t; __BEGIN_DECLS void free_printf_comp(printf_comp_t __pc) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); printf_comp_t new_printf_comp(printf_domain_t __restrict __domain, locale_t __restrict __loc, const char * __restrict __fmt) __XPRINTF_ATTR((__nonnull__(1, 3))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); /**** Extensible printf execution ****/ int asxprintf_exec(char ** __restrict __ret, printf_comp_t __restrict __pc, ...) __XPRINTF_ATTR((__nonnull__(1, 2))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int dxprintf_exec(int __fd, printf_comp_t __restrict __pc, ...) __XPRINTF_ATTR((__nonnull__(2))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int fxprintf_exec(FILE * __restrict __stream, printf_comp_t __restrict __pc, ...) __XPRINTF_ATTR((__nonnull__(1, 2))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int sxprintf_exec(char * __restrict __str, size_t __size, printf_comp_t __restrict __pc, ...) __XPRINTF_ATTR((__nonnull__(1, 3))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int xprintf_exec(printf_comp_t __restrict __pc, ...) __XPRINTF_ATTR((__nonnull__(1))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vasxprintf_exec(char ** __restrict __ret, printf_comp_t __restrict __pc, va_list __ap) __XPRINTF_ATTR((__nonnull__(1, 2))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vdxprintf_exec(int __fd, printf_comp_t __restrict __pc, va_list __ap) __XPRINTF_ATTR((__nonnull__(2))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vfxprintf_exec(FILE * __restrict __stream, printf_comp_t __restrict __pc, va_list __ap) __XPRINTF_ATTR((__nonnull__(1, 2))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vsxprintf_exec(char * __restrict __str, size_t __size, printf_comp_t __restrict __pc, va_list __ap) __XPRINTF_ATTR((__nonnull__(1, 3))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); int vxprintf_exec(printf_comp_t __restrict __pc, va_list __ap) __XPRINTF_ATTR((__nonnull__(1))) __OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0); __END_DECLS #endif /* !_PRINTF_H */
4,434
16,461
<filename>ios/versioned/sdk43/EXFont/EXFont/ABI43_0_0EXFontLoaderProcessor.h // Copyright 2015-present 650 Industries. All rights reserved. #import <Foundation/Foundation.h> #import <ABI43_0_0ExpoModulesCore/ABI43_0_0EXFontProcessorInterface.h> #import <ABI43_0_0EXFont/ABI43_0_0EXFontManager.h> @interface ABI43_0_0EXFontLoaderProcessor : NSObject <ABI43_0_0EXFontProcessorInterface> - (instancetype)initWithFontFamilyPrefix:(NSString *)prefix manager:(ABI43_0_0EXFontManager *)manager; - (instancetype)initWithManager:(ABI43_0_0EXFontManager *)manager; @end
249
1,338
/* * Copyright 2002-2006, Haiku, Inc. * Distributed under the terms of the MIT license. * * Authors: * <NAME> * <NAME>, <EMAIL> * <NAME> */ #ifndef TRANSLATOR_LIST_VIEW_H #define TRANSLATOR_LIST_VIEW_H #include <ListView.h> #include <String.h> #include <TranslationDefs.h> class TranslatorItem : public BStringItem { public: TranslatorItem(translator_id id, const char* name); virtual ~TranslatorItem(); translator_id ID() const { return fID; } const BString& Supertype() const { return fSupertype; } private: translator_id fID; BString fSupertype; }; class TranslatorListView : public BListView { public: TranslatorListView(const char* name, list_view_type type = B_SINGLE_SELECTION_LIST); virtual ~TranslatorListView(); TranslatorItem* TranslatorAt(int32 index) const; virtual void MessageReceived(BMessage* message); virtual void MouseMoved(BPoint point, uint32 transit, const BMessage* msg); void SortItems(); }; #endif // TRANSLATOR_LIST_VIEW_H
398
353
<reponame>xiaoxiang1238/junixsocket /* * junixsocket * * Copyright 2009-2021 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.newsclub.net.unix; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import java.nio.ByteBuffer; import org.junit.jupiter.api.Test; public class AFUNIXSocketPairTest { // CPD-OFF @Test public void testSocketPair() throws Exception { AFUNIXSocketPair<AFUNIXSocketChannel> pair = AFUNIXSocketPair.open(); AFUNIXSocketChannel sc1 = pair.getSocket1(); AFUNIXSocketChannel sc2 = pair.getSocket2(); assertTrue(sc1.isConnected()); assertTrue(sc2.isConnected()); assertNotEquals(pair.getSocket1(), pair.getSocket2()); assertNotEquals(pair.getSocket1().socket(), pair.getSocket2().socket()); assertEquals(sc1.getPeerCredentials(), sc2.getPeerCredentials()); ByteBuffer bb = ByteBuffer.allocate(4096); bb.putInt(0x04030201); bb.flip(); sc1.write(bb); ByteBuffer bb2 = ByteBuffer.allocate(4096); sc2.read(bb2); bb2.flip(); assertEquals(0x04030201, bb2.getInt()); assertNull(pair.getSocket1().getLocalAddress()); assertNull(pair.getSocket2().getLocalAddress()); assertNull(pair.getSocket1().getRemoteAddress()); assertNull(pair.getSocket1().getRemoteAddress()); } @Test @AFUNIXSocketCapabilityRequirement(AFUNIXSocketCapability.CAPABILITY_DATAGRAMS) public void testDatagramPair() throws Exception { AFUNIXSocketPair<AFUNIXDatagramChannel> pair = AFUNIXSocketPair.openDatagram(); AFUNIXDatagramChannel sc1 = pair.getSocket1(); AFUNIXDatagramChannel sc2 = pair.getSocket2(); assertTrue(sc1.isConnected()); assertTrue(sc2.isConnected()); assertNotEquals(pair.getSocket1(), pair.getSocket2()); assertNotEquals(pair.getSocket1().socket(), pair.getSocket2().socket()); assertEquals(sc1.getPeerCredentials(), sc2.getPeerCredentials()); ByteBuffer bb = ByteBuffer.allocate(4096); bb.putInt(0x04030201); bb.flip(); sc1.write(bb); ByteBuffer bb2 = ByteBuffer.allocate(4096); sc2.read(bb2); bb2.flip(); assertEquals(0x04030201, bb2.getInt()); assertNull(pair.getSocket1().getLocalAddress()); assertNull(pair.getSocket2().getLocalAddress()); assertNull(pair.getSocket1().getRemoteAddress()); assertNull(pair.getSocket1().getRemoteAddress()); } }
1,098
854
<filename>core/src/main/java/dev/morphia/mapping/conventions/package-info.java @NonNullApi package dev.morphia.mapping.conventions; import com.mongodb.lang.NonNullApi;
63
1,167
<filename>form/form.priv.h /**************************************************************************** * Copyright (c) 1998-2012,2014 Free Software Foundation, Inc. * * * * Permission is hereby granted, free of charge, to any person obtaining a * * copy of this software and associated documentation files (the * * "Software"), to deal in the Software without restriction, including * * without limitation the rights to use, copy, modify, merge, publish, * * distribute, distribute with modifications, sublicense, and/or sell * * copies of the Software, and to permit persons to whom the Software is * * furnished to do so, subject to the following conditions: * * * * The above copyright notice and this permission notice shall be included * * in all copies or substantial portions of the Software. * * * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * * IN NO EVENT SHALL THE ABOVE COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, * * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR * * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR * * THE USE OR OTHER DEALINGS IN THE SOFTWARE. * * * * Except as contained in this notice, the name(s) of the above copyright * * holders shall not be used in advertising or otherwise to promote the * * sale, use or other dealings in this Software without prior written * * authorization. * ****************************************************************************/ /**************************************************************************** * Author: <NAME>, 1995,1997 * ****************************************************************************/ /* $Id: form.priv.h,v 0.38 2014/11/01 13:56:14 tom Exp $ */ #ifndef FORM_PRIV_H #define FORM_PRIV_H 1 /* *INDENT-OFF*/ #include "curses.priv.h" #include "mf_common.h" #if USE_WIDEC_SUPPORT #if HAVE_WCTYPE_H #include <wctype.h> #endif #ifndef MB_LEN_MAX #define MB_LEN_MAX 8 /* should be >= MB_CUR_MAX, but that may be a function */ #endif #define FIELD_CELL NCURSES_CH_T #define NCURSES_FIELD_INTERNALS char** expanded; WINDOW *working; #define NCURSES_FIELD_EXTENSION , (char **)0, (WINDOW *)0 #else #define FIELD_CELL char #define NCURSES_FIELD_EXTENSION /* nothing */ #endif #include "form.h" /*********************** * Default objects * ***********************/ extern NCURSES_EXPORT_VAR(FORM *) _nc_Default_Form; extern NCURSES_EXPORT_VAR(FIELD *) _nc_Default_Field; extern NCURSES_EXPORT_VAR(FIELDTYPE *) _nc_Default_FieldType; /* form status values */ #define _OVLMODE (0x04U) /* Form is in overlay mode */ #define _WINDOW_MODIFIED (0x10U) /* Current field window has been modified */ #define _FCHECK_REQUIRED (0x20U) /* Current field needs validation */ /* field status values */ #define _CHANGED (0x01U) /* Field has been changed */ #define _NEWTOP (0x02U) /* Vertical scrolling occurred */ #define _NEWPAGE (0x04U) /* field begins new page of form */ #define _MAY_GROW (0x08U) /* dynamic field may still grow */ /* fieldtype status values */ #define _LINKED_TYPE (0x01U) /* Type is a linked type */ #define _HAS_ARGS (0x02U) /* Type has arguments */ #define _HAS_CHOICE (0x04U) /* Type has choice methods */ #define _RESIDENT (0x08U) /* Type is built-in */ #define _GENERIC (0x10U) /* A generic field type */ /* This are the field options required to be a selectable field in field navigation requests */ #define O_SELECTABLE (O_ACTIVE | O_VISIBLE) /* If form is NULL replace form argument by default-form */ #define Normalize_Form(form) \ ((form) = (form != 0) ? (form) : _nc_Default_Form) /* If field is NULL replace field argument by default-field */ #define Normalize_Field(field) \ ((field) = (field != 0) ? (field) : _nc_Default_Field) #if NCURSES_SP_FUNCS #define Get_Form_Screen(form) \ ((form)->win ? _nc_screen_of((form->win)):CURRENT_SCREEN) #else #define Get_Form_Screen(form) CURRENT_SCREEN #endif /* Retrieve forms window */ #define Get_Form_Window(form) \ ((form)->sub \ ? (form)->sub \ : ((form)->win \ ? (form)->win \ : StdScreen(Get_Form_Screen(form)))) /* Calculate the size for a single buffer for this field */ #define Buffer_Length(field) ((field)->drows * (field)->dcols) /* Calculate the total size of all buffers for this field */ #define Total_Buffer_Size(field) \ ( (size_t)(Buffer_Length(field) + 1) * (size_t)(1+(field)->nbuf) * sizeof(FIELD_CELL) ) /* Logic to determine whether or not a field is single lined */ #define Single_Line_Field(field) \ (((field)->rows + (field)->nrow) == 1) #define Field_Has_Option(f,o) ((((unsigned)(f)->opts) & o) != 0) /* Logic to determine whether or not a field is selectable */ #define Field_Is_Selectable(f) (((unsigned)((f)->opts) & O_SELECTABLE)==O_SELECTABLE) #define Field_Is_Not_Selectable(f) (((unsigned)((f)->opts) & O_SELECTABLE)!=O_SELECTABLE) typedef struct typearg { struct typearg *left; struct typearg *right; } TypeArgument; /* This is a dummy request code (normally invalid) to be used internally with the form_driver() routine to position to the first active field on the form */ #define FIRST_ACTIVE_MAGIC (-291056) #define ALL_FORM_OPTS ( \ O_NL_OVERLOAD |\ O_BS_OVERLOAD ) #define STD_FIELD_OPTS (Field_Options)( \ O_VISIBLE |\ O_ACTIVE |\ O_PUBLIC |\ O_EDIT |\ O_WRAP |\ O_BLANK |\ O_AUTOSKIP|\ O_NULLOK |\ O_PASSOK |\ O_STATIC) #define ALL_FIELD_OPTS (Field_Options)( \ STD_FIELD_OPTS |\ O_DYNAMIC_JUSTIFY) #define C_BLANK ' ' #define is_blank(c) ((c)==C_BLANK) #define C_ZEROS '\0' extern NCURSES_EXPORT(TypeArgument *) _nc_Make_Argument (const FIELDTYPE*, va_list*, int*); extern NCURSES_EXPORT(TypeArgument *) _nc_Copy_Argument (const FIELDTYPE*, const TypeArgument*, int*); extern NCURSES_EXPORT(void) _nc_Free_Argument (const FIELDTYPE*, TypeArgument*); extern NCURSES_EXPORT(bool) _nc_Copy_Type (FIELD*, FIELD const *); extern NCURSES_EXPORT(void) _nc_Free_Type (FIELD *); extern NCURSES_EXPORT(int) _nc_Synchronize_Attributes (FIELD*); extern NCURSES_EXPORT(int) _nc_Synchronize_Options (FIELD*, Field_Options); extern NCURSES_EXPORT(int) _nc_Set_Form_Page (FORM*, int, FIELD*); extern NCURSES_EXPORT(int) _nc_Refresh_Current_Field (FORM*); extern NCURSES_EXPORT(FIELD *) _nc_First_Active_Field (FORM*); extern NCURSES_EXPORT(bool) _nc_Internal_Validation (FORM*); extern NCURSES_EXPORT(int) _nc_Set_Current_Field (FORM*, FIELD*); extern NCURSES_EXPORT(int) _nc_Position_Form_Cursor (FORM*); #if NCURSES_INTEROP_FUNCS extern NCURSES_EXPORT(FIELDTYPE *) _nc_TYPE_INTEGER(void); extern NCURSES_EXPORT(FIELDTYPE *) _nc_TYPE_ALNUM(void); extern NCURSES_EXPORT(FIELDTYPE *) _nc_TYPE_ALPHA(void); extern NCURSES_EXPORT(FIELDTYPE *) _nc_TYPE_ENUM(void); extern NCURSES_EXPORT(FIELDTYPE *) _nc_TYPE_NUMERIC(void); extern NCURSES_EXPORT(FIELDTYPE *) _nc_TYPE_REGEXP(void); extern NCURSES_EXPORT(FIELDTYPE *) _nc_TYPE_IPV4(void); extern NCURSES_EXPORT(FIELDTYPE *) _nc_generic_fieldtype(bool (*const field_check) (FORM*, FIELD *, const void *), bool (*const char_check) (int, FORM*, FIELD*, const void *), bool (*const next)(FORM*,FIELD*,const void*), bool (*const prev)(FORM*,FIELD*,const void*), void (*freecallback)(void*)); extern NCURSES_EXPORT(int) _nc_set_generic_fieldtype(FIELD*, FIELDTYPE*, int (*)(void**)); extern NCURSES_EXPORT(WINDOW*) _nc_form_cursor(const FORM* , int* , int* ); #define INIT_FT_FUNC(func) {func} #else #define INIT_FT_FUNC(func) func #endif extern NCURSES_EXPORT(void) _nc_get_fieldbuffer(FORM*, FIELD*, FIELD_CELL*); #if USE_WIDEC_SUPPORT extern NCURSES_EXPORT(wchar_t *) _nc_Widen_String(char *, int *); #endif #ifdef TRACE #define returnField(code) TRACE_RETURN1(code,field) #define returnFieldPtr(code) TRACE_RETURN1(code,field_ptr) #define returnForm(code) TRACE_RETURN1(code,form) #define returnFieldType(code) TRACE_RETURN1(code,field_type) #define returnFormHook(code) TRACE_RETURN1(code,form_hook) extern NCURSES_EXPORT(FIELD **) _nc_retrace_field_ptr (FIELD **); extern NCURSES_EXPORT(FIELD *) _nc_retrace_field (FIELD *); extern NCURSES_EXPORT(FIELDTYPE *) _nc_retrace_field_type (FIELDTYPE *); extern NCURSES_EXPORT(FORM *) _nc_retrace_form (FORM *); extern NCURSES_EXPORT(Form_Hook) _nc_retrace_form_hook (Form_Hook); #else /* !TRACE */ #define returnFieldPtr(code) return code #define returnFieldType(code) return code #define returnField(code) return code #define returnForm(code) return code #define returnFormHook(code) return code #endif /* TRACE/!TRACE */ /* * Use Check_CTYPE_Field() to simplify FIELDTYPE's that use only the ccheck() * function. */ #if USE_WIDEC_SUPPORT #define Check_CTYPE_Field(result, buffer, width, ccheck) \ while (*buffer && *buffer == ' ') \ buffer++; \ if (*buffer) \ { \ bool blank = FALSE; \ int len; \ int n; \ wchar_t *list = _nc_Widen_String((char *)buffer, &len); \ if (list != 0) \ { \ result = TRUE; \ for (n = 0; n < len; ++n) \ { \ if (blank) \ { \ if (list[n] != ' ') \ { \ result = FALSE; \ break; \ } \ } \ else if (list[n] == ' ') \ { \ blank = TRUE; \ result = (n + 1 >= width); \ } \ else if (!ccheck(list[n], NULL)) \ { \ result = FALSE; \ break; \ } \ } \ free(list); \ } \ } #else #define Check_CTYPE_Field(result, buffer, width, ccheck) \ while (*buffer && *buffer == ' ') \ buffer++; \ if (*buffer) \ { \ unsigned char *s = buffer; \ int l = -1; \ while (*buffer && ccheck(*buffer, NULL)) \ buffer++; \ l = (int)(buffer - s); \ while (*buffer && *buffer == ' ') \ buffer++; \ result = ((*buffer || (l < width)) ? FALSE : TRUE); \ } #endif /* *INDENT-ON*/ #endif /* FORM_PRIV_H */
4,619
1,144
<gh_stars>1000+ package de.metas.invoicecandidate.api.impl; /* * #%L * de.metas.swat.base * %% * Copyright (C) 2015 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ import java.math.BigDecimal; import java.util.Properties; import org.adempiere.util.lang.ObjectUtils; import de.metas.i18n.IMsgBL; import de.metas.invoicecandidate.api.IInvoiceCandidateEnqueueResult; import de.metas.lock.api.ILock; import de.metas.util.Check; import de.metas.util.Services; /** * @author al */ /* package */final class InvoiceCandidateEnqueueResult implements IInvoiceCandidateEnqueueResult { private static final String MSG_INVOICE_CANDIDATE_ENQUEUE = "InvoiceCandidateEnqueue"; private final int invoiceCandidateEnqueuedCount; private final int workpackageEnqueuedCount; private final int workpackageQueueSizeBeforeEnqueueing; private final BigDecimal totalNetAmtToInvoiceChecksum; private final ILock lock; /* package */ InvoiceCandidateEnqueueResult(final int invoiceCandidateEnqueuedCount, final int enqueuedWorkpackageCount, final int workpackageQueueSizeBeforeEnqueueing, final BigDecimal totalNetAmtToInvoiceChecksum, final ILock lock) { super(); Check.assume(invoiceCandidateEnqueuedCount >= 0, "invoiceCandidateEnqueuedCount > 0"); this.invoiceCandidateEnqueuedCount = invoiceCandidateEnqueuedCount; Check.assume(enqueuedWorkpackageCount >= 0, "Expected positive amount of enqueuedWorkpackageCount, but got {}", enqueuedWorkpackageCount); this.workpackageEnqueuedCount = enqueuedWorkpackageCount; this.workpackageQueueSizeBeforeEnqueueing = workpackageQueueSizeBeforeEnqueueing; this.totalNetAmtToInvoiceChecksum = totalNetAmtToInvoiceChecksum; Check.assumeNotNull(lock, "lock not null"); this.lock = lock; } @Override public String toString() { return ObjectUtils.toString(this); } @Override public String getSummaryTranslated(final Properties ctx) { final IMsgBL msgBL = Services.get(IMsgBL.class); final int countWorkpackages = getWorkpackageEnqueuedCount(); final int countUnprocessedWorkPackages = getWorkpackageQueueSizeBeforeEnqueueing(); return msgBL.getMsg(ctx, MSG_INVOICE_CANDIDATE_ENQUEUE, new Object[] { countWorkpackages, countUnprocessedWorkPackages }); } @Override public int getInvoiceCandidateEnqueuedCount() { return invoiceCandidateEnqueuedCount; } @Override public int getWorkpackageEnqueuedCount() { return workpackageEnqueuedCount; } @Override public int getWorkpackageQueueSizeBeforeEnqueueing() { return workpackageQueueSizeBeforeEnqueueing; } @Override public BigDecimal getTotalNetAmtToInvoiceChecksum() { return totalNetAmtToInvoiceChecksum; } @Override public ILock getLock() { return lock; } }
1,092
577
package org.python.util.install; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.FocusEvent; import java.awt.event.FocusListener; import java.io.File; import java.io.IOException; import javax.swing.JButton; import javax.swing.JComponent; import javax.swing.JFileChooser; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.JTextField; public class DirectorySelectionPage extends AbstractWizardPage { private static final long serialVersionUID = -3672273150338356549L; private JLabel _label; private JTextField _directory; private JButton _browse; private JarInfo _jarInfo; public DirectorySelectionPage(JarInfo jarInfo) { super(); _jarInfo = jarInfo; initComponents(); } private void initComponents() { // label _label = new JLabel(); // directory _directory = new JTextField(40); _directory.addFocusListener(new DirectoryFocusListener()); // browse button _browse = new JButton(); _browse.addActionListener(new BrowseButtonListener()); JPanel panel = new JPanel(); GridBagLayout gridBagLayout = new GridBagLayout(); panel.setLayout(gridBagLayout); GridBagConstraints gridBagConstraints = newGridBagConstraints(); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 0; panel.add(_label, gridBagConstraints); gridBagConstraints.gridx = 0; gridBagConstraints.gridy = 1; panel.add(_directory, gridBagConstraints); gridBagConstraints.gridx = 1; gridBagConstraints.gridy = 1; panel.add(_browse, gridBagConstraints); add(panel); } JTextField getDirectory() { return _directory; } protected String getTitle() { return getText(TARGET_DIRECTORY_PROPERTY); } protected String getDescription() { return getText(CHOOSE_LOCATION); } protected boolean isCancelVisible() { return true; } protected boolean isPreviousVisible() { return true; } protected boolean isNextVisible() { return true; } protected JComponent getFocusField() { return _directory; } protected void activate() { _label.setText(getText(SELECT_TARGET_DIRECTORY) + ": "); _browse.setText(getText(BROWSE)); String directory = FrameInstaller.getTargetDirectory(); if (directory == null || directory.length() <= 0) { File defaultDirectory = getDefaultDirectory(); try { directory = defaultDirectory.getCanonicalPath(); } catch (IOException e) { directory = "?"; } FrameInstaller.setTargetDirectory(directory); } _directory.setText(FrameInstaller.getTargetDirectory()); _directory.setToolTipText(_directory.getText()); } protected void passivate() { } protected void beforeValidate() { } private File getDefaultDirectory() { String directory = ""; File defaultDirectory = null; // 1st try (on windows): root if (Installation.isWindows()) { JavaHomeHandler handler = new JavaHomeHandler(); if (handler.isValidHome()) { directory = handler.getHome().getAbsolutePath(); if (directory.length() > 2) { directory = directory.substring(0, 2); } } else { directory = "C:"; } defaultDirectory = makeJythonSubDirectory(directory); } // 2st try: user.home if (defaultDirectory == null) { directory = System.getProperty("user.home", ""); if (directory.length() > 0) { defaultDirectory = makeJythonSubDirectory(directory); } } // 3rd try: user.dir if (defaultDirectory == null) { directory = System.getProperty("user.dir", ""); if (directory.length() > 0) { defaultDirectory = makeJythonSubDirectory(directory); } } // 4th try: current directory if (defaultDirectory == null) { defaultDirectory = makeJythonSubDirectory(new File(new File("dummy").getAbsolutePath()).getParent()); } return defaultDirectory; } private File makeJythonSubDirectory(String directory) { File defaultDirectory = null; File parentDirectory = new File(directory); if (parentDirectory.exists() && parentDirectory.isDirectory()) { String jythonSubDirectoryName = "jython" + (_jarInfo.getVersion()).replaceAll("\\+", ""); defaultDirectory = new File(parentDirectory, jythonSubDirectoryName); } return defaultDirectory; } private class BrowseButtonListener implements ActionListener { public void actionPerformed(ActionEvent e) { String directoryName = _directory.getText(); File directory = new File(directoryName); if (directory.exists()) { if (!directory.isDirectory()) { // switch to parent directory if user typed the name of a file directory = directory.getParentFile(); } } JFileChooser fileChooser = new JFileChooser(directory); fileChooser.setDialogTitle(getText(SELECT_TARGET_DIRECTORY)); // the filter is at the moment only used for the title of the dialog: fileChooser.setFileFilter(new DirectoryFilter()); fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); if (fileChooser.isAcceptAllFileFilterUsed()) { if (Installation.isMacintosh() && Installation.isJDK141()) { // work around ArrayIndexOutOfBoundsExceptio on Mac OS X, java version 1.4.1 } else { fileChooser.setAcceptAllFileFilterUsed(false); } } int returnValue = fileChooser.showDialog(_browse, getText(SELECT)); if (returnValue == JFileChooser.APPROVE_OPTION) { _directory.setText(fileChooser.getSelectedFile().getAbsolutePath()); _directory.setToolTipText(_directory.getText()); FrameInstaller.setTargetDirectory(_directory.getText()); } } } private class DirectoryFocusListener implements FocusListener { public void focusGained(FocusEvent e) { } public void focusLost(FocusEvent e) { FrameInstaller.setTargetDirectory(_directory.getText()); _directory.setToolTipText(_directory.getText()); } } }
2,940
2,671
def wee(waa, woo=False, wii=True): print "OK", waa, woo, wii wee("stuff") wee("stuff", "dog") wee("stuff", "dog", "cat") wee("stuff", wii="lamma") wee(wii="lamma", waa="pocky") wee(wii="lamma", waa="pocky", woo="blorp")
110