max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
436
<gh_stars>100-1000 // Copyright 2018 Schibsted Marketplaces Products & Technology As // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.schibsted.spt.data.jslt.impl; import java.util.List; import java.util.Collections; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.NullNode; public class DotExpression extends AbstractNode { private String key; private ExpressionNode parent; public DotExpression(Location location) { super(location); } public DotExpression(String key, ExpressionNode parent, Location location) { super(location); this.key = key; this.parent = parent; } public JsonNode apply(Scope scope, JsonNode input) { // if there is no key we just return the input if (key == null) return input; // if we have a parent, get the input from the parent (preceding expr) if (parent != null) input = parent.apply(scope, input); // okay, do the keying JsonNode value = input.get(key); if (value == null) value = NullNode.instance; return value; } public List<ExpressionNode> getChildren() { if (parent == null) return Collections.EMPTY_LIST; else return Collections.singletonList(parent); } public void dump(int level) { System.out.println(NodeUtils.indent(level) + this); } public String toString() { String me = "." + (key == null ? "" : key); if (parent != null) return "" + parent + me; else return me; } // verify that we've build a correct DotExpression for our object // matcher (only used for that) public void checkOk(Location matcher) { // this object is OK, but might be a FailDotExpression higher up, // so check for that if (parent != null) ((DotExpression) parent).checkOk(matcher); } public ExpressionNode optimize() { if (parent != null) parent = parent.optimize(); return this; } }
803
583
#include <memory> #include "base_test.hpp" #include "utils/assert.hpp" #include "concurrency/transaction_context.hpp" #include "expression/expression_functional.hpp" #include "expression/pqp_column_expression.hpp" #include "hyrise.hpp" #include "operators/get_table.hpp" #include "operators/maintenance/create_table.hpp" #include "operators/projection.hpp" #include "operators/table_wrapper.hpp" #include "operators/validate.hpp" #include "storage/table.hpp" namespace opossum { using namespace opossum::expression_functional; // NOLINT class CreateTableTest : public BaseTest { public: void SetUp() override { column_definitions.emplace_back("a", DataType::Int, false); column_definitions.emplace_back("b", DataType::Float, true); dummy_table_wrapper = std::make_shared<TableWrapper>(Table::create_dummy_table(column_definitions)); dummy_table_wrapper->never_clear_output(); dummy_table_wrapper->execute(); create_table = std::make_shared<CreateTable>("t", false, dummy_table_wrapper); } TableColumnDefinitions column_definitions; std::shared_ptr<TableWrapper> dummy_table_wrapper; std::shared_ptr<CreateTable> create_table; }; TEST_F(CreateTableTest, NameAndDescription) { EXPECT_EQ(create_table->name(), "CreateTable"); EXPECT_EQ(create_table->description(DescriptionMode::SingleLine), "CreateTable 't' ('a' int NOT NULL, 'b' float NULL)"); const auto context = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); create_table->set_transaction_context(context); create_table->execute(); context->commit(); dummy_table_wrapper->clear_output(); EXPECT_EQ(create_table->description(DescriptionMode::MultiLine), "CreateTable 't' ('a' int NOT NULL\n'b' float NULL)"); } TEST_F(CreateTableTest, Execute) { const auto context = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); create_table->set_transaction_context(context); create_table->execute(); context->commit(); EXPECT_TRUE(Hyrise::get().storage_manager.has_table("t")); const auto table = Hyrise::get().storage_manager.get_table("t"); EXPECT_EQ(table->row_count(), 0); EXPECT_EQ(table->column_definitions(), column_definitions); } TEST_F(CreateTableTest, TableAlreadyExists) { const auto context = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); create_table->set_transaction_context(context); create_table->execute(); // Table name "t" is taken now context->commit(); const auto create_different_table = std::make_shared<CreateTable>("t2", false, dummy_table_wrapper); const auto create_same_table = std::make_shared<CreateTable>("t", false, dummy_table_wrapper); const auto context_2 = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); const auto context_3 = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); create_different_table->set_transaction_context(context_2); create_same_table->set_transaction_context(context_3); EXPECT_NO_THROW(create_different_table->execute()); context_2->commit(); EXPECT_THROW(create_same_table->execute(), std::logic_error); context_3->rollback(RollbackReason::Conflict); } TEST_F(CreateTableTest, ExecuteWithIfNotExists) { const auto context = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); const auto ct_if_not_exists_1 = std::make_shared<CreateTable>("t", true, dummy_table_wrapper); ct_if_not_exists_1->set_transaction_context(context); ct_if_not_exists_1->execute(); context->commit(); EXPECT_TRUE(Hyrise::get().storage_manager.has_table("t")); const auto table = Hyrise::get().storage_manager.get_table("t"); EXPECT_EQ(table->row_count(), 0); EXPECT_EQ(table->column_definitions(), column_definitions); const auto context_2 = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); const auto ct_if_not_exists_2 = std::make_shared<CreateTable>("t", true, dummy_table_wrapper); ct_if_not_exists_2->set_transaction_context(context_2); EXPECT_NO_THROW(ct_if_not_exists_2->execute()); context_2->commit(); } TEST_F(CreateTableTest, CreateTableAsSelect) { const auto table = load_table("resources/test_data/tbl/10_ints.tbl"); Hyrise::get().storage_manager.add_table("test", table); const auto context = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); const auto get_table = std::make_shared<GetTable>("test"); get_table->set_transaction_context(context); get_table->execute(); const auto validate = std::make_shared<Validate>(get_table); validate->set_transaction_context(context); validate->execute(); const auto create_table_as = std::make_shared<CreateTable>("test_2", false, validate); create_table_as->set_transaction_context(context); EXPECT_NO_THROW(create_table_as->execute()); context->commit(); const auto created_table = Hyrise::get().storage_manager.get_table("test_2"); EXPECT_TABLE_EQ_ORDERED(created_table, table); Hyrise::get().storage_manager.drop_table("test"); EXPECT_TABLE_EQ_ORDERED(created_table, table); } TEST_F(CreateTableTest, CreateTableAsSelectWithProjection) { const auto table = load_table("resources/test_data/tbl/int_float.tbl"); Hyrise::get().storage_manager.add_table("test", table); const auto context = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); const auto get_table = std::make_shared<GetTable>("test"); get_table->set_transaction_context(context); get_table->execute(); const auto validate = std::make_shared<Validate>(get_table); validate->set_transaction_context(context); validate->execute(); const std::shared_ptr<AbstractExpression> expr = add_(PQPColumnExpression::from_table(*table, "a"), PQPColumnExpression::from_table(*table, "b")); const auto projection = std::make_shared<Projection>(validate, expression_vector(expr)); projection->set_transaction_context(context); projection->execute(); const auto create_table_as = std::make_shared<CreateTable>("test_2", false, projection); create_table_as->set_transaction_context(context); EXPECT_NO_THROW(create_table_as->execute()); context->commit(); const auto created_table = Hyrise::get().storage_manager.get_table("test_2"); EXPECT_TABLE_EQ_ORDERED(created_table, load_table("resources/test_data/tbl/projection/int_float_add.tbl")); } TEST_F(CreateTableTest, CreateTableWithDifferentTransactionContexts) { const auto table = load_table("resources/test_data/tbl/10_ints.tbl"); Hyrise::get().storage_manager.add_table("test", table); const auto context_1 = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); const auto context_2 = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); const auto context_3 = Hyrise::get().transaction_manager.new_transaction_context(AutoCommit::No); // Create table 1 with second context const auto get_table_1 = std::make_shared<GetTable>("test"); get_table_1->set_transaction_context(context_2); get_table_1->execute(); const auto validate_1 = std::make_shared<Validate>(get_table_1); validate_1->set_transaction_context(context_2); validate_1->execute(); const auto create_table_as_1 = std::make_shared<CreateTable>("test_2", false, validate_1); create_table_as_1->set_transaction_context(context_2); EXPECT_NO_THROW(create_table_as_1->execute()); // Create table 2 with first context, which should not see the rows of table 1 const auto get_table_2 = std::make_shared<GetTable>("test_2"); get_table_2->set_transaction_context(context_1); get_table_2->execute(); const auto validate_2 = std::make_shared<Validate>(get_table_2); validate_2->set_transaction_context(context_1); validate_2->execute(); const auto create_table_as_2 = std::make_shared<CreateTable>("test_3", false, validate_2); create_table_as_2->set_transaction_context(context_1); EXPECT_NO_THROW(create_table_as_2->execute()); context_1->commit(); const auto table_3 = Hyrise::get().storage_manager.get_table("test_3"); EXPECT_EQ(table_3->row_count(), 0); context_2->rollback(RollbackReason::User); const auto get_table_3 = std::make_shared<GetTable>("test_2"); get_table_3->set_transaction_context(context_3); get_table_3->execute(); const auto validate_3 = std::make_shared<Validate>(get_table_3); validate_3->set_transaction_context(context_3); validate_3->execute(); context_3->commit(); EXPECT_EQ(validate_3->get_output()->row_count(), 0); } } // namespace opossum
2,927
4,845
package cn.dev33.satoken.context.model; /** * [存储器] 包装类 * <p> 在 Request作用域里: 存值、取值 * @author kong * */ public interface SaStorage { /** * 获取底层源对象 * @return see note */ public Object getSource(); /** * 在 [Request作用域] 里写入一个值 * @param key 键 * @param value 值 */ public void set(String key, Object value); /** * 在 [Request作用域] 里获取一个值 * @param key 键 * @return 值 */ public Object get(String key); /** * 在 [Request作用域] 里删除一个值 * @param key 键 */ public void delete(String key); }
317
594
<gh_stars>100-1000 #pragma once #include <Engine/Material/BSDF.h> #include <Engine/Material/GGX.h> namespace Ubpa { class BSDF_FrostedGlass : public BSDF { public: BSDF_FrostedGlass(const rgbf& colorFactor = rgbf(1.f), float roughnessFactor = 1.f, float ior = 1.5f) : colorFactor(colorFactor), roughnessFactor(roughnessFactor), ior(ior), colorTexture(nullptr), roughnessTexture(nullptr), aoTexture(nullptr), normalTexture(nullptr) { } public: static const Ptr<BSDF_FrostedGlass> New(const rgbf& colorFactor = rgbf(1.f), float roughnessFactor = 1.f, float ior = 1.5f) { return Ubpa::New<BSDF_FrostedGlass>(colorFactor, roughnessFactor, ior); } protected: virtual ~BSDF_FrostedGlass() = default; public: virtual const rgbf F(const normalf& wo, const normalf& wi, const pointf2& texcoord) override; // probability density function virtual float PDF(const normalf& wo, const normalf& wi, const pointf2& texcoord) override; // PD is probability density // return albedo virtual const rgbf Sample_f(const normalf& wo, const pointf2& texcoord, normalf& wi, float& PD) override; virtual void ChangeNormal(const pointf2& texcoord, const normalf& tangent, normalf& normal) const override; private: static float Fr(const normalf& v, const normalf& h, float ior); private: const rgbf GetColor(const pointf2& texcoord) const; float GetRoughness(const pointf2& texcoord) const; float GetAO(const pointf2& texcoord) const; public: GGX ggx; rgbf colorFactor; Ptr<Image> colorTexture; float roughnessFactor; Ptr<Image> roughnessTexture; Ptr<Image> aoTexture; Ptr<Image> normalTexture; float ior; }; }
609
423
// {{{ MIT License // Copyright 2017 <NAME> // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // }}} #ifndef _GRINGO_BACKTRACE_HH #define _GRINGO_BACKTRACE_HH #include <libunwind.h> #include <cstring> #include <cxxabi.h> #include <cstdio> #include <unistd.h> #include <unordered_map> #include <string> #include <cstdlib> // TODO: given a little time this could be cleaned up // to remove the ugly fixed size strings // anyway this is just debug code... namespace Gringo { inline const char* getExecutableName() { static char* exe = 0; if (!exe) { char link[4096]; static char _exe[4096]; _exe[0] = '\0'; snprintf(link, sizeof(link), "/proc/%d/exe", getpid()); readlink(link, _exe, sizeof(link)); exe = _exe; } return exe; } inline int getFileAndLine (unw_word_t addr, char *file, int *line) { char buf[4096]; static std::unordered_map<unw_word_t, std::string> cache; std::string &s = cache[addr]; if (!s.empty()) { strncpy(buf, s.c_str(), sizeof(buf)); } else { snprintf (buf, sizeof(buf), "/home/wv/bin/linux/64/binutils-2.23.1/bin/addr2line -C -e %s -f -i %lx", getExecutableName(), addr); FILE* f = popen (buf, "r"); if (f == NULL) { perror (buf); return 0; } fgets (buf, sizeof(buf), f); fgets (buf, sizeof(buf), f); pclose(f); s = buf; const char *pref = "/home/kaminski/svn/wv/Programming/gringo/trunk/"; if (strncmp(pref, s.c_str(), strlen(pref)) == 0) { s = buf + strlen(pref); strncpy(buf, s.c_str(), sizeof(buf)); } } if (buf[0] != '?') { char *p = buf; while (*p != ':') { p++; } *p++ = 0; strncpy (file , buf, 4096); sscanf (p,"%d", line); } else { strncpy (file,"unkown", 4096); *line = 0; } return 1; } inline void showBacktrace (void) { char name[4096]; int status; unw_cursor_t cursor; unw_context_t uc; unw_word_t ip, sp, offp; unw_getcontext(&uc); unw_init_local(&cursor, &uc); while (unw_step(&cursor) > 0) { char file[4096]; int line = 0; name[0] = '\0'; unw_get_proc_name(&cursor, name, sizeof(name), &offp); unw_get_reg(&cursor, UNW_REG_IP, &ip); unw_get_reg(&cursor, UNW_REG_SP, &sp); if (strcmp(name, "__libc_start_main") == 0 || strcmp(name, "_start") == 0) { continue; } char *realname = abi::__cxa_demangle(name, 0, 0, &status); getFileAndLine((long)ip, file, &line); printf("%s in %s:%d\n", !status ? realname : name, file, line); free(realname); } } } // namespace Gringo #endif // _GRINGO_BACKTRACE_HH
1,621
6,304
<reponame>mohad12211/skia /* * Copyright 2012 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #ifndef BenchLogger_DEFINED #define BenchLogger_DEFINED #include <stdio.h> #include "include/core/SkString.h" #include "include/core/SkTypes.h" class SkFILEWStream; /** * Class that allows logging to a file while simultaneously logging to stdout/stderr. */ class BenchLogger { public: BenchLogger(); /** * Not virtual, since this class is not intended to be subclassed. */ ~BenchLogger(); /** * Specify a file to write progress logs to. Unless this is called with a valid file path, * BenchLogger will only write to stdout/stderr. */ bool SetLogFile(const char file[]); /** * Log an error to stderr, taking a C style string as input. */ void logError(const char msg[]) { this->nativeLogError(msg); } /** * Log an error to stderr, taking an SkString as input. */ void logError(const SkString& str) { this->nativeLogError(str.c_str()); } /** * Log the progress of the bench tool to both stdout and the log file specified by SetLogFile, * if any, taking a C style string as input. */ void logProgress(const char msg[]) { this->nativeLogProgress(msg); this->fileWrite(msg, strlen(msg)); } /** * Log the progress of the bench tool to both stdout and the log file specified by SetLogFile, * if any, taking an SkString as input. */ void logProgress(const SkString& str) { this->nativeLogProgress(str.c_str()); this->fileWrite(str.c_str(), str.size()); } private: #ifdef SK_BUILD_FOR_ANDROID void nativeLogError(const char msg[]) { SkDebugf("%s", msg); } #else void nativeLogError(const char msg[]) { fprintf(stderr, "%s", msg); } #endif void nativeLogProgress(const char msg[]) { SkDebugf("%s", msg); } void fileWrite(const char msg[], size_t size); SkFILEWStream* fFileStream; }; #endif // BenchLogger_DEFINED
765
6,098
<reponame>ahmedengu/h2o-3 package water.rapids.ast.prims.matrix; import hex.DMatrix; import water.fvec.Frame; import water.rapids.Env; import water.rapids.vals.ValFrame; import water.rapids.ast.AstPrimitive; import water.rapids.ast.AstRoot; /** * Matrix multiplication */ public class AstMMult extends AstPrimitive { @Override public String[] args() { return new String[]{"ary", "ary2"}; } @Override public int nargs() { return 1 + 2; } // (x X1 X2) @Override public String str() { return "x"; } @Override public ValFrame apply(Env env, Env.StackHelp stk, AstRoot asts[]) { Frame X1 = stk.track(asts[1].exec(env)).getFrame(); Frame X2 = stk.track(asts[2].exec(env)).getFrame(); return new ValFrame(DMatrix.mmul(X1, X2)); } }
303
611
// DejaLu // Copyright (c) 2015 <NAME>. All rights reserved. #ifndef dejalu_DJLSingleton_h #define dejalu_DJLSingleton_h #define DJLSINGLETON(className) \ static className * singleton = nil; \ static dispatch_once_t onceToken; \ dispatch_once(&onceToken, ^{ \ singleton = [[className alloc] init]; \ }); \ return singleton; #endif
129
988
//------------------------------------------------------------------------------ // GB_assign_zombie.h: definitions for GB_assign_zombie* functions //------------------------------------------------------------------------------ // SuiteSparse:GraphBLAS, <NAME>, (c) 2017-2021, All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 //------------------------------------------------------------------------------ #ifndef GB_ASSIGN_ZOMBIE_H #define GB_ASSIGN_ZOMBIE_H #include "GB_ij.h" void GB_assign_zombie1 ( GrB_Matrix C, const int64_t j, GB_Context Context ) ; void GB_assign_zombie2 ( GrB_Matrix C, const int64_t i, GB_Context Context ) ; void GB_assign_zombie3 ( GrB_Matrix C, const GrB_Matrix M, const bool Mask_comp, const bool Mask_struct, // if true, use the only structure of M const int64_t j, const GrB_Index *I, const int64_t nI, const int Ikind, const int64_t Icolon [3], GB_Context Context ) ; void GB_assign_zombie4 ( GrB_Matrix C, const GrB_Matrix M, const bool Mask_comp, const bool Mask_struct, // if true, use the only structure of M const int64_t i, const GrB_Index *J, const int64_t nJ, const int Jkind, const int64_t Jcolon [3], GB_Context Context ) ; GrB_Info GB_assign_zombie5 ( GrB_Matrix C, const GrB_Matrix M, const bool Mask_comp, const bool Mask_struct, // if true, use the only structure of M const GrB_Index *I, const int64_t nI, const int Ikind, const int64_t Icolon [3], const GrB_Index *J, const int64_t nJ, const int Jkind, const int64_t Jcolon [3], GB_Context Context ) ; #endif
678
1,444
<gh_stars>1000+ package mage.cards.d; import java.util.UUID; import mage.abilities.Ability; import mage.abilities.DelayedTriggeredAbility; import mage.abilities.common.BeginningOfUpkeepTriggeredAbility; import mage.abilities.common.delayed.AtTheBeginOfNextEndStepDelayedTriggeredAbility; import mage.abilities.effects.ContinuousEffect; import mage.abilities.effects.OneShotEffect; import mage.abilities.effects.common.ExileTargetEffect; import mage.abilities.effects.common.LoseLifeSourceControllerEffect; import mage.abilities.effects.common.continuous.GainAbilityTargetEffect; import mage.abilities.keyword.HasteAbility; import mage.cards.Card; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.*; import mage.filter.StaticFilters; import mage.game.Game; import mage.game.permanent.Permanent; import mage.players.Player; import mage.target.common.TargetCardInYourGraveyard; import mage.target.targetpointer.FixedTarget; /** * * @author fireshoes */ public final class DawnOfTheDead extends CardImpl { public DawnOfTheDead(UUID ownerId, CardSetInfo setInfo) { super(ownerId, setInfo, new CardType[]{CardType.ENCHANTMENT}, "{2}{B}{B}{B}"); // At the beginning of your upkeep, you lose 1 life. this.addAbility(new BeginningOfUpkeepTriggeredAbility(Zone.BATTLEFIELD, new LoseLifeSourceControllerEffect(1), TargetController.YOU, false)); // At the beginning of your upkeep, you may return target creature card from your graveyard to the battlefield. // That creature gains haste until end of turn. Exile it at the beginning of the next end step. Ability ability = new BeginningOfUpkeepTriggeredAbility(Zone.BATTLEFIELD, new DawnOfTheDeadEffect(), TargetController.YOU, true); ability.addTarget(new TargetCardInYourGraveyard(StaticFilters.FILTER_CARD_CREATURE_YOUR_GRAVEYARD)); this.addAbility(ability); } private DawnOfTheDead(final DawnOfTheDead card) { super(card); } @Override public DawnOfTheDead copy() { return new DawnOfTheDead(this); } } class DawnOfTheDeadEffect extends OneShotEffect { public DawnOfTheDeadEffect() { super(Outcome.PutCreatureInPlay); this.staticText = "return target creature card from your graveyard to the battlefield. That creature gains haste until end of turn. Exile it at the beginning of the next end step"; } public DawnOfTheDeadEffect(final DawnOfTheDeadEffect effect) { super(effect); } @Override public DawnOfTheDeadEffect copy() { return new DawnOfTheDeadEffect(this); } @Override public boolean apply(Game game, Ability source) { Card card = game.getCard(this.getTargetPointer().getFirst(game, source)); Player controller = game.getPlayer(source.getControllerId()); if (controller != null && card != null) { if (controller.moveCards(card, Zone.BATTLEFIELD, source, game)) { Permanent creature = game.getPermanent(card.getId()); if (creature != null) { // gains haste ContinuousEffect effect = new GainAbilityTargetEffect(HasteAbility.getInstance(), Duration.EndOfTurn); effect.setTargetPointer(new FixedTarget(creature, game)); game.addEffect(effect, source); // Exile at begin of next end step ExileTargetEffect exileEffect = new ExileTargetEffect(null, null, Zone.BATTLEFIELD); exileEffect.setTargetPointer(new FixedTarget(creature, game)); DelayedTriggeredAbility delayedAbility = new AtTheBeginOfNextEndStepDelayedTriggeredAbility(exileEffect); game.addDelayedTriggeredAbility(delayedAbility, source); } } return true; } return false; } }
1,415
5,871
<filename>src/passes/OnceReduction.cpp /* * Copyright 2021 WebAssembly Community Group participants * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // // Reduces the amount of calls to functions that only run once. A "run-once" // or "once" function is a function guarded by a global to make sure it runs a // single time: // // global foo$once = 0; // // function foo() { // if (foo$once) return; // foo$once = 1; // ..do some work.. // } // // If we verify that there are no other kind of sets to that global - that is, // it is only used to guard this code - then we can remove subsequent calls to // the function, // // foo(); // ..stuff.. // foo(); // this call can be removed // // The latter call can be removed since it has definitely run by then. // // TODO: "Once" globals are effectively boolean in that all non-zero values are // indistinguishable, and so we could rewrite them all to be 1. // #include <atomic> #include "cfg/domtree.h" #include "ir/utils.h" #include "pass.h" #include "support/unique_deferring_queue.h" #include "wasm-builder.h" #include "wasm.h" namespace wasm { namespace { struct OptInfo { // Maps global names to whether they are possible indicators of "once" // functions. A "once" global has these properties: // // * They are only ever written to with non-zero values. // * They are never read from except in the beginning of a "once" function // (otherwise, execution might be affected by the specific values of the // global, instead of just using it to guard the "once" function). // // Those properties ensure that the global is monotonic in the sense that it // begins at zero and, if they are written to, will only receive a non-zero // value - they never return to 0. std::unordered_map<Name, std::atomic<bool>> onceGlobals; // Maps functions to whether they are "once", by indicating the global that // they use for that purpose. An empty name means they are not "once". std::unordered_map<Name, Name> onceFuncs; // For each function, the "once" globals that are definitely set after calling // it. If the function is "once" itself, that is included, but it also // includes any other "once" functions we definitely call, and so forth. // The "new" version is written to in each iteration, and then swapped with // the main one (to avoid reading and writing in parallel). std::unordered_map<Name, std::unordered_set<Name>> onceGlobalsSetInFuncs, newOnceGlobalsSetInFuncs; }; struct Scanner : public WalkerPass<PostWalker<Scanner>> { bool isFunctionParallel() override { return true; } Scanner(OptInfo& optInfo) : optInfo(optInfo) {} Scanner* create() override { return new Scanner(optInfo); } // All the globals we read from. Any read of a global prevents us from // optimizing, unless it is the single read at the top of an "only" function // (as other reads might be used to check for the value of the global in // complex ways that we do not want to try to reason about). std::unordered_map<Name, Index> readGlobals; void visitGlobalGet(GlobalGet* curr) { readGlobals[curr->name]++; } void visitGlobalSet(GlobalSet* curr) { if (!curr->value->type.isInteger()) { // This is either a type we don't care about, or an unreachable set which // we also don't care about. return; } if (auto* c = curr->value->dynCast<Const>()) { if (c->value.getInteger() > 0) { // This writes a non-zero constant, which is what we hoped for. return; } } // This is not a constant, or it is zero - failure. optInfo.onceGlobals.at(curr->name) = false; } void visitFunction(Function* curr) { // TODO: support params and results? if (curr->getParams() == Type::none && curr->getResults() == Type::none) { auto global = getOnceGlobal(curr->body); if (global.is()) { // This is a "once" function, as best we can tell for now. Further // information may cause a problem, say, if the global is used in a bad // way in another function, so we may undo this. optInfo.onceFuncs.at(curr->name) = global; // We can ignore the get in the "once" pattern at the top of the // function. readGlobals[global]--; } } for (auto& kv : readGlobals) { auto global = kv.first; auto count = kv.second; if (count > 0) { // This global has reads we cannot reason about, so do not optimize it. optInfo.onceGlobals.at(global) = false; } } } // Check if a function body is in the "once" pattern. Return the name of the // global if so, or an empty name otherwise. // // TODO: This pattern can show up in random places and not just at the top of // the "once" function - say, if that function was inlined somewhere - // so it might be good to look for the more general pattern everywhere. // TODO: Handle related patterns like if (!once) { .. }, but other opts will // tend to normalize to this form anyhow. Name getOnceGlobal(Expression* body) { // Look the pattern mentioned above: // // function foo() { // if (foo$once) return; // foo$once = 1; // ... // auto* block = body->dynCast<Block>(); if (!block) { return Name(); } auto& list = block->list; if (list.size() < 2) { return Name(); } auto* iff = list[0]->dynCast<If>(); if (!iff) { return Name(); } auto* get = iff->condition->dynCast<GlobalGet>(); if (!get) { return Name(); } if (!iff->ifTrue->is<Return>() || iff->ifFalse) { return Name(); } auto* set = list[1]->dynCast<GlobalSet>(); // Note that we have already checked the set's value earlier - that if it is // reached then it writes a non-zero constant. Those are properties that we // need from all sets. For this specific set, we also need it to actually // perform the write, that is, to not be unreachable (otherwise, the global // is not written here, and the function can execute more than once). if (!set || set->name != get->name || set->type == Type::unreachable) { return Name(); } return get->name; } private: OptInfo& optInfo; }; // Information in a basic block. We track relevant expressions, which are calls // calls to "once" functions, and writes to "once" globals. struct BlockInfo { std::vector<Expression*> exprs; }; // Performs optimization in all functions. This reads onceGlobalsSetInFuncs in // order to know what "once" globals are written by each function (so that when // we see a call, we can infer things), and when it finishes with a function it // has learned which "once" globals it must set, and it then writes out // newOnceGlobalsSetInFuncs with that result. Later iterations will then use // those values in place of onceGlobalsSetInFuncs, which propagate things to // callers. This in effect mixes local optimization with the global propagation // - as we need to run the full local optimization in order to infer the new // values for onceGlobalsSetInFuncs, that is unavoidable (in principle, we could // also do a full propagation to a fixed point in between running local // optimization, but that would require more code - it might be more efficient, // though). struct Optimizer : public WalkerPass<CFGWalker<Optimizer, Visitor<Optimizer>, BlockInfo>> { bool isFunctionParallel() override { return true; } Optimizer(OptInfo& optInfo) : optInfo(optInfo) {} Optimizer* create() override { return new Optimizer(optInfo); } void visitGlobalSet(GlobalSet* curr) { if (currBasicBlock) { currBasicBlock->contents.exprs.push_back(curr); } } void visitCall(Call* curr) { if (currBasicBlock) { currBasicBlock->contents.exprs.push_back(curr); } } void doWalkFunction(Function* func) { using Parent = WalkerPass<CFGWalker<Optimizer, Visitor<Optimizer>, BlockInfo>>; // Walk the function to builds the CFG. Parent::doWalkFunction(func); if (basicBlocks.empty()) { return; } // Build a dominator tree, which then tells us what to remove: if a call // appears in block A, then we do not need to make any calls in any blocks // dominated by A. DomTree<Parent::BasicBlock> domTree(basicBlocks); // Perform the work by going through the blocks in reverse postorder and // filling out which "once" globals have been written to. // Each index in this vector is the set of "once" globals written to in the // basic block with the same index. std::vector<std::unordered_set<Name>> onceGlobalsWrittenVec; auto numBlocks = basicBlocks.size(); onceGlobalsWrittenVec.resize(numBlocks); for (Index i = 0; i < numBlocks; i++) { auto* block = basicBlocks[i].get(); // Note that we take a reference here, which is how the data we accumulate // ends up stored. The blocks we dominate will see it later. auto& onceGlobalsWritten = onceGlobalsWrittenVec[i]; // Note information from our immediate dominator. // TODO: we could also intersect information from all of our preds. auto parent = domTree.iDoms[i]; if (parent == domTree.nonsense) { // This is either the entry node (which we need to process), or an // unreachable block (which we do not need to process - we leave that to // DCE). if (i > 0) { continue; } } else { // This block has an immediate dominator, so we know that everything // written to there can be assumed written. onceGlobalsWritten = onceGlobalsWrittenVec[parent]; } // Process the block's expressions. auto& exprs = block->contents.exprs; for (auto* expr : exprs) { // Given the name of a "once" global that is written by this // instruction, optimize. auto optimizeOnce = [&](Name globalName) { assert(optInfo.onceGlobals.at(globalName)); if (onceGlobalsWritten.count(globalName)) { // This global has already been written, so this expr is not needed, // regardless of whether it is a global.set or a call. // // Note that assertions below verify that there are no children that // we need to keep around, and so we can just nop the entire node. ExpressionManipulator::nop(expr); } else { // From here on, this global is set, hopefully allowing us to // optimize away others. onceGlobalsWritten.insert(globalName); } }; if (auto* set = expr->dynCast<GlobalSet>()) { if (optInfo.onceGlobals.at(set->name)) { // This global is written. assert(set->value->is<Const>()); optimizeOnce(set->name); } } else if (auto* call = expr->dynCast<Call>()) { if (optInfo.onceFuncs.at(call->target).is()) { // The global used by the "once" func is written. assert(call->operands.empty()); optimizeOnce(optInfo.onceFuncs.at(call->target)); continue; } // This is not a call to a "once" func. However, we may have inferred // that it definitely sets some "once" globals before it returns, and // we can use that information. for (auto globalName : optInfo.onceGlobalsSetInFuncs.at(call->target)) { onceGlobalsWritten.insert(globalName); } } else { WASM_UNREACHABLE("invalid expr"); } } } // As a result of the above optimization, we know which "once" globals are // definitely written in this function. Regardless of whether this is a // "once" function itself, that set of globals can be used in further // optimizations, as any call to this function must set those. // TODO: Aside from the entry block, we could intersect all the exit blocks. optInfo.newOnceGlobalsSetInFuncs[func->name] = std::move(onceGlobalsWrittenVec[0]); } private: OptInfo& optInfo; }; } // anonymous namespace struct OnceReduction : public Pass { void run(PassRunner* runner, Module* module) override { OptInfo optInfo; // Fill out the initial data. for (auto& global : module->globals) { // For a global to possibly be "once", it must be an integer, and to not // be imported (as a mutable import may be read and written to from the // outside). As we scan code we will turn this into false if we see // anything that proves the global is not "once". // TODO: This limitation could perhaps only be on mutable ones, but // immutable globals will not be considered "once" anyhow as they do // not fit the pattern of being written after the first call. // TODO: non-integer types? optInfo.onceGlobals[global->name] = global->type.isInteger() && !global->imported(); } for (auto& func : module->functions) { // Fill in the map so that it can be operated on in parallel. optInfo.onceFuncs[func->name] = Name(); } for (auto& ex : module->exports) { if (ex->kind == ExternalKind::Global) { // An exported global cannot be "once" since the outside may read and // write to it in ways we are unaware. // TODO: See comment above on mutability. optInfo.onceGlobals[ex->value] = false; } } // Scan the module to find out which globals and functions are "once". Scanner(optInfo).run(runner, module); // Combine the information. We found which globals appear to be "once", but // other information may have proven they are not so, in fact. Specifically, // for a function to be "once" we need its global to also be such. for (auto& kv : optInfo.onceFuncs) { Name& onceGlobal = kv.second; if (onceGlobal.is() && !optInfo.onceGlobals[onceGlobal]) { onceGlobal = Name(); } } // Optimize using what we found. Keep iterating while we find things to // optimize, which we estimate using a counter of the total number of once // globals set by functions: as that increases, it means we are propagating // useful information. // TODO: limit # of iterations? Index lastOnceGlobalsSet = 0; // First, initialize onceGlobalsSetInFuncs for the first iteration, by // ensuring each item is present, and adding the "once" global for "once" // funcs. bool foundOnce = false; for (auto& func : module->functions) { // Either way, at least fill the data structure for parallel operation. auto& set = optInfo.onceGlobalsSetInFuncs[func->name]; auto global = optInfo.onceFuncs[func->name]; if (global.is()) { set.insert(global); foundOnce = true; } } if (!foundOnce) { // Nothing to optimize. return; } while (1) { // Initialize all the items in the new data structure that will be // populated. for (auto& func : module->functions) { optInfo.newOnceGlobalsSetInFuncs[func->name]; } Optimizer(optInfo).run(runner, module); optInfo.onceGlobalsSetInFuncs = std::move(optInfo.newOnceGlobalsSetInFuncs); // Count how many once globals are set, and see if we have any more work // to do. Index currOnceGlobalsSet = 0; for (auto& kv : optInfo.onceGlobalsSetInFuncs) { auto& globals = kv.second; currOnceGlobalsSet += globals.size(); } assert(currOnceGlobalsSet >= lastOnceGlobalsSet); if (currOnceGlobalsSet > lastOnceGlobalsSet) { lastOnceGlobalsSet = currOnceGlobalsSet; continue; } return; } } }; Pass* createOnceReductionPass() { return new OnceReduction(); } } // namespace wasm
5,811
764
<reponame>freeunion1possible/sia-gateway /*- * << * sag * == * Copyright (C) 2019 sia * == * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * >> */ package com.creditease.gateway.admin.controller; import java.io.IOException; import java.util.Map; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.CrossOrigin; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.RestController; import com.creditease.gateway.admin.controller.base.BaseAdminController; import com.creditease.gateway.admin.service.FilterService; import com.creditease.gateway.domain.BwfilterObj; import com.creditease.gateway.excpetion.GatewayException; import com.creditease.gateway.excpetion.GatewayException.ExceptionType; import com.creditease.gateway.helper.JsonHelper; import com.creditease.gateway.message.Message; import com.creditease.gateway.message.Message.ResponseCode; /** * 黑白名单管理 * * @author peihua */ @RestController public class AdminFilterController extends BaseAdminController { public static final Logger LOGGER = LoggerFactory.getLogger(AdminFilterController.class); @Autowired FilterService filterService; @RequestMapping(value = "/addWBList2Route", produces = "application/json;charset=UTF-8") @CrossOrigin(methods = { RequestMethod.GET, RequestMethod.POST }, origins = "*") @ResponseBody public String addWBList2Route(@RequestBody String req) throws IOException { try { LOGGER.info("addWBList2Route by user:{}", authCheckor.getCurrentUser()); BwfilterObj rs = JsonHelper.toObject(req, BwfilterObj.class); String zuulGroupName = rs.getGroupId(); String id = rs.getRouteid(); boolean rst = filterService.addWhiteList2Route(zuulGroupName, id, rs); Message msg = null; if (rst) { msg = new Message(rst, ResponseCode.SUCCESS_CODE.getCode()); } else { msg = new Message(rst, ResponseCode.SERVER_ERROR_CODE.getCode()); } return JsonHelper.toString(msg); } catch (Exception e) { LOGGER.error("exception:{}", e.getLocalizedMessage()); new GatewayException(ExceptionType.AdminException, e); return returnErrorMsg("Admin黑白名单新增异常" + e.getMessage(), ResponseCode.SERVER_ERROR_CODE); } } /** * 修改路由黑/白名单 * * @param * @return * @throws IOException */ @RequestMapping(value = "/updateWBList2Route", produces = "application/json;charset=UTF-8") @CrossOrigin(methods = { RequestMethod.GET, RequestMethod.POST }, origins = "*") @ResponseBody public String updateWBList2Route(@RequestBody String req) throws IOException { Message msg = null; try { LOGGER.info("updateWBList2Route by user:{}", authCheckor.getCurrentUser()); BwfilterObj rs = JsonHelper.toObject(req, BwfilterObj.class); String zuulGroupName = rs.getGroupId(); String routeid = rs.getRouteid(); boolean rst = filterService.updateWBList2Route(zuulGroupName, routeid, rs); if (rst) { msg = new Message(rst, ResponseCode.SUCCESS_CODE.getCode()); } else { msg = new Message(rst, ResponseCode.SERVER_ERROR_CODE.getCode()); } msg.setCode(ResponseCode.SUCCESS_CODE.getCode()); msg.setResponse(rst); return JsonHelper.toString(msg); } catch (Exception e) { LOGGER.error("updateWBList2Route exception:{}", e.getLocalizedMessage()); new GatewayException(ExceptionType.AdminException, e); return returnErrorMsg("Admin黑白名单新增异常" + e.getMessage(), ResponseCode.SERVER_ERROR_CODE); } } /** * 根據zuulGroupName查詢所有白名單 * * @param * @return */ @RequestMapping(value = "/queryWBList", produces = "application/json;charset=UTF-8") @CrossOrigin(methods = { RequestMethod.GET, RequestMethod.POST }, origins = "*") @ResponseBody public String queryWhiteList(@RequestBody String req) throws IOException { try { LOGGER.info("QueryWhiteList by user:{}", authCheckor.getCurrentUser()); Message msg = new Message(); msg.setCode(ResponseCode.SUCCESS_CODE.getCode()); @SuppressWarnings("unchecked") Map<String, String> rs = JsonHelper.toObject(req, Map.class); String groupId = rs.get("groupId"); String routeid = rs.get("routeid"); Object rst = filterService.queryWhiteList(groupId, routeid); if (rst != null) { BwfilterObj obj = JsonHelper.toObject(rst.toString(), BwfilterObj.class); msg.setResponse(obj); } return JsonHelper.toString(msg); } catch (Exception e) { LOGGER.error("queryWhiteList exception:{}", e.getMessage()); new GatewayException(ExceptionType.AdminException, e); return returnErrorMsg("Admin黑白名单新增异常" + e.getMessage(), ResponseCode.SERVER_ERROR_CODE); } } @RequestMapping(value = "/deleteWBList2Route", produces = "application/json;charset=UTF-8") @CrossOrigin(methods = { RequestMethod.GET, RequestMethod.POST }, origins = "*") @ResponseBody public String deleteWBList2Route(@RequestBody String req) throws IOException { try { LOGGER.info("DeleteWBList2Route by user:", authCheckor.getCurrentUser()); BwfilterObj rs = JsonHelper.toObject(req, BwfilterObj.class); String zuulGroupName = rs.getGroupId(); String routeid = rs.getRouteid(); boolean rst = filterService.deleteWBList2Route(zuulGroupName, routeid, rs); Message msg = null; if (rst) { msg = new Message(rst, ResponseCode.SUCCESS_CODE.getCode()); } else { msg = new Message(rst, ResponseCode.SERVER_ERROR_CODE.getCode()); } return JsonHelper.toString(msg); } catch (Exception e) { LOGGER.error("Exception:{}", e.getMessage()); new GatewayException(ExceptionType.AdminException, e); return returnErrorMsg("Admin黑白名单新增异常" + e.getMessage(), ResponseCode.SERVER_ERROR_CODE); } } }
2,338
4,716
from xonsh.completers.man import complete_from_man from xonsh.completers.path import complete_dir from xonsh.completers.tools import contextual_command_completer_for from xonsh.parsers.completion_context import ( CompletionContext, CommandContext, ) @contextual_command_completer_for("cd") def complete_cd(command: CommandContext): """ Completion for "cd", includes only valid directory names. """ results, lprefix = complete_dir(command) if len(results) == 0: raise StopIteration return results, lprefix @contextual_command_completer_for("rmdir") def complete_rmdir(command: CommandContext): """ Completion for "rmdir", includes only valid directory names. """ opts = complete_from_man(CompletionContext(command)) comps, lp = complete_dir(command) if len(comps) == 0 and len(opts) == 0: raise StopIteration return comps | opts, lp
327
475
// Copyright (c) 2015-2016 <NAME> // License: Academic Free License ("AFL") v. 3.0 // AFL License page: http://opensource.org/licenses/AFL-3.0 // http://vittorioromeo.info | <EMAIL> #pragma once #include <ecst/config.hpp> #include <boost/hana.hpp> ECST_NAMESPACE { /// @brief Alias for the `boost::hana` namespace. namespace bh = boost::hana; /// @brief Alias for `typename boost::hana::decay<...>::type`. template <typename... Ts> using decay_t = typename bh::detail::decay<Ts...>::type; } ECST_NAMESPACE_END /// @brief Alias for `decay_t<decltype(...)>`. #define ECST_DECAY_DECLTYPE(...) ::ecst::decay_t<decltype(__VA_ARGS__)>
275
2,151
// Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/system/toast/toast_manager.h" #include "ash/screen_util.h" #include "ash/shelf/shelf.h" #include "ash/shelf/shelf_constants.h" #include "ash/shell.h" #include "ash/test/ash_test_base.h" #include "base/run_loop.h" #include "base/strings/string16.h" #include "base/strings/string_number_conversions.h" #include "base/strings/utf_string_conversions.h" #include "ui/compositor/scoped_animation_duration_scale_mode.h" #include "ui/display/manager/display_manager.h" #include "ui/views/widget/widget.h" namespace ash { class DummyEvent : public ui::Event { public: DummyEvent() : Event(ui::ET_UNKNOWN, base::TimeTicks(), 0) {} ~DummyEvent() override = default; }; class ToastManagerTest : public AshTestBase { public: ToastManagerTest() = default; ~ToastManagerTest() override = default; private: void SetUp() override { AshTestBase::SetUp(); manager_ = Shell::Get()->toast_manager(); manager_->ResetSerialForTesting(); EXPECT_EQ(0, GetToastSerial()); } protected: ToastManager* manager() { return manager_; } int GetToastSerial() { return manager_->serial_for_testing(); } ToastOverlay* GetCurrentOverlay() { return manager_->GetCurrentOverlayForTesting(); } views::Widget* GetCurrentWidget() { ToastOverlay* overlay = GetCurrentOverlay(); return overlay ? overlay->widget_for_testing() : nullptr; } ToastOverlayButton* GetDismissButton() { ToastOverlay* overlay = GetCurrentOverlay(); DCHECK(overlay); return overlay->dismiss_button_for_testing(); } base::string16 GetCurrentText() { ToastOverlay* overlay = GetCurrentOverlay(); return overlay ? overlay->text_ : base::string16(); } base::Optional<base::string16> GetCurrentDismissText() { ToastOverlay* overlay = GetCurrentOverlay(); return overlay ? overlay->dismiss_text_ : base::string16(); } void ClickDismissButton() { ToastOverlay* overlay = GetCurrentOverlay(); if (overlay) overlay->ClickDismissButtonForTesting(DummyEvent()); } std::string ShowToast(const std::string& text, int32_t duration) { std::string id = "TOAST_ID_" + base::UintToString(serial_++); manager()->Show( ToastData(id, base::ASCIIToUTF16(text), duration, base::string16())); return id; } std::string ShowToastWithDismiss( const std::string& text, int32_t duration, const base::Optional<std::string>& dismiss_text) { base::Optional<base::string16> localized_dismiss; if (dismiss_text.has_value()) localized_dismiss = base::ASCIIToUTF16(dismiss_text.value()); std::string id = "TOAST_ID_" + base::UintToString(serial_++); manager()->Show( ToastData(id, base::ASCIIToUTF16(text), duration, localized_dismiss)); return id; } void CancelToast(const std::string& id) { manager()->Cancel(id); } private: ToastManager* manager_ = nullptr; unsigned int serial_ = 0; DISALLOW_COPY_AND_ASSIGN(ToastManagerTest); }; TEST_F(ToastManagerTest, ShowAndCloseAutomatically) { ShowToast("DUMMY", 10); EXPECT_EQ(1, GetToastSerial()); while (GetCurrentOverlay() != nullptr) base::RunLoop().RunUntilIdle(); } TEST_F(ToastManagerTest, ShowAndCloseManually) { ShowToast("DUMMY", ToastData::kInfiniteDuration); EXPECT_EQ(1, GetToastSerial()); EXPECT_FALSE(GetCurrentWidget()->GetLayer()->GetAnimator()->is_animating()); ClickDismissButton(); EXPECT_EQ(nullptr, GetCurrentOverlay()); } TEST_F(ToastManagerTest, ShowAndCloseManuallyDuringAnimation) { ui::ScopedAnimationDurationScaleMode slow_animation_duration( ui::ScopedAnimationDurationScaleMode::SLOW_DURATION); ShowToast("DUMMY", ToastData::kInfiniteDuration); EXPECT_TRUE(GetCurrentWidget()->GetLayer()->GetAnimator()->is_animating()); base::RunLoop().RunUntilIdle(); EXPECT_EQ(1, GetToastSerial()); EXPECT_TRUE(GetCurrentWidget()->GetLayer()->GetAnimator()->is_animating()); // Try to close it during animation. ClickDismissButton(); while (GetCurrentWidget()->GetLayer()->GetAnimator()->is_animating()) base::RunLoop().RunUntilIdle(); // Toast isn't closed. EXPECT_TRUE(GetCurrentOverlay() != nullptr); } TEST_F(ToastManagerTest, NullMessageHasNoDismissButton) { ShowToastWithDismiss("DUMMY", 10, base::Optional<std::string>()); base::RunLoop().RunUntilIdle(); EXPECT_FALSE(GetDismissButton()); } TEST_F(ToastManagerTest, QueueMessage) { ShowToast("DUMMY1", 10); ShowToast("DUMMY2", 10); ShowToast("DUMMY3", 10); EXPECT_EQ(1, GetToastSerial()); EXPECT_EQ(base::ASCIIToUTF16("DUMMY1"), GetCurrentText()); while (GetToastSerial() != 2) base::RunLoop().RunUntilIdle(); EXPECT_EQ(base::ASCIIToUTF16("DUMMY2"), GetCurrentText()); while (GetToastSerial() != 3) base::RunLoop().RunUntilIdle(); EXPECT_EQ(base::ASCIIToUTF16("DUMMY3"), GetCurrentText()); } TEST_F(ToastManagerTest, PositionWithVisibleBottomShelf) { Shelf* shelf = GetPrimaryShelf(); EXPECT_EQ(SHELF_ALIGNMENT_BOTTOM, shelf->alignment()); EXPECT_EQ(SHELF_VISIBLE, shelf->GetVisibilityState()); ShowToast("DUMMY", ToastData::kInfiniteDuration); EXPECT_EQ(1, GetToastSerial()); gfx::Rect toast_bounds = GetCurrentWidget()->GetWindowBoundsInScreen(); gfx::Rect root_bounds = screen_util::GetDisplayBoundsWithShelf(shelf->GetWindow()); EXPECT_TRUE(toast_bounds.Intersects(shelf->GetUserWorkAreaBounds())); EXPECT_NEAR(root_bounds.CenterPoint().x(), toast_bounds.CenterPoint().x(), 1); gfx::Rect shelf_bounds = shelf->GetIdealBounds(); EXPECT_FALSE(toast_bounds.Intersects(shelf_bounds)); EXPECT_EQ(shelf_bounds.y() - ToastOverlay::kOffset, toast_bounds.bottom()); EXPECT_EQ( root_bounds.bottom() - shelf_bounds.height() - ToastOverlay::kOffset, toast_bounds.bottom()); } TEST_F(ToastManagerTest, PositionWithAutoHiddenBottomShelf) { std::unique_ptr<aura::Window> window( CreateTestWindowInShellWithBounds(gfx::Rect(1, 2, 3, 4))); Shelf* shelf = GetPrimaryShelf(); EXPECT_EQ(SHELF_ALIGNMENT_BOTTOM, shelf->alignment()); shelf->SetAutoHideBehavior(SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS); EXPECT_EQ(SHELF_AUTO_HIDE_HIDDEN, shelf->GetAutoHideState()); ShowToast("DUMMY", ToastData::kInfiniteDuration); EXPECT_EQ(1, GetToastSerial()); gfx::Rect toast_bounds = GetCurrentWidget()->GetWindowBoundsInScreen(); gfx::Rect root_bounds = screen_util::GetDisplayBoundsWithShelf(shelf->GetWindow()); EXPECT_TRUE(toast_bounds.Intersects(shelf->GetUserWorkAreaBounds())); EXPECT_NEAR(root_bounds.CenterPoint().x(), toast_bounds.CenterPoint().x(), 1); EXPECT_EQ(root_bounds.bottom() - kShelfAutoHideSize - ToastOverlay::kOffset, toast_bounds.bottom()); } TEST_F(ToastManagerTest, PositionWithHiddenBottomShelf) { Shelf* shelf = GetPrimaryShelf(); EXPECT_EQ(SHELF_ALIGNMENT_BOTTOM, shelf->alignment()); shelf->SetAutoHideBehavior(SHELF_AUTO_HIDE_ALWAYS_HIDDEN); EXPECT_EQ(SHELF_HIDDEN, shelf->GetVisibilityState()); ShowToast("DUMMY", ToastData::kInfiniteDuration); EXPECT_EQ(1, GetToastSerial()); gfx::Rect toast_bounds = GetCurrentWidget()->GetWindowBoundsInScreen(); gfx::Rect root_bounds = screen_util::GetDisplayBoundsWithShelf(shelf->GetWindow()); EXPECT_TRUE(toast_bounds.Intersects(shelf->GetUserWorkAreaBounds())); EXPECT_NEAR(root_bounds.CenterPoint().x(), toast_bounds.CenterPoint().x(), 1); EXPECT_EQ(root_bounds.bottom() - ToastOverlay::kOffset, toast_bounds.bottom()); } TEST_F(ToastManagerTest, PositionWithVisibleLeftShelf) { Shelf* shelf = GetPrimaryShelf(); EXPECT_EQ(SHELF_VISIBLE, shelf->GetVisibilityState()); shelf->SetAlignment(SHELF_ALIGNMENT_LEFT); ShowToast("DUMMY", ToastData::kInfiniteDuration); EXPECT_EQ(1, GetToastSerial()); gfx::Rect toast_bounds = GetCurrentWidget()->GetWindowBoundsInScreen(); gfx::RectF precise_toast_bounds(toast_bounds); gfx::Rect root_bounds = screen_util::GetDisplayBoundsWithShelf(shelf->GetWindow()); EXPECT_TRUE(toast_bounds.Intersects(shelf->GetUserWorkAreaBounds())); EXPECT_EQ(root_bounds.bottom() - ToastOverlay::kOffset, toast_bounds.bottom()); gfx::Rect shelf_bounds = shelf->GetIdealBounds(); EXPECT_FALSE(toast_bounds.Intersects(shelf_bounds)); EXPECT_NEAR( shelf_bounds.right() + (root_bounds.width() - shelf_bounds.width()) / 2.0, precise_toast_bounds.CenterPoint().x(), 1.f /* accepted error */); } TEST_F(ToastManagerTest, PositionWithUnifiedDesktop) { display_manager()->SetUnifiedDesktopEnabled(true); UpdateDisplay("1000x500,0+600-100x500"); Shelf* shelf = GetPrimaryShelf(); EXPECT_EQ(SHELF_ALIGNMENT_BOTTOM, shelf->alignment()); EXPECT_EQ(SHELF_VISIBLE, shelf->GetVisibilityState()); ShowToast("DUMMY", ToastData::kInfiniteDuration); EXPECT_EQ(1, GetToastSerial()); gfx::Rect toast_bounds = GetCurrentWidget()->GetWindowBoundsInScreen(); gfx::Rect root_bounds = screen_util::GetDisplayBoundsWithShelf(shelf->GetWindow()); EXPECT_TRUE(toast_bounds.Intersects(shelf->GetUserWorkAreaBounds())); EXPECT_TRUE(root_bounds.Contains(toast_bounds)); EXPECT_NEAR(root_bounds.CenterPoint().x(), toast_bounds.CenterPoint().x(), 1); gfx::Rect shelf_bounds = shelf->GetIdealBounds(); EXPECT_FALSE(toast_bounds.Intersects(shelf_bounds)); EXPECT_EQ(shelf_bounds.y() - ToastOverlay::kOffset, toast_bounds.bottom()); EXPECT_EQ( root_bounds.bottom() - shelf_bounds.height() - ToastOverlay::kOffset, toast_bounds.bottom()); } TEST_F(ToastManagerTest, CancelToast) { std::string id1 = ShowToast("TEXT1", ToastData::kInfiniteDuration); std::string id2 = ShowToast("TEXT2", ToastData::kInfiniteDuration); std::string id3 = ShowToast("TEXT3", ToastData::kInfiniteDuration); // Confirm that the first toast is shown. EXPECT_EQ(base::ASCIIToUTF16("TEXT1"), GetCurrentText()); // Cancel the queued toast. CancelToast(id2); // Confirm that the shown toast is still visible. EXPECT_EQ(base::ASCIIToUTF16("TEXT1"), GetCurrentText()); // Cancel the shown toast. CancelToast(id1); // Confirm that the next toast is visible. EXPECT_EQ(base::ASCIIToUTF16("TEXT3"), GetCurrentText()); // Cancel the shown toast. CancelToast(id3); // Confirm that the shown toast disappears. EXPECT_FALSE(GetCurrentOverlay()); // Confirm that only 1 toast is shown. EXPECT_EQ(2, GetToastSerial()); } } // namespace ash
3,910
428
/* ** 2012 June 2 ** ** The author disclaims copyright to this source code. In place of ** a legal notice, here is a blessing: ** May you do good and not evil. ** May you find forgiveness for yourself and forgive others. ** May you share freely, never taking more than you give. */ package info.ata4.bspinfo.gui.models; import info.ata4.bsplib.BspFileReader; import info.ata4.bsplib.entity.Entity; import info.ata4.util.gui.ListTableModel; import java.util.*; /** * * @author <NAME> <barracuda415 at yahoo.de> */ public class EntityTableModel extends ListTableModel { public EntityTableModel() { super(2); columnNames = Arrays.asList(new String[]{"Class", "Entities"}); columnClasses = new Class[] {String.class, Integer.class}; } public EntityTableModel(BspFileReader bspReader) { this(); Set<String> classes = bspReader.getEntityClassSet(); List<Entity> entities = bspReader.getData().entities; List<String> entityStrings = new ArrayList<>(); // create non-unique list of all entity classes for (Entity ent : entities) { entityStrings.add(ent.getClassName()); } // create rows and count occurrences of all unique entity classes for (String cls : classes) { List<Object> row = new ArrayList<>(); row.add(cls); row.add(Collections.frequency(entityStrings, cls)); addRow(row); } } @Override public boolean isCellEditable(int row, int column) { return false; } }
671
335
<reponame>Safal08/Hacktoberfest-1<gh_stars>100-1000 { "word": "Kibble", "definitions": [ "Grind or chop (beans, grain, etc.) coarsely." ], "parts-of-speech": "Verb" }
91
379
<reponame>3090559154/oim-im-api-tools package com.onlyxiahui.im.message; /** * @author Only * @date 2016-05-19 04:11:12 */ public class Head { private String key; private String name; private String action; private String method; private String version; private long time; public static final String code_fail = "0"; public static final String code_success = "1"; public Head() { name = ""; version = "1"; } public String getKey() { return key; } public void setKey(String key) { this.key = key; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getAction() { return action; } public void setAction(String action) { this.action = action; } public String getMethod() { return method; } public void setMethod(String method) { this.method = method; } public long getTime() { return time; } public void setTime(long time) { this.time = time; } public String getVersion() { return version; } public void setVersion(String version) { this.version = version; } }
390
674
// // Enum.hpp // Clock Signal // // Created by <NAME> on 17/02/2020. // Copyright © 2020 <NAME>. All rights reserved. // #ifndef Enum_hpp #define Enum_hpp #include <algorithm> #include <cctype> #include <string> #include <typeindex> #include <typeinfo> #include <vector> #include <unordered_map> namespace Reflection { #define ReflectableEnum(Name, ...) \ enum class Name { __VA_ARGS__ }; \ constexpr static const char *__declaration##Name = #__VA_ARGS__; #define EnumDeclaration(Name) #Name, __declaration##Name #define AnnounceEnum(Name) ::Reflection::Enum::declare<Name>(EnumDeclaration(Name)) #define AnnounceEnumNS(Namespace, Name) ::Reflection::Enum::declare<Namespace::Name>(#Name, Namespace::__declaration##Name) /*! This provides a very slight version of enum reflection; you can introspect only: * enums have been registered, along with the text of their declarations; * provided that those enums do not declare specific values for their members. The macros above help avoid duplication of the declaration, making this just mildly less terrible than it might have been. No guarantees of speed or any other kind of efficiency are offered. */ class Enum { public: /*! Registers @c name and the entries within @c declaration for the enum type @c Type. Assuming the caller used the macros above, a standard pattern where both things can be placed in the same namespace might look like: ReflectableEnum(MyEnum, int, A, B, C); ... AnnounceEnum(MyEnum) If AnnounceEnum cannot be placed into the same namespace as ReflectableEnum, see the EnumDeclaration macro. */ template <typename Type> static void declare(const char *name, const char *declaration) { const char *d_ptr = declaration; std::vector<std::string> result; while(true) { // Skip non-alphas, and exit if the terminator is found. while(*d_ptr && !isalpha(*d_ptr)) ++d_ptr; if(!*d_ptr) break; // Note the current location and proceed for all alphas and digits. const auto start = d_ptr; while(isalpha(*d_ptr) || isdigit(*d_ptr)) ++d_ptr; // Add a string view. result.emplace_back(std::string(start, size_t(d_ptr - start))); } members_by_type_.emplace(std::make_pair(std::type_index(typeid(Type)), result)); names_by_type_.emplace(std::make_pair(std::type_index(typeid(Type)), std::string(name))); } /*! @returns the declared name of the enum @c Type if it has been registered; the empty string otherwise. */ template <typename Type> static const std::string &name() { return name(typeid(Type)); } /*! @returns the declared name of the enum with type_info @c type if it has been registered; the empty string otherwise. */ static const std::string &name(std::type_index type) { const auto entry = names_by_type_.find(type); if(entry == names_by_type_.end()) return empty_string_; return entry->second; } /*! @returns the number of members of the enum @c Type if it has been registered; 0 otherwise. */ template <typename Type> static size_t size() { return size(typeid(Type)); } /*! @returns the number of members of the enum with type_info @c type if it has been registered; @c std::string::npos otherwise. */ static size_t size(std::type_index type) { const auto entry = members_by_type_.find(type); if(entry == members_by_type_.end()) return std::string::npos; return entry->second.size(); } /*! @returns A @c std::string name for the enum value @c e. */ template <typename Type> static const std::string &to_string(Type e) { return to_string(typeid(Type), int(e)); } /*! @returns A @c std::string name for the enum value @c e from the enum with type_info @c type. */ static const std::string &to_string(std::type_index type, int e) { const auto entry = members_by_type_.find(type); if(entry == members_by_type_.end()) return empty_string_; return entry->second[size_t(e)]; } /*! @returns a vector naming the members of the enum with type_info @c type if it has been registered; an empty vector otherwise. */ static const std::vector<std::string> &all_values(std::type_index type) { const auto entry = members_by_type_.find(type); if(entry == members_by_type_.end()) return empty_vector_; return entry->second; } /*! @returns a vector naming the members of the enum @c Type type if it has been registered; an empty vector otherwise. */ template <typename Type> static const std::vector<std::string> &all_values() { return all_values(typeid(Type)); } /*! @returns A value of @c Type for the name @c str, or @c EnumType(std::string::npos) if the name is not found. */ template <typename Type> static Type from_string(const std::string &str) { return Type(from_string(typeid(Type), str)); } /*! @returns A value for the name @c str in the enum with type_info @c type , or @c -1 if the name is not found. */ static int from_string(std::type_index type, const std::string &str) { const auto entry = members_by_type_.find(type); if(entry == members_by_type_.end()) return -1; const auto iterator = std::find(entry->second.begin(), entry->second.end(), str); if(iterator == entry->second.end()) return -1; return int(iterator - entry->second.begin()); } private: static inline std::unordered_map<std::type_index, std::vector<std::string>> members_by_type_; static inline std::unordered_map<std::type_index, std::string> names_by_type_; static inline const std::string empty_string_; static inline const std::vector<std::string> empty_vector_; }; } #endif /* Enum_hpp */
1,986
769
// // MapViewController.h // // Created by <NAME> on 10/12/16. // Copyright © 2016 <NAME>. All rights reserved. // #import <UIKit/UIKit.h> #import "TangramMap/TangramMap.h" @interface MapViewController : UIViewController <TGMapViewDelegate, TGRecognizerDelegate> - (void)mapView:(nonnull TGMapView *)mapView didLoadScene:(int)sceneID withError:(nullable NSError *)sceneError; - (void)mapViewDidCompleteLoading:(nonnull TGMapView *)mapView; - (void)mapView:(nonnull TGMapView *)mapView didSelectFeature:(nullable NSDictionary *)feature atScreenPosition:(CGPoint)position; - (void)mapView:(nonnull TGMapView *)mapView didSelectLabel:(nullable TGLabelPickResult *)labelPickResult atScreenPosition:(CGPoint)position; - (void)mapView:(nonnull TGMapView *)mapView didSelectMarker:(nullable TGMarkerPickResult *)markerPickResult atScreenPosition:(CGPoint)position; - (void)mapView:(nonnull TGMapView *)view didCaptureScreenshot:(nonnull UIImage *)screenshot; - (void)mapView:(nonnull TGMapView *)view recognizer:(nonnull UIGestureRecognizer *)recognizer didRecognizeSingleTapGesture:(CGPoint)location; - (void)mapView:(nonnull TGMapView *)view recognizer:(nonnull UIGestureRecognizer *)recognizer didRecognizeLongPressGesture:(CGPoint)location; @end
393
6,684
{ "pagination": { "DescribeBackups": { "input_token": "NextToken", "output_token": "NextToken", "limit_key": "MaxResults" }, "DescribeClusters": { "input_token": "NextToken", "output_token": "NextToken", "limit_key": "MaxResults" }, "ListTags": { "input_token": "NextToken", "output_token": "NextToken", "limit_key": "MaxResults" } } }
196
373
package net.finmath.montecarlo.interestrate.products.components; import java.util.SortedMap; import java.util.TreeMap; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import net.finmath.montecarlo.interestrate.models.FundingCapacity; import net.finmath.montecarlo.interestrate.models.FundingCapacity.DefaultFactors; import net.finmath.stochastic.RandomVariable; import net.finmath.stochastic.Scalar; /** * @author <NAME> * */ public class FundingCapacityTest { @Test public void test() { final SortedMap<Double, Double> instSurvProb = new TreeMap<Double, Double>(); instSurvProb.put(1.5, 0.9); instSurvProb.put(2.0, 0.8); instSurvProb.put(5.0, 0.7); instSurvProb.put(10.0, 0.5); final FundingCapacity fc = new FundingCapacity("EUR", new Scalar(0.0), instSurvProb); final DefaultFactors df = fc.getDefaultFactors(1.0, new Scalar(1.0)); final RandomVariable sp = df.getSurvivalProbability(); Assertions.assertEquals(0.9, sp.doubleValue()); final RandomVariable dc = df.getDefaultCompensation(); Assertions.assertEquals(1.0/0.9, dc.doubleValue()); final DefaultFactors df2 = fc.getDefaultFactors(1.0, new Scalar(1.0)); final RandomVariable sp2 = df2.getSurvivalProbability(); Assertions.assertEquals(0.85, sp2.doubleValue(), 1E-12); final RandomVariable dc2 = df2.getDefaultCompensation(); Assertions.assertEquals((1.5-1.0/0.9)+0.5+(1-(1.5-1.0/0.9)*0.9-0.5*0.8)/0.7, dc2.doubleValue(), 1E-12); final RandomVariable sp3 = fc.getDefaultFactors(1.0, new Scalar(3.0)).getSurvivalProbability(); Assertions.assertEquals(0.7, sp3.doubleValue(), 1E-12); final RandomVariable sp4 = fc.getDefaultFactors(1.0, new Scalar(-4.0)).getSurvivalProbability(); Assertions.assertEquals((3*0.7+0.5*0.8+0.5*0.9)/4, sp4.doubleValue(), 1E-12); final RandomVariable level = fc.getCurrentFundingLevel(); Assertions.assertEquals(1.0, level.doubleValue(), 1E-12); } }
769
1,253
package it.cosenonjaviste.daggermock.subcomponenterror; import org.junit.Test; import it.cosenonjaviste.daggermock.DaggerMockRule; import static org.assertj.core.api.Assertions.assertThat; import static org.junit.Assert.fail; public class SubComponentErrorTest { String s = "BBBB"; Integer i = 1; Long l = 1L; Short sh = 1; @Test public void testSubComponentError() throws Throwable { try { DaggerMockRule<MyComponent> rule = new DaggerMockRule<>(MyComponent.class, new MyModule()); rule.apply(null, null, this).evaluate(); fail(); } catch (Throwable e) { assertThat(e.getMessage()) .contains("Error while trying to override subComponents objects") .contains("java.lang.Integer") .contains("java.lang.Short") .doesNotContain("java.lang.Long"); } } }
419
5,169
<reponame>Gantios/Specs { "name": "ZYThumbnailTableView", "version": "0.2.1", "summary": "A TableView not only the thumbnail Cell and it can be expanded.", "description": "You can use panGesture let the tableviewCell(thumbanil) expanded, expand topView or Bottom which DIY by youself.\nand you can congure the tableview cell, topView, bottomView and update cell certainly with the Block in ZYThumbnailTableView.\nhave fun!", "homepage": "https://github.com/liuzhiyi1992/ZYThumbnailTableView", "license": "MIT", "authors": { "liuzhiyi1992": "<EMAIL>" }, "platforms": { "ios": "8.0" }, "source": { "git": "https://github.com/liuzhiyi1992/ZYThumbnailTableView.git", "tag": "0.2.1" }, "source_files": "ZYThumbnailTableView/Classes/**/*.{h,m,swift}", "frameworks": "UIKit", "requires_arc": true }
310
570
<reponame>pnwamk/souffle /* * Souffle - A Datalog Compiler * Copyright (c) 2020 The Souffle Developers. All rights reserved * Licensed under the Universal Permissive License v 1.0 as shown at: * - https://opensource.org/licenses/UPL * - <souffle root>/licenses/SOUFFLE-UPL.txt */ /************************************************************************ * * @file Utils.cpp * * A collection of utilities used in translation * ***********************************************************************/ #include "ast2ram/utility/Utils.h" #include "ast/Atom.h" #include "ast/Clause.h" #include "ast/QualifiedName.h" #include "ast/Relation.h" #include "ast/UnnamedVariable.h" #include "ast/Variable.h" #include "ast/utility/Utils.h" #include "ast2ram/utility/Location.h" #include "ram/Clear.h" #include "ram/Condition.h" #include "ram/Conjunction.h" #include "ram/TupleElement.h" #include "souffle/utility/ContainerUtil.h" #include "souffle/utility/StringUtil.h" #include <string> #include <vector> namespace souffle::ast2ram { std::string getConcreteRelationName(const ast::QualifiedName& name, const std::string prefix) { return prefix + getRelationName(name); } std::string getDeltaRelationName(const ast::QualifiedName& name) { return getConcreteRelationName(name, "@delta_"); } std::string getNewRelationName(const ast::QualifiedName& name) { return getConcreteRelationName(name, "@new_"); } std::string getRejectRelationName(const ast::QualifiedName& name) { return getConcreteRelationName(name, "@reject_"); } std::string getDeleteRelationName(const ast::QualifiedName& name) { return getConcreteRelationName(name, "@delete_"); } std::string getRelationName(const ast::QualifiedName& name) { return toString(join(name.getQualifiers(), ".")); } std::string getBaseRelationName(const ast::QualifiedName& name) { return stripPrefix("@new_", stripPrefix("@delta_", stripPrefix("@info_", name.toString()))); } void appendStmt(VecOwn<ram::Statement>& stmtList, Own<ram::Statement> stmt) { if (stmt) { stmtList.push_back(std::move(stmt)); } } void nameUnnamedVariables(ast::Clause* clause) { // the node mapper conducting the actual renaming struct Instantiator : public ast::NodeMapper { mutable int counter = 0; Instantiator() = default; Own<ast::Node> operator()(Own<ast::Node> node) const override { // apply recursive node->apply(*this); // replace unknown variables if (isA<ast::UnnamedVariable>(node)) { auto name = " _unnamed_var" + toString(++counter); return mk<ast::Variable>(name); } // otherwise nothing return node; } }; // name all variables in the atoms Instantiator init; for (auto& atom : ast::getBodyLiterals<ast::Atom>(*clause)) { atom->apply(init); } } Own<ram::TupleElement> makeRamTupleElement(const Location& loc) { return mk<ram::TupleElement>(loc.identifier, loc.element); } Own<ram::Condition> addConjunctiveTerm(Own<ram::Condition> curCondition, Own<ram::Condition> newTerm) { return curCondition ? mk<ram::Conjunction>(std::move(curCondition), std::move(newTerm)) : std::move(newTerm); } } // namespace souffle::ast2ram
1,256
1,163
<reponame>qingyunqu/iree // Copyright 2019 The IREE Authors // // Licensed under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception #include "iree/compiler/Dialect/Shape/IR/ShapeOps.h" #include "iree/compiler/Dialect/Shape/IR/ShapeTypes.h" #include "llvm/ADT/STLExtras.h" #include "llvm/ADT/SmallString.h" #include "llvm/ADT/SmallVector.h" #include "llvm/Support/Casting.h" #include "llvm/Support/SMLoc.h" #include "mlir/Dialect/StandardOps/IR/Ops.h" #include "mlir/IR/Attributes.h" #include "mlir/IR/Builders.h" #include "mlir/IR/BuiltinTypes.h" #include "mlir/IR/Diagnostics.h" #include "mlir/IR/Matchers.h" #include "mlir/IR/OpImplementation.h" #include "mlir/IR/OperationSupport.h" #include "mlir/IR/PatternMatch.h" #include "mlir/IR/TypeUtilities.h" #include "mlir/IR/Value.h" #include "mlir/Support/LLVM.h" #include "mlir/Support/LogicalResult.h" namespace mlir { namespace iree_compiler { namespace Shape { //===----------------------------------------------------------------------===// // shapex.tie_shape //===----------------------------------------------------------------------===// static LogicalResult verifyTieShapeOp(TieShapeOp op) { // Validate shapedType and ranked_shape_type conservatively in this // case (tie_shape supports arbitrary operand() but we constrain it if // it is specific enough. auto shapedType = op.operand().getType().dyn_cast<ShapedType>(); auto rsType = op.shape().getType().dyn_cast<RankedShapeType>(); if (shapedType && shapedType.hasRank() && rsType) { for (auto it : llvm::zip(shapedType.getShape(), rsType.getAllDims())) { if ((std::get<0>(it) != -1 && std::get<1>(it) != -1) && std::get<0>(it) != std::get<1>(it)) { return op.emitOpError("dims must match between tensor and shape"); } } } return success(); } Value TieShapeOp::getViewSource() { return operand(); } //===----------------------------------------------------------------------===// // shapex.get_ranked_shape //===----------------------------------------------------------------------===// void GetRankedShapeOp::build(OpBuilder &builder, OperationState &result, Value operand) { auto rankedOperandType = operand.getType().dyn_cast<RankedTensorType>(); if (rankedOperandType) { result.types.push_back(RankedShapeType::get(rankedOperandType.getShape(), builder.getContext())); } result.addOperands(operand); } static LogicalResult verifyGetRankedShapeOp(GetRankedShapeOp op) { auto tensorType = op.operand().getType().cast<TensorType>(); auto rsType = op.shape().getType().cast<RankedShapeType>(); if (tensorType.getRank() != rsType.getRank()) { return op.emitOpError("operand and result must be of same rank"); } auto rsDims = rsType.getAllDims(); if (!std::equal(rsDims.begin(), rsDims.end(), tensorType.getShape().begin())) { return op.emitOpError("operand tensor and result shape must be equal"); } return success(); } //===----------------------------------------------------------------------===// // shapex.const_ranked_shape //===----------------------------------------------------------------------===// void ConstRankedShapeOp::build(OpBuilder &builder, OperationState &result, Type type) { assert(type.cast<RankedShapeType>().isFullyStatic()); result.types.push_back(type); } static LogicalResult verifyConstRankedShapeOp(ConstRankedShapeOp op) { auto rsType = op.result().getType().dyn_cast<RankedShapeType>(); if (!rsType || !rsType.isFullyStatic()) { return op.emitOpError("must be a fully static ranked_shape"); } return success(); } void ConstRankedShapeOp::getAsmResultNames( function_ref<void(Value, StringRef)> setNameFn) { auto rankedShape = result().getType().cast<RankedShapeType>(); SmallString<32> buffer; llvm::raw_svector_ostream os(buffer); os << "rs"; interleave( rankedShape.getAllDims(), os, [&](int64_t dim) { os << dim; }, "_"); setNameFn(getResult(), os.str()); } //===----------------------------------------------------------------------===// // shapex.make_ranked_shape //===----------------------------------------------------------------------===// static LogicalResult verifyMakeRankedShapeOp(MakeRankedShapeOp op) { if (op.getRankedShapeType().getNumDynamicDims() != op.getNumOperands()) { return op.emitError() << "number of dynamic dims doesn't match number of operands"; } return success(); } //===----------------------------------------------------------------------===// // shapex.ranked_dim //===----------------------------------------------------------------------===// void RankedDimOp::build(OpBuilder &builder, OperationState &result, Type dimType, Value shape, int index) { result.addOperands(shape); result.addAttribute("index", builder.getIntegerAttr(builder.getIndexType(), index)); result.addTypes(dimType); } void RankedDimOp::build(OpBuilder &builder, OperationState &result, Value shape, int index) { RankedDimOp::build(builder, result, builder.getIndexType(), shape, index); } ParseResult parseRankedDimOp(OpAsmParser &parser, OperationState &state) { OpAsmParser::OperandType operand; Type operandType; IntegerAttr indexAttr; Type indexType = parser.getBuilder().getIndexType(); SmallVector<Type, 1> resultTypes; if (parser.parseOperand(operand) || parser.parseLSquare() || parser.parseAttribute(indexAttr, indexType, "index", state.attributes) || parser.parseRSquare() || parser.parseColonType(operandType) || parser.parseArrowTypeList(resultTypes) || resultTypes.empty() || parser.resolveOperand(operand, operandType, state.operands)) { return failure(); } auto rsType = operandType.dyn_cast<RankedShapeType>(); if (!rsType) { return parser.emitError(parser.getNameLoc()); } state.types.push_back(resultTypes[0]); return success(); } static void printRankedDimOp(OpAsmPrinter &p, RankedDimOp op) { p << " "; p.printOperand(op.shape()); p << "[" << op.getIndex() << "]"; p << " : "; p.printType(op.shape().getType()); p << " -> "; p.printType(op.getType()); } static LogicalResult verifyRankedDimOp(RankedDimOp op) { auto rsType = op.shape().getType().dyn_cast<RankedShapeType>(); auto index = static_cast<int64_t>(op.getIndex()); if (index < 0 || index >= rsType.getRank()) { return op.emitOpError() << "index out of bounds of shape"; } return success(); } } // namespace Shape } // namespace iree_compiler } // namespace mlir #define GET_OP_CLASSES #include "iree/compiler/Dialect/Shape/IR/ShapeOps.cpp.inc"
2,361
436
""" This model defines our core SQLite database interface. """ from __future__ import annotations import sqlite3 class Database: """Proxy class to access sqlite3.connect method.""" def __init__(self, *args, **kwargs) -> None: self.args = args self.kwargs = kwargs self._connection: sqlite3.Connection | None = None @property def connection(self) -> sqlite3.Connection: """Returns an existing SQL connection or creates a new one.""" if self._connection: return self._connection else: connection = sqlite3.connect(*self.args, **self.kwargs) connection.row_factory = sqlite3.Row self._connection = connection return connection def close(self) -> None: """Closes the SQL connection.""" if self._connection: self._connection.close() self._connection = None def commit(self) -> None: """Commits SQL changes.""" self.connection.commit() def execute(self, sql: str, *args) -> sqlite3.Cursor: """ Creates a cursor and executes the given SQL statement. :param sql: SQL statement to execute. :param args: Parameters to substitute for placeholders in SQL statement. :returns: The created cursor. """ with self.connection: return self.connection.execute(sql, args) def executescript(self, script: str) -> None: """ Creates a cursor and executes the given SQL script. :param script: SQL script to execute. :returns: The created cursor. """ with self.connection: self.connection.cursor().executescript(script)
679
1,582
// // ECDeviceGeneric.hpp // SMCSuperIO // // Copyright © 2021 vit9696. All rights reserved. // #ifndef ECDeviceGeneric_hpp #define ECDeviceGeneric_hpp #include "SuperIODevice.hpp" #include "ECDevice.hpp" namespace EC { class ECDeviceGeneric : public ECDevice { const char *modelName {"Generic EC"}; IORegistryEntry *device {nullptr}; uint32_t tachometerCount {1}; static constexpr uint32_t MaxTachometerCount {5}; struct Tachometer { const char *name {"FAN"}; uint32_t addr {0}; uint32_t mul {1}; uint32_t div {1}; uint32_t dividend {0}; uint8_t size {sizeof(uint8_t)}; bool bigEndian {false}; bool inverse {false}; }; Tachometer tachometers[MaxTachometerCount] {}; protected: const char* getModelName() override; uint8_t getTachometerCount() override; uint16_t updateTachometer(uint8_t index) override; const char* getTachometerName(uint8_t index) override; uint8_t getVoltageCount() override; float updateVoltage(uint8_t index) override; const char* getVoltageName(uint8_t index) override; /** * Ctor */ ECDeviceGeneric(IORegistryEntry *lpc); public: /** * Device factory */ static ECDevice* detect(SMCSuperIO* sio, const char *name, IORegistryEntry *lpc); }; } #endif /* ECDeviceGeneric_hpp */
520
1,004
<reponame>Andrew-Chen-Wang/asgiref import asyncio import socket as sock from functools import partial import pytest from asgiref.server import StatelessServer async def sock_recvfrom(sock, n): while True: try: return sock.recvfrom(n) except BlockingIOError: await asyncio.sleep(0) class Server(StatelessServer): def __init__(self, application, max_applications=1000): super().__init__( application, max_applications=max_applications, ) self._sock = sock.socket(sock.AF_INET, sock.SOCK_DGRAM | sock.SOCK_NONBLOCK) self._sock.bind(("127.0.0.1", 0)) @property def address(self): return self._sock.getsockname() async def handle(self): while True: data, addr = await sock_recvfrom(self._sock, 4096) data = data.decode("utf-8") if data.startswith("Register"): _, usr_name = data.split(" ") input_quene = self.get_or_create_application_instance(usr_name, addr) input_quene.put_nowait(b"Welcome") elif data.startswith("To"): _, usr_name, msg = data.split(" ", 2) input_quene = self.get_or_create_application_instance(usr_name, addr) input_quene.put_nowait(msg.encode("utf-8")) async def application_send(self, scope, message): self._sock.sendto(message, scope) def close(self): self._sock.close() for details in self.application_instances.values(): details["future"].cancel() class Client: def __init__(self, name): self._sock = sock.socket(sock.AF_INET, sock.SOCK_DGRAM | sock.SOCK_NONBLOCK) self.name = name async def register(self, server_addr, name=None): name = name or self.name self._sock.sendto(f"Register {name}".encode("utf-8"), server_addr) async def send(self, server_addr, to, msg): self._sock.sendto(f"To {to} {msg}".encode("utf-8"), server_addr) async def get_msg(self): msg, server_addr = await sock_recvfrom(self._sock, 4096) return msg, server_addr def close(self): self._sock.close() @pytest.fixture(scope="function") def server(): async def app(scope, receive, send): while True: msg = await receive() await send(msg) server = Server(app, 10) yield server server.close() async def check_client_msg(client, expected_address, expected_msg): msg, server_addr = await asyncio.wait_for(client.get_msg(), timeout=1.0) assert msg == expected_msg assert server_addr == expected_address async def server_auto_close(fut, timeout): """Server run based on run_until_complete. It will block forever with handle function because it is a while True loop without break. Use this method to close server automatically.""" loop = asyncio.get_event_loop() task = asyncio.ensure_future(fut, loop=loop) await asyncio.sleep(timeout) task.cancel() def test_stateless_server(server): """StatelessServer can be instantiated with an ASGI 3 application.""" """Create a UDP Server can register instance based on name from message of client. Clients can communicate to other client by name through server""" loop = asyncio.get_event_loop() server.handle = partial(server_auto_close, fut=server.handle(), timeout=1.0) client1 = Client(name="client1") client2 = Client(name="client2") async def check_client1_behavior(): await client1.register(server.address) await check_client_msg(client1, server.address, b"Welcome") await client1.send(server.address, "client2", "Hello") async def check_client2_behavior(): await client2.register(server.address) await check_client_msg(client2, server.address, b"Welcome") await check_client_msg(client2, server.address, b"Hello") task1 = loop.create_task(check_client1_behavior()) task2 = loop.create_task(check_client2_behavior()) server.run() assert task1.done() assert task2.done() def test_server_delete_instance(server): """The max_applications of Server is 10. After 20 times register, application number should be 10.""" loop = asyncio.get_event_loop() server.handle = partial(server_auto_close, fut=server.handle(), timeout=1.0) client1 = Client(name="client1") async def client1_multiple_register(): for i in range(20): await client1.register(server.address, name=f"client{i}") print(f"client{i}") await check_client_msg(client1, server.address, b"Welcome") task = loop.create_task(client1_multiple_register()) server.run() assert task.done()
1,946
825
<reponame>jiangkang/Hummer<filename>android/hummer-core/src/main/jni/qjs/hummer/HummerRecycler.cpp #include <map> #include "HummerJNI.h" #include "QuickJSCache.h" #include "HummerRecycler.h" static std::map<long, jobject> HUMMER_RECYCLER_MAP; static jmethodID HUMMER_RECYCLER_INVOKE_ID = nullptr; extern "C" JNIEXPORT void JNICALL Java_com_didi_hummer_core_engine_jsc_jni_HummerRecycler_init(JNIEnv *env, jobject thiz, jlong js_context) { jobject recycler = env->NewGlobalRef(thiz); HUMMER_RECYCLER_MAP[js_context] = recycler; HUMMER_RECYCLER_INVOKE_ID = env->GetMethodID(env->GetObjectClass(thiz), "recycle", "(J)V"); } extern "C" JNIEXPORT void JNICALL Java_com_didi_hummer_core_engine_jsc_jni_HummerRecycler_release(JNIEnv *env, jobject thiz, jlong js_context) { env->DeleteGlobalRef(HUMMER_RECYCLER_MAP[js_context]); HUMMER_RECYCLER_MAP.erase(js_context); } void HummerRecycler::recycle(long ctxId, int64_t objId) { jobject bridge = HUMMER_RECYCLER_MAP[ctxId]; if (bridge != nullptr) { JNIEnv *env = JNI_GetEnv(); env->CallVoidMethod(bridge, HUMMER_RECYCLER_INVOKE_ID, objId); JNI_DetachEnv(); } }
524
852
import FWCore.ParameterSet.Config as cms cleanPatPhotons = cms.EDProducer("PATPhotonCleaner", ## Input collection of Photons src = cms.InputTag("selectedPatPhotons"), # preselection (any string-based cut for pat::Photon) preselection = cms.string(''), # overlap checking configurables checkOverlaps = cms.PSet( electrons = cms.PSet( src = cms.InputTag("cleanPatElectrons"), algorithm = cms.string("bySuperClusterSeed"), requireNoOverlaps = cms.bool(False), # mark photons that overlap with electrons # for further studies, but DO NOT discard # them ), ), # finalCut (any string-based cut for pat::Photon) finalCut = cms.string(''), )
377
439
enum Relationship { EQUAL, SUBLIST, SUPERLIST, UNEQUAL }
25
12,252
<filename>testsuite/performance/tests/src/main/java/org/keycloak/performance/dataset/attr/StringListAttribute.java<gh_stars>1000+ package org.keycloak.performance.dataset.attr; import org.keycloak.performance.dataset.Entity; /** * * @author tkyjovsk * @param <PE> owner entity */ public class StringListAttribute<PE extends Entity> extends Attribute<PE, StringListAttributeRepresentation> { public StringListAttribute(PE attributeOwner, int index) { super(attributeOwner, index); } @Override public StringListAttributeRepresentation newRepresentation() { return new StringListAttributeRepresentation(); } }
208
1,170
<reponame>Nightonke/Gitee // // VHSecureTextField.h // VHGithubNotifier // // Created by viktorhuang on 2017/2/25. // Copyright © 2017年 黄伟平. All rights reserved. // @class VHSecureTextField; @protocol VHSecureTextFieldDelegate <NSObject> @required - (void)onSecureTextFieldEnterButtonClicked:(VHSecureTextField *)textField; @end @interface VHSecureTextField : NSSecureTextField @property (nonatomic, weak) id<VHSecureTextFieldDelegate> secureTextFieldDelegate; @end
178
351
import subprocess import os ROOT = os.path.dirname(os.path.abspath(__file__)) + "/../../bin" def test_tools(): """ Checks everything that is in /bin/ and tries to run it """ if os.path.isdir(ROOT): for _file in os.listdir(ROOT): print(_file) if os.path.isfile(os.path.join(ROOT, _file)): subprocess.check_call("{}/{} --version".format(ROOT, _file).split())
200
964
# # Plex - Transition Maps # # This version represents state sets direcly as dicts # for speed. # from copy import copy import string from sys import maxint from types import TupleType class TransitionMap: """ A TransitionMap maps an input event to a set of states. An input event is one of: a range of character codes, the empty string (representing an epsilon move), or one of the special symbols BOL, EOL, EOF. For characters, this implementation compactly represents the map by means of a list: [code_0, states_0, code_1, states_1, code_2, states_2, ..., code_n-1, states_n-1, code_n] where |code_i| is a character code, and |states_i| is a set of states corresponding to characters with codes |c| in the range |code_i| <= |c| <= |code_i+1|. The following invariants hold: n >= 1 code_0 == -maxint code_n == maxint code_i < code_i+1 for i in 0..n-1 states_0 == states_n-1 Mappings for the special events '', BOL, EOL, EOF are kept separately in a dictionary. """ map = None # The list of codes and states special = None # Mapping for special events def __init__(self, map = None, special = None): if not map: map = [-maxint, {}, maxint] if not special: special = {} self.map = map self.special = special #self.check() ### def add(self, event, new_state, TupleType = TupleType): """ Add transition to |new_state| on |event|. """ if type(event) == TupleType: code0, code1 = event i = self.split(code0) j = self.split(code1) map = self.map while i < j: map[i + 1][new_state] = 1 i = i + 2 else: self.get_special(event)[new_state] = 1 def add_set(self, event, new_set, TupleType = TupleType): """ Add transitions to the states in |new_set| on |event|. """ if type(event) == TupleType: code0, code1 = event i = self.split(code0) j = self.split(code1) map = self.map while i < j: map[i + 1].update(new_set) i = i + 2 else: self.get_special(event).update(new_set) def get_epsilon(self): """ Return the mapping for epsilon, or None. """ return self.special.get('') def items(self, len = len): """ Return the mapping as a list of ((code1, code2), state_set) and (special_event, state_set) pairs. """ result = [] map = self.map else_set = map[1] i = 0 n = len(map) - 1 code0 = map[0] while i < n: set = map[i + 1] code1 = map[i + 2] if set or else_set: result.append(((code0, code1), set)) code0 = code1 i = i + 2 for event, set in self.special.items(): if set: result.append((event, set)) return result # ------------------- Private methods -------------------- def split(self, code, len = len, maxint = maxint): """ Search the list for the position of the split point for |code|, inserting a new split point if necessary. Returns index |i| such that |code| == |map[i]|. """ # We use a funky variation on binary search. map = self.map hi = len(map) - 1 # Special case: code == map[-1] if code == maxint: return hi # General case lo = 0 # loop invariant: map[lo] <= code < map[hi] and hi - lo >= 2 while hi - lo >= 4: # Find midpoint truncated to even index mid = ((lo + hi) / 2) & ~1 if code < map[mid]: hi = mid else: lo = mid # map[lo] <= code < map[hi] and hi - lo == 2 if map[lo] == code: return lo else: map[hi:hi] = [code, map[hi - 1].copy()] #self.check() ### return hi def get_special(self, event): """ Get state set for special event, adding a new entry if necessary. """ special = self.special set = special.get(event, None) if not set: set = {} special[event] = set return set # --------------------- Conversion methods ----------------------- def __str__(self): map_strs = [] map = self.map n = len(map) i = 0 while i < n: code = map[i] if code == -maxint: code_str = "-inf" elif code == maxint: code_str = "inf" else: code_str = str(code) map_strs.append(code_str) i = i + 1 if i < n: map_strs.append(state_set_str(map[i])) i = i + 1 special_strs = {} for event, set in self.special.items(): special_strs[event] = state_set_str(set) return "[%s]+%s" % ( string.join(map_strs, ","), special_strs ) # --------------------- Debugging methods ----------------------- def check(self): """Check data structure integrity.""" if not self.map[-3] < self.map[-1]: print self assert 0 def dump(self, file): map = self.map i = 0 n = len(map) - 1 while i < n: self.dump_range(map[i], map[i + 2], map[i + 1], file) i = i + 2 for event, set in self.special.items(): if set: if not event: event = 'empty' self.dump_trans(event, set, file) def dump_range(self, code0, code1, set, file): if set: if code0 == -maxint: if code1 == maxint: k = "any" else: k = "< %s" % self.dump_char(code1) elif code1 == maxint: k = "> %s" % self.dump_char(code0 - 1) elif code0 == code1 - 1: k = self.dump_char(code0) else: k = "%s..%s" % (self.dump_char(code0), self.dump_char(code1 - 1)) self.dump_trans(k, set, file) def dump_char(self, code): if 0 <= code <= 255: return repr(chr(code)) else: return "chr(%d)" % code def dump_trans(self, key, set, file): file.write(" %s --> %s\n" % (key, self.dump_set(set))) def dump_set(self, set): return state_set_str(set) # # State set manipulation functions # #def merge_state_sets(set1, set2): # for state in set2.keys(): # set1[state] = 1 def state_set_str(set): state_list = set.keys() str_list = [] for state in state_list: str_list.append("S%d" % state.number) return "[%s]" % string.join(str_list, ",")
2,787
453
<gh_stars>100-1000 #include "headers/powd2.h" static __inline double _pow(double x, double y) { return spu_extract(_powd2(spu_promote(x, 0), spu_promote(y, 0)), 0); }
78
882
package water; import jsr166y.CountedCompleter; import jsr166y.ForkJoinPool; import jsr166y.ForkJoinTask; import jsr166y.RecursiveAction; import water.H2O.H2OCountedCompleter; import water.api.DocGen; import water.fvec.Vec; import water.util.Log; import water.util.Utils; import java.util.Arrays; import java.util.Random; import java.util.concurrent.atomic.AtomicBoolean; public class UDPDropTest extends Func { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help = "Message sizes", filter = Default.class, json=true) public int[] msg_sizes = new int[]{1,32,64,128,256,512,1024,AutoBuffer.MTU-100}; //INPUT @API(help = "Nodes", json=true) public String[] nodes; //OUTPUT @API(help = "Drop rates between each (ordered) pair of nodes for different message sizes", json = true) public UDPDropMatrix [] dropRates; private static class UDPPing extends DTask<UDPPing>{ boolean _done; int _retries = -1; final long _t1; long _t2; byte [] _payload; public UDPPing(){_t1 = -1;} public UDPPing(int sz){ assert sz <= AutoBuffer.MTU:"msg size does not fit into UDP"; _payload = MemoryManager.malloc1(sz); Random rnd = new Random(); for(int i = 0; i < _payload.length; ++i) _payload[i] = (byte)rnd.nextInt(); _t1 = System.currentTimeMillis(); } @Override public void compute2() { tryComplete();} @Override public synchronized UDPPing read(AutoBuffer ab){ if(_done)return this; _done = true; _t2 = System.currentTimeMillis(); _retries = ab.get4(); byte [] bs = ab.getA1(); _payload = bs; return this; } @Override public synchronized AutoBuffer write(AutoBuffer ab){ if(!_done) ++_retries; ab.put4(_retries); // count the number of retries as number of serialization calls ab.putA1(_payload); return ab; } @Override public void copyOver(Freezable f){ UDPPing u = (UDPPing)f; _retries = u._retries; _payload = u._payload; } } private static class TCPTester extends DTask<TCPTester> { public final int _srcId; public final int _tgtId; public final int _N; private final int[] _msgSzs; private transient RPC<UDPPing>[][] _pings; double[] _dropRates; int[] _droppedPackets; public TCPTester(H2ONode src, H2ONode tgt, int[] msgSzs, int ntests) { _srcId = src.index(); _tgtId = tgt.index(); _msgSzs = msgSzs; _N = ntests; } private transient boolean _done; private final void doTest() { _droppedPackets = new int[_N]; Arrays.fill(_droppedPackets, -1); _pings = new RPC[_msgSzs.length][_N]; // addToPendingCount(_msgSzs.length*_N - 1); for (int i = 0; i < _msgSzs.length; ++i) for (int j = 0; j < _N; ++j) // instead of synchronization, just wait for predetermined amount of time _pings[i][j] = new RPC(H2O.CLOUD._memary[_tgtId], new UDPPing(_msgSzs[i]))/*.addCompleter(this)*/.call(); try { Thread.sleep(5000); } catch (InterruptedException e) { } // if not done yet, finish no matter what (racy but we don't care here - only a debug tool, does not have to be precise) // setPendingCount(0); } @Override public synchronized void onCompletion(CountedCompleter caller) { if (!_done) { // only one completion _done = true; _dropRates = MemoryManager.malloc8d(_msgSzs.length); // compute the drop rates for (int i = 0; i < _msgSzs.length; ++i) { double sum = 0; for (int j = 0; j < _N; ++j) { RPC<UDPPing> rpc = _pings[i][j]; sum += (rpc._dt._retries == -1 ? Double.POSITIVE_INFINITY : rpc._dt._retries); } _dropRates[i] = 1 - _N / (_N + sum); } } } @Override public void compute2() { } } private static class UDPDropTester extends DTask<UDPDropTester> { public final int _srcId; public final int _tgtId; public final int _N; private final int [] _msgSzs; private transient RPC<UDPPing> [][] _pings; double [] _dropRates; int [] _droppedPackets; public UDPDropTester(H2ONode src, H2ONode tgt, int [] msgSzs, int ntests){ _srcId = src.index(); _tgtId = tgt.index(); _msgSzs = msgSzs; _N = ntests; } private transient boolean _done; private final void doTest(){ _droppedPackets = new int[_N]; Arrays.fill(_droppedPackets,-1); _pings = new RPC[_msgSzs.length][_N]; // addToPendingCount(_msgSzs.length*_N - 1); for(int i = 0; i < _msgSzs.length; ++i) for(int j = 0; j < _N; ++j) // instead of synchronization, just wait for predetermined amount of time _pings[i][j] = new RPC(H2O.CLOUD._memary[_tgtId],new UDPPing(_msgSzs[i]))/*.addCompleter(this)*/.call(); try { Thread.sleep(5000); } catch (InterruptedException e) {} // if not done yet, finish no matter what (racy but we don't care here - only a debug tool, does not have to be precise) // setPendingCount(0); } @Override public synchronized void onCompletion(CountedCompleter caller){ if(!_done){ // only one completion _done = true; _dropRates = MemoryManager.malloc8d(_msgSzs.length); // compute the drop rates for(int i = 0; i < _msgSzs.length; ++i) { double sum = 0; for (int j = 0; j < _N; ++j) { RPC<UDPPing> rpc = _pings[i][j]; sum += (rpc._dt._retries == -1 ? Double.POSITIVE_INFINITY : rpc._dt._retries); } _dropRates[i] = 1 - _N/(_N+sum); } } } @Override public void compute2() { if(_srcId == H2O.SELF.index()) { doTest(); tryComplete(); } else { _done = true; final UDPDropTester t = (UDPDropTester) clone(); new RPC(H2O.CLOUD._memary[_srcId], t).addCompleter(new H2OCountedCompleter(this) { @Override public void compute2() { } @Override public void onCompletion(CountedCompleter cc) { copyOver(t); } }).call(); } } } private static class UDPDropMatrix extends Iced { static final int API_WEAVER=1; // This file has auto-gen'd doc & json fields static public DocGen.FieldDoc[] DOC_FIELDS; // Initialized from Auto-Gen code. @API(help="message size") public final int messageSz; @API(help="meassured drop rates") public final double [][] dropRates; public UDPDropMatrix(int msgSz, double [][] dropRates){ messageSz = msgSz; this.dropRates = dropRates; } @Override public String toString(){ return " drop rates at " + messageSz + " bytes\n" + Utils.pprint(dropRates); } } @Override protected void execImpl() { logStart(); Log.debug("NetworkTester testing udp drops"); final UDPDropTester [] dropTests = new UDPDropTester[H2O.CLOUD.size()*H2O.CLOUD.size()-H2O.CLOUD.size()]; H2O.submitTask(new H2OCountedCompleter() { @Override public void compute2() { int k = 0; for(int i = 0; i < H2O.CLOUD.size(); ++i) for(int j = 0; j < H2O.CLOUD.size(); ++j){ if(i == j) continue; dropTests[k++] = new UDPDropTester(H2O.CLOUD._memary[i],H2O.CLOUD._memary[j],msg_sizes,10); } ForkJoinTask.invokeAll(dropTests); tryComplete(); } }).join(); dropRates = new UDPDropMatrix[msg_sizes.length]; for(int m = 0; m < msg_sizes.length; ++m){ double [][] ds = new double[H2O.CLOUD.size()][H2O.CLOUD.size()]; int k = 0; for(int i = 0; i < H2O.CLOUD.size(); ++i) for(int j = 0; j < H2O.CLOUD.size(); ++j){ if(i == j) continue; ds[i][j] = dropTests[k++]._dropRates[m]; } dropRates[m] = new UDPDropMatrix(msg_sizes[m],ds); } Log.debug("Network test udp drop rates: "); for(UDPDropMatrix m:dropRates) Log.debug(m.toString()); // now do the tcp bandwith test // print out } @Override public boolean toHTML(StringBuilder sb) { try { DocGen.HTML.section(sb, "UDP Drop rates"); for(int i = 0; i < msg_sizes.length; ++i){ sb.append("<h4>" + "Message size = " + msg_sizes[i] + " bytes</h4>"); sb.append("<div>"); UDPDropMatrix d = dropRates[i]; sb.append("<table class='table table-bordered table-condensed'>\n"); sb.append("<tr>"); sb.append("<th></th>"); for(int j = 0 ; j < H2O.CLOUD.size(); ++j) sb.append("<th>" + j + "</th>"); sb.append("</tr>\n"); for(int j = 0 ; j < H2O.CLOUD.size(); ++j){ sb.append("<tr><td>" + j + "</td>"); for(int k = 0; k < d.dropRates[j].length; ++k){ sb.append("<td>" + (int)(100*d.dropRates[j][k]) + "&#37;</td>"); } sb.append("</tr>\n"); } sb.append("</table>"); sb.append("</div>"); } } catch(Throwable t){ t.printStackTrace(); } return true; } }
4,223
2,453
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 30 2020 21:18:12). // // Copyright (C) 1997-2019 <NAME>. // #import <Foundation/NSKeyedArchiver.h> @interface NSKeyedArchiver (DVTKeyedArchiverAdditions) + (id)dvt_archivedDataWithSecureRootObject:(id)arg1 error:(id *)arg2; + (id)dvt_archiveDataFromRunningBlock:(CDUnknownBlockType)arg1; @end
138
458
<gh_stars>100-1000 /* DO NOT EDIT THIS FILE - it is machine generated */ #include <jni.h> /* Header for class com_mousebird_maply_ParticleSystem */ #ifndef _Included_com_mousebird_maply_ParticleSystem #define _Included_com_mousebird_maply_ParticleSystem #ifdef __cplusplus extern "C" { #endif /* * Class: com_mousebird_maply_ParticleSystem * Method: setName * Signature: (Ljava/lang/String;)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setName (JNIEnv *, jobject, jstring); /* * Class: com_mousebird_maply_ParticleSystem * Method: getID * Signature: ()J */ JNIEXPORT jlong JNICALL Java_com_mousebird_maply_ParticleSystem_getID (JNIEnv *, jobject); /* * Class: com_mousebird_maply_ParticleSystem * Method: setParticleSystemTypeNative * Signature: (I)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setParticleSystemTypeNative (JNIEnv *, jobject, jint); /* * Class: com_mousebird_maply_ParticleSystem * Method: setPositionShaderID * Signature: (J)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setPositionShaderID (JNIEnv *, jobject, jlong); /* * Class: com_mousebird_maply_ParticleSystem * Method: setRenderShaderID * Signature: (J)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setRenderShaderID (JNIEnv *, jobject, jlong); /* * Class: com_mousebird_maply_ParticleSystem * Method: setLifetime * Signature: (D)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setLifetime (JNIEnv *, jobject, jdouble); /* * Class: com_mousebird_maply_ParticleSystem * Method: setBasetime * Signature: (D)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setBasetime (JNIEnv *, jobject, jdouble); /* * Class: com_mousebird_maply_ParticleSystem * Method: getBasetime * Signature: ()D */ JNIEXPORT jdouble JNICALL Java_com_mousebird_maply_ParticleSystem_getBasetime (JNIEnv *, jobject); /* * Class: com_mousebird_maply_ParticleSystem * Method: setTotalParticles * Signature: (I)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setTotalParticles (JNIEnv *, jobject, jint); /* * Class: com_mousebird_maply_ParticleSystem * Method: getTotalParticles * Signature: ()I */ JNIEXPORT jint JNICALL Java_com_mousebird_maply_ParticleSystem_getTotalParticles (JNIEnv *, jobject); /* * Class: com_mousebird_maply_ParticleSystem * Method: setBatchSize * Signature: (I)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setBatchSize (JNIEnv *, jobject, jint); /* * Class: com_mousebird_maply_ParticleSystem * Method: getBatchSize * Signature: ()I */ JNIEXPORT jint JNICALL Java_com_mousebird_maply_ParticleSystem_getBatchSize (JNIEnv *, jobject); /* * Class: com_mousebird_maply_ParticleSystem * Method: setContinuousUpdate * Signature: (Z)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setContinuousUpdate (JNIEnv *, jobject, jboolean); /* * Class: com_mousebird_maply_ParticleSystem * Method: setDrawPriority * Signature: (I)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setDrawPriority (JNIEnv *, jobject, jint); /* * Class: com_mousebird_maply_ParticleSystem * Method: setPointSize * Signature: (F)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setPointSize (JNIEnv *, jobject, jfloat); /* * Class: com_mousebird_maply_ParticleSystem * Method: addAttributeNative * Signature: (Ljava/lang/String;I)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_addAttributeNative (JNIEnv *, jobject, jstring, jint); /* * Class: com_mousebird_maply_ParticleSystem * Method: addVaryingNative * Signature: (Ljava/lang/String;Ljava/lang/String;I)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_addVaryingNative (JNIEnv *, jobject, jstring, jstring, jint); /* * Class: com_mousebird_maply_ParticleSystem * Method: addTextureID * Signature: (J)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_addTextureID (JNIEnv *, jobject, jlong); /* * Class: com_mousebird_maply_ParticleSystem * Method: setZBufferRead * Signature: (Z)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setZBufferRead (JNIEnv *, jobject, jboolean); /* * Class: com_mousebird_maply_ParticleSystem * Method: setZBufferWrite * Signature: (Z)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setZBufferWrite (JNIEnv *, jobject, jboolean); /* * Class: com_mousebird_maply_ParticleSystem * Method: setRenderTargetNative * Signature: (J)V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_setRenderTargetNative (JNIEnv *, jobject, jlong); /* * Class: com_mousebird_maply_ParticleSystem * Method: nativeInit * Signature: ()V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_nativeInit (JNIEnv *, jclass); /* * Class: com_mousebird_maply_ParticleSystem * Method: initialise * Signature: ()V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_initialise (JNIEnv *, jobject); /* * Class: com_mousebird_maply_ParticleSystem * Method: dispose * Signature: ()V */ JNIEXPORT void JNICALL Java_com_mousebird_maply_ParticleSystem_dispose (JNIEnv *, jobject); #ifdef __cplusplus } #endif #endif
2,172
1,374
<filename>core-lib/es6/src/main/java/def/js/ProxyConstructor.java package def.js; @jsweet.lang.Interface public abstract class ProxyConstructor extends def.js.Object { native public <T> Revocable<T> revocable(T target, ProxyHandler<T> handler); public <T> ProxyConstructor(T target, ProxyHandler<T> handler){} /** This is an automatically generated object type (see the source definition). */ @jsweet.lang.ObjectType public static class Revocable<T> extends def.js.Object { public T proxy; public java.lang.Runnable revoke; } protected ProxyConstructor(){} }
204
711
<gh_stars>100-1000 package com.java110.utils.util; import com.alibaba.fastjson.JSONObject; import junit.framework.TestCase; import java.lang.reflect.InvocationTargetException; import java.util.Date; import java.util.HashMap; import java.util.Map; public class BeanConvertUtilTest extends TestCase { /** * bean 转为bean * * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public void testCovertBean() throws IllegalAccessException, NoSuchMethodException, InvocationTargetException { PersonDto personDto = new PersonDto(); personDto.setId(1); personDto.setName("wuxw"); personDto.setCreateTime(new Date()); personDto.setPage("1"); personDto.setAge(1); PersonVo personVo = new PersonVo(); personVo = BeanConvertUtil.covertBean(personDto, personVo); System.out.println("dto 转 vo" + JSONObject.toJSONString(personVo)); } /** * bean 转为bean * * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public void testCovertBeanAgent() throws IllegalAccessException, NoSuchMethodException, InvocationTargetException { PersonVo personVo = new PersonVo(); personVo.setId("2"); personVo.setName("wuxw"); personVo.setCreateTime("2020-01-28 12:12:12"); PersonDto personDto = new PersonDto(); personDto = BeanConvertUtil.covertBean(personVo, personDto); System.out.println("dto 转 vo" + JSONObject.toJSONString(personDto)); } /** * bean 转为bean * * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public void testBeanCovertMap() throws IllegalAccessException, NoSuchMethodException, InvocationTargetException { PersonDto personVo = new PersonDto(); personVo.setName("wuxw"); personVo.setCreateTime(new Date()); Map map = BeanConvertUtil.beanCovertMap(personVo); System.out.println("bean 转 map" + JSONObject.toJSONString(map)); } /** * bean 转为bean * * @throws IllegalAccessException * @throws NoSuchMethodException * @throws InvocationTargetException */ public void testMapCovertBean() throws IllegalAccessException, NoSuchMethodException, InvocationTargetException { Map info = new HashMap(); info.put("name", "wuxw"); info.put("createTime", new Date()); info.put("page",1); PersonDto personDto = null; personDto = BeanConvertUtil.covertBean(info,PersonDto.class); System.out.println("map 转 bean" + JSONObject.toJSONString(personDto)); } }
1,107
335
{ "word": "Junk", "definitions": [ "Old or discarded articles that are considered useless or of little value.", "Worthless writing, talk, or ideas.", "A person's belongings.", "Junk bonds.", "Heroin.", "The lump of oily fibrous tissue in a sperm whale's head, containing spermaceti.", "A man's genitals." ], "parts-of-speech": "Noun" }
165
373
/** @file Copyright (c) 2020 <NAME> <<EMAIL>> Copyright (c) 2020, ARM Limited. All rights reserved. Copyright (c) 2020 <NAME> <<EMAIL>> SPDX-License-Identifier: BSD-2-Clause-Patent **/ #ifndef BCM_GENET_DXE_H__ #define BCM_GENET_DXE_H__ #include <Uefi.h> #include <Library/UefiLib.h> #include <Protocol/BcmGenetPlatformDevice.h> #include <Protocol/AdapterInformation.h> #include <Protocol/ComponentName.h> #include <Protocol/ComponentName2.h> #include <Protocol/SimpleNetwork.h> #include "GenericPhy.h" #define LOWEST_SET_BIT(__mask) ((((__mask) - 1) & (__mask)) ^ (__mask)) #define SHIFTOUT(__x, __mask) (((__x) & (__mask)) / LOWEST_SET_BIT (__mask)) #define SHIFTIN(__x, __mask) ((__x) * LOWEST_SET_BIT (__mask)) /* * Aux control shadow register, bits 0-2 select function (0x00 to * 0x07). */ #define BRGPHY_MII_AUXCTL 0x18 /* AUX control */ #define BRGPHY_AUXCTL_SHADOW_MISC 0x07 #define BRGPHY_AUXCTL_MISC_DATA_MASK 0x7ff8 #define BRGPHY_AUXCTL_MISC_READ_SHIFT 12 #define BRGPHY_AUXCTL_MISC_WRITE_EN 0x8000 #define BRGPHY_AUXCTL_MISC_RGMII_SKEW_EN 0x0200 /* * Shadow register 0x1C, bit 15 is write enable, * bits 14-10 select function (0x00 to 0x1F). */ #define BRGPHY_MII_SHADOW_1C 0x1C #define BRGPHY_SHADOW_1C_WRITE_EN 0x8000 #define BRGPHY_SHADOW_1C_SELECT_MASK 0x7C00 #define BRGPHY_SHADOW_1C_DATA_MASK 0x03FF /* Shadow 0x1C Clock Alignment Control Register (select value 0x03) */ #define BRGPHY_SHADOW_1C_CLK_CTRL (0x03 << 10) #define BRGPHY_SHADOW_1C_GTXCLK_EN 0x0200 #define MAX_ETHERNET_PKT_SIZE 1500 #define GENET_VERSION 0x0a #define GENET_MAX_PACKET_SIZE 1536 #define GENET_SYS_REV_CTRL 0x000 #define SYS_REV_MAJOR (BIT27|BIT26|BIT25|BIT24) #define SYS_REV_MINOR (BIT19|BIT18|BIT17|BIT16) #define GENET_SYS_PORT_CTRL 0x004 #define GENET_SYS_PORT_MODE_EXT_GPHY 3 #define GENET_SYS_RBUF_FLUSH_CTRL 0x008 #define GENET_SYS_RBUF_FLUSH_RESET BIT1 #define GENET_SYS_TBUF_FLUSH_CTRL 0x00c #define GENET_EXT_RGMII_OOB_CTRL 0x08c #define GENET_EXT_RGMII_OOB_ID_MODE_DISABLE BIT16 #define GENET_EXT_RGMII_OOB_RGMII_MODE_EN BIT6 #define GENET_EXT_RGMII_OOB_OOB_DISABLE BIT5 #define GENET_EXT_RGMII_OOB_RGMII_LINK BIT4 #define GENET_INTRL2_CPU_STAT 0x200 #define GENET_INTRL2_CPU_CLEAR 0x208 #define GENET_INTRL2_CPU_STAT_MASK 0x20c #define GENET_INTRL2_CPU_SET_MASK 0x210 #define GENET_INTRL2_CPU_CLEAR_MASK 0x214 #define GENET_IRQ_MDIO_ERROR BIT24 #define GENET_IRQ_MDIO_DONE BIT23 #define GENET_IRQ_TXDMA_DONE BIT16 #define GENET_IRQ_RXDMA_DONE BIT13 #define GENET_RBUF_CTRL 0x300 #define GENET_RBUF_BAD_DIS BIT2 #define GENET_RBUF_ALIGN_2B BIT1 #define GENET_RBUF_64B_EN BIT0 #define GENET_RBUF_TBUF_SIZE_CTRL 0x3b4 #define GENET_UMAC_CMD 0x808 #define GENET_UMAC_CMD_LCL_LOOP_EN BIT15 #define GENET_UMAC_CMD_SW_RESET BIT13 #define GENET_UMAC_CMD_HD_EN BIT10 #define GENET_UMAC_CMD_PROMISC BIT4 #define GENET_UMAC_CMD_SPEED (BIT3|BIT2) #define GENET_UMAC_CMD_SPEED_10 0 #define GENET_UMAC_CMD_SPEED_100 1 #define GENET_UMAC_CMD_SPEED_1000 2 #define GENET_UMAC_CMD_RXEN BIT1 #define GENET_UMAC_CMD_TXEN BIT0 #define GENET_UMAC_MAC0 0x80c #define GENET_UMAC_MAC1 0x810 #define GENET_UMAC_MAX_FRAME_LEN 0x814 #define GENET_UMAC_TX_FLUSH 0xb34 #define GENET_UMAC_MIB_CTRL 0xd80 #define GENET_UMAC_MIB_RESET_TX BIT2 #define GENET_UMAC_MIB_RESET_RUNT BIT1 #define GENET_UMAC_MIB_RESET_RX BIT0 #define GENET_MDIO_CMD 0xe14 #define GENET_MDIO_START_BUSY BIT29 #define GENET_MDIO_READ BIT27 #define GENET_MDIO_WRITE BIT26 #define GENET_MDIO_PMD (BIT25|BIT24|BIT23|BIT22|BIT21) #define GENET_MDIO_REG (BIT20|BIT19|BIT18|BIT17|BIT16) #define GENET_UMAC_MDF_CTRL 0xe50 #define GENET_UMAC_MDF_ADDR0(n) (0xe54 + (n) * 0x8) #define GENET_UMAC_MDF_ADDR1(n) (0xe58 + (n) * 0x8) #define GENET_MAX_MDF_FILTER 17 #define GENET_DMA_DESC_COUNT 256 #define GENET_DMA_DESC_SIZE 12 #define GENET_DMA_DEFAULT_QUEUE 16 #define GENET_DMA_RING_SIZE 0x40 #define GENET_DMA_RINGS_SIZE (GENET_DMA_RING_SIZE * (GENET_DMA_DEFAULT_QUEUE + 1)) #define GENET_RX_BASE 0x2000 #define GENET_TX_BASE 0x4000 #define GENET_RX_DMA_RINGBASE(qid) (GENET_RX_BASE + 0xc00 + GENET_DMA_RING_SIZE * (qid)) #define GENET_RX_DMA_WRITE_PTR_LO(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x00) #define GENET_RX_DMA_WRITE_PTR_HI(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x04) #define GENET_RX_DMA_PROD_INDEX(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x08) #define GENET_RX_DMA_CONS_INDEX(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x0c) #define GENET_RX_DMA_RING_BUF_SIZE(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x10) #define GENET_RX_DMA_RING_BUF_SIZE_DESC_COUNT 0xffff0000 #define GENET_RX_DMA_RING_BUF_SIZE_BUF_LENGTH 0x0000ffff #define GENET_RX_DMA_START_ADDR_LO(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x14) #define GENET_RX_DMA_START_ADDR_HI(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x18) #define GENET_RX_DMA_END_ADDR_LO(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x1c) #define GENET_RX_DMA_END_ADDR_HI(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x20) #define GENET_RX_DMA_XON_XOFF_THRES(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x28) #define GENET_RX_DMA_XON_XOFF_THRES_LO 0xffff0000 #define GENET_RX_DMA_XON_XOFF_THRES_HI 0x0000ffff #define GENET_RX_DMA_READ_PTR_LO(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x2c) #define GENET_RX_DMA_READ_PTR_HI(qid) (GENET_RX_DMA_RINGBASE(qid) + 0x30) #define GENET_TX_DMA_RINGBASE(qid) (GENET_TX_BASE + 0xc00 + GENET_DMA_RING_SIZE * (qid)) #define GENET_TX_DMA_READ_PTR_LO(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x00) #define GENET_TX_DMA_READ_PTR_HI(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x04) #define GENET_TX_DMA_CONS_INDEX(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x08) #define GENET_TX_DMA_PROD_INDEX(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x0c) #define GENET_TX_DMA_RING_BUF_SIZE(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x10) #define GENET_TX_DMA_RING_BUF_SIZE_DESC_COUNT 0xffff0000 #define GENET_TX_DMA_RING_BUF_SIZE_BUF_LENGTH 0x0000ffff #define GENET_TX_DMA_START_ADDR_LO(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x14) #define GENET_TX_DMA_START_ADDR_HI(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x18) #define GENET_TX_DMA_END_ADDR_LO(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x1c) #define GENET_TX_DMA_END_ADDR_HI(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x20) #define GENET_TX_DMA_MBUF_DONE_THRES(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x24) #define GENET_TX_DMA_FLOW_PERIOD(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x28) #define GENET_TX_DMA_WRITE_PTR_LO(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x2c) #define GENET_TX_DMA_WRITE_PTR_HI(qid) (GENET_TX_DMA_RINGBASE(qid) + 0x30) #define GENET_RX_DESC_STATUS(idx) (GENET_RX_BASE + GENET_DMA_DESC_SIZE * (idx) + 0x00) #define GENET_RX_DESC_STATUS_BUFLEN (BIT27|BIT26|BIT25|BIT24|BIT23|BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16) #define GENET_RX_DESC_STATUS_OWN BIT15 #define GENET_RX_DESC_STATUS_EOP BIT14 #define GENET_RX_DESC_STATUS_SOP BIT13 #define GENET_RX_DESC_STATUS_RX_ERROR BIT2 #define GENET_RX_DESC_ADDRESS_LO(idx) (GENET_RX_BASE + GENET_DMA_DESC_SIZE * (idx) + 0x04) #define GENET_RX_DESC_ADDRESS_HI(idx) (GENET_RX_BASE + GENET_DMA_DESC_SIZE * (idx) + 0x08) #define GENET_TX_DESC_STATUS(idx) (GENET_TX_BASE + GENET_DMA_DESC_SIZE * (idx) + 0x00) #define GENET_TX_DESC_STATUS_BUFLEN (BIT27|BIT26|BIT25|BIT24|BIT23|BIT22|BIT21|BIT20|BIT19|BIT18|BIT17|BIT16) #define GENET_TX_DESC_STATUS_OWN BIT15 #define GENET_TX_DESC_STATUS_EOP BIT14 #define GENET_TX_DESC_STATUS_SOP BIT13 #define GENET_TX_DESC_STATUS_QTAG (BIT12|BIT11|BIT10|BIT9|BIT8|BIT7) #define GENET_TX_DESC_STATUS_CRC BIT6 #define GENET_TX_DESC_ADDRESS_LO(idx) (GENET_TX_BASE + GENET_DMA_DESC_SIZE * (idx) + 0x04) #define GENET_TX_DESC_ADDRESS_HI(idx) (GENET_TX_BASE + GENET_DMA_DESC_SIZE * (idx) + 0x08) #define GENET_RX_DMA_RING_CFG (GENET_RX_BASE + 0x1040 + 0x00) #define GENET_RX_DMA_CTRL (GENET_RX_BASE + 0x1040 + 0x04) #define GENET_RX_DMA_CTRL_RBUF_EN(qid) (BIT1 << (qid)) #define GENET_RX_DMA_CTRL_EN BIT0 #define GENET_RX_SCB_BURST_SIZE (GENET_RX_BASE + 0x1040 + 0x0c) #define GENET_TX_DMA_RING_CFG (GENET_TX_BASE + 0x1040 + 0x00) #define GENET_TX_DMA_CTRL (GENET_TX_BASE + 0x1040 + 0x04) #define GENET_TX_DMA_CTRL_RBUF_EN(qid) (BIT1 << (qid)) #define GENET_TX_DMA_CTRL_EN BIT0 #define GENET_TX_SCB_BURST_SIZE (GENET_TX_BASE + 0x1040 + 0x0c) typedef struct { EFI_PHYSICAL_ADDRESS PhysAddress; VOID * Mapping; } GENET_MAP_INFO; typedef enum { GENET_PHY_MODE_MII, GENET_PHY_MODE_RGMII, GENET_PHY_MODE_RGMII_RXID, GENET_PHY_MODE_RGMII_TXID, GENET_PHY_MODE_RGMII_ID, } GENET_PHY_MODE; typedef struct { UINT32 Signature; EFI_HANDLE ControllerHandle; EFI_LOCK Lock; EFI_EVENT ExitBootServicesEvent; EFI_SIMPLE_NETWORK_PROTOCOL Snp; EFI_SIMPLE_NETWORK_MODE SnpMode; EFI_ADAPTER_INFORMATION_PROTOCOL Aip; BCM_GENET_PLATFORM_DEVICE_PROTOCOL *Dev; GENERIC_PHY_PRIVATE_DATA Phy; UINT8 *TxBuffer[GENET_DMA_DESC_COUNT]; VOID *TxBufferMap[GENET_DMA_DESC_COUNT]; UINT8 TxQueued; UINT16 TxNext; UINT16 TxConsIndex; UINT16 TxProdIndex; EFI_PHYSICAL_ADDRESS RxBuffer; GENET_MAP_INFO RxBufferMap[GENET_DMA_DESC_COUNT]; UINT16 RxConsIndex; UINT16 RxProdIndex; GENET_PHY_MODE PhyMode; UINTN RegBase; } GENET_PRIVATE_DATA; extern EFI_COMPONENT_NAME_PROTOCOL gGenetComponentName; extern EFI_COMPONENT_NAME2_PROTOCOL gGenetComponentName2; extern EFI_DRIVER_BINDING_PROTOCOL mGenetDriverBinding; extern CONST EFI_SIMPLE_NETWORK_PROTOCOL gGenetSimpleNetworkTemplate; extern CONST EFI_ADAPTER_INFORMATION_PROTOCOL gGenetAdapterInfoTemplate; #define GENET_DRIVER_SIGNATURE SIGNATURE_32('G', 'N', 'E', 'T') #define GENET_PRIVATE_DATA_FROM_SNP_THIS(a) CR(a, GENET_PRIVATE_DATA, Snp, GENET_DRIVER_SIGNATURE) #define GENET_PRIVATE_DATA_FROM_AIP_THIS(a) CR(a, GENET_PRIVATE_DATA, Aip, GENET_DRIVER_SIGNATURE) #define GENET_RX_BUFFER(g, idx) ((UINT8 *)(UINTN)(g)->RxBuffer + GENET_MAX_PACKET_SIZE * (idx)) EFI_STATUS EFIAPI GenetPhyRead ( IN VOID *Priv, IN UINT8 PhyAddr, IN UINT8 Reg, OUT UINT16 *Data ); EFI_STATUS EFIAPI GenetPhyWrite ( IN VOID *Priv, IN UINT8 PhyAddr, IN UINT8 Reg, IN UINT16 Data ); EFI_STATUS EFIAPI GenetPhyResetAction ( IN VOID *Priv ); VOID EFIAPI GenetPhyConfigure ( IN VOID *Priv, IN GENERIC_PHY_SPEED Speed, IN GENERIC_PHY_DUPLEX Duplex ); VOID GenetReset ( IN GENET_PRIVATE_DATA *Genet ); VOID EFIAPI GenetSetMacAddress ( IN GENET_PRIVATE_DATA *Genet, IN EFI_MAC_ADDRESS *MacAddr ); VOID GenetSetPhyMode ( IN GENET_PRIVATE_DATA *Genet, IN GENET_PHY_MODE PhyMode ); VOID GenetEnableTxRx ( IN GENET_PRIVATE_DATA *Genet ); VOID GenetDisableTxRx ( IN GENET_PRIVATE_DATA *Genet ); VOID GenetSetPromisc ( IN GENET_PRIVATE_DATA *Genet, IN BOOLEAN Enable ); VOID GenetEnableBroadcastFilter ( IN GENET_PRIVATE_DATA *Genet, IN BOOLEAN Enable ); VOID GenetDmaInitRings ( IN GENET_PRIVATE_DATA *Genet ); EFI_STATUS GenetDmaAlloc ( IN GENET_PRIVATE_DATA *Genet ); VOID GenetDmaFree ( IN GENET_PRIVATE_DATA *Genet ); VOID GenetDmaTriggerTx ( IN GENET_PRIVATE_DATA *Genet, IN UINT8 DescIndex, IN EFI_PHYSICAL_ADDRESS PhysAddr, IN UINTN NumberOfBytes ); EFI_STATUS GenetDmaMapRxDescriptor ( IN GENET_PRIVATE_DATA *Genet, IN UINT8 DescIndex ); VOID GenetDmaUnmapRxDescriptor ( IN GENET_PRIVATE_DATA *Genet, IN UINT8 DescIndex ); VOID GenetTxIntr ( IN GENET_PRIVATE_DATA *Genet, OUT VOID **TxBuf ); UINT32 GenetRxPending ( IN GENET_PRIVATE_DATA *Genet ); UINT32 GenetTxPending ( IN GENET_PRIVATE_DATA *Genet ); EFI_STATUS GenetRxIntr ( IN GENET_PRIVATE_DATA *Genet, OUT UINT8 *DescIndex, OUT UINTN *FrameLength ); VOID GenetRxComplete ( IN GENET_PRIVATE_DATA *Genet ); #endif /* GENET_UTIL_H__ */
8,880
680
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional test for GradientDescent.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf class GradientDescentOptimizerTest(tf.test.TestCase): def testBasic(self): for dtype in [tf.half, tf.float32, tf.float64]: with self.test_session(): var0 = tf.Variable([1.0, 2.0], dtype=dtype) var1 = tf.Variable([3.0, 4.0], dtype=dtype) grads0 = tf.constant([0.1, 0.1], dtype=dtype) grads1 = tf.constant([0.01, 0.01], dtype=dtype) sgd_op = tf.train.GradientDescentOptimizer(3.0).apply_gradients(zip( [grads0, grads1], [var0, var1])) tf.initialize_all_variables().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([1.0, 2.0], var0.eval()) self.assertAllCloseAccordingToType([3.0, 4.0], var1.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType( [1.0 - 3.0 * 0.1, 2.0 - 3.0 * 0.1], var0.eval()) self.assertAllCloseAccordingToType( [3.0 - 3.0 * 0.01, 4.0 - 3.0 * 0.01], var1.eval()) def testTensorLearningRate(self): for dtype in [tf.half, tf.float32, tf.float64]: with self.test_session(): var0 = tf.Variable([1.0, 2.0], dtype=dtype) var1 = tf.Variable([3.0, 4.0], dtype=dtype) grads0 = tf.constant([0.1, 0.1], dtype=dtype) grads1 = tf.constant([0.01, 0.01], dtype=dtype) lrate = tf.constant(3.0) sgd_op = tf.train.GradientDescentOptimizer(lrate).apply_gradients(zip( [grads0, grads1], [var0, var1])) tf.initialize_all_variables().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([1.0, 2.0], var0.eval()) self.assertAllCloseAccordingToType([3.0, 4.0], var1.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType( [1.0 - 3.0 * 0.1, 2.0 - 3.0 * 0.1], var0.eval()) self.assertAllCloseAccordingToType( [3.0 - 3.0 * 0.01, 4.0 - 3.0 * 0.01], var1.eval()) def testGradWrtRef(self): for dtype in [tf.half, tf.float32, tf.float64]: with self.test_session(): opt = tf.train.GradientDescentOptimizer(3.0) values = [1.0, 3.0] vars_ = [tf.Variable([v], dtype=dtype) for v in values] grads_and_vars = opt.compute_gradients(vars_[0].ref() + vars_[1], vars_) tf.initialize_all_variables().run() for grad, _ in grads_and_vars: self.assertAllCloseAccordingToType([1.0], grad.eval()) def testWithGlobalStep(self): for dtype in [tf.half, tf.float32, tf.float64]: with self.test_session(): global_step = tf.Variable(0, trainable=False) var0 = tf.Variable([1.0, 2.0], dtype=dtype) var1 = tf.Variable([3.0, 4.0], dtype=dtype) grads0 = tf.constant([0.1, 0.1], dtype=dtype) grads1 = tf.constant([0.01, 0.01], dtype=dtype) sgd_op = tf.train.GradientDescentOptimizer(3.0).apply_gradients( zip([grads0, grads1], [var0, var1]), global_step=global_step) tf.initialize_all_variables().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([1.0, 2.0], var0.eval()) self.assertAllCloseAccordingToType([3.0, 4.0], var1.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params and global_step self.assertAllCloseAccordingToType( [1.0 - 3.0 * 0.1, 2.0 - 3.0 * 0.1], var0.eval()) self.assertAllCloseAccordingToType( [3.0 - 3.0 * 0.01, 4.0 - 3.0 * 0.01], var1.eval()) self.assertAllCloseAccordingToType(1, global_step.eval()) def testSparseBasic(self): for dtype in [tf.half, tf.float32, tf.float64]: with self.test_session(): var0 = tf.Variable([[1.0], [2.0]], dtype=dtype) var1 = tf.Variable([[3.0], [4.0]], dtype=dtype) grads0 = tf.IndexedSlices( tf.constant([0.1], shape=[1, 1], dtype=dtype), tf.constant([0]), tf.constant([2, 1])) grads1 = tf.IndexedSlices( tf.constant([0.01], shape=[1, 1], dtype=dtype), tf.constant([1]), tf.constant([2, 1])) sgd_op = tf.train.GradientDescentOptimizer(3.0).apply_gradients( zip([grads0, grads1], [var0, var1])) tf.initialize_all_variables().run() # Fetch params to validate initial values self.assertAllCloseAccordingToType([[1.0], [2.0]], var0.eval()) self.assertAllCloseAccordingToType([[3.0], [4.0]], var1.eval()) # Run 1 step of sgd sgd_op.run() # Validate updated params self.assertAllCloseAccordingToType( [[1.0 - 3.0 * 0.1], [2.0]], var0.eval()) self.assertAllCloseAccordingToType( [[3.0], [4.0 - 3.0 * 0.01]], var1.eval()) if __name__ == "__main__": tf.test.main()
2,650
778
import KratosMultiphysics as KM import KratosMultiphysics.KratosUnittest as KratosUnittest class TestPoint(KratosUnittest.TestCase): def test_point_constructor_with_kratos_array(self): coords = [1.0, -2.5, 3.3] arr = KM.Array3(coords) point = KM.Point(arr) self.assertAlmostEqual(point.X, coords[0]) self.assertAlmostEqual(point.Y, coords[1]) self.assertAlmostEqual(point.Z, coords[2]) def test_point_constructor_with_kratos_vector(self): coords = [1.0, -2.5, 3.3] vec = KM.Vector(coords) point = KM.Point(vec) self.assertAlmostEqual(point.X, coords[0]) self.assertAlmostEqual(point.Y, coords[1]) self.assertAlmostEqual(point.Z, coords[2]) if __name__ == '__main__': KratosUnittest.main()
378
346
<filename>scripts/demo_datasets.py import logging from open3d.ml.datasets import (SemanticKITTI, ParisLille3D, Semantic3D, S3DIS, Toronto3D) import argparse import numpy as np def parse_args(): parser = argparse.ArgumentParser(description='Read from datasets') parser.add_argument('--path_semantickitti', help='path to semantiSemanticKITTI') parser.add_argument('--path_semantick3d', help='path to Semantic3D') parser.add_argument('--path_parislille3d', help='path to ParisLille3D') parser.add_argument('--path_toronto3d', help='path to Toronto3D') parser.add_argument('--path_s3dis', help='path to S3DIS') args, _ = parser.parse_known_args() dict_args = vars(args) for k in dict_args: v = dict_args[k] print("{}: {}".format(k, v) if v is not None else "{} not given". format(k)) return args def demo_dataset(args): # read data from datasets datasets = [] if args.path_semantickitti is not None: datasets.append( SemanticKITTI(dataset_path=args.path_semantickitti, use_cache=False)) if args.path_parislille3d is not None: datasets.append( ParisLille3D(dataset_path=args.path_parislille3d, use_cache=False)) if args.path_toronto3d is not None: datasets.append( Toronto3D(dataset_path=args.path_toronto3d, use_cache=False)) if args.path_semantick3d is not None: datasets.append( Semantic3D(dataset_path=args.path_semantick3d, use_cache=False)) if args.path_s3dis is not None: datasets.append(S3DIS(dataset_path=args.path_s3dis, use_cache=False)) for dataset in datasets: print(dataset.name) cat_num = len(dataset.label_to_names) num_labels = np.zeros([cat_num]) split = dataset.get_split('train') for i in range(len(split)): data = split.get_data(i) labels = data['label'] for l in range(cat_num): num_labels[l] += (labels == l).sum() print(num_labels) for dataset in datasets: print(dataset.label_to_names) # print names of all pointcould split = dataset.get_split('test') for i in range(len(split)): attr = split.get_attr(i) print(attr['name']) split = dataset.get_split('train') for i in range(len(split)): data = split.get_data(i) print(data['point'].shape) if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format='%(levelname)s - %(asctime)s - %(module)s - %(message)s', ) args = parse_args() demo_dataset(args)
1,293
2,023
<reponame>tdiprima/code import os import re from urllib2 import urlopen base_url_qacandrot = "https://qacand.sflab.ondemand.com/sf-version.properties" base_url_qapatch = "http://qapatch.successfactors.com/sf-version.properties" base_url_qacand = "http://qacand.successfactors.com/sf-version.properties" # get test environment print "Before use this tool, please make sure you connect to VPN!" env = raw_input("Please enter your test env: qacand, qacandrot or qapatch (Non case sensitive): ") if env.strip().lower() == "qacand": html_resource = urlopen(base_url_qacand).read() elif env.strip().lower() == "qapatch": html_resource = urlopen(base_url_qapatch).read() elif html_resource == "qacandrot": html_resource = urlopen(base_url_qacandrot).read() module_svn_map = {} pattern = "(.*?)-(.*?)-(.*?)sion=(\d+$)" p = re.compile(pattern) for strofmodule in html_resource.split(): if re.match(pattern, strofmodule): results = re.findall(pattern, strofmodule) module = results[0][1] svn_number = results[0][3] module_svn_map[module] = svn_number build_num_pattern = "com.successfactors.sf-packages.version=" build_num_len = html_resource.index(build_num_pattern) + len(build_num_pattern) build_version = html_resource[build_num_len:] print "build version is " + build_version, "please make sure it is same as that on " + env.strip().lower() + "." while 1: build_by_module = raw_input("Is your module build by module: (y or n, Non case sensitive) ") if build_by_module.strip().lower() == "n": your_module = "V4" your_svn_number = raw_input("Please enter your svn number (6 digit): ") if int(your_svn_number) <= int(module_svn_map[your_module]): print "Your svn number is included in current build on " + env.strip().lower() + "." else: print "Your svn number is NOT included on " +env.strip().lower() + "." os.system("pause") elif build_by_module.strip().lower() == "y": print "Please make sure your input is exactly same as one of module above!" your_module = raw_input("Please enter your module name: ") your_svn_number = raw_input("Please enter your svn number (6 digit): ") if your_module.strip().lower() in module_svn_map.keys(): if int(your_svn_number) <= int(module_svn_map[your_module]): print "Your svn number is included in current build on " + env.strip().lower() + "." else: print "Your svn number is NOT included on " + env.strip().lower() + "." os.system("pause")
1,030
836
<filename>espresso/web/java/androidx/test/espresso/web/model/ModelCodec.java /* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package androidx.test.espresso.web.model; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import android.os.Build; import android.util.JsonReader; import android.util.Log; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import java.io.IOException; import java.io.StringReader; import java.math.BigDecimal; import java.math.BigInteger; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.json.JSONStringer; import org.json.JSONTokener; /** Encodes/Decodes JSON. */ public final class ModelCodec { private static final String TAG = "JS_CODEC"; private static final ImmutableSet<Class<?>> VALUEABLE_CLASSES = ImmutableSet.of(Boolean.class, Number.class, String.class, JSONObject.class, JSONArray.class); private static final ImmutableSet<Class<?>> TOP_LEVEL_CLASSES = ImmutableSet.of( JSONObject.class, JSONArray.class, Iterable.class, Object[].class, Map.class, JSONAble.class); private static final CopyOnWriteArrayList<JSONAble.DeJSONFactory> DEJSONIZERS = new CopyOnWriteArrayList<JSONAble.DeJSONFactory>( Lists.newArrayList( Evaluation.DEJSONIZER, WindowReference.DEJSONIZER, ElementReference.DEJSONIZER)); private ModelCodec() {} /** Transforms a JSON string to an evaluation. */ public static Evaluation decodeEvaluation(String json) { Object obj = decode(json); if (obj instanceof Evaluation) { return (Evaluation) obj; } else { throw new IllegalArgumentException( String.format( "Document: \"%s\" did not decode to an evaluation. Instead: \"%s\"", json, obj)); } } /** Encodes a Java Object into a JSON string. */ public static String encode(Object javaObject) { checkNotNull(javaObject); try { if (javaObject instanceof JSONObject) { return javaObject.toString(); } else if (javaObject instanceof JSONArray) { return javaObject.toString(); } else if (javaObject instanceof JSONAble) { return new JSONObject(((JSONAble) javaObject).toJSONString()).toString(); } else if ((javaObject instanceof Iterable) || (javaObject instanceof Map) || (javaObject instanceof Object[])) { JSONStringer stringer = new JSONStringer(); return encodeHelper(javaObject, stringer).toString(); } throw new IllegalArgumentException( String.format( "%s: not a valid top level class. Want one of: %s", javaObject.getClass(), TOP_LEVEL_CLASSES)); } catch (JSONException je) { throw new RuntimeException("Encode failed: " + javaObject, je); } } /** * Removes a DeJSONFactory from the list of factories that transform JSONObjects to java objects. */ public static void removeDeJSONFactory(JSONAble.DeJSONFactory dejson) { DEJSONIZERS.remove(dejson); } /** Adds a DeJSONFactory to intercept JSONObjects and replace them with more suitable types. */ public static void addDeJSONFactory(JSONAble.DeJSONFactory dejson) { DEJSONIZERS.add(checkNotNull(dejson)); } static Object decode(String json) { checkNotNull(json); checkArgument(!"".equals(json), "Empty docs not supported."); try { if (Build.VERSION.SDK_INT < 13) { // After API 13, there is the JSONReader API - which is nicer to work with. return decodeViaJSONObject(json); } else { return decodeViaJSONReader(json); } } catch (JSONException je) { throw new RuntimeException(String.format("Could not parse: %s", json), je); } catch (IOException ioe) { throw new RuntimeException(String.format("Could not parse: %s", json), ioe); } } private static Object decodeViaJSONObject(String json) throws JSONException { JSONTokener tokener = new JSONTokener(json); Object value = tokener.nextValue(); if (value instanceof JSONArray) { return decodeArray((JSONArray) value); } else if (value instanceof JSONObject) { return decodeObject((JSONObject) value); } else { throw new IllegalArgumentException("No top level object or array: " + json); } } private static List<Object> decodeArray(JSONArray array) throws JSONException { List<Object> data = Lists.newArrayList(); for (int i = 0; i < array.length(); i++) { if (array.isNull(i)) { data.add(null); } else { Object value = array.get(i); if (value instanceof JSONObject) { data.add(decodeObject((JSONObject) value)); } else if (value instanceof JSONArray) { data.add(decodeArray((JSONArray) value)); } else { // boolean / string / or number. data.add(value); } } } return data; } private static Object decodeObject(JSONObject jsonObject) throws JSONException { List<String> nullKeys = Lists.newArrayList(); Map<String, Object> obj = Maps.newHashMap(); Iterator<String> keys = jsonObject.keys(); while (keys.hasNext()) { String key = keys.next(); if (jsonObject.isNull(key)) { nullKeys.add(key); obj.put(key, JSONObject.NULL); } else { Object value = jsonObject.get(key); if (value instanceof JSONObject) { obj.put(key, decodeObject((JSONObject) value)); } else if (value instanceof JSONArray) { obj.put(key, decodeArray((JSONArray) value)); } else { // boolean / string / or number. obj.put(key, value); } } } Object replacement = maybeReplaceMap(obj); if (replacement != null) { return replacement; } else { for (String key : nullKeys) { obj.remove(key); } return obj; } } private static Object decodeViaJSONReader(String json) throws IOException { JsonReader reader = null; try { reader = new JsonReader(new StringReader(json)); while (true) { switch (reader.peek()) { case BEGIN_OBJECT: return decodeObject(reader); case BEGIN_ARRAY: return decodeArray(reader); default: throw new IllegalStateException("Bogus document: " + json); } } } finally { if (null != reader) { try { reader.close(); } catch (IOException ioe) { Log.i(TAG, "json reader - close exception", ioe); } } } } private static List<Object> decodeArray(JsonReader reader) throws IOException { List<Object> array = Lists.newArrayList(); reader.beginArray(); while (reader.hasNext()) { switch (reader.peek()) { case BEGIN_OBJECT: array.add(decodeObject(reader)); break; case NULL: reader.nextNull(); array.add(null); break; case STRING: array.add(reader.nextString()); break; case BOOLEAN: array.add(reader.nextBoolean()); break; case BEGIN_ARRAY: array.add(decodeArray(reader)); break; case NUMBER: array.add(decodeNumber(reader.nextString())); break; default: throw new IllegalStateException(String.format("%s: bogus token", reader.peek())); } } reader.endArray(); return array; } private static Number decodeNumber(String value) { try { return Integer.valueOf(value); } catch (NumberFormatException i) { try { return Long.valueOf(value); } catch (NumberFormatException i2) { try { return Double.valueOf(value); } catch (NumberFormatException i3) { try { return new BigInteger(value); } catch (NumberFormatException i4) { return new BigDecimal(value); } } } } } private static Object decodeObject(JsonReader reader) throws IOException { Map<String, Object> obj = Maps.newHashMap(); List<String> nullKeys = Lists.newArrayList(); reader.beginObject(); while (reader.hasNext()) { String key = reader.nextName(); Object value = null; switch (reader.peek()) { case BEGIN_OBJECT: obj.put(key, decodeObject(reader)); break; case NULL: reader.nextNull(); nullKeys.add(key); obj.put(key, JSONObject.NULL); break; case STRING: obj.put(key, reader.nextString()); break; case BOOLEAN: obj.put(key, reader.nextBoolean()); break; case NUMBER: obj.put(key, decodeNumber(reader.nextString())); break; case BEGIN_ARRAY: obj.put(key, decodeArray(reader)); break; default: throw new IllegalStateException(String.format("%s: bogus token.", reader.peek())); } } reader.endObject(); Object replacement = maybeReplaceMap(obj); if (null != replacement) { return replacement; } else { for (String key : nullKeys) { obj.remove(key); } } return obj; } private static Object maybeReplaceMap(Map<String, Object> obj) { for (JSONAble.DeJSONFactory dejsonizer : DEJSONIZERS) { Object maybe = dejsonizer.attemptDeJSONize(obj); if (null != maybe) { return maybe; } } return null; } private static JSONStringer encodeHelper(Object javaObject, JSONStringer stringer) throws JSONException { if (null == javaObject) { stringer.value(javaObject); } else if (javaObject instanceof Map) { stringer.object(); Set<Map.Entry> entries = ((Map) javaObject).entrySet(); for (Map.Entry entry : entries) { stringer.key(entry.getKey().toString()); encodeHelper(entry.getValue(), stringer); } stringer.endObject(); } else if (javaObject instanceof Iterable) { stringer.array(); for (Object obj : ((Iterable) javaObject)) { encodeHelper(obj, stringer); } stringer.endArray(); } else if (javaObject instanceof Object[]) { stringer.array(); for (Object obj : ((Object[]) javaObject)) { encodeHelper(obj, stringer); } stringer.endArray(); } else if (javaObject instanceof JSONAble) { JSONObject jsonObj = new JSONObject(((JSONAble) javaObject).toJSONString()); stringer.value(jsonObj); } else { boolean converted = false; for (Class valuableClazz : VALUEABLE_CLASSES) { if (valuableClazz.isAssignableFrom(javaObject.getClass())) { converted = true; stringer.value(javaObject); } } checkState( converted, "%s: not encodable. Want one of: %s", javaObject.getClass(), VALUEABLE_CLASSES); } return stringer; } }
4,779
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.javascript2.editor; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.netbeans.modules.csl.api.InstantRenamer; import org.netbeans.modules.csl.api.OffsetRange; import org.netbeans.modules.csl.spi.ParserResult; import org.netbeans.modules.javascript2.lexer.api.LexUtilities; import org.netbeans.modules.javascript2.model.api.Occurrence; import org.netbeans.modules.javascript2.editor.navigation.OccurrencesSupport; import org.netbeans.modules.javascript2.editor.navigation.OccurrencesFinderImpl; import org.netbeans.modules.javascript2.editor.parser.JsParserResult; import org.netbeans.modules.javascript2.model.api.Model; /** * * @author <NAME> */ public class JsInstantRenamer implements InstantRenamer { @Override public boolean isRenameAllowed(ParserResult info, int caretOffset, String[] explanationRetValue) { JsParserResult jsInfo = (JsParserResult)info; OccurrencesSupport os = new OccurrencesSupport(Model.getModel(jsInfo, false)); Occurrence occurrence = os.getOccurrence(jsInfo.getSnapshot().getEmbeddedOffset(caretOffset)); return occurrence != null; } @Override public Set<OffsetRange> getRenameRegions(ParserResult info, int caretOffset) { if (info instanceof JsParserResult) { JsParserResult pResult = (JsParserResult)info; Set<OffsetRange> findOccurrenceRanges = OccurrencesFinderImpl.findOccurrenceRanges(pResult, info.getSnapshot().getEmbeddedOffset(caretOffset)); HashSet<OffsetRange> sourceRanges = new HashSet(findOccurrenceRanges.size()); for (OffsetRange range : findOccurrenceRanges) { sourceRanges.add(LexUtilities.getLexerOffsets(pResult, range)); } return sourceRanges; } else { return Collections.EMPTY_SET; } } }
888
3,553
<filename>lib/onigmo/enc/iso_8859.h #define SHARP_s 0xdf
28
535
<reponame>Nemo157/mynewt-core /* * string.h */ #ifndef _STRING_H #define _STRING_H #include <klibc/extern.h> #include <stddef.h> #ifdef __cplusplus extern "C" { #endif __extern void *memccpy(void *, const void *, int, size_t); __extern void *memchr(const void *, int, size_t); __extern void *memrchr(const void *, int, size_t); __extern int memcmp(const void *, const void *, size_t); __extern void *memcpy(void *, const void *, size_t); __extern void *memmove(void *, const void *, size_t); __extern void *memset(void *, int, size_t); __extern void *memmem(const void *, size_t, const void *, size_t); __extern void memswap(void *, void *, size_t); __extern void bzero(void *, size_t); __extern int strcasecmp(const char *, const char *); __extern int strncasecmp(const char *, const char *, size_t); __extern char *strcat(char *, const char *); __extern char *strchr(const char *, int); __extern char *index(const char *, int); __extern char *strrchr(const char *, int); __extern char *rindex(const char *, int); __extern int strcmp(const char *, const char *); __extern char *strcpy(char *, const char *); __extern size_t strcspn(const char *, const char *); __extern char *strdup(const char *); __extern char *strndup(const char *, size_t); __extern size_t strlen(const char *); __extern size_t strnlen(const char *, size_t); __extern char *strncat(char *, const char *, size_t); __extern size_t strlcat(char *, const char *, size_t); __extern int strncmp(const char *, const char *, size_t); __extern char *strncpy(char *, const char *, size_t); __extern size_t strlcpy(char *, const char *, size_t); __extern char *strpbrk(const char *, const char *); __extern char *strsep(char **, const char *); __extern size_t strspn(const char *, const char *); __extern char *strstr(const char *, const char *); __extern char *strnstr(const char *, const char *, size_t); __extern char *strtok(char *, const char *); __extern char *strtok_r(char *, const char *, char **); /* Some dummy functions to avoid errors with C++ cstring */ inline static int strcoll(const char *s1, const char *s2) { return strcmp(s1, s2); } inline static size_t strxfrm(char *dest, const char *src, size_t n) { strncpy(dest, src, n); return strlen(src); } #ifdef __cplusplus } #endif #endif /* _STRING_H */
893
733
/** @file @brief Fake header to allow GHOST 4.09 use with MSVC 2005 @date 2012 @author <NAME> <<EMAIL>> and <<EMAIL>> http://academic.cleardefinition.com/ Iowa State University Virtual Reality Applications Center Human-Computer Interaction Graduate Program */ // Copyright 2012, Iowa State University // SPDX-License-Identifier: BSL-1.0 #pragma once
116
2,151
<filename>net/filter/filter_source_stream_unittest.cc // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <algorithm> #include <string> #include "base/bind.h" #include "base/callback.h" #include "base/macros.h" #include "base/numerics/safe_conversions.h" #include "net/base/io_buffer.h" #include "net/base/net_errors.h" #include "net/base/test_completion_callback.h" #include "net/filter/filter_source_stream.h" #include "net/filter/mock_source_stream.h" #include "testing/gtest/include/gtest/gtest.h" namespace net { namespace { const size_t kDefaultBufferSize = 4096; const size_t kSmallBufferSize = 1; class TestFilterSourceStreamBase : public FilterSourceStream { public: TestFilterSourceStreamBase(std::unique_ptr<SourceStream> upstream) : FilterSourceStream(SourceStream::TYPE_NONE, std::move(upstream)) {} ~TestFilterSourceStreamBase() override { DCHECK(buffer_.empty()); } std::string GetTypeAsString() const override { return type_string_; } void set_type_string(const std::string& type_string) { type_string_ = type_string; } protected: // Writes contents of |buffer_| to |output_buffer| and returns the number of // bytes written or an error code. Additionally removes consumed data from // |buffer_|. int WriteBufferToOutput(IOBuffer* output_buffer, int output_buffer_size) { size_t bytes_to_filter = std::min(buffer_.length(), static_cast<size_t>(output_buffer_size)); memcpy(output_buffer->data(), buffer_.data(), bytes_to_filter); buffer_.erase(0, bytes_to_filter); return base::checked_cast<int>(bytes_to_filter); } // Buffer used by subclasses to hold data that is yet to be passed to the // caller. std::string buffer_; private: std::string type_string_; DISALLOW_COPY_AND_ASSIGN(TestFilterSourceStreamBase); }; // A FilterSourceStream that needs all input data before it can return non-zero // bytes read. class NeedsAllInputFilterSourceStream : public TestFilterSourceStreamBase { public: NeedsAllInputFilterSourceStream(std::unique_ptr<SourceStream> upstream, size_t expected_input_bytes) : TestFilterSourceStreamBase(std::move(upstream)), expected_input_bytes_(expected_input_bytes) {} int FilterData(IOBuffer* output_buffer, int output_buffer_size, IOBuffer* input_buffer, int input_buffer_size, int* consumed_bytes, bool upstream_eof_reached) override { buffer_.append(input_buffer->data(), input_buffer_size); EXPECT_GE(expected_input_bytes_, input_buffer_size); expected_input_bytes_ -= input_buffer_size; *consumed_bytes = input_buffer_size; if (!upstream_eof_reached) { // Keep returning 0 bytes read until all input has been consumed. return 0; } EXPECT_EQ(0, expected_input_bytes_); return WriteBufferToOutput(output_buffer, output_buffer_size); } private: // Expected remaining bytes to be received from |upstream|. int expected_input_bytes_; DISALLOW_COPY_AND_ASSIGN(NeedsAllInputFilterSourceStream); }; // A FilterSourceStream that repeat every input byte by |multiplier| amount of // times. class MultiplySourceStream : public TestFilterSourceStreamBase { public: MultiplySourceStream(std::unique_ptr<SourceStream> upstream, int multiplier) : TestFilterSourceStreamBase(std::move(upstream)), multiplier_(multiplier) {} int FilterData(IOBuffer* output_buffer, int output_buffer_size, IOBuffer* input_buffer, int input_buffer_size, int* consumed_bytes, bool /*upstream_eof_reached*/) override { for (int i = 0; i < input_buffer_size; i++) { for (int j = 0; j < multiplier_; j++) buffer_.append(input_buffer->data() + i, 1); } *consumed_bytes = input_buffer_size; return WriteBufferToOutput(output_buffer, output_buffer_size); } private: int multiplier_; DISALLOW_COPY_AND_ASSIGN(MultiplySourceStream); }; // A FilterSourceStream passes through data unchanged to consumer. class PassThroughFilterSourceStream : public TestFilterSourceStreamBase { public: explicit PassThroughFilterSourceStream(std::unique_ptr<SourceStream> upstream) : TestFilterSourceStreamBase(std::move(upstream)) {} int FilterData(IOBuffer* output_buffer, int output_buffer_size, IOBuffer* input_buffer, int input_buffer_size, int* consumed_bytes, bool /*upstream_eof_reached*/) override { buffer_.append(input_buffer->data(), input_buffer_size); *consumed_bytes = input_buffer_size; return WriteBufferToOutput(output_buffer, output_buffer_size); } private: DISALLOW_COPY_AND_ASSIGN(PassThroughFilterSourceStream); }; // A FilterSourceStream passes throttle input data such that it returns them to // caller only one bytes at a time. class ThrottleSourceStream : public TestFilterSourceStreamBase { public: explicit ThrottleSourceStream(std::unique_ptr<SourceStream> upstream) : TestFilterSourceStreamBase(std::move(upstream)) {} int FilterData(IOBuffer* output_buffer, int output_buffer_size, IOBuffer* input_buffer, int input_buffer_size, int* consumed_bytes, bool /*upstream_eof_reached*/) override { buffer_.append(input_buffer->data(), input_buffer_size); *consumed_bytes = input_buffer_size; int bytes_to_read = std::min(1, static_cast<int>(buffer_.size())); memcpy(output_buffer->data(), buffer_.data(), bytes_to_read); buffer_.erase(0, bytes_to_read); return bytes_to_read; } private: DISALLOW_COPY_AND_ASSIGN(ThrottleSourceStream); }; // A FilterSourceStream that consumes all input data but return no output. class NoOutputSourceStream : public TestFilterSourceStreamBase { public: NoOutputSourceStream(std::unique_ptr<SourceStream> upstream, size_t expected_input_size) : TestFilterSourceStreamBase(std::move(upstream)), expected_input_size_(expected_input_size), consumed_all_input_(false) {} int FilterData(IOBuffer* output_buffer, int output_buffer_size, IOBuffer* input_buffer, int input_buffer_size, int* consumed_bytes, bool /*upstream_eof_reached*/) override { expected_input_size_ -= input_buffer_size; *consumed_bytes = input_buffer_size; EXPECT_LE(0, expected_input_size_); consumed_all_input_ = (expected_input_size_ == 0); return OK; } bool consumed_all_input() const { return consumed_all_input_; } private: // Expected remaining bytes to be received from |upstream|. int expected_input_size_; bool consumed_all_input_; DISALLOW_COPY_AND_ASSIGN(NoOutputSourceStream); }; // A FilterSourceStream return an error code in FilterData(). class ErrorFilterSourceStream : public FilterSourceStream { public: explicit ErrorFilterSourceStream(std::unique_ptr<SourceStream> upstream) : FilterSourceStream(SourceStream::TYPE_NONE, std::move(upstream)) {} int FilterData(IOBuffer* output_buffer, int output_buffer_size, IOBuffer* input_buffer, int input_buffer_size, int* consumed_bytes, bool /*upstream_eof_reached*/) override { return ERR_CONTENT_DECODING_FAILED; } std::string GetTypeAsString() const override { return ""; } private: DISALLOW_COPY_AND_ASSIGN(ErrorFilterSourceStream); }; } // namespace class FilterSourceStreamTest : public ::testing::TestWithParam<MockSourceStream::Mode> { protected: // If MockSourceStream::Mode is ASYNC, completes |num_reads| from // |mock_stream| and wait for |callback| to complete. If Mode is not ASYNC, // does nothing and returns |previous_result|. int CompleteReadIfAsync(int previous_result, TestCompletionCallback* callback, MockSourceStream* mock_stream, size_t num_reads) { if (GetParam() == MockSourceStream::ASYNC) { EXPECT_EQ(ERR_IO_PENDING, previous_result); while (num_reads > 0) { mock_stream->CompleteNextRead(); num_reads--; } return callback->WaitForResult(); } return previous_result; } }; INSTANTIATE_TEST_CASE_P(FilterSourceStreamTests, FilterSourceStreamTest, ::testing::Values(MockSourceStream::SYNC, MockSourceStream::ASYNC)); // Tests that a FilterSourceStream subclass (NeedsAllInputFilterSourceStream) // can return 0 bytes for FilterData()s when it has not consumed EOF from the // upstream. In this case, FilterSourceStream should continue reading from // upstream to complete filtering. TEST_P(FilterSourceStreamTest, FilterDataReturnNoBytesExceptLast) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input("hello, world!"); size_t read_size = 2; size_t num_reads = 0; // Add a sequence of small reads. for (size_t offset = 0; offset < input.length(); offset += read_size) { source->AddReadResult(input.data() + offset, std::min(read_size, input.length() - offset), OK, GetParam()); num_reads++; } source->AddReadResult(input.data(), 0, OK, GetParam()); // EOF num_reads++; MockSourceStream* mock_stream = source.get(); NeedsAllInputFilterSourceStream stream(std::move(source), input.length()); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); TestCompletionCallback callback; std::string actual_output; while (true) { int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); if (rv == ERR_IO_PENDING) rv = CompleteReadIfAsync(rv, &callback, mock_stream, num_reads); if (rv == OK) break; ASSERT_GT(rv, OK); actual_output.append(output_buffer->data(), rv); } EXPECT_EQ(input, actual_output); } // Tests that FilterData() returns 0 byte read because the upstream gives an // EOF. TEST_P(FilterSourceStreamTest, FilterDataReturnNoByte) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input; source->AddReadResult(input.data(), 0, OK, GetParam()); MockSourceStream* mock_stream = source.get(); PassThroughFilterSourceStream stream(std::move(source)); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); TestCompletionCallback callback; int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); rv = CompleteReadIfAsync(rv, &callback, mock_stream, 1); EXPECT_EQ(OK, rv); } // Tests that FilterData() returns 0 byte filtered even though the upstream // produces data. TEST_P(FilterSourceStreamTest, FilterDataOutputNoData) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input = "hello, world!"; size_t read_size = 2; size_t num_reads = 0; // Add a sequence of small reads. for (size_t offset = 0; offset < input.length(); offset += read_size) { source->AddReadResult(input.data() + offset, std::min(read_size, input.length() - offset), OK, GetParam()); num_reads++; } // Add a 0 byte read to signal EOF. source->AddReadResult(input.data() + input.length(), 0, OK, GetParam()); num_reads++; MockSourceStream* mock_stream = source.get(); NoOutputSourceStream stream(std::move(source), input.length()); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); TestCompletionCallback callback; int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); rv = CompleteReadIfAsync(rv, &callback, mock_stream, num_reads); EXPECT_EQ(OK, rv); EXPECT_TRUE(stream.consumed_all_input()); } // Tests that FilterData() returns non-zero bytes because the upstream // returns data. TEST_P(FilterSourceStreamTest, FilterDataReturnData) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input = "hello, world!"; size_t read_size = 2; // Add a sequence of small reads. for (size_t offset = 0; offset < input.length(); offset += read_size) { source->AddReadResult(input.data() + offset, std::min(read_size, input.length() - offset), OK, GetParam()); } // Add a 0 byte read to signal EOF. source->AddReadResult(input.data() + input.length(), 0, OK, GetParam()); MockSourceStream* mock_stream = source.get(); PassThroughFilterSourceStream stream(std::move(source)); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); TestCompletionCallback callback; std::string actual_output; while (true) { int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); rv = CompleteReadIfAsync(rv, &callback, mock_stream, /*num_reads=*/1); if (rv == OK) break; ASSERT_GE(static_cast<int>(read_size), rv); ASSERT_GT(rv, OK); actual_output.append(output_buffer->data(), rv); } EXPECT_EQ(input, actual_output); } // Tests that FilterData() returns more data than what it consumed. TEST_P(FilterSourceStreamTest, FilterDataReturnMoreData) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input = "hello, world!"; size_t read_size = 2; // Add a sequence of small reads. for (size_t offset = 0; offset < input.length(); offset += read_size) { source->AddReadResult(input.data() + offset, std::min(read_size, input.length() - offset), OK, GetParam()); } // Add a 0 byte read to signal EOF. source->AddReadResult(input.data() + input.length(), 0, OK, GetParam()); MockSourceStream* mock_stream = source.get(); int multiplier = 2; MultiplySourceStream stream(std::move(source), multiplier); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); TestCompletionCallback callback; std::string actual_output; while (true) { int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); rv = CompleteReadIfAsync(rv, &callback, mock_stream, /*num_reads=*/1); if (rv == OK) break; ASSERT_GE(static_cast<int>(read_size) * multiplier, rv); ASSERT_GT(rv, OK); actual_output.append(output_buffer->data(), rv); } EXPECT_EQ("hheelllloo,, wwoorrlldd!!", actual_output); } // Tests that FilterData() returns non-zero bytes and output buffer size is // smaller than the number of bytes read from the upstream. TEST_P(FilterSourceStreamTest, FilterDataOutputSpace) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input = "hello, world!"; size_t read_size = 2; // Add a sequence of small reads. for (size_t offset = 0; offset < input.length(); offset += read_size) { source->AddReadResult(input.data() + offset, std::min(read_size, input.length() - offset), OK, GetParam()); } // Add a 0 byte read to signal EOF. source->AddReadResult(input.data() + input.length(), 0, OK, GetParam()); // Use an extremely small buffer size, so FilterData will need more output // space. scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kSmallBufferSize); MockSourceStream* mock_stream = source.get(); PassThroughFilterSourceStream stream(std::move(source)); TestCompletionCallback callback; std::string actual_output; while (true) { int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); if (rv == ERR_IO_PENDING) rv = CompleteReadIfAsync(rv, &callback, mock_stream, /*num_reads=*/1); if (rv == OK) break; ASSERT_GT(rv, OK); ASSERT_GE(kSmallBufferSize, static_cast<size_t>(rv)); actual_output.append(output_buffer->data(), rv); } EXPECT_EQ(input, actual_output); } // Tests that FilterData() returns an error code, which is then surfaced as // the result of calling Read(). TEST_P(FilterSourceStreamTest, FilterDataReturnError) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input; source->AddReadResult(input.data(), 0, OK, GetParam()); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); MockSourceStream* mock_stream = source.get(); ErrorFilterSourceStream stream(std::move(source)); TestCompletionCallback callback; int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); rv = CompleteReadIfAsync(rv, &callback, mock_stream, /*num_reads=*/1); EXPECT_EQ(ERR_CONTENT_DECODING_FAILED, rv); // Reading from |stream| again should return the same error. rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); EXPECT_EQ(ERR_CONTENT_DECODING_FAILED, rv); } TEST_P(FilterSourceStreamTest, FilterChaining) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input = "hello, world!"; source->AddReadResult(input.data(), input.length(), OK, GetParam()); source->AddReadResult(input.data(), 0, OK, GetParam()); // EOF MockSourceStream* mock_stream = source.get(); std::unique_ptr<PassThroughFilterSourceStream> pass_through_source( new PassThroughFilterSourceStream(std::move(source))); pass_through_source->set_type_string("FIRST_PASS_THROUGH"); std::unique_ptr<NeedsAllInputFilterSourceStream> needs_all_input_source( new NeedsAllInputFilterSourceStream(std::move(pass_through_source), input.length())); needs_all_input_source->set_type_string("NEEDS_ALL"); std::unique_ptr<PassThroughFilterSourceStream> second_pass_through_source( new PassThroughFilterSourceStream(std::move(needs_all_input_source))); second_pass_through_source->set_type_string("SECOND_PASS_THROUGH"); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); TestCompletionCallback callback; std::string actual_output; while (true) { int rv = second_pass_through_source->Read( output_buffer.get(), output_buffer->size(), callback.callback()); if (rv == ERR_IO_PENDING) rv = CompleteReadIfAsync(rv, &callback, mock_stream, /*num_reads=*/2); if (rv == OK) break; ASSERT_GT(rv, OK); actual_output.append(output_buffer->data(), rv); } EXPECT_EQ(input, actual_output); // Type string (from left to right) should be the order of data flow. EXPECT_EQ("FIRST_PASS_THROUGH,NEEDS_ALL,SECOND_PASS_THROUGH", second_pass_through_source->Description()); } // Tests that FilterData() returns multiple times for a single MockStream // read, because there is not enough output space. TEST_P(FilterSourceStreamTest, OutputSpaceForOneRead) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input = "hello, world!"; source->AddReadResult(input.data(), input.length(), OK, GetParam()); // Add a 0 byte read to signal EOF. source->AddReadResult(input.data() + input.length(), 0, OK, GetParam()); // Use an extremely small buffer size (1 byte), so FilterData will need more // output space. scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kSmallBufferSize); MockSourceStream* mock_stream = source.get(); PassThroughFilterSourceStream stream(std::move(source)); TestCompletionCallback callback; std::string actual_output; while (true) { int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); if (rv == ERR_IO_PENDING) rv = CompleteReadIfAsync(rv, &callback, mock_stream, /*num_reads=*/1); if (rv == OK) break; ASSERT_GT(rv, OK); ASSERT_GE(kSmallBufferSize, static_cast<size_t>(rv)); actual_output.append(output_buffer->data(), rv); } EXPECT_EQ(input, actual_output); } // Tests that FilterData() returns multiple times for a single MockStream // read, because the filter returns one byte at a time. TEST_P(FilterSourceStreamTest, ThrottleSourceStream) { std::unique_ptr<MockSourceStream> source(new MockSourceStream); std::string input = "hello, world!"; source->AddReadResult(input.data(), input.length(), OK, GetParam()); // Add a 0 byte read to signal EOF. source->AddReadResult(input.data() + input.length(), 0, OK, GetParam()); scoped_refptr<IOBufferWithSize> output_buffer = new IOBufferWithSize(kDefaultBufferSize); MockSourceStream* mock_stream = source.get(); ThrottleSourceStream stream(std::move(source)); TestCompletionCallback callback; std::string actual_output; while (true) { int rv = stream.Read(output_buffer.get(), output_buffer->size(), callback.callback()); if (rv == ERR_IO_PENDING) rv = CompleteReadIfAsync(rv, &callback, mock_stream, /*num_reads=*/1); if (rv == OK) break; ASSERT_GT(rv, OK); // ThrottleSourceStream returns 1 byte at a time. ASSERT_GE(1u, static_cast<size_t>(rv)); actual_output.append(output_buffer->data(), rv); } EXPECT_EQ(input, actual_output); } } // namespace net
8,142
348
{"nom":"Saint-Pierre-lès-Franqueville","dpt":"Aisne","inscrits":45,"abs":4,"votants":41,"blancs":2,"nuls":3,"exp":36,"res":[{"panneau":"1","voix":18},{"panneau":"2","voix":18}]}
77
2,151
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/password_manager/core/browser/form_parsing/ios_form_parser.h" #include "base/strings/string16.h" #include "base/strings/string_number_conversions.h" #include "base/strings/utf_string_conversions.h" #include "components/autofill/core/common/form_data.h" #include "components/autofill/core/common/form_field_data.h" #include "components/autofill/core/common/password_form.h" #include "testing/gmock/include/gmock/gmock.h" #include "testing/gtest/include/gtest/gtest.h" #include "url/gurl.h" using autofill::FormData; using autofill::FormFieldData; using autofill::PasswordForm; using base::ASCIIToUTF16; using base::UintToString16; namespace password_manager { namespace { constexpr int kFieldNotFound = -1; struct ParseResultIndices { int username_index; int password_index; int new_password_index; int confirmation_password_index; bool IsEmpty() const { return username_index == kFieldNotFound && password_index == kFieldNotFound && new_password_index == kFieldNotFound && confirmation_password_index == kFieldNotFound; } }; struct TestFieldData { bool is_password; bool is_focusable = true; bool is_empty = true; const char* autocomplete_attribute = nullptr; // If |value| != nullptr then |is_empty| is ignored. // If |value| == nullptr and |is_empty| == false, then the exact field value // is assumed to be not important for a test and it will be set to some unique // value. const char* value = nullptr; const char* form_control_type = nullptr; }; struct FormParsingTestCase { const char* description; std::vector<TestFieldData> fields; ParseResultIndices fill_result; ParseResultIndices save_result; }; class IOSFormParserTest : public testing::Test { public: IOSFormParserTest() {} protected: void CheckTestData(const std::vector<FormParsingTestCase>& test_cases); }; FormData GetFormData(const FormParsingTestCase& test_form) { FormData form_data; form_data.action = GURL("http://example1.com"); form_data.origin = GURL("http://example2.com"); for (size_t i = 0; i < test_form.fields.size(); ++i) { const TestFieldData& field_data = test_form.fields[i]; FormFieldData field; // An exact id is not important, set id such that different fields have // different id. field.id = ASCIIToUTF16("field_id") + UintToString16(i); if (field_data.form_control_type) field.form_control_type = field_data.form_control_type; else field.form_control_type = field_data.is_password ? "password" : "text"; field.is_focusable = field_data.is_focusable; if (field_data.value) { field.value = ASCIIToUTF16(field_data.value); } else if (!field_data.is_empty) { // An exact value is not important, set a value with simple pattern, such // that different fields have different values. field.value = ASCIIToUTF16("field_value") + UintToString16(i); } if (field_data.autocomplete_attribute) field.autocomplete_attribute = field_data.autocomplete_attribute; form_data.fields.push_back(field); } return form_data; } // Check that field |fields[field_index]| has type |element_type| and value // |value|. |element| is the name of this element in parsing // ("username_element", "password_element" etc), that is used to show diagnostic // message. void CheckField(const std::vector<FormFieldData>& fields, int field_index, const char* element_type, const base::string16& element, const base::string16* value) { SCOPED_TRACE(testing::Message("CheckField, element_type = ") << element_type); base::string16 expected_element; base::string16 expected_value; if (field_index != kFieldNotFound) { const FormFieldData& field = fields[field_index]; expected_element = field.id; expected_value = field.value; } EXPECT_EQ(expected_element, element); if (value) EXPECT_EQ(expected_value, *value); } void CheckPasswordFormFields(const PasswordForm& password_form, const FormData& form_data, const ParseResultIndices& expected_fields) { CheckField(form_data.fields, expected_fields.username_index, "username", password_form.username_element, &password_form.username_value); CheckField(form_data.fields, expected_fields.password_index, "password", password_form.password_element, &password_form.password_value); CheckField(form_data.fields, expected_fields.new_password_index, "new_password", password_form.new_password_element, &password_form.new_password_value); CheckField(form_data.fields, expected_fields.confirmation_password_index, "confirmation_password", password_form.confirmation_password_element, nullptr); } void IOSFormParserTest::CheckTestData( const std::vector<FormParsingTestCase>& test_cases) { for (const FormParsingTestCase& test_case : test_cases) { const FormData form_data = GetFormData(test_case); for (auto mode : {FormParsingMode::FILLING, FormParsingMode::SAVING}) { SCOPED_TRACE( testing::Message("Test description: ") << test_case.description << ", parsing mode = " << (mode == FormParsingMode::FILLING ? "Filling" : "Saving")); std::unique_ptr<PasswordForm> parsed_form = ParseFormData(form_data, mode); const ParseResultIndices& expected_indices = mode == FormParsingMode::FILLING ? test_case.fill_result : test_case.save_result; if (expected_indices.IsEmpty() != (parsed_form == nullptr)) { if (expected_indices.IsEmpty()) EXPECT_FALSE(parsed_form) << "Expected no parsed results"; else EXPECT_TRUE(parsed_form) << "The form is expected to be parsed successfully"; } else if (!expected_indices.IsEmpty() && parsed_form) { EXPECT_TRUE(form_data.SameFormAs(parsed_form->form_data)); CheckPasswordFormFields(*parsed_form, form_data, expected_indices); } else { // Expected and parsed results are empty, everything is ok. } } } } TEST_F(IOSFormParserTest, NotPasswordForm) { std::vector<FormParsingTestCase> test_data = { { "No fields", {}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, }, { "No password fields", {{.is_password = false}, {.is_password = false}}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, }, }; CheckTestData(test_data); } TEST_F(IOSFormParserTest, SkipNotTextFields) { std::vector<FormParsingTestCase> test_data = { { "Select between username and password fields", {{.is_password = false, .is_empty = false}, {.form_control_type = "select", .is_empty = false}, {.is_password = true, .is_empty = false}}, {0, 2, kFieldNotFound, kFieldNotFound}, {0, 2, kFieldNotFound, kFieldNotFound}, }, }; CheckTestData(test_data); } TEST_F(IOSFormParserTest, OnlyPasswordFields) { std::vector<FormParsingTestCase> test_data = { { "1 password field", { {.is_password = true, .is_focusable = true, .is_empty = false}, }, {kFieldNotFound, 0, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, 0, kFieldNotFound, kFieldNotFound}, }, { "2 password fields, new and confirmation password", { {.is_password = true, .is_focusable = true, .value = "pw"}, {.is_password = true, .is_focusable = true, .value = "pw"}, }, {kFieldNotFound, kFieldNotFound, 0, 1}, {kFieldNotFound, kFieldNotFound, 0, 1}, }, { "2 password fields, current and new password", { {.is_password = true, .is_focusable = true, .value = "pw1"}, {.is_password = true, .is_focusable = true, .value = "pw2"}, }, {kFieldNotFound, 0, 1, kFieldNotFound}, {kFieldNotFound, 0, 1, kFieldNotFound}, }, { "3 password fields, current, new, confirm password", { {.is_password = true, .is_focusable = true, .value = "pw1"}, {.is_password = true, .is_focusable = true, .value = "pw2"}, {.is_password = true, .is_focusable = true, .value = "pw2"}, }, {kFieldNotFound, 0, 1, 2}, {kFieldNotFound, 0, 1, 2}, }, { "3 password fields with different values", { {.is_password = true, .is_focusable = true, .value = "pw1"}, {.is_password = true, .is_focusable = true, .value = "pw2"}, {.is_password = true, .is_focusable = true, .value = "pw3"}, }, {kFieldNotFound, 0, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, 0, kFieldNotFound, kFieldNotFound}, }, { "4 password fields, only the first 3 are considered", { {.is_password = true, .is_focusable = true, .value = "pw1"}, {.is_password = true, .is_focusable = true, .value = "pw2"}, {.is_password = true, .is_focusable = true, .value = "pw2"}, {.is_password = true, .is_focusable = true, .value = "pw3"}, }, {kFieldNotFound, 0, 1, 2}, {kFieldNotFound, 0, 1, 2}, }, }; CheckTestData(test_data); } TEST_F(IOSFormParserTest, TestFocusability) { std::vector<FormParsingTestCase> test_data = { { "non-focusable fields are considered when there are no focusable " "fields", { {.is_password = true, .is_focusable = false, .is_empty = false}, {.is_password = true, .is_focusable = false, .is_empty = false}, }, {kFieldNotFound, 0, 1, kFieldNotFound}, {kFieldNotFound, 0, 1, kFieldNotFound}, }, { "non-focusable should be skipped when there are focusable fields", { {.is_password = true, .is_focusable = false, .is_empty = false}, {.is_password = true, .is_focusable = true, .is_empty = false}, }, {kFieldNotFound, 1, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, 1, kFieldNotFound, kFieldNotFound}, }, { "non-focusable text fields before password", { {.is_password = false, .is_focusable = false, .is_empty = false}, {.is_password = false, .is_focusable = false, .is_empty = false}, {.is_password = true, .is_focusable = true, .is_empty = false}, }, {1, 2, kFieldNotFound, kFieldNotFound}, {1, 2, kFieldNotFound, kFieldNotFound}, }, { "focusable and non-focusable text fields before password", { {.is_password = false, .is_focusable = true, .is_empty = false}, {.is_password = false, .is_focusable = false, .is_empty = false}, {.is_password = true, .is_focusable = true, .is_empty = false}, }, {0, 2, kFieldNotFound, kFieldNotFound}, {0, 2, kFieldNotFound, kFieldNotFound}, }, }; CheckTestData(test_data); } TEST_F(IOSFormParserTest, TextAndPasswordFields) { std::vector<FormParsingTestCase> test_data = { { "Simple empty sign-in form", {{.is_password = false, .is_focusable = true, .is_empty = true}, {.is_password = true, .is_focusable = true, .is_empty = true}}, {0, 1, kFieldNotFound, kFieldNotFound}, // Form with empty fields on saving does not make any sense, so empty // parsing. {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, }, { "Simple sign-in form with filled data", {{.is_password = false, .is_focusable = true, .is_empty = false}, {.is_password = true, .is_focusable = true, .is_empty = false}}, {0, 1, kFieldNotFound, kFieldNotFound}, {0, 1, kFieldNotFound, kFieldNotFound}, }, { "Empty sign-in form with an extra text field", {{.is_password = false, .is_focusable = true, .is_empty = true}, {.is_password = false, .is_focusable = true, .is_empty = true}, {.is_password = true, .is_focusable = true, .is_empty = true}}, {1, 2, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, }, { "Non-empty sign-in form with an extra text field", {{.is_password = false, .is_focusable = true, .is_empty = false}, {.is_password = false, .is_focusable = true, .is_empty = true}, {.is_password = true, .is_focusable = true, .is_empty = false}}, {1, 2, kFieldNotFound, kFieldNotFound}, {0, 2, kFieldNotFound, kFieldNotFound}, }, { "Empty sign-in form with an extra invisible text field", {{.is_password = false, .is_focusable = true, .is_empty = true}, {.is_password = false, .is_focusable = false, .is_empty = true}, {.is_password = true, .is_focusable = true, .is_empty = true}}, {0, 2, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, }, { "Non-empty sign-in form with an extra invisible text field", {{.is_password = false, .is_focusable = true, .is_empty = false}, {.is_password = false, .is_focusable = false, .is_empty = false}, {.is_password = true, .is_focusable = true, .is_empty = false}}, {0, 2, kFieldNotFound, kFieldNotFound}, {0, 2, kFieldNotFound, kFieldNotFound}, }, { "Simple empty sign-in form with empty username", {{.is_password = false, .is_focusable = true, .is_empty = true}, {.is_password = true, .is_focusable = true, .is_empty = false}}, {0, 1, kFieldNotFound, kFieldNotFound}, // Form with empty username does not make sense, so username field // should not be found. {kFieldNotFound, 1, kFieldNotFound, kFieldNotFound}, }, }; CheckTestData(test_data); } TEST_F(IOSFormParserTest, TestAutocomplete) { std::vector<FormParsingTestCase> test_data = { { "All possible password autocomplete attributes and some fields " "without autocomplete", { {.is_password = false, .autocomplete_attribute = "username"}, {.is_password = false}, {.is_password = true}, {.is_password = true, .autocomplete_attribute = "current-password"}, {.is_password = true, .autocomplete_attribute = "new-password"}, {.is_password = true}, {.is_password = true, .autocomplete_attribute = "new-password"}, }, {0, 3, 4, 6}, {0, 3, 4, 6}, }, { "Non-password autocomplete attributes are skipped ", { {.is_password = false, .is_empty = false, .autocomplete_attribute = "email"}, { .is_password = false, .is_empty = false, }, { .is_password = true, .is_empty = false, }, {.is_password = true, .is_empty = false, .autocomplete_attribute = "password"}, }, {1, 2, 3, kFieldNotFound}, {1, 2, 3, kFieldNotFound}, }, { "Multiple autocomplete attributes for the same field", { {.is_password = false, .autocomplete_attribute = "email username"}, {.is_password = false}, {.is_password = true}, {.is_password = true, .autocomplete_attribute = "abc current-password xyz"}, }, {0, 3, kFieldNotFound, kFieldNotFound}, {0, 3, kFieldNotFound, kFieldNotFound}, }, { "Multiple username autocomplete attributes, fallback to base " "heuristics", { {.is_password = false, .autocomplete_attribute = "username"}, {.is_password = false, .autocomplete_attribute = "username"}, {.is_password = true}, {.is_password = true, .autocomplete_attribute = "current-password"}, }, {1, 2, 3, kFieldNotFound}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, }, }; CheckTestData(test_data); } TEST_F(IOSFormParserTest, SkippingFieldsWithCreditCardFields) { std::vector<FormParsingTestCase> test_data = { { "Simple form with all fields are credit card related", { {.is_password = false, .autocomplete_attribute = "cc-name"}, {.is_password = true, .autocomplete_attribute = "cc-any-string"}, }, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, kFieldNotFound, kFieldNotFound, kFieldNotFound}, }, { "Multiple autocomplete attributes for the same field", { // This field should be skipped. {.is_password = false, .autocomplete_attribute = "cc-name username"}, {.is_password = true, .is_empty = false}, }, {kFieldNotFound, 1, kFieldNotFound, kFieldNotFound}, {kFieldNotFound, 1, kFieldNotFound, kFieldNotFound}, }, }; CheckTestData(test_data); } } // namespace } // namespace password_manager
8,116
2,151
/* * Copyright (c) 2012 The Native Client Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ /* * Simple fault injection test. */ #include <stdio.h> #include <stdlib.h> #include <errno.h> #include "native_client/src/include/portability.h" #include "native_client/src/shared/platform/nacl_log.h" #include "native_client/src/trusted/fault_injection/fault_injection.h" int FunctionThatMightFail(size_t ix) { return (int) ix+1; } enum ErrorCode { PASS, FAIL, RETAKE_PREVIOUS_GRADE, GOTO_JAIL_DO_NOT_PASS_GO, }; enum ErrorCode SomeFunction(size_t ix) { switch (ix & 0x3) { case 0: return PASS; case 1: return FAIL; } return RETAKE_PREVIOUS_GRADE; } ssize_t fake_write(size_t ix) { return (ssize_t) ix; } int fake_fstat(size_t ix) { return (int) ix; } int main(int ac, char **av) { int opt; size_t ix; size_t limit = 10u; char *buffer; enum ErrorCode err; static enum ErrorCode expected[4] = { PASS, FAIL, RETAKE_PREVIOUS_GRADE, RETAKE_PREVIOUS_GRADE }; ssize_t write_result; NaClLogModuleInit(); while (-1 != (opt = getopt(ac, av, "l:v"))) { switch (opt) { case 'v': NaClLogIncrVerbosity(); break; case 'l': limit = strtoul(optarg, (char **) NULL, 0); break; default: fprintf(stderr, "Usage: nacl_fi_test [-v]\n"); return 1; } } NaClFaultInjectionModuleInit(); for (ix = 0; ix < limit; ++ix) { printf("%d\n", NACL_FI("test", FunctionThatMightFail(ix), -1)); buffer = NACL_FI("alloc", malloc(ix+1), NULL); if (NULL == buffer) { printf("allocation for %"NACL_PRIdS" bytes failed\n", ix+1); } else { free(buffer); buffer = NULL; } err = NACL_FI_VAL("ret", enum ErrorCode, SomeFunction(ix)); if (err != expected[ix & 0x3]) { printf("Unexpected return %d, expected %d\n", err, expected[ix & 0x3]); } if (-1 == NACL_FI_SYSCALL("fstat", fake_fstat(ix))) { printf("fstat failed, errno %d\n", errno); } if (-1 == (write_result = NACL_FI_TYPED_SYSCALL("write", ssize_t, fake_write(ix)))) { printf("write failed, errno %d\n", errno); } else if (write_result != (ssize_t) ix) { printf("unexpected write count %"NACL_PRIdS", expected %"NACL_PRIuS"\n", write_result, ix); } } return 0; }
1,221
30,023
"""Test the Steamist config flow.""" import asyncio from unittest.mock import patch import pytest from homeassistant import config_entries from homeassistant.components import dhcp from homeassistant.components.steamist.const import DOMAIN from homeassistant.const import CONF_DEVICE, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import ( RESULT_TYPE_ABORT, RESULT_TYPE_CREATE_ENTRY, RESULT_TYPE_FORM, ) from . import ( DEFAULT_ENTRY_DATA, DEVICE_30303_NOT_STEAMIST, DEVICE_HOSTNAME, DEVICE_IP_ADDRESS, DEVICE_MAC_ADDRESS, DEVICE_NAME, DISCOVERY_30303, FORMATTED_MAC_ADDRESS, MOCK_ASYNC_GET_STATUS_INACTIVE, _patch_discovery, _patch_status, ) from tests.common import MockConfigEntry MODULE = "homeassistant.components.steamist" DHCP_DISCOVERY = dhcp.DhcpServiceInfo( hostname=DEVICE_HOSTNAME, ip=DEVICE_IP_ADDRESS, macaddress=DEVICE_MAC_ADDRESS, ) async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["errors"] == {} with _patch_discovery(no_device=True), patch( "homeassistant.components.steamist.config_flow.Steamist.async_get_status" ), patch( "homeassistant.components.steamist.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { "host": "127.0.0.1", }, ) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_CREATE_ENTRY assert result2["title"] == "127.0.0.1" assert result2["data"] == { "host": "127.0.0.1", } assert len(mock_setup_entry.mock_calls) == 1 async def test_form_with_discovery(hass: HomeAssistant) -> None: """Test we can also discovery the device during manual setup.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["errors"] == {} with _patch_discovery(), patch( "homeassistant.components.steamist.config_flow.Steamist.async_get_status" ), patch( "homeassistant.components.steamist.async_setup_entry", return_value=True, ) as mock_setup_entry: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { "host": "127.0.0.1", }, ) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_CREATE_ENTRY assert result2["title"] == DEVICE_NAME assert result2["data"] == DEFAULT_ENTRY_DATA assert len(mock_setup_entry.mock_calls) == 1 async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.steamist.config_flow.Steamist.async_get_status", side_effect=asyncio.TimeoutError, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { "host": "127.0.0.1", }, ) assert result2["type"] == RESULT_TYPE_FORM assert result2["errors"] == {"base": "cannot_connect"} async def test_form_unknown_exception(hass: HomeAssistant) -> None: """Test we handle unknown exceptions.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) with patch( "homeassistant.components.steamist.config_flow.Steamist.async_get_status", side_effect=Exception, ): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { "host": "127.0.0.1", }, ) assert result2["type"] == RESULT_TYPE_FORM assert result2["errors"] == {"base": "unknown"} async def test_discovery(hass: HomeAssistant) -> None: """Test setting up discovery.""" with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" assert not result["errors"] result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_FORM assert result2["step_id"] == "pick_device" assert not result2["errors"] # test we can try again result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" assert not result["errors"] result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) await hass.async_block_till_done() assert result2["type"] == "form" assert result2["step_id"] == "pick_device" assert not result2["errors"] with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE), patch( f"{MODULE}.async_setup", return_value=True ) as mock_setup, patch( f"{MODULE}.async_setup_entry", return_value=True ) as mock_setup_entry: result3 = await hass.config_entries.flow.async_configure( result["flow_id"], {CONF_DEVICE: FORMATTED_MAC_ADDRESS}, ) await hass.async_block_till_done() assert result3["type"] == RESULT_TYPE_CREATE_ENTRY assert result3["title"] == DEVICE_NAME assert result3["data"] == DEFAULT_ENTRY_DATA mock_setup.assert_called_once() mock_setup_entry.assert_called_once() # ignore configured devices result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] == RESULT_TYPE_FORM assert result["step_id"] == "user" assert not result["errors"] with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE): result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_ABORT assert result2["reason"] == "no_devices_found" async def test_discovered_by_discovery_and_dhcp(hass: HomeAssistant) -> None: """Test we get the form with discovery and abort for dhcp source when we get both.""" with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data=DISCOVERY_30303, ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_FORM assert result["errors"] is None with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE): result2 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY, ) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_ABORT assert result2["reason"] == "already_in_progress" with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE): result3 = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=dhcp.DhcpServiceInfo( hostname="any", ip=DEVICE_IP_ADDRESS, macaddress="00:00:00:00:00:00", ), ) await hass.async_block_till_done() assert result3["type"] == RESULT_TYPE_ABORT assert result3["reason"] == "already_in_progress" async def test_discovered_by_discovery(hass: HomeAssistant) -> None: """Test we can setup when discovered from discovery.""" with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY}, data=DISCOVERY_30303, ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_FORM assert result["errors"] is None with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE), patch( f"{MODULE}.async_setup", return_value=True ) as mock_async_setup, patch( f"{MODULE}.async_setup_entry", return_value=True ) as mock_async_setup_entry: result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_CREATE_ENTRY assert result2["data"] == DEFAULT_ENTRY_DATA assert mock_async_setup.called assert mock_async_setup_entry.called async def test_discovered_by_dhcp(hass: HomeAssistant) -> None: """Test we can setup when discovered from dhcp.""" with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY, ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_FORM assert result["errors"] is None with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE), patch( f"{MODULE}.async_setup", return_value=True ) as mock_async_setup, patch( f"{MODULE}.async_setup_entry", return_value=True ) as mock_async_setup_entry: result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {}) await hass.async_block_till_done() assert result2["type"] == RESULT_TYPE_CREATE_ENTRY assert result2["data"] == DEFAULT_ENTRY_DATA assert mock_async_setup.called assert mock_async_setup_entry.called async def test_discovered_by_dhcp_discovery_fails(hass: HomeAssistant) -> None: """Test we can setup when discovered from dhcp but then we cannot get the device name.""" with _patch_discovery(no_device=True), _patch_status( MOCK_ASYNC_GET_STATUS_INACTIVE ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY, ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_ABORT assert result["reason"] == "cannot_connect" async def test_discovered_by_dhcp_discovery_finds_non_steamist_device( hass: HomeAssistant, ) -> None: """Test we can setup when discovered from dhcp but its not a steamist device.""" with _patch_discovery(device=DEVICE_30303_NOT_STEAMIST), _patch_status( MOCK_ASYNC_GET_STATUS_INACTIVE ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY, ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_ABORT assert result["reason"] == "not_steamist_device" @pytest.mark.parametrize( "source, data", [ (config_entries.SOURCE_DHCP, DHCP_DISCOVERY), (config_entries.SOURCE_INTEGRATION_DISCOVERY, DISCOVERY_30303), ], ) async def test_discovered_by_dhcp_or_discovery_adds_missing_unique_id( hass, source, data ): """Test we can setup when discovered from dhcp or discovery and add a missing unique id.""" config_entry = MockConfigEntry(domain=DOMAIN, data={CONF_HOST: DEVICE_IP_ADDRESS}) config_entry.add_to_hass(hass) with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE), patch( f"{MODULE}.async_setup", return_value=True ) as mock_setup, patch( f"{MODULE}.async_setup_entry", return_value=True ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": source}, data=data ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_ABORT assert result["reason"] == "already_configured" assert config_entry.unique_id == FORMATTED_MAC_ADDRESS assert mock_setup.called assert mock_setup_entry.called @pytest.mark.parametrize( "source, data", [ (config_entries.SOURCE_DHCP, DHCP_DISCOVERY), (config_entries.SOURCE_INTEGRATION_DISCOVERY, DISCOVERY_30303), ], ) async def test_discovered_by_dhcp_or_discovery_existing_unique_id_does_not_reload( hass, source, data ): """Test we can setup when discovered from dhcp or discovery and it does not reload.""" config_entry = MockConfigEntry( domain=DOMAIN, data=DEFAULT_ENTRY_DATA, unique_id=FORMATTED_MAC_ADDRESS ) config_entry.add_to_hass(hass) with _patch_discovery(), _patch_status(MOCK_ASYNC_GET_STATUS_INACTIVE), patch( f"{MODULE}.async_setup", return_value=True ) as mock_setup, patch( f"{MODULE}.async_setup_entry", return_value=True ) as mock_setup_entry: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": source}, data=data ) await hass.async_block_till_done() assert result["type"] == RESULT_TYPE_ABORT assert result["reason"] == "already_configured" assert not mock_setup.called assert not mock_setup_entry.called
6,063
17,085
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np from functools import partial from numpy import asarray from numpy.fft._pocketfft import _raw_fft, _raw_fftnd, _get_forward_norm, _get_backward_norm, _cook_nd_args def _fftc2c(a, n=None, axis=-1, norm=None, forward=None): a = asarray(a) if n is None: n = a.shape[axis] if forward: inv_norm = _get_forward_norm(n, norm) else: inv_norm = _get_backward_norm(n, norm) output = _raw_fft(a, n, axis, False, forward, inv_norm) return output def _fftr2c(a, n=None, axis=-1, norm=None, forward=None): a = asarray(a) if n is None: n = a.shape[axis] if forward: inv_norm = _get_forward_norm(n, norm) else: inv_norm = _get_backward_norm(n, norm) output = _raw_fft(a, n, axis, True, True, inv_norm) if not forward: output = output.conj() return output def _fftc2r(a, n=None, axis=-1, norm=None, forward=None): a = asarray(a) if n is None: n = (a.shape[axis] - 1) * 2 if forward: inv_norm = _get_forward_norm(n, norm) else: inv_norm = _get_backward_norm(n, norm) output = _raw_fft(a.conj() if forward else a, n, axis, True, False, inv_norm) return output def fft_c2c(x, axes, normalization, forward): f = partial(_fftc2c, forward=forward) y = _raw_fftnd(x, s=None, axes=axes, function=f, norm=normalization) return y def fft_c2c_backward(dy, axes, normalization, forward): f = partial(_fftc2c, forward=forward) dx = _raw_fftnd(dy, s=None, axes=axes, function=f, norm=normalization) return dx def fft_r2c(x, axes, normalization, forward, onesided): a = asarray(x) s, axes = _cook_nd_args(a, axes=axes) if onesided: a = _fftr2c(a, s[-1], axes[-1], normalization, forward) for ii in range(len(axes) - 1): a = _fftc2c(a, s[ii], axes[ii], normalization, forward) else: a = fft_c2c(x, axes, normalization, forward) return a def fft_r2c_backward(dy, x, axes, normalization, forward, onesided): a = dy if not onesided: a = fft_c2c_backward(a, axes, normalization, forward).real else: pad_widths = [(0, 0)] * a.ndim last_axis = axes[-1] if last_axis < 0: last_axis += a.ndim last_dim_size = a.shape[last_axis] pad_widths[last_axis] = (0, x.shape[last_axis] - last_dim_size) a = np.pad(a, pad_width=pad_widths) a = fft_c2c_backward(a, axes, normalization, forward).real return a def fft_c2r(x, axes, normalization, forward, last_dim_size): a = asarray(x) s, axes = _cook_nd_args(a, axes=axes, invreal=1) if last_dim_size is not None: s[-1] = last_dim_size for ii in range(len(axes) - 1): a = _fftc2c(a, s[ii], axes[ii], normalization, forward) a = _fftc2r(a, s[-1], axes[-1], normalization, forward) return a
1,548
4,054
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. #pragma once #include "status-callback.h" #include "peer-check.h" namespace config::sentinel { /** * Handles a checkConnectivity request by making an outgoing * ping request. When the ping finishes, fills an answer * into the parent request and send the answer back. **/ class CheckCompletionHandler : public StatusCallback { private: FRT_RPCRequest *_parentRequest; public: CheckCompletionHandler(FRT_RPCRequest *parentRequest); virtual ~CheckCompletionHandler(); void returnStatus(bool ok) override; }; }
187
641
// AT32UC3A0128 CPU configuration #ifndef __CPU_AT32UC3A0128_H__ #define __CPU_AT32UC3A0128_H__ #include "board.h" #include "stacks.h" #include "platform_ints.h" #include "sdramc.h" // Number of resources (0 if not available/not implemented) #define NUM_PIO 4 #define NUM_SPI 6 #define NUM_UART 2 #define NUM_TIMER 3 #define NUM_PWM 6 // PWM7 is on GPIO50 #define NUM_I2C 1 #define NUM_ADC 8 // Though ADC3 pin is the Ethernet IRQ #define NUM_CAN 0 #define ADC_BIT_RESOLUTION 10 #define CPU_FREQUENCY REQ_CPU_FREQ // PIO prefix ('0' for P0, P1, ... or 'A' for PA, PB, ...) #define PIO_PREFIX 'A' // Pins per port configuration: // #define PIO_PINS_PER_PORT (n) if each port has the same number of pins, or // #define PIO_PIN_ARRAY { n1, n2, ... } to define pins per port in an array // Use #define PIO_PINS_PER_PORT 0 if this isn't needed #define PIO_PIN_ARRAY { 31, 32, 32, 14 } #define AVR32_NUM_GPIO 110 // actually 109, but consider also PA31 #define RAM_SIZE 0x8000 #define INTERNAL_RAM1_FIRST_FREE end #define INTERNAL_RAM1_LAST_FREE ( RAM_SIZE - STACK_SIZE_TOTAL - 1 ) #define PLATFORM_CPU_CONSTANTS_INTS\ _C( INT_UART_RX ),\ _C( INT_TMR_MATCH ),\ _C( INT_GPIO_POSEDGE ),\ _C( INT_GPIO_NEGEDGE ), #endif // #ifndef __CPU_AT32UC3A0128_H__
688
1,755
<filename>Filters/Core/Testing/Cxx/TestDelaunay2DMeshes.cxx /*========================================================================= Program: Visualization Toolkit Module: TestDelaunay2DMeshes.cxx Copyright (c) <NAME>, <NAME>, <NAME> All rights reserved. See Copyright.txt or http://www.kitware.com/Copyright.htm for details. This software is distributed WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the above copyright notice for more information. =========================================================================*/ // Test meshes obtained with vtkDelaunay2D. #include "vtkCellArray.h" #include "vtkDelaunay2D.h" #include "vtkNew.h" #include "vtkPoints.h" #include "vtkPolyData.h" #include "vtkPolyDataReader.h" #include "vtkPolyDataWriter.h" #include "vtkSmartPointer.h" #include "vtkTestUtilities.h" #include "vtkTransform.h" #include "vtkTriangle.h" #include "vtkXMLPolyDataReader.h" #define VTK_FAILURE 1 bool CompareMeshes(vtkPolyData* p1, vtkPolyData* p2) { vtkIdType nbPoints1 = p1->GetNumberOfPoints(); vtkIdType nbPoints2 = p2->GetNumberOfPoints(); vtkIdType nbCells1 = p1->GetNumberOfCells(); vtkIdType nbCells2 = p2->GetNumberOfCells(); if (nbPoints1 != nbPoints2 || nbCells1 != nbCells2) { return false; } vtkCellArray* polys1 = p1->GetPolys(); vtkCellArray* polys2 = p2->GetPolys(); polys1->InitTraversal(); polys2->InitTraversal(); vtkIdType npts1; vtkIdType npts2; const vtkIdType* pts1; const vtkIdType* pts2; while (polys1->GetNextCell(npts1, pts1) && polys2->GetNextCell(npts2, pts2)) { if (npts1 != npts2) { return false; } for (vtkIdType i = 0; i < npts1; i++) { if (pts1[i] != pts2[i]) { return false; } } } return true; } void DumpMesh(vtkPolyData* mesh) { vtkNew<vtkPolyDataWriter> writer; writer->SetInputData(mesh); writer->WriteToOutputStringOn(); writer->Write(); std::cerr << writer->GetOutputString() << std::endl; } bool TriangulationTest(const std::string& filePath) { vtkNew<vtkPolyDataReader> inputReader; inputReader->SetFileName((filePath + "-Input.vtk").c_str()); inputReader->Update(); vtkNew<vtkDelaunay2D> delaunay2D; delaunay2D->SetInputConnection(inputReader->GetOutputPort()); delaunay2D->SetSourceConnection(inputReader->GetOutputPort()); delaunay2D->Update(); vtkPolyData* obtainedMesh = delaunay2D->GetOutput(); vtkNew<vtkPolyDataReader> outputReader; outputReader->SetFileName((filePath + "-Output.vtk").c_str()); outputReader->Update(); vtkPolyData* validMesh = outputReader->GetOutput(); if (!CompareMeshes(validMesh, obtainedMesh)) { std::cerr << "Obtained mesh is different from expected! " "Its VTK file follows:" << std::endl; DumpMesh(obtainedMesh); return false; } return true; } void GetTransform(vtkTransform* transform, vtkPoints* points) { double zaxis[3] = { 0., 0., 1. }; double pt0[3], pt1[3], pt2[3], normal[3]; points->GetPoint(0, pt0); points->GetPoint(1, pt1); points->GetPoint(2, pt2); vtkTriangle::ComputeNormal(pt0, pt1, pt2, normal); double rotationAxis[3], center[3], rotationAngle; double dotZAxis = vtkMath::Dot(normal, zaxis); if (fabs(1.0 - dotZAxis) < 1e-6) { // Aligned with z-axis rotationAxis[0] = 1.0; rotationAxis[1] = 0.0; rotationAxis[2] = 0.0; rotationAngle = 0.0; } else if (fabs(1.0 + dotZAxis) < 1e-6) { // Co-linear with z-axis, but reversed sense. // Aligned with z-axis rotationAxis[0] = 1.0; rotationAxis[1] = 0.0; rotationAxis[2] = 0.0; rotationAngle = 180.0; } else { // The general case vtkMath::Cross(normal, zaxis, rotationAxis); vtkMath::Normalize(rotationAxis); rotationAngle = vtkMath::DegreesFromRadians(acos(vtkMath::Dot(zaxis, normal))); } transform->PreMultiply(); transform->Identity(); transform->RotateWXYZ(rotationAngle, rotationAxis[0], rotationAxis[1], rotationAxis[2]); vtkTriangle::TriangleCenter(pt0, pt1, pt2, center); transform->Translate(-center[0], -center[1], -center[2]); } bool TessellationTestWithTransform(const std::string& dataPath) { std::string transformFilePath = dataPath + "-Transform.vtp"; std::string boundaryFilePath = dataPath + "-Input.vtp"; vtkNew<vtkXMLPolyDataReader> reader; reader->SetFileName(transformFilePath.c_str()); reader->Update(); vtkNew<vtkTransform> transform; vtkPoints* points = reader->GetOutput()->GetPoints(); GetTransform(transform, points); reader->SetFileName(boundaryFilePath.c_str()); reader->Update(); vtkPolyData* boundaryPoly = reader->GetOutput(); vtkNew<vtkDelaunay2D> del2D; del2D->SetInputData(boundaryPoly); del2D->SetSourceData(boundaryPoly); del2D->SetTolerance(0.0); del2D->SetAlpha(0.0); del2D->SetOffset(0); del2D->SetProjectionPlaneMode(VTK_SET_TRANSFORM_PLANE); del2D->SetTransform(transform); del2D->BoundingTriangulationOff(); del2D->Update(); vtkPolyData* outPoly = del2D->GetOutput(); if (outPoly->GetNumberOfCells() != boundaryPoly->GetNumberOfPoints() - 2) { std::cerr << "Bad triangulation for " << dataPath << "!" << std::endl; std::cerr << "Output has " << outPoly->GetNumberOfCells() << " cells instead of " << boundaryPoly->GetNumberOfPoints() - 2 << std::endl; return false; } return true; } int TestDelaunay2DMeshes(int argc, char* argv[]) { char* data_dir = vtkTestUtilities::GetDataRoot(argc, argv); if (!data_dir) { cerr << "Could not determine data directory." << endl; return VTK_FAILURE; } std::string dataPath = std::string(data_dir) + "/Data/Delaunay/"; delete[] data_dir; bool result = true; result &= TriangulationTest(dataPath + "DomainWithHole"); result &= TessellationTestWithTransform(dataPath + "Test1"); result &= TessellationTestWithTransform(dataPath + "Test2"); result &= TessellationTestWithTransform(dataPath + "Test3"); result &= TessellationTestWithTransform(dataPath + "Test4"); result &= TessellationTestWithTransform(dataPath + "Test5"); return result ? EXIT_SUCCESS : EXIT_FAILURE; }
2,408
1,455
/* * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.data.redis.connection.stream; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; /** * Value object representing a Stream Id with its offset. * * @author <NAME> * @see 2.2 */ public final class StreamOffset<K> { private final K key; private final ReadOffset offset; private StreamOffset(K key, ReadOffset offset) { Assert.notNull(key, "Key must not be null"); Assert.notNull(offset, "ReadOffset must not be null"); this.key = key; this.offset = offset; } /** * Create a {@link StreamOffset} given {@code key} and {@link ReadOffset}. * * @param stream the stream key. * @param readOffset the {@link ReadOffset} to use. * @return new instance of {@link StreamOffset}. */ public static <K> StreamOffset<K> create(K stream, ReadOffset readOffset) { return new StreamOffset<>(stream, readOffset); } /** * Create a {@link StreamOffset} given {@code key} starting at {@link ReadOffset#latest()}. * * @param stream the stream key. * @param <K> * @return new instance of {@link StreamOffset}. */ public static <K> StreamOffset<K> latest(K stream) { return new StreamOffset<>(stream, ReadOffset.latest()); } /** * Create a {@link StreamOffset} given {@code stream} starting at {@link ReadOffset#from(String) * ReadOffset#from("0-0")}. * * @param stream the stream key. * @param <K> * @return new instance of {@link StreamOffset}. */ public static <K> StreamOffset<K> fromStart(K stream) { return new StreamOffset<>(stream, ReadOffset.from("0-0")); } /** * Create a {@link StreamOffset} from the given {@link Record#getId() record id} as reference to create the * {@link ReadOffset#from(String)}. * * @param reference the record to be used as reference point. * @param <K> * @return new instance of {@link StreamOffset}. */ public static <K> StreamOffset<K> from(Record<K, ?> reference) { Assert.notNull(reference, "Reference record must not be null"); return create(reference.getStream(), ReadOffset.from(reference.getId())); } public K getKey() { return this.key; } public ReadOffset getOffset() { return this.offset; } @Override public String toString() { return "StreamOffset{" + "key=" + key + ", offset=" + offset + '}'; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; StreamOffset<?> that = (StreamOffset<?>) o; if (!ObjectUtils.nullSafeEquals(key, that.key)) { return false; } return ObjectUtils.nullSafeEquals(offset, that.offset); } @Override public int hashCode() { int result = ObjectUtils.nullSafeHashCode(key); result = 31 * result + ObjectUtils.nullSafeHashCode(offset); return result; } }
1,100
45,293
public enum Enum { A, B, C }
24
416
<reponame>ljz663/tencentcloud-sdk-java<gh_stars>100-1000 /* * Copyright (c) 2017-2018 THL A29 Limited, a Tencent company. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tencentcloudapi.dts.v20180330.models; import com.tencentcloudapi.common.AbstractModel; import com.google.gson.annotations.SerializedName; import com.google.gson.annotations.Expose; import java.util.HashMap; public class ActivateSubscribeRequest extends AbstractModel{ /** * 订阅实例ID。 */ @SerializedName("SubscribeId") @Expose private String SubscribeId; /** * 数据库实例ID */ @SerializedName("InstanceId") @Expose private String InstanceId; /** * 数据订阅类型0-全实例订阅,1数据订阅,2结构订阅,3数据订阅与结构订阅 */ @SerializedName("SubscribeObjectType") @Expose private Long SubscribeObjectType; /** * 订阅对象 */ @SerializedName("Objects") @Expose private SubscribeObject Objects; /** * 数据订阅服务所在子网。默认为数据库实例所在的子网内。 */ @SerializedName("UniqSubnetId") @Expose private String UniqSubnetId; /** * 订阅服务端口;默认为7507 */ @SerializedName("Vport") @Expose private Long Vport; /** * Get 订阅实例ID。 * @return SubscribeId 订阅实例ID。 */ public String getSubscribeId() { return this.SubscribeId; } /** * Set 订阅实例ID。 * @param SubscribeId 订阅实例ID。 */ public void setSubscribeId(String SubscribeId) { this.SubscribeId = SubscribeId; } /** * Get 数据库实例ID * @return InstanceId 数据库实例ID */ public String getInstanceId() { return this.InstanceId; } /** * Set 数据库实例ID * @param InstanceId 数据库实例ID */ public void setInstanceId(String InstanceId) { this.InstanceId = InstanceId; } /** * Get 数据订阅类型0-全实例订阅,1数据订阅,2结构订阅,3数据订阅与结构订阅 * @return SubscribeObjectType 数据订阅类型0-全实例订阅,1数据订阅,2结构订阅,3数据订阅与结构订阅 */ public Long getSubscribeObjectType() { return this.SubscribeObjectType; } /** * Set 数据订阅类型0-全实例订阅,1数据订阅,2结构订阅,3数据订阅与结构订阅 * @param SubscribeObjectType 数据订阅类型0-全实例订阅,1数据订阅,2结构订阅,3数据订阅与结构订阅 */ public void setSubscribeObjectType(Long SubscribeObjectType) { this.SubscribeObjectType = SubscribeObjectType; } /** * Get 订阅对象 * @return Objects 订阅对象 */ public SubscribeObject getObjects() { return this.Objects; } /** * Set 订阅对象 * @param Objects 订阅对象 */ public void setObjects(SubscribeObject Objects) { this.Objects = Objects; } /** * Get 数据订阅服务所在子网。默认为数据库实例所在的子网内。 * @return UniqSubnetId 数据订阅服务所在子网。默认为数据库实例所在的子网内。 */ public String getUniqSubnetId() { return this.UniqSubnetId; } /** * Set 数据订阅服务所在子网。默认为数据库实例所在的子网内。 * @param UniqSubnetId 数据订阅服务所在子网。默认为数据库实例所在的子网内。 */ public void setUniqSubnetId(String UniqSubnetId) { this.UniqSubnetId = UniqSubnetId; } /** * Get 订阅服务端口;默认为7507 * @return Vport 订阅服务端口;默认为7507 */ public Long getVport() { return this.Vport; } /** * Set 订阅服务端口;默认为7507 * @param Vport 订阅服务端口;默认为7507 */ public void setVport(Long Vport) { this.Vport = Vport; } public ActivateSubscribeRequest() { } /** * NOTE: Any ambiguous key set via .set("AnyKey", "value") will be a shallow copy, * and any explicit key, i.e Foo, set via .setFoo("value") will be a deep copy. */ public ActivateSubscribeRequest(ActivateSubscribeRequest source) { if (source.SubscribeId != null) { this.SubscribeId = new String(source.SubscribeId); } if (source.InstanceId != null) { this.InstanceId = new String(source.InstanceId); } if (source.SubscribeObjectType != null) { this.SubscribeObjectType = new Long(source.SubscribeObjectType); } if (source.Objects != null) { this.Objects = new SubscribeObject(source.Objects); } if (source.UniqSubnetId != null) { this.UniqSubnetId = new String(source.UniqSubnetId); } if (source.Vport != null) { this.Vport = new Long(source.Vport); } } /** * Internal implementation, normal users should not use it. */ public void toMap(HashMap<String, String> map, String prefix) { this.setParamSimple(map, prefix + "SubscribeId", this.SubscribeId); this.setParamSimple(map, prefix + "InstanceId", this.InstanceId); this.setParamSimple(map, prefix + "SubscribeObjectType", this.SubscribeObjectType); this.setParamObj(map, prefix + "Objects.", this.Objects); this.setParamSimple(map, prefix + "UniqSubnetId", this.UniqSubnetId); this.setParamSimple(map, prefix + "Vport", this.Vport); } }
3,009
14,668
<reponame>zealoussnow/chromium // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "ash/public/cpp/clipboard_image_model_factory.h" namespace ash { namespace { ClipboardImageModelFactory* g_instance = nullptr; } // namespace ClipboardImageModelFactory::ClipboardImageModelFactory() { DCHECK_EQ(nullptr, g_instance); g_instance = this; } ClipboardImageModelFactory::~ClipboardImageModelFactory() { DCHECK_EQ(g_instance, this); g_instance = nullptr; } // static ClipboardImageModelFactory* ClipboardImageModelFactory::Get() { return g_instance; } } // namespace ash
232
364
<gh_stars>100-1000 package ca.uhn.fhir.jpa.search.reindex; /*- * #%L * HAPI FHIR Storage api * %% * Copyright (C) 2014 - 2021 Smile CDR, Inc. * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.RejectedExecutionHandler; import java.util.concurrent.ThreadPoolExecutor; /** * A handler for rejected tasks that will have the caller block until space is available. * This was stolen from old hibernate search(5.X.X), as it has been removed in HS6. We can probably come up with a better solution though. */ // TODO KHS consolidate with the other BlockPolicy class this looks like it is a duplicate of public class BlockPolicy implements RejectedExecutionHandler { private static final Logger ourLog = LoggerFactory.getLogger(BlockPolicy.class); /** * Puts the Runnable to the blocking queue, effectively blocking the delegating thread until space is available. * * @param r the runnable task requested to be executed * @param e the executor attempting to execute this task */ @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor e) { try { e.getQueue().put(r); } catch (InterruptedException e1) { ourLog.error("Interrupted Execption for task: {}", r, e1); Thread.currentThread().interrupt(); } } }
550
777
<reponame>Sun-Joong/aifh /* * Artificial Intelligence for Humans * Volume 2: Nature Inspired Algorithms * Java Version * http://www.aifh.org * http://www.jeffheaton.com * * Code repository: * https://github.com/jeffheaton/aifh * * Copyright 2014 by <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * For more information on Heaton Research copyrights, licenses * and trademarks visit: * http://www.heatonresearch.com/copyright */ package com.heatonresearch.aifh.examples.capstone.model.milestone1; /** * Stats that were collected about the titanic passengers to help with normalization, interpolation * and modeling. */ public class TitanicStats { /** * Passengers with the title "master", mean age. */ private final CalcMean meanMaster = new CalcMean(); /** * Passengers with the title "mr", mean age. */ private final CalcMean meanMr = new CalcMean(); /** * Passengers with the title "miss", mean age. */ private final CalcMean meanMiss = new CalcMean(); /** * Passengers with the title "mrs", mean age. */ private final CalcMean meanMrs = new CalcMean(); /** * Passengers with a military title, mean age. */ private final CalcMean meanMilitary = new CalcMean(); /** * Passengers with a nobility title, mean age. */ private final CalcMean meanNobility = new CalcMean(); /** * Passengers with the title "dr". */ private final CalcMean meanDr = new CalcMean(); /** * Passengers with the title "rev". */ private final CalcMean meanClergy = new CalcMean(); /** * Total passengers. */ private final CalcMean meanTotal = new CalcMean(); /** * Total male passengers. */ private final CalcMean meanMale = new CalcMean(); /** * Total female passengers. */ private final CalcMean meanFemale = new CalcMean(); /** * Passengers in 1st class, average fare. */ private final CalcMean meanFare1 = new CalcMean(); /** * Passengers in 2st class, average fare. */ private final CalcMean meanFare2 = new CalcMean(); /** * Passengers in 3rd class, average fare. */ private final CalcMean meanFare3 = new CalcMean(); /** * Survival stats for passengers with a title of "master". */ private final CalcSurvival survivalMaster = new CalcSurvival(); /** * Survival stats for passengers with a title of "mr". */ private final CalcSurvival survivalMr = new CalcSurvival(); /** * Survival stats for passengers with a title of "miss". */ private final CalcSurvival survivalMiss = new CalcSurvival(); /** * Survival stats for passengers with a title of "mrs". */ private final CalcSurvival survivalMrs = new CalcSurvival(); /** * Survival stats for passengers with a military title. */ private final CalcSurvival survivalMilitary = new CalcSurvival(); /** * Survival stats for passengers with a nobility title. */ private final CalcSurvival survivalNobility = new CalcSurvival(); /** * Survival stats for passengers with a title of "dr". */ private final CalcSurvival survivalDr = new CalcSurvival(); /** * Survival stats for passengers with a title of "rev". */ private final CalcSurvival survivalClergy = new CalcSurvival(); /** * Survival stats for all passengers. */ private final CalcSurvival survivalTotal = new CalcSurvival(); /** * Survival stats for passengers that embarked from Southampton, England. */ private final CalcSurvival embarkedS = new CalcSurvival(); /** * Survival stats for passengers that embarked from Cherbourg, France. */ private final CalcSurvival embarkedC = new CalcSurvival(); /** * Survival stats for passengers that embarked from Queenstown, England. */ private final CalcSurvival embarkedQ = new CalcSurvival(); /** * Histogram of embark locations. */ private final CalcHistogram embarkedHisto = new CalcHistogram(); /** * @return Passengers with the title "master", mean age. */ public CalcMean getMeanMaster() { return meanMaster; } /** * @return Passengers with the title "mr", mean age. */ public CalcMean getMeanMr() { return meanMr; } /** * @return Passengers with the title "miss", mean age. */ public CalcMean getMeanMiss() { return meanMiss; } /** * @return Passengers with the title "mrs", mean age. */ public CalcMean getMeanMrs() { return meanMrs; } /** * @return Passengers with a military title, mean age. */ public CalcMean getMeanMilitary() { return meanMilitary; } /** * @return Passengers with a noble title, mean age. */ public CalcMean getMeanNobility() { return meanNobility; } /** * @return Passengers with the title "dr", mean age. */ public CalcMean getMeanDr() { return meanDr; } /** * @return Passengers with the title "rev", mean age. */ public CalcMean getMeanClergy() { return meanClergy; } /** * @return Mean age for all passengers. */ public CalcMean getMeanTotal() { return meanTotal; } /** * @return Survival stats for passengers with a title of "master". */ public CalcSurvival getSurvivalMaster() { return survivalMaster; } /** * @return Survival stats for passengers with a title of "mr". */ public CalcSurvival getSurvivalMr() { return survivalMr; } /** * @return Survival stats for passengers with a title of "miss". */ public CalcSurvival getSurvivalMiss() { return survivalMiss; } /** * @return Survival stats for passengers with a title of "mrs". */ public CalcSurvival getSurvivalMrs() { return survivalMrs; } /** * @return Survival stats for passengers with a military title. */ public CalcSurvival getSurvivalMilitary() { return survivalMilitary; } /** * @return Survival stats for passengers with a noble title. */ public CalcSurvival getSurvivalNobility() { return survivalNobility; } /** * @return Survival stats for passengers with a title of "dr". */ public CalcSurvival getSurvivalDr() { return survivalDr; } /** * @return Survival stats for passengers with a title of "clergy". */ public CalcSurvival getSurvivalClergy() { return survivalClergy; } /** * @return Survival stats on the total number of passengers. */ public CalcSurvival getSurvivalTotal() { return survivalTotal; } /** * @return Survival stats for passengers that embarked from Southampton, England. */ public CalcSurvival getEmbarkedS() { return embarkedS; } /** * @return Survival stats for passengers that embarked from Cherbourg, France. */ public CalcSurvival getEmbarkedC() { return embarkedC; } /** * @return Survival stats for passengers that embarked from Queenstown, England. */ public CalcSurvival getEmbarkedQ() { return embarkedQ; } /** * @return Histogram of embark locations. */ public CalcHistogram getEmbarkedHisto() { return embarkedHisto; } /** * @return Mean age for male passengers. */ public CalcMean getMeanMale() { return meanMale; } /** * @return Mean age for female passengers. */ public CalcMean getMeanFemale() { return meanFemale; } /** * @return Mean fare for first class. */ public CalcMean getMeanFare1() { return meanFare1; } /** * @return Mean fare for second class. */ public CalcMean getMeanFare2() { return meanFare2; } /** * @return Mean fare for second class. */ public CalcMean getMeanFare3() { return meanFare3; } /** * Dump all stats to stdout. */ public void dump() { System.out.println("Mean Master: Mean Age: " + meanMaster.calculate() + " " + survivalMaster.toString()); System.out.println("Mr.: Mean Age: " + meanMr.calculate() + " " + survivalMr.toString()); System.out.println("Miss.: Mean Age: " + meanMiss.calculate() + " " + survivalMiss.toString()); System.out.println("Mrs.: Mean Age: " + meanMrs.calculate() + " " + survivalMrs.toString()); System.out.println("Military: Mean Age: " + meanMrs.calculate() + " " + survivalMilitary.toString()); System.out.println("Clergy: Mean Age: " + meanClergy.calculate() + " " + survivalClergy.toString()); System.out.println("Nobility: Mean Age: " + meanNobility.calculate() + " " + survivalNobility.toString()); System.out.println("Dr: Mean Age: " + meanDr.calculate() + " " + survivalDr.toString()); System.out.println("Total known survival: Mean Age: " + meanTotal.calculate() + " " + survivalTotal.toString()); System.out.println(); System.out.println("Embarked Queenstown: Mean Age: " + embarkedQ.toString()); System.out.println("Embarked Southampton: Mean Age: " + embarkedS.toString()); System.out.println("Embarked Cherbourg: Mean Age: " + embarkedC.toString()); System.out.println("Most common embarked: Mean Age: " + this.embarkedHisto.max()); System.out.println(); System.out.println("Mean Age Male: " + this.meanMale.calculate()); System.out.println("Mean Age Female: " + this.meanFemale.calculate()); System.out.println(); System.out.println("Mean Fair 1st Class: " + this.meanFare1.calculate()); System.out.println("Mean Fair 2st Class: " + this.meanFare2.calculate()); System.out.println("Mean Fair 3st Class: " + this.meanFare3.calculate()); } }
3,961
5,169
<reponame>morizotter/Specs { "name": "QBFlatButton", "version": "1.1", "summary": "Flat-Style Button", "homepage": "https://github.com/questbeat/QBFlatButton", "license": "MIT", "authors": { "questbeat": "<EMAIL>" }, "platforms": { "ios": "6.0" }, "source": { "git": "https://github.com/questbeat/QBFlatButton.git", "tag": "v1.1" }, "source_files": "Pod/Classes/*.{h,m}", "requires_arc": true }
196
401
/* * Hedgewars, a free turn based strategy game * Copyright (c) 2004-2019 <NAME> <<EMAIL>> * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; version 2 of the License * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ /** * @file * @brief KeyMap class definition */ #ifndef HEDGEWARS_KEYMAP_H #define HEDGEWARS_KEYMAP_H #include <QFile> #include <QHash> #include "SDL.h" class KeyMap { public: /** * @brief Returns reference to the <i>singleton</i> instance of this class. * * @see <a href="https://en.wikipedia.org/wiki/Singleton_pattern">singleton pattern</a> * * @return reference to the instance. */ static KeyMap & instance(); SDL_Scancode getScancodeFromKeyname(QString keyname); QString getKeynameFromScancode(int scancode); QString getKeynameFromScancodeConverted(int scancode); QString getKeynameFromKeycode(int keycode); private: // TODO: Optimize data structures QHash<SDL_Scancode, QString> mapOfKeynames; QHash<QString, SDL_Scancode> mapOfScancodes; bool getKeyMap(); bool keyMapGenerated = false; }; #endif // HEDGEWARS_KEYMAP_H
638
679
<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ import com.sun.star.uno.UnoRuntime; import com.sun.star.accessibility.XAccessible; import com.sun.star.accessibility.XAccessibleContext; import com.sun.star.accessibility.XAccessibleSelection; import com.sun.star.lang.IndexOutOfBoundsException; import javax.swing.*; import java.awt.*; import java.util.Vector; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; class AccessibleSelectionHandler extends NodeHandler { public NodeHandler createHandler( XAccessibleContext xContext ) { XAccessibleSelection xSelection = (XAccessibleSelection) UnoRuntime.queryInterface( XAccessibleSelection.class, xContext); return (xSelection == null) ? null : new AccessibleSelectionHandler(xSelection); } public AccessibleSelectionHandler() { } public AccessibleSelectionHandler( XAccessibleSelection xSelection ) { if (xSelection != null) maChildList.setSize( 2 ); } public AccessibleTreeNode createChild( AccessibleTreeNode aParent, int nIndex ) { AccessibleTreeNode aChild = null; if( aParent instanceof AccTreeNode ) { XAccessibleSelection xSelection = ((AccTreeNode)aParent).getSelection(); if( xSelection != null ) { switch( nIndex ) { case 0: aChild = new StringNode( "getSelectedAccessibleChildCount: " + xSelection.getSelectedAccessibleChildCount(), aParent ); break; case 1: { VectorNode aVNode = new VectorNode( "Selected Children", aParent); int nSelected = 0; int nCount = ((AccTreeNode)aParent).getContext(). getAccessibleChildCount(); try { for( int i = 0; i < nCount; i++ ) { try { if( xSelection.isAccessibleChildSelected( i ) ) { XAccessible xSelChild = xSelection. getSelectedAccessibleChild(nSelected); XAccessible xNChild = ((AccTreeNode)aParent). getContext().getAccessibleChild( i ); aVNode.addChild( new StringNode( i + ": " + xNChild.getAccessibleContext(). getAccessibleDescription() + " (" + (xSelChild.equals(xNChild) ? "OK" : "XXX") + ")", aParent ) ); } } catch (com.sun.star.lang.DisposedException e) { aVNode.addChild( new StringNode( i + ": caught DisposedException while creating", aParent )); } } aChild = aVNode; } catch( IndexOutOfBoundsException e ) { aChild = new StringNode( "IndexOutOfBounds", aParent ); } } break; default: aChild = new StringNode( "ERROR", aParent ); break; } } } return aChild; } public String[] getActions (AccessibleTreeNode aNode) { if( aNode instanceof AccTreeNode ) { XAccessibleSelection xSelection = ((AccTreeNode)aNode).getSelection(); if( xSelection != null ) { return new String[] { "Select..." }; } } return new String[0]; } public void performAction (AccessibleTreeNode aNode, int nIndex) { new SelectionDialog( (AccTreeNode)aNode ).show(); } }
3,145
335
<gh_stars>100-1000 { "word": "Phrase", "definitions": [ "Put into a particular form of words.", "Divide (music) into phrases in a particular way, especially in performance." ], "parts-of-speech": "Verb" }
94
595
<filename>src/sdk_src/platform_config.h<gh_stars>100-1000 #ifndef __PLATFORM_CONFIG_H_ #define __PLATFORM_CONFIG_H_ #endif
56
746
package jp.mixi.assignment.actionbar; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; public class Actionbar2Activity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_actionbar2); } }
122
5,169
<gh_stars>1000+ { "name": "CECManager", "version": "0.9.9.1.1", "summary": "Gerenciamento de experiência de usuário", "homepage": "http://cinnecta.com/", "authors": { "<NAME>": "<EMAIL>" }, "license": { "type": "Copyright", "text": "Copyright Cinnecta do Brasil LTDA. All Rights Reserved." }, "platforms": { "ios": "9.0" }, "source": { "git": "https://github.com/lucasbut/cecmanager-pods.git", "branch": "master", "tag": "v0.9.9.1.1" }, "ios": { "vendored_frameworks": "CECManager.framework" } }
257
348
{"nom":"Villers-aux-Vents","circ":"1ère circonscription","dpt":"Meuse","inscrits":110,"abs":39,"votants":71,"blancs":5,"nuls":1,"exp":65,"res":[{"nuance":"UDI","nom":"<NAME>","voix":40},{"nuance":"REM","nom":"<NAME>","voix":25}]}
92
2,217
#!/usr/bin/env python from __future__ import print_function import warnings import sys def main(argv): if len(argv) != 2: exit(64) with open(argv[1]) as fp: contents = fp.read() exitcode = 0 syntax_err = None with warnings.catch_warnings(record=True) as wc: try: compile(contents, argv[1], "exec", 0, 1) except SyntaxError as exc: syntax_err = exc # Output any warnings (caught during `compile`). # This could/should maybe only handle SyntaxWarnings? for wm in wc: print( "%s:%s: W: %s (%s)" % (wm.filename, wm.lineno, wm.message, wm.category.__name__) ) exitcode |= 2 # Output any SyntaxError. if syntax_err: print( "%s:%s:%s: E: %s" % ( syntax_err.filename, syntax_err.lineno, syntax_err.offset, syntax_err.msg, ) ) exitcode |= 1 return exitcode if __name__ == "__main__": sys.exit(main(sys.argv))
567
675
<reponame>bigplayszn/nCine<filename>tests/apptest_clones.h #ifndef CLASS_MYEVENTHANDLER #define CLASS_MYEVENTHANDLER #include <ncine/IAppEventHandler.h> #include <ncine/IInputEventHandler.h> #include <ncine/TimeStamp.h> #include <nctl/UniquePtr.h> namespace ncine { class AppConfiguration; class Texture; class Sprite; class AnimatedSprite; class MeshSprite; class Font; class TextNode; class ParticleSystem; } namespace nc = ncine; /// My nCine event handler class MyEventHandler : public nc::IAppEventHandler, public nc::IInputEventHandler { public: void onPreInit(nc::AppConfiguration &config) override; void onInit() override; void onFrameStart() override; void onKeyReleased(const nc::KeyboardEvent &event) override; private: static const unsigned int NumParticles = 50; nctl::UniquePtr<nc::Texture> spriteTexture_; nctl::UniquePtr<nc::Sprite> sprite_; nctl::UniquePtr<nc::Sprite> clonedSprite_; nctl::UniquePtr<nc::Texture> animSpriteTexture_; nctl::UniquePtr<nc::AnimatedSprite> animSprite_; nctl::UniquePtr<nc::AnimatedSprite> clonedAnimSprite_; nctl::UniquePtr<nc::Texture> meshSpriteTexture_; nctl::UniquePtr<nc::MeshSprite> meshSprite_; nctl::UniquePtr<nc::MeshSprite> clonedMeshSprite_; nctl::UniquePtr<nc::Font> font_; nctl::UniquePtr<nc::TextNode> textNode_; nctl::UniquePtr<nc::TextNode> clonedTextNode_; nctl::UniquePtr<nc::Texture> particleTexture_; nctl::UniquePtr<nc::ParticleSystem> particleSystem_; nctl::UniquePtr<nc::ParticleSystem> clonedParticleSystem_; nc::TimeStamp lastEmissionTime_; }; #endif
574
381
<reponame>joyrun/ActivityRouter package router.activity; import com.grouter.compiler.ActivityModel; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ActivitySourceFileParserHelper { /** * @return 是否带有 @RouterActivity 注解 */ public static boolean hasRouterActivityAnnotationQuick(File file) { // 如果不是Java或者Kotlin文件直接返回false if (!file.getName().endsWith(".java") && !file.getName().endsWith(".kt")) { return false; } boolean hasRouterActivity = false; try { FileReader fr = new FileReader(file); BufferedReader bf = new BufferedReader(fr); String str; // 按行读取字符串 while ((str = bf.readLine()) != null) { str = str.trim(); if (str.startsWith("@RouterActivity")) { hasRouterActivity = true; break; } } bf.close(); fr.close(); } catch (IOException e) { e.printStackTrace(); } return hasRouterActivity; } private static ActivityKotlinSourceFileParser routerKotlinSourceFileParser = new ActivityKotlinSourceFileParser(); private static ActivityJavaSourceFileParser routerJavaSourceFileParser = new ActivityJavaSourceFileParser(); public static List<ActivityModel> parse(List<File> files) { List<ActivityModel> typeModels = new ArrayList<>(); for (File file : files) { parse(typeModels, file); } Collections.sort(typeModels); return typeModels; } public static void parse(List<ActivityModel> typeModels, File file) { if (file.isFile()) { if (!hasRouterActivityAnnotationQuick(file)) { return; } int handleCount; if (file.getName().endsWith(".kt")) { handleCount = routerKotlinSourceFileParser.parse(typeModels, file); } else { handleCount = routerJavaSourceFileParser.parse(typeModels, file); } if (handleCount > 0) { System.out.println("GRouter fix: " + file.getAbsolutePath()); } } else { File[] files = file.listFiles(); if (files != null) { for (File item : files) { try { parse(typeModels, item); } catch (Exception e) { e.printStackTrace(); } } } } } }
1,352
570
package com.hx.curtain.drawer.nineoldandroids.view; import android.view.View; import static com.hx.curtain.drawer.nineoldandroids.view.animation.AnimatorProxy.NEEDS_PROXY; import static com.hx.curtain.drawer.nineoldandroids.view.animation.AnimatorProxy.wrap; public final class ViewHelper { private ViewHelper() {} public static float getAlpha(View view) { return NEEDS_PROXY ? wrap(view).getAlpha() : Honeycomb.getAlpha(view); } public static void setAlpha(View view, float alpha) { if (NEEDS_PROXY) { wrap(view).setAlpha(alpha); } else { Honeycomb.setAlpha(view, alpha); } } public static float getPivotX(View view) { return NEEDS_PROXY ? wrap(view).getPivotX() : Honeycomb.getPivotX(view); } public static void setPivotX(View view, float pivotX) { if (NEEDS_PROXY) { wrap(view).setPivotX(pivotX); } else { Honeycomb.setPivotX(view, pivotX); } } public static float getPivotY(View view) { return NEEDS_PROXY ? wrap(view).getPivotY() : Honeycomb.getPivotY(view); } public static void setPivotY(View view, float pivotY) { if (NEEDS_PROXY) { wrap(view).setPivotY(pivotY); } else { Honeycomb.setPivotY(view, pivotY); } } public static float getRotation(View view) { return NEEDS_PROXY ? wrap(view).getRotation() : Honeycomb.getRotation(view); } public static void setRotation(View view, float rotation) { if (NEEDS_PROXY) { wrap(view).setRotation(rotation); } else { Honeycomb.setRotation(view, rotation); } } public static float getRotationX(View view) { return NEEDS_PROXY ? wrap(view).getRotationX() : Honeycomb.getRotationX(view); } public static void setRotationX(View view, float rotationX) { if (NEEDS_PROXY) { wrap(view).setRotationX(rotationX); } else { Honeycomb.setRotationX(view, rotationX); } } public static float getRotationY(View view) { return NEEDS_PROXY ? wrap(view).getRotationY() : Honeycomb.getRotationY(view); } public static void setRotationY(View view, float rotationY) { if (NEEDS_PROXY) { wrap(view).setRotationY(rotationY); } else { Honeycomb.setRotationY(view, rotationY); } } public static float getScaleX(View view) { return NEEDS_PROXY ? wrap(view).getScaleX() : Honeycomb.getScaleX(view); } public static void setScaleX(View view, float scaleX) { if (NEEDS_PROXY) { wrap(view).setScaleX(scaleX); } else { Honeycomb.setScaleX(view, scaleX); } } public static float getScaleY(View view) { return NEEDS_PROXY ? wrap(view).getScaleY() : Honeycomb.getScaleY(view); } public static void setScaleY(View view, float scaleY) { if (NEEDS_PROXY) { wrap(view).setScaleY(scaleY); } else { Honeycomb.setScaleY(view, scaleY); } } public static float getScrollX(View view) { return NEEDS_PROXY ? wrap(view).getScrollX() : Honeycomb.getScrollX(view); } public static void setScrollX(View view, int scrollX) { if (NEEDS_PROXY) { wrap(view).setScrollX(scrollX); } else { Honeycomb.setScrollX(view, scrollX); } } public static float getScrollY(View view) { return NEEDS_PROXY ? wrap(view).getScrollY() : Honeycomb.getScrollY(view); } public static void setScrollY(View view, int scrollY) { if (NEEDS_PROXY) { wrap(view).setScrollY(scrollY); } else { Honeycomb.setScrollY(view, scrollY); } } public static float getTranslationX(View view) { return NEEDS_PROXY ? wrap(view).getTranslationX() : Honeycomb.getTranslationX(view); } public static void setTranslationX(View view, float translationX) { if (NEEDS_PROXY) { wrap(view).setTranslationX(translationX); } else { Honeycomb.setTranslationX(view, translationX); } } public static float getTranslationY(View view) { return NEEDS_PROXY ? wrap(view).getTranslationY() : Honeycomb.getTranslationY(view); } public static void setTranslationY(View view, float translationY) { if (NEEDS_PROXY) { wrap(view).setTranslationY(translationY); } else { Honeycomb.setTranslationY(view, translationY); } } public static float getX(View view) { return NEEDS_PROXY ? wrap(view).getX() : Honeycomb.getX(view); } public static void setX(View view, float x) { if (NEEDS_PROXY) { wrap(view).setX(x); } else { Honeycomb.setX(view, x); } } public static float getY(View view) { return NEEDS_PROXY ? wrap(view).getY() : Honeycomb.getY(view); } public static void setY(View view, float y) { if (NEEDS_PROXY) { wrap(view).setY(y); } else { Honeycomb.setY(view, y); } } private static final class Honeycomb { static float getAlpha(View view) { return view.getAlpha(); } static void setAlpha(View view, float alpha) { view.setAlpha(alpha); } static float getPivotX(View view) { return view.getPivotX(); } static void setPivotX(View view, float pivotX) { view.setPivotX(pivotX); } static float getPivotY(View view) { return view.getPivotY(); } static void setPivotY(View view, float pivotY) { view.setPivotY(pivotY); } static float getRotation(View view) { return view.getRotation(); } static void setRotation(View view, float rotation) { view.setRotation(rotation); } static float getRotationX(View view) { return view.getRotationX(); } static void setRotationX(View view, float rotationX) { view.setRotationX(rotationX); } static float getRotationY(View view) { return view.getRotationY(); } static void setRotationY(View view, float rotationY) { view.setRotationY(rotationY); } static float getScaleX(View view) { return view.getScaleX(); } static void setScaleX(View view, float scaleX) { view.setScaleX(scaleX); } static float getScaleY(View view) { return view.getScaleY(); } static void setScaleY(View view, float scaleY) { view.setScaleY(scaleY); } static float getScrollX(View view) { return view.getScrollX(); } static void setScrollX(View view, int scrollX) { view.setScrollX(scrollX); } static float getScrollY(View view) { return view.getScrollY(); } static void setScrollY(View view, int scrollY) { view.setScrollY(scrollY); } static float getTranslationX(View view) { return view.getTranslationX(); } static void setTranslationX(View view, float translationX) { view.setTranslationX(translationX); } static float getTranslationY(View view) { return view.getTranslationY(); } static void setTranslationY(View view, float translationY) { view.setTranslationY(translationY); } static float getX(View view) { return view.getX(); } static void setX(View view, float x) { view.setX(x); } static float getY(View view) { return view.getY(); } static void setY(View view, float y) { view.setY(y); } } }
3,770
642
#include "RootRef.h" #include "Frontend.h" using namespace MaximCompiler; RootRef::RootRef(void *handle) : handle(handle) {} void RootRef::addSocket(MaximCompiler::VarType vartype) { MaximFrontend::maxim_build_root_socket(get(), vartype.release()); }
93
1,350
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.devtestlabs.implementation; import com.azure.core.http.rest.PagedIterable; import com.azure.core.http.rest.Response; import com.azure.core.http.rest.SimpleResponse; import com.azure.core.util.Context; import com.azure.core.util.logging.ClientLogger; import com.azure.resourcemanager.devtestlabs.fluent.ArtifactsClient; import com.azure.resourcemanager.devtestlabs.fluent.models.ArmTemplateInfoInner; import com.azure.resourcemanager.devtestlabs.fluent.models.ArtifactInner; import com.azure.resourcemanager.devtestlabs.models.ArmTemplateInfo; import com.azure.resourcemanager.devtestlabs.models.Artifact; import com.azure.resourcemanager.devtestlabs.models.Artifacts; import com.azure.resourcemanager.devtestlabs.models.GenerateArmTemplateRequest; import com.fasterxml.jackson.annotation.JsonIgnore; public final class ArtifactsImpl implements Artifacts { @JsonIgnore private final ClientLogger logger = new ClientLogger(ArtifactsImpl.class); private final ArtifactsClient innerClient; private final com.azure.resourcemanager.devtestlabs.DevTestLabsManager serviceManager; public ArtifactsImpl( ArtifactsClient innerClient, com.azure.resourcemanager.devtestlabs.DevTestLabsManager serviceManager) { this.innerClient = innerClient; this.serviceManager = serviceManager; } public PagedIterable<Artifact> list(String resourceGroupName, String labName, String artifactSourceName) { PagedIterable<ArtifactInner> inner = this.serviceClient().list(resourceGroupName, labName, artifactSourceName); return Utils.mapPage(inner, inner1 -> new ArtifactImpl(inner1, this.manager())); } public PagedIterable<Artifact> list( String resourceGroupName, String labName, String artifactSourceName, String expand, String filter, Integer top, String orderby, Context context) { PagedIterable<ArtifactInner> inner = this .serviceClient() .list(resourceGroupName, labName, artifactSourceName, expand, filter, top, orderby, context); return Utils.mapPage(inner, inner1 -> new ArtifactImpl(inner1, this.manager())); } public Artifact get(String resourceGroupName, String labName, String artifactSourceName, String name) { ArtifactInner inner = this.serviceClient().get(resourceGroupName, labName, artifactSourceName, name); if (inner != null) { return new ArtifactImpl(inner, this.manager()); } else { return null; } } public Response<Artifact> getWithResponse( String resourceGroupName, String labName, String artifactSourceName, String name, String expand, Context context) { Response<ArtifactInner> inner = this.serviceClient().getWithResponse(resourceGroupName, labName, artifactSourceName, name, expand, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new ArtifactImpl(inner.getValue(), this.manager())); } else { return null; } } public ArmTemplateInfo generateArmTemplate( String resourceGroupName, String labName, String artifactSourceName, String name, GenerateArmTemplateRequest generateArmTemplateRequest) { ArmTemplateInfoInner inner = this .serviceClient() .generateArmTemplate(resourceGroupName, labName, artifactSourceName, name, generateArmTemplateRequest); if (inner != null) { return new ArmTemplateInfoImpl(inner, this.manager()); } else { return null; } } public Response<ArmTemplateInfo> generateArmTemplateWithResponse( String resourceGroupName, String labName, String artifactSourceName, String name, GenerateArmTemplateRequest generateArmTemplateRequest, Context context) { Response<ArmTemplateInfoInner> inner = this .serviceClient() .generateArmTemplateWithResponse( resourceGroupName, labName, artifactSourceName, name, generateArmTemplateRequest, context); if (inner != null) { return new SimpleResponse<>( inner.getRequest(), inner.getStatusCode(), inner.getHeaders(), new ArmTemplateInfoImpl(inner.getValue(), this.manager())); } else { return null; } } private ArtifactsClient serviceClient() { return this.innerClient; } private com.azure.resourcemanager.devtestlabs.DevTestLabsManager manager() { return this.serviceManager; } }
1,970
990
''' Given n pairs of parentheses, write a function to generate all combinations of well-formed parentheses. For example, given n = 3, a solution set is: [ "((()))", "(()())", "(())()", "()(())", "()()()" ] ''' class Solution(object): def generateParenthesis(self, n): """ :type n: int :rtype: List[str] """ result = [] def backtracking(S, left, right): if len(S) == 2*n: result.append(S) return if left < n: backtracking(S+'(', left+1, right) if right < left: backtracking(S+')', left, right+1) backtracking('', 0, 0) return result
336
12,252
package org.keycloak.testsuite.util; import org.subethamail.smtp.MessageContext; import org.subethamail.smtp.MessageHandler; import org.subethamail.smtp.MessageHandlerFactory; import org.subethamail.smtp.RejectException; import javax.mail.MessagingException; import javax.mail.Session; import javax.mail.internet.MimeMessage; import java.io.BufferedReader; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Properties; public class MessageHandlerFactoryImpl implements MessageHandlerFactory { MimeMessage message; public MessageHandler create(MessageContext ctx) { return new Handler(ctx); } class Handler implements MessageHandler { MessageContext ctx; public Handler(MessageContext ctx) { this.ctx = ctx; } public void from(String from) throws RejectException { System.out.println("FROM:" + from); } public void recipient(String recipient) throws RejectException { System.out.println("RECIPIENT:" + recipient); } public void data(InputStream data) throws IOException { String rawMail = this.convertStreamToString(data); Session session = Session.getDefaultInstance(new Properties()); InputStream is = new ByteArrayInputStream(rawMail.getBytes()); try { message = new MimeMessage(session, is); setMessage(message); } catch (MessagingException e) { e.printStackTrace(); } } public void done() { System.out.println("Finished"); } public String convertStreamToString(InputStream is) { BufferedReader reader = new BufferedReader(new InputStreamReader(is)); StringBuilder sb = new StringBuilder(); String line = null; try { while ((line = reader.readLine()) != null) { sb.append(line + "\n"); } } catch (IOException e) { e.printStackTrace(); } return sb.toString(); } } public MimeMessage getMessage(){ return message; } public void setMessage(MimeMessage msg){ this.message = msg; } }
1,039
488
// 7.3.3h.cc // turn on overloading int dummy(); // line 4 void ddummy() { __testOverload(dummy(), 4); } asm("collectLookupResults f=18 g=19 x=10"); namespace A { int x; // line 10 } namespace B { int i; struct g { }; struct x { }; void f(int); void f(double); // line 18 void g(char); // OK: hides struct g (line 19) } void func() { int i; //ERROR(1): using B::i; // error: i declared twice void f(char); using B::f; // OK: each f is a function f(3.5); // calls B::f(double) using B::g; g('a'); // calls B::g(char) struct g g1; // g1 has class type B::g using B::x; using A::x; // OK: hides struct B::x x = 99; // assigns to A::x struct x x1; // x1 has class type B::x }
500
28,056
<filename>src/main/java/com/alibaba/fastjson/support/spring/annotation/FastJsonView.java package com.alibaba.fastjson.support.spring.annotation; import java.lang.annotation.*; /** * <pre> * 一个放置到 {@link org.springframework.stereotype.Controller Controller}方法上的注解. * 设置返回对象针对某个类需要排除或者包括的字段 * 例如: * <code>&#064;FastJsonView(exclude = {&#064;FastJsonFilter(clazz = JSON.class,props = {"data"})})</code> * * </pre> * @author yanquanyu * @author liuming */ @Target({ElementType.METHOD}) @Retention(RetentionPolicy.RUNTIME) @Documented public @interface FastJsonView { FastJsonFilter[] include() default {}; FastJsonFilter[] exclude() default {}; }
309
5,169
<filename>Specs/JxbPhotoView_Swift/1.1/JxbPhotoView_Swift.podspec.json<gh_stars>1000+ { "name": "JxbPhotoView_Swift", "version": "1.1", "license": "MIT", "summary": "A photo library viewer in Swift", "homepage": "http://www.jxb.name", "authors": { "Peter": "<EMAIL>" }, "source": { "git": "https://github.com/JxbSir/JxbPhotoView_Swift.git", "tag": "1.1" }, "platforms": { "ios": "8.0" }, "source_files": "JxbPhotoView/JxbPhotoView/*.swift", "requires_arc": true, "dependencies": { "Kingfisher": [ ] } }
250
619
<filename>tests/unit/test_task_sets.py import unittest from rlbench.tasks import (FS10_V1, FS25_V1, FS50_V1, FS95_V1, MT15_V1, MT30_V1, MT55_V1, MT100_V1) FS_V1 = [ (FS10_V1, 10, 5), (FS25_V1, 25, 5), (FS50_V1, 50, 5), (FS95_V1, 95, 5)] MT_V1 = [ (MT15_V1, 15), (MT30_V1, 30), (MT55_V1, 55), (MT100_V1, 100)] class TestTaskSet(unittest.TestCase): def test_fs_v1(self): for ts, train, test in FS_V1: with self.subTest(task_set='FS%d_V1' % train): self.assertEqual(len(ts['train']), train) self.assertEqual(len(ts['test']), test) # Test no duplicates self.assertEqual(len(ts['train'] + ts['test']), len(set(ts['train'] + ts['test']))) self.assertFalse(any(i in ts['test'] for i in ts['train'])) def test_mt_v1(self): for ts, train in MT_V1: with self.subTest(task_set='MT%d_V1' % train): self.assertEqual(len(ts['train']), train) # Test no duplicates self.assertEqual(len(ts['train']), len(set(ts['train'])))
669
1,178
<reponame>leozz37/makani /* * Copyright 2020 Makani Technologies LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef AVIONICS_COMMON_GILL_SERIAL_H_ #define AVIONICS_COMMON_GILL_SERIAL_H_ #include <stdbool.h> #include <stdint.h> #include "avionics/common/gill_binary.h" #include "avionics/common/nmea.h" #include "avionics/common/serial_parse.h" #define GILL_FIELDS_MAX 16 typedef enum { kGillProtoAscii, kGillProtoBinary, kGillProtoLine, kGillProtoPrompt, kGillProtoNmea, } GillProto; typedef struct { uint8_t checksum; int32_t checksum_index; int32_t fields; int32_t field_delim[GILL_FIELDS_MAX]; } GillAscii; typedef struct { GillAscii ascii; GillBinary binary; NmeaReceive nmea; GillProto proto; const uint8_t *data; int32_t data_length; } GillReceive; bool GillParse(SerialReceiveBuffer *buffer, GillReceive *rx); #endif // AVIONICS_COMMON_GILL_SERIAL_H_
523
805
<filename>lib/source/gainput/pad/GainputInputDevicePadMac.cpp #include <gainput/gainput.h> #ifdef GAINPUT_PLATFORM_MAC #include "GainputInputDevicePadImpl.h" #include <gainput/GainputInputDeltaState.h> #include <gainput/GainputHelpers.h> #include <gainput/GainputLog.h> #include "GainputInputDevicePadMac.h" #import <CoreFoundation/CoreFoundation.h> #import <IOKit/hid/IOHIDManager.h> #import <IOKit/hid/IOHIDUsageTables.h> namespace gainput { extern bool MacIsApplicationKey(); namespace { static const unsigned kMaxPads = 16; static bool usedPadIndices_[kMaxPads] = { false }; static inline float FixUpAnalog(float analog, const float minAxis, const float maxAxis, bool symmetric) { analog = analog < minAxis ? minAxis : analog > maxAxis ? maxAxis : analog; // clamp analog -= minAxis; analog /= (Abs(minAxis) + Abs(maxAxis))*(symmetric ? 0.5f : 1.0f); if (symmetric) { analog -= 1.0f; } return analog; } static void OnDeviceInput(void* inContext, IOReturn inResult, void* inSender, IOHIDValueRef value) { if (!MacIsApplicationKey()) { return; } IOHIDElementRef elem = IOHIDValueGetElement(value); InputDevicePadImplMac* device = reinterpret_cast<InputDevicePadImplMac*>(inContext); GAINPUT_ASSERT(device); uint32_t usagePage = IOHIDElementGetUsagePage(elem); uint32_t usage = IOHIDElementGetUsage(elem); if (IOHIDElementGetReportCount(elem) > 1 || (usagePage == kHIDPage_GenericDesktop && usage == kHIDUsage_GD_Pointer) ) { return; } if (usagePage >= kHIDPage_VendorDefinedStart) { return; } InputManager& manager = device->manager_; CFIndex state = (int)IOHIDValueGetIntegerValue(value); float analog = IOHIDValueGetScaledValue(value, kIOHIDValueScaleTypePhysical); if (usagePage == kHIDPage_Button && device->buttonDialect_.count(usage)) { const DeviceButtonId buttonId = device->buttonDialect_[usage]; manager.EnqueueConcurrentChange(device->device_, device->nextState_, device->delta_, buttonId, state != 0); } else if (usagePage == kHIDPage_GenericDesktop) { if (usage == kHIDUsage_GD_Hatswitch) { int dpadX = 0; int dpadY = 0; switch(state) { case 0: dpadX = 0; dpadY = 1; break; case 1: dpadX = 1; dpadY = 1; break; case 2: dpadX = 1; dpadY = 0; break; case 3: dpadX = 1; dpadY = -1; break; case 4: dpadX = 0; dpadY = -1; break; case 5: dpadX = -1; dpadY = -1; break; case 6: dpadX = -1; dpadY = 0; break; case 7: dpadX = -1; dpadY = 1; break; default: dpadX = 0; dpadY = 0; break; } manager.EnqueueConcurrentChange(device->device_, device->nextState_, device->delta_, PadButtonLeft, dpadX < 0); manager.EnqueueConcurrentChange(device->device_, device->nextState_, device->delta_, PadButtonRight, dpadX > 0); manager.EnqueueConcurrentChange(device->device_, device->nextState_, device->delta_, PadButtonUp, dpadY > 0); manager.EnqueueConcurrentChange(device->device_, device->nextState_, device->delta_, PadButtonDown, dpadY < 0); } else if (device->axisDialect_.count(usage)) { const DeviceButtonId buttonId = device->axisDialect_[usage]; if (buttonId == PadButtonAxis4 || buttonId == PadButtonAxis5) { analog = FixUpAnalog(analog, device->minTriggerAxis_, device->maxTriggerAxis_, false); } else if (buttonId == PadButtonLeftStickY || buttonId == PadButtonRightStickY) { analog = -FixUpAnalog(analog, device->minAxis_, device->maxAxis_, true); } else { analog = FixUpAnalog(analog, device->minAxis_, device->maxAxis_, true); } manager.EnqueueConcurrentChange(device->device_, device->nextState_, device->delta_, buttonId, analog); } else if (device->buttonDialect_.count(usage)) { const DeviceButtonId buttonId = device->buttonDialect_[usage]; manager.EnqueueConcurrentChange(device->device_, device->nextState_, device->delta_, buttonId, state != 0); } #ifdef GAINPUT_DEBUG else { GAINPUT_LOG("Unmapped button (generic): %d\n", usage); } #endif } #ifdef GAINPUT_DEBUG else { GAINPUT_LOG("Unmapped button: %d\n", usage); } #endif } static void OnDeviceConnected(void* inContext, IOReturn inResult, void* inSender, IOHIDDeviceRef inIOHIDDeviceRef) { InputDevicePadImplMac* device = reinterpret_cast<InputDevicePadImplMac*>(inContext); GAINPUT_ASSERT(device); if (device->deviceState_ != InputDevice::DS_UNAVAILABLE) { return; } for (unsigned i = 0; i < device->index_ && i < kMaxPads; ++i) { if (!usedPadIndices_[i]) { return; } } if (device->index_ < kMaxPads) { usedPadIndices_[device->index_] = true; } device->deviceState_ = InputDevice::DS_OK; long vendorId = 0; long productId = 0; if (CFTypeRef tCFTypeRef = IOHIDDeviceGetProperty(inIOHIDDeviceRef, CFSTR(kIOHIDVendorIDKey) )) { if (CFNumberGetTypeID() == CFGetTypeID(tCFTypeRef)) { CFNumberGetValue((CFNumberRef)tCFTypeRef, kCFNumberSInt32Type, &vendorId); } } if (CFTypeRef tCFTypeRef = IOHIDDeviceGetProperty(inIOHIDDeviceRef, CFSTR(kIOHIDProductIDKey) )) { if (CFNumberGetTypeID() == CFGetTypeID(tCFTypeRef)) { CFNumberGetValue((CFNumberRef)tCFTypeRef, kCFNumberSInt32Type, &productId); } } if (vendorId == 0x054c && (productId == 0x5c4 || productId == 0x9cc)) // Sony DualShock 4 { device->minAxis_ = 0; device->maxAxis_ = 256; device->minTriggerAxis_ = device->minAxis_; device->maxTriggerAxis_ = device->maxAxis_; device->axisDialect_[kHIDUsage_GD_X] = PadButtonLeftStickX; device->axisDialect_[kHIDUsage_GD_Y] = PadButtonLeftStickY; device->axisDialect_[kHIDUsage_GD_Z] = PadButtonRightStickX; device->axisDialect_[kHIDUsage_GD_Rz] = PadButtonRightStickY; device->axisDialect_[kHIDUsage_GD_Rx] = PadButtonAxis4; device->axisDialect_[kHIDUsage_GD_Ry] = PadButtonAxis5; device->buttonDialect_[0x09] = PadButtonSelect; device->buttonDialect_[0x0b] = PadButtonL3; device->buttonDialect_[0x0c] = PadButtonR3; device->buttonDialect_[0x0A] = PadButtonStart; device->buttonDialect_[0xfffffff0] = PadButtonUp; device->buttonDialect_[0xfffffff1] = PadButtonRight; device->buttonDialect_[0xfffffff2] = PadButtonDown; device->buttonDialect_[0xfffffff3] = PadButtonLeft; device->buttonDialect_[0x05] = PadButtonL1; device->buttonDialect_[0x07] = PadButtonL2; device->buttonDialect_[0x06] = PadButtonR1; device->buttonDialect_[0x08] = PadButtonR2; device->buttonDialect_[0x04] = PadButtonY; device->buttonDialect_[0x03] = PadButtonB; device->buttonDialect_[0x02] = PadButtonA; device->buttonDialect_[0x01] = PadButtonX; device->buttonDialect_[0x0d] = PadButtonHome; device->buttonDialect_[0x0e] = PadButton17; // Touch pad } else if (vendorId == 0x054c && productId == 0x268) // Sony DualShock 3 { device->minAxis_ = 0; device->maxAxis_ = 256; device->minTriggerAxis_ = device->minAxis_; device->maxTriggerAxis_ = device->maxAxis_; device->axisDialect_[kHIDUsage_GD_X] = PadButtonLeftStickX; device->axisDialect_[kHIDUsage_GD_Y] = PadButtonLeftStickY; device->axisDialect_[kHIDUsage_GD_Z] = PadButtonRightStickX; device->axisDialect_[kHIDUsage_GD_Rz] = PadButtonRightStickY; device->axisDialect_[kHIDUsage_GD_Rx] = PadButtonAxis4; device->axisDialect_[kHIDUsage_GD_Ry] = PadButtonAxis5; //device->buttonDialect_[0] = PadButtonSelect; device->buttonDialect_[2] = PadButtonL3; device->buttonDialect_[3] = PadButtonR3; device->buttonDialect_[4] = PadButtonStart; device->buttonDialect_[5] = PadButtonUp; device->buttonDialect_[6] = PadButtonRight; device->buttonDialect_[7] = PadButtonDown; device->buttonDialect_[8] = PadButtonLeft; device->buttonDialect_[11] = PadButtonL1; device->buttonDialect_[9] = PadButtonL2; device->buttonDialect_[12] = PadButtonR1; device->buttonDialect_[10] = PadButtonR2; device->buttonDialect_[13] = PadButtonY; device->buttonDialect_[14] = PadButtonB; device->buttonDialect_[15] = PadButtonA; device->buttonDialect_[16] = PadButtonX; device->buttonDialect_[17] = PadButtonHome; } else if (vendorId == 0x045e && (productId == 0x028E || productId == 0x028F || productId == 0x02D1)) // Microsoft 360 Controller wired/wireless, Xbox One Controller { device->minAxis_ = -(1<<15); device->maxAxis_ = 1<<15; device->minTriggerAxis_ = 0; device->maxTriggerAxis_ = 255; device->axisDialect_[kHIDUsage_GD_X] = PadButtonLeftStickX; device->axisDialect_[kHIDUsage_GD_Y] = PadButtonLeftStickY; device->axisDialect_[kHIDUsage_GD_Rx] = PadButtonRightStickX; device->axisDialect_[kHIDUsage_GD_Ry] = PadButtonRightStickY; device->axisDialect_[kHIDUsage_GD_Z] = PadButtonAxis4; device->axisDialect_[kHIDUsage_GD_Rz] = PadButtonAxis5; device->buttonDialect_[0x0a] = PadButtonSelect; device->buttonDialect_[0x07] = PadButtonL3; device->buttonDialect_[0x08] = PadButtonR3; device->buttonDialect_[0x09] = PadButtonStart; device->buttonDialect_[0x0c] = PadButtonUp; device->buttonDialect_[0x0f] = PadButtonRight; device->buttonDialect_[0x0d] = PadButtonDown; device->buttonDialect_[0x0e] = PadButtonLeft; device->buttonDialect_[0x05] = PadButtonL1; device->buttonDialect_[0x06] = PadButtonR1; device->buttonDialect_[0x04] = PadButtonY; device->buttonDialect_[0x02] = PadButtonB; device->buttonDialect_[0x01] = PadButtonA; device->buttonDialect_[0x03] = PadButtonX; device->buttonDialect_[0x0b] = PadButtonHome; } } static void OnDeviceRemoved(void* inContext, IOReturn inResult, void* inSender, IOHIDDeviceRef inIOHIDDeviceRef) { InputDevicePadImplMac* device = reinterpret_cast<InputDevicePadImplMac*>(inContext); GAINPUT_ASSERT(device); device->deviceState_ = InputDevice::DS_UNAVAILABLE; if (device->index_ < kMaxPads) { usedPadIndices_[device->index_] = true; } } } InputDevicePadImplMac::InputDevicePadImplMac(InputManager& manager, InputDevice& device, unsigned index, InputState& state, InputState& previousState) : buttonDialect_(manager.GetAllocator()), axisDialect_(manager.GetAllocator()), minAxis_(-FLT_MAX), maxAxis_(FLT_MAX), minTriggerAxis_(-FLT_MAX), maxTriggerAxis_(FLT_MAX), manager_(manager), device_(device), index_(index), state_(state), previousState_(previousState), nextState_(manager.GetAllocator(), PadButtonCount_ + PadButtonAxisCount_), deviceState_(InputDevice::DS_UNAVAILABLE), ioManager_(0) { IOHIDManagerRef ioManager = IOHIDManagerCreate(kCFAllocatorDefault, kIOHIDManagerOptionNone); if (CFGetTypeID(ioManager) != IOHIDManagerGetTypeID()) { return; } ioManager_ = ioManager; static const unsigned kKeyCount = 2; CFStringRef keys[kKeyCount] = { CFSTR(kIOHIDDeviceUsagePageKey), CFSTR(kIOHIDDeviceUsageKey), }; int usagePage = kHIDPage_GenericDesktop; int usage = kHIDUsage_GD_GamePad; CFNumberRef values[kKeyCount] = { CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &usagePage), CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &usage), }; CFMutableArrayRef matchingArray = CFArrayCreateMutable(kCFAllocatorDefault, 0, &kCFTypeArrayCallBacks); CFDictionaryRef matchingDict = CFDictionaryCreate(kCFAllocatorDefault, (const void **) keys, (const void **) values, kKeyCount, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); CFArrayAppendValue(matchingArray, matchingDict); CFRelease(matchingDict); CFRelease(values[1]); usage = kHIDUsage_GD_MultiAxisController; values[1] = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &usage); matchingDict = CFDictionaryCreate(kCFAllocatorDefault, (const void **) keys, (const void **) values, kKeyCount, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); CFArrayAppendValue(matchingArray, matchingDict); CFRelease(matchingDict); CFRelease(values[1]); usage = kHIDUsage_GD_Joystick; values[1] = CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt32Type, &usage); matchingDict = CFDictionaryCreate(kCFAllocatorDefault, (const void **) keys, (const void **) values, kKeyCount, &kCFTypeDictionaryKeyCallBacks, &kCFTypeDictionaryValueCallBacks); CFArrayAppendValue(matchingArray, matchingDict); CFRelease(matchingDict); for (unsigned i = 0; i < kKeyCount; ++i) { CFRelease(keys[i]); CFRelease(values[i]); } IOHIDManagerSetDeviceMatchingMultiple(ioManager, matchingArray); CFRelease(matchingArray); IOHIDManagerRegisterDeviceMatchingCallback(ioManager, OnDeviceConnected, this); IOHIDManagerRegisterDeviceRemovalCallback(ioManager, OnDeviceRemoved, this); IOHIDManagerRegisterInputValueCallback(ioManager, OnDeviceInput, this); IOHIDManagerOpen(ioManager, kIOHIDOptionsTypeNone); IOHIDManagerScheduleWithRunLoop(ioManager, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode); } InputDevicePadImplMac::~InputDevicePadImplMac() { IOHIDManagerRef ioManager = reinterpret_cast<IOHIDManagerRef>(ioManager_); IOHIDManagerUnscheduleFromRunLoop(ioManager, CFRunLoopGetCurrent(), kCFRunLoopDefaultMode); IOHIDManagerClose(ioManager, 0); CFRelease(ioManager); } void InputDevicePadImplMac::Update(InputDeltaState* delta) { delta_ = delta; state_ = nextState_; } bool InputDevicePadImplMac::IsValidButton(DeviceButtonId deviceButton) const { if (buttonDialect_.empty()) { return deviceButton < PadButtonMax_; } for (HashMap<unsigned, DeviceButtonId>::const_iterator it = buttonDialect_.begin(); it != buttonDialect_.end(); ++it) { if (it->second == deviceButton) { return true; } } for (HashMap<unsigned, DeviceButtonId>::const_iterator it = axisDialect_.begin(); it != axisDialect_.end(); ++it) { if (it->second == deviceButton) { return true; } } return false; } } #endif
5,384
480
/* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.optimizer.core.rel.ddl; import com.alibaba.polardbx.gms.metadb.table.IndexStatus; import com.alibaba.polardbx.optimizer.OptimizerContext; import com.alibaba.polardbx.optimizer.config.table.GsiMetaManager.GsiIndexMetaBean; import com.alibaba.polardbx.optimizer.config.table.GsiMetaManager.GsiMetaBean; import com.alibaba.polardbx.optimizer.config.table.GsiMetaManager.GsiTableMetaBean; import com.alibaba.polardbx.optimizer.core.rel.ddl.data.TruncateTablePreparedData; import com.alibaba.polardbx.optimizer.core.rel.ddl.data.gsi.TruncateGlobalIndexPreparedData; import com.alibaba.polardbx.optimizer.core.rel.ddl.data.gsi.TruncateTableWithGsiPreparedData; import org.apache.calcite.rel.ddl.TruncateTable; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlTruncateTable; import java.util.Map; public class LogicalTruncateTable extends BaseDdlOperation { private SqlTruncateTable sqlTruncateTable; private TruncateTablePreparedData truncateTablePreparedData; private TruncateTableWithGsiPreparedData truncateTableWithGsiPreparedData; public LogicalTruncateTable(TruncateTable truncateTable) { super(truncateTable); this.sqlTruncateTable = (SqlTruncateTable) truncateTable.sqlNode; } public static LogicalTruncateTable create(TruncateTable truncateTable) { return new LogicalTruncateTable(truncateTable); } public boolean isWithGsi() { return truncateTableWithGsiPreparedData != null && truncateTableWithGsiPreparedData.hasGsi(); } public TruncateTablePreparedData getTruncateTablePreparedData() { return truncateTablePreparedData; } public TruncateTableWithGsiPreparedData getTruncateTableWithGsiPreparedData() { return truncateTableWithGsiPreparedData; } public void prepareData() { // A normal logical table or a primary table with GSIs. truncateTablePreparedData = preparePrimaryData(); final GsiMetaBean gsiMetaBean = OptimizerContext.getContext(schemaName).getLatestSchemaManager().getGsi(tableName, IndexStatus.ALL); if (gsiMetaBean.withGsi(tableName)) { truncateTableWithGsiPreparedData = new TruncateTableWithGsiPreparedData(); truncateTableWithGsiPreparedData.setPrimaryTablePreparedData(truncateTablePreparedData); final GsiTableMetaBean gsiTableMeta = gsiMetaBean.getTableMeta().get(tableName); for (Map.Entry<String, GsiIndexMetaBean> gsiEntry : gsiTableMeta.indexMap.entrySet()) { TruncateGlobalIndexPreparedData indexTablePreparedData = prepareGsiData(truncateTablePreparedData.getTableName(), gsiEntry.getKey()); truncateTableWithGsiPreparedData.addIndexTablePreparedData(indexTablePreparedData); } } } public boolean isPurge() { // Forcibly truncate the table instead of putting it into the recycle bin if purge. return sqlTruncateTable.isPurge(); } public SqlNode getTargetTable() { return sqlTruncateTable.getTargetTable(); } private TruncateTablePreparedData preparePrimaryData() { TruncateTablePreparedData preparedData = new TruncateTablePreparedData(); preparedData.setSchemaName(schemaName); preparedData.setTableName(tableName); return preparedData; } private TruncateGlobalIndexPreparedData prepareGsiData(String primaryTableName, String indexTableName) { TruncateGlobalIndexPreparedData preparedData = new TruncateGlobalIndexPreparedData(); TruncateTablePreparedData indexTablePreparedData = new TruncateTablePreparedData(); indexTablePreparedData.setSchemaName(schemaName); indexTablePreparedData.setTableName(indexTableName); preparedData.setSchemaName(schemaName); preparedData.setTableName(indexTableName); preparedData.setIndexTablePreparedData(indexTablePreparedData); preparedData.setPrimaryTableName(primaryTableName); return preparedData; } }
1,696
2,151
<gh_stars>1000+ /* Find debugging and symbol information for a module in libdwfl. Copyright (C) 2005-2013 Red Hat, Inc. This file is part of elfutils. This file is free software; you can redistribute it and/or modify it under the terms of either * the GNU Lesser General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version or * the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version or both in parallel, as here. elfutils is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received copies of the GNU General Public License and the GNU Lesser General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "libdwflP.h" /* Returns the name of the symbol "closest" to ADDR. Never returns symbols at addresses above ADDR. */ const char * internal_function __libdwfl_addrsym (Dwfl_Module *mod, GElf_Addr addr, GElf_Off *off, GElf_Sym *closest_sym, GElf_Word *shndxp, Elf **elfp, Dwarf_Addr *biasp, bool adjust_st_value) { int syments = INTUSE(dwfl_module_getsymtab) (mod); if (syments < 0) return NULL; /* Return true iff we consider ADDR to lie in the same section as SYM. */ GElf_Word addr_shndx = SHN_UNDEF; Elf *addr_symelf = NULL; inline bool same_section (GElf_Addr value, Elf *symelf, GElf_Word shndx) { /* For absolute symbols and the like, only match exactly. */ if (shndx >= SHN_LORESERVE) return value == addr; /* If value might not be st_value, the shndx of the symbol might not match the section of the value. Explicitly look both up. */ if (! adjust_st_value) { Dwarf_Addr v; if (addr_shndx == SHN_UNDEF) { v = addr; addr_shndx = __libdwfl_find_section_ndx (mod, &v); } v = value; return addr_shndx == __libdwfl_find_section_ndx (mod, &v); } /* Figure out what section ADDR lies in. */ if (addr_shndx == SHN_UNDEF || addr_symelf != symelf) { GElf_Addr mod_addr = dwfl_deadjust_st_value (mod, symelf, addr); Elf_Scn *scn = NULL; addr_shndx = SHN_ABS; addr_symelf = symelf; while ((scn = elf_nextscn (symelf, scn)) != NULL) { GElf_Shdr shdr_mem; GElf_Shdr *shdr = gelf_getshdr (scn, &shdr_mem); if (likely (shdr != NULL) && mod_addr >= shdr->sh_addr && mod_addr < shdr->sh_addr + shdr->sh_size) { addr_shndx = elf_ndxscn (scn); break; } } } return shndx == addr_shndx && addr_symelf == symelf; } /* Keep track of the closest symbol we have seen so far. Here we store only symbols with nonzero st_size. */ const char *closest_name = NULL; GElf_Addr closest_value = 0; GElf_Word closest_shndx = SHN_UNDEF; Elf *closest_elf = NULL; /* Keep track of an eligible symbol with st_size == 0 as a fallback. */ const char *sizeless_name = NULL; GElf_Sym sizeless_sym = { 0, 0, 0, 0, 0, SHN_UNDEF }; GElf_Addr sizeless_value = 0; GElf_Word sizeless_shndx = SHN_UNDEF; Elf *sizeless_elf = NULL; /* Keep track of the lowest address a relevant sizeless symbol could have. */ GElf_Addr min_label = 0; /* Try one symbol and associated value from the search table. */ inline void try_sym_value (GElf_Addr value, GElf_Sym *sym, const char *name, GElf_Word shndx, Elf *elf, bool resolved) { /* Even if we don't choose this symbol, its existence excludes any sizeless symbol (assembly label) that is below its upper bound. */ if (value + sym->st_size > min_label) min_label = value + sym->st_size; if (sym->st_size == 0 || addr - value < sym->st_size) { /* Return GELF_ST_BIND as higher-is-better integer. */ inline int binding_value (const GElf_Sym *symp) { switch (GELF_ST_BIND (symp->st_info)) { case STB_GLOBAL: return 3; case STB_WEAK: return 2; case STB_LOCAL: return 1; default: return 0; } } /* This symbol is a better candidate than the current one if it's closer to ADDR or is global when it was local. */ if (closest_name == NULL || closest_value < value || binding_value (closest_sym) < binding_value (sym)) { if (sym->st_size != 0) { *closest_sym = *sym; closest_value = value; closest_shndx = shndx; closest_elf = elf; closest_name = name; } else if (closest_name == NULL && value >= min_label && same_section (value, resolved ? mod->main.elf : elf, shndx)) { /* Handwritten assembly symbols sometimes have no st_size. If no symbol with proper size includes the address, we'll use the closest one that is in the same section as ADDR. */ sizeless_sym = *sym; sizeless_value = value; sizeless_shndx = shndx; sizeless_elf = elf; sizeless_name = name; } } /* When the beginning of its range is no closer, the end of its range might be. Otherwise follow GELF_ST_BIND preference. If all are equal prefer the first symbol found. */ else if (sym->st_size != 0 && closest_value == value && ((closest_sym->st_size > sym->st_size && (binding_value (closest_sym) <= binding_value (sym))) || (closest_sym->st_size >= sym->st_size && (binding_value (closest_sym) < binding_value (sym))))) { *closest_sym = *sym; closest_value = value; closest_shndx = shndx; closest_elf = elf; closest_name = name; } } } /* Look through the symbol table for a matching symbol. */ inline void search_table (int start, int end) { for (int i = start; i < end; ++i) { GElf_Sym sym; GElf_Addr value; GElf_Word shndx; Elf *elf; bool resolved; const char *name = __libdwfl_getsym (mod, i, &sym, &value, &shndx, &elf, NULL, &resolved, adjust_st_value); if (name != NULL && name[0] != '\0' && sym.st_shndx != SHN_UNDEF && value <= addr && GELF_ST_TYPE (sym.st_info) != STT_SECTION && GELF_ST_TYPE (sym.st_info) != STT_FILE && GELF_ST_TYPE (sym.st_info) != STT_TLS) { try_sym_value (value, &sym, name, shndx, elf, resolved); /* If this is an addrinfo variant and the value could be resolved then also try matching the (adjusted) st_value. */ if (resolved && mod->e_type != ET_REL) { GElf_Addr adjusted_st_value; adjusted_st_value = dwfl_adjusted_st_value (mod, elf, sym.st_value); if (value != adjusted_st_value && adjusted_st_value <= addr) try_sym_value (adjusted_st_value, &sym, name, shndx, elf, false); } } } } /* First go through global symbols. mod->first_global and mod->aux_first_global are setup by dwfl_module_getsymtab to the index of the first global symbol in those symbol tables. Both are non-zero when the table exist, except when there is only a dynsym table loaded through phdrs, then first_global is zero and there will be no auxiliary table. All symbols with local binding come first in the symbol table, then all globals. The zeroth, null entry, in the auxiliary table is skipped if there is a main table. */ int first_global = INTUSE (dwfl_module_getsymtab_first_global) (mod); if (first_global < 0) return NULL; search_table (first_global == 0 ? 1 : first_global, syments); /* If we found nothing searching the global symbols, then try the locals. Unless we have a global sizeless symbol that matches exactly. */ if (closest_name == NULL && first_global > 1 && (sizeless_name == NULL || sizeless_value != addr)) search_table (1, first_global); /* If we found no proper sized symbol to use, fall back to the best candidate sizeless symbol we found, if any. */ if (closest_name == NULL && sizeless_name != NULL && sizeless_value >= min_label) { *closest_sym = sizeless_sym; closest_value = sizeless_value; closest_shndx = sizeless_shndx; closest_elf = sizeless_elf; closest_name = sizeless_name; } *off = addr - closest_value; if (shndxp != NULL) *shndxp = closest_shndx; if (elfp != NULL) *elfp = closest_elf; if (biasp != NULL) *biasp = dwfl_adjusted_st_value (mod, closest_elf, 0); return closest_name; } const char * dwfl_module_addrsym (Dwfl_Module *mod, GElf_Addr addr, GElf_Sym *closest_sym, GElf_Word *shndxp) { GElf_Off off; return __libdwfl_addrsym (mod, addr, &off, closest_sym, shndxp, NULL, NULL, true); } INTDEF (dwfl_module_addrsym) const char *dwfl_module_addrinfo (Dwfl_Module *mod, GElf_Addr address, GElf_Off *offset, GElf_Sym *sym, GElf_Word *shndxp, Elf **elfp, Dwarf_Addr *bias) { return __libdwfl_addrsym (mod, address, offset, sym, shndxp, elfp, bias, false); } INTDEF (dwfl_module_addrinfo)
3,748
892
{ "schema_version": "1.2.0", "id": "GHSA-w8h3-vpfx-2ggc", "modified": "2022-05-01T07:04:10Z", "published": "2022-05-01T07:04:10Z", "aliases": [ "CVE-2006-2942" ], "details": "TWiki 4.0.0, 4.0.1, and 4.0.2 allows remote attackers to gain Twiki administrator privileges via a TWiki.TWikiRegistration form with a modified action attribute that references the Sandbox web instead of the user web, which can then be used to associate the user's login name with the WikiName of a member of the TWikiAdminGroup.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-2942" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/27336" }, { "type": "WEB", "url": "http://archives.neohapsis.com/archives/vulnwatch/2006-q2/0032.html" }, { "type": "WEB", "url": "http://secunia.com/advisories/20596" }, { "type": "WEB", "url": "http://securitytracker.com/id?1016323" }, { "type": "WEB", "url": "http://twiki.org/cgi-bin/view/Codev/SecurityAlertTWiki4PrivilegeElevation" }, { "type": "WEB", "url": "http://www.osvdb.org/26623" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/18506" }, { "type": "WEB", "url": "http://www.vupen.com/english/advisories/2006/2415" } ], "database_specific": { "cwe_ids": [ ], "severity": "MODERATE", "github_reviewed": false } }
740
892
{ "schema_version": "1.2.0", "id": "GHSA-p6hh-9ppc-p972", "modified": "2022-05-13T01:52:11Z", "published": "2022-05-13T01:52:11Z", "aliases": [ "CVE-2018-3126" ], "details": "Vulnerability in the Oracle Retail Xstore Point of Service component of Oracle Retail Applications (subcomponent: Xenvironment). Supported versions that are affected are 15.0.2, 16.0.4 and 17.0.2. Difficult to exploit vulnerability allows high privileged attacker with network access via HTTP to compromise Oracle Retail Xstore Point of Service. Successful attacks of this vulnerability can result in takeover of Oracle Retail Xstore Point of Service. CVSS 3.0 Base Score 6.6 (Confidentiality, Integrity and Availability impacts). CVSS Vector: (CVSS:3.0/AV:N/AC:H/PR:H/UI:N/S:U/C:H/I:H/A:H).", "severity": [ { "type": "CVSS_V3", "score": "CVSS:3.0/AV:N/AC:H/PR:H/UI:N/S:U/C:H/I:H/A:H" } ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-3126" }, { "type": "WEB", "url": "http://www.oracle.com/technetwork/security-advisory/cpuoct2018-4428296.html" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/105596" } ], "database_specific": { "cwe_ids": [ ], "severity": "MODERATE", "github_reviewed": false } }
583
467
<filename>davinci.html/WebContent/metadata/html/iframe_oam.json { "id": "http://www.w3.org/html/iframe", "name": "html.iframe", "spec": "1.0", "version": "1.0", "property": { "name": { "datatype": "string", }, "src": { "datatype": "string", "format": "url", }, "alt": { "datatype": "string", } }, "content": "<iframe></iframe>", "title": { "type": "text/html", "value": "<p>HTML element for embedding a complete web document (e.g., another HTML file) inside the current document.</p>" }, "description": { "type": "text/html", "value": "<p>The iframe element represents a nested browsing context.</p><p>The src attribute gives the address of a page that the nested browsing context is to contain. The attribute, if present, must be a valid non-empty URL potentially surrounded by spaces.</p>" } }
310
807
///usr/bin/env jbang "$0" "$@" ; exit $? //REPOS jitpack // Using jitpack to get latest dev version of karate to be able // to use the just added fork() methods to test clis. // see https://github.com/intuit/karate/issues/1191 ////DEPS com.github.intuit.karate:karate-netty:develop-SNAPSHOT //DEPS com.github.intuit.karate:karate-netty:e2882c4 ////DEPS com.intuit.karate:karate-netty:2.0.0 ////DEPS com.github.maxandersen.karate:karate-netty:19e06766 class karate { public static void main(String... args) { com.intuit.karate.Main.main(args); } }
225
2,453
<filename>XVim2/XcodeHeader/IDEKit/IDENoteExplorationContext.h // // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 30 2020 21:18:12). // // Copyright (C) 1997-2019 <NAME>. // #import <IDEKit/IDEExplorationContext.h> @protocol IDENoteAnnotationExplorableItem; @interface IDENoteExplorationContext : IDEExplorationContext { id <IDENoteAnnotationExplorableItem> _note; } - (void).cxx_destruct; @property(readonly) id <IDENoteAnnotationExplorableItem> note; // @synthesize note=_note; - (int)explorationType; - (id)initWithNote:(id)arg1; @end
210
1,259
<gh_stars>1000+ import logging from custom_auth.oauth.exceptions import GithubError logger = logging.getLogger(__name__) def convert_response_data_to_dictionary(text: str) -> dict: try: response_data = {} for key, value in [param.split("=") for param in text.split("&")]: response_data[key] = value return response_data except ValueError: logger.warning("Malformed data received from Github (%s)" % text) raise GithubError("Malformed data received from Github") def get_first_and_last_name(full_name: str) -> list: if not full_name: return ["", ""] names = full_name.strip().split(" ") return names if len(names) == 2 else [full_name, ""]
276
1,296
/** * @file MInputStreamAdapter.h * @brief Adapter to use managed input streams on the SDK * * (c) 2013-2014 by Mega Limited, Auckland, New Zealand * * This file is part of the MEGA SDK - Client Access Engine. * * Applications using the MEGA API must present a valid application key * and comply with the the rules set forth in the Terms of Service. * * The MEGA SDK is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * @copyright Simplified (2-clause) BSD License. * * You should have received a copy of the license along with this * program. */ #include "MInputStreamAdapter.h" using namespace mega; using namespace Platform; MInputStreamAdapter::MInputStreamAdapter(MInputStream^ inputStream) { this->inputStream = inputStream; } int64_t MInputStreamAdapter::getSize() { if (inputStream != nullptr) return inputStream->Length(); return 0; } bool MInputStreamAdapter::read(char *buffer, size_t size) { if (inputStream == nullptr) return false; if (!buffer) return inputStream->Read(nullptr, size); return inputStream->Read(::Platform::ArrayReference<unsigned char>((unsigned char *)buffer, size), size); }
398
541
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.sword2; import org.dspace.content.Item; import org.dspace.core.Context; import org.swordapp.server.OREStatement; import org.swordapp.server.Statement; import org.swordapp.server.SwordError; import org.swordapp.server.SwordServerException; public class OreStatementDisseminator extends GenericStatementDisseminator implements SwordStatementDisseminator { public Statement disseminate(Context context, Item item) throws DSpaceSwordException, SwordError, SwordServerException { SwordUrlManager urlManager = new SwordUrlManager( new SwordConfigurationDSpace(), context); String aggUrl = urlManager.getAggregationUrl(item); String remUrl = urlManager.getOreStatementUri(item); Statement s = new OREStatement(remUrl, aggUrl); this.populateStatement(context, item, s); return s; } }
346