max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
428
<reponame>pabru/libgdx /******************************************************************************* * Copyright 2011 See AUTHORS file. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package java.util.regex; import com.google.gwt.regexp.shared.RegExp; /** Emulation of the {@link Pattern} class, uses {@link RegExp} as internal implementation. * @author hneuer */ public class Pattern { final RegExp regExp; private Pattern (String regExp) { this.regExp = RegExp.compile(regExp); } public static Pattern compile (String regExp) { return new Pattern(regExp); } public Matcher matcher (CharSequence input) { return new Matcher(this, input); } }
338
2,406
<reponame>monroid/openvino<filename>inference-engine/thirdparty/clDNN/src/gemm.cpp // Copyright (C) 2018-2021 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // /////////////////////////////////////////////////////////////////////////////////////////////////// #include "gemm_inst.h" #include "primitive_type_base.h" #include "cldnn/runtime/error_handler.hpp" #include "json_object.h" #include <string> #include <utility> #include <algorithm> namespace cldnn { primitive_type_id gemm::type_id() { static primitive_type_base<gemm> instance; return &instance; } layout gemm_inst::calc_output_layout(gemm_node const& node) { auto prim = node.get_primitive(); auto input0_layout = node.input(0).get_output_layout(); auto input1_layout = node.input(1).get_output_layout(); bool transpose_input0 = prim->transpose_input0; bool transpose_input1 = prim->transpose_input1; auto M = !transpose_input0 ? input0_layout.size.spatial[1] : input0_layout.size.spatial[0]; auto N = !transpose_input1 ? input1_layout.size.spatial[0] : input1_layout.size.spatial[1]; auto output_size = input0_layout.size; for (size_t i = 1; i < node.inputs_count(); ++i) { auto input_layout = node.input(i).get_output_layout(); output_size = tensor::max(output_size, input_layout.size); } output_size.spatial[0] = N; output_size.spatial[1] = M; auto output_type = input0_layout.data_type; if ((output_type == data_types::u8 || output_type == data_types::i8) && prim->output_data_type) output_type = *prim->output_data_type; if (node.has_fused_primitives()) { output_type = node.get_fused_output_layout().data_type; } auto output_format = input0_layout.format; return layout(output_type, output_format, output_size, prim->output_padding); } std::string gemm_inst::to_string(gemm_node const& node) { auto desc = node.get_primitive(); auto node_info = node.desc_to_json(); auto alpha = desc->alpha; auto beta = desc->beta; auto transpose_input0 = desc->transpose_input0 ? " true" : "false"; auto transpose_input1 = desc->transpose_input1 ? " true" : "false"; std::stringstream primitive_description; json_composite gemm_info; for (size_t i = 0; i < node.inputs_count(); i++) { gemm_info.add("input_" + std::to_string(i), node.input(i).id()); } gemm_info.add("alpha", alpha); gemm_info.add("beta", beta); gemm_info.add("trasnpose_input0", transpose_input0); gemm_info.add("transpose_input1", transpose_input1); node_info->add("gemm info", gemm_info); node_info->dump(primitive_description); return primitive_description.str(); } gemm_inst::typed_primitive_inst(network& network, gemm_node const& node) : parent(network, node) { auto input0_layout = node.input(0).get_output_layout(); auto input1_layout = node.input(1).get_output_layout(); bool transpose_input0 = node.get_primitive()->transpose_input0; bool transpose_input1 = node.get_primitive()->transpose_input1; auto transposed_x0 = input0_layout.size.spatial[0]; auto transposed_y0 = input0_layout.size.spatial[1]; if (transpose_input0) { std::swap(transposed_x0, transposed_y0); } auto transposed_x1 = input1_layout.size.spatial[0]; auto transposed_y1 = input1_layout.size.spatial[1]; if (transpose_input1) { std::swap(transposed_x1, transposed_y1); } CLDNN_ERROR_NOT_EQUAL(node.id(), "Input 0 internal dimension size", transposed_x0, "Input 1 internal dimension size", transposed_y1, ""); if (node.inputs_count() == 3) { auto input2_layout = node.input(2).get_output_layout(); CLDNN_ERROR_NOT_EQUAL(node.id(), "Input 0 external dimension size", transposed_y0, "Input 2 rows number", input2_layout.size.spatial[1], ""); CLDNN_ERROR_NOT_EQUAL(node.id(), "Input 1 external dimension size", transposed_x1, "Input 2 columns number", input2_layout.size.spatial[0], ""); } } } // namespace cldnn
1,986
454
<gh_stars>100-1000 package io.vertx.up.uca.cosmic; import io.vertx.core.json.JsonObject; import io.vertx.up.commune.config.IntegrationRequest; import org.apache.http.impl.client.CloseableHttpClient; /* * Rotator for http request of major for methods: * * - POST * - GET * - PUT * - DELETE */ public interface Rotator { /* * The rotator could bind to HttpClient ( core ) */ Rotator bind(CloseableHttpClient client); /* * Request data with `Json` parameters, get string response */ String request(IntegrationRequest request, JsonObject params); }
209
892
{ "schema_version": "1.2.0", "id": "GHSA-xjrw-4vxx-f2vq", "modified": "2022-05-13T01:10:53Z", "published": "2022-05-13T01:10:53Z", "aliases": [ "CVE-2018-9024" ], "details": "An improper authentication vulnerability in CA Privileged Access Manager 2.x allows attackers to spoof IP addresses in a log file.", "severity": [ { "type": "CVSS_V3", "score": "CVSS:3.0/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:L/A:N" } ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2018-9024" }, { "type": "WEB", "url": "https://support.ca.com/us/product-content/recommended-reading/security-notices/ca20180614-01--security-notice-for-ca-privileged-access-manager.html" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/104496" } ], "database_specific": { "cwe_ids": [ "CWE-287" ], "severity": "MODERATE", "github_reviewed": false } }
484
2,151
<filename>third_party/blink/renderer/core/animation/interpolation_effect_test.cc // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <memory> #include "testing/gtest/include/gtest/gtest.h" #include "third_party/blink/renderer/core/animation/animation_test_helper.h" #include "third_party/blink/renderer/core/animation/css_number_interpolation_type.h" #include "third_party/blink/renderer/core/animation/interpolation_effect.h" #include "third_party/blink/renderer/core/animation/transition_interpolation.h" namespace blink { namespace { const double kInterpolationTestDuration = 1.0; double GetInterpolableNumber(scoped_refptr<Interpolation> value) { TransitionInterpolation& interpolation = ToTransitionInterpolation(*value.get()); std::unique_ptr<TypedInterpolationValue> interpolated_value = interpolation.GetInterpolatedValue(); return ToInterpolableNumber(interpolated_value->GetInterpolableValue()) .Value(); } scoped_refptr<Interpolation> CreateInterpolation(int from, int to) { // We require a property that maps to CSSNumberInterpolationType. 'z-index' // suffices for this, and also means we can ignore the AnimatableValues for // the compositor (as z-index isn't compositor-compatible). PropertyHandle property_handle(GetCSSPropertyZIndex()); CSSNumberInterpolationType interpolation_type(property_handle); InterpolationValue start(InterpolableNumber::Create(from)); InterpolationValue end(InterpolableNumber::Create(to)); return TransitionInterpolation::Create(property_handle, interpolation_type, std::move(start), std::move(end), nullptr, nullptr); } } // namespace TEST(AnimationInterpolationEffectTest, SingleInterpolation) { InterpolationEffect interpolation_effect; interpolation_effect.AddInterpolation( CreateInterpolation(0, 10), scoped_refptr<TimingFunction>(), 0, 1, -1, 2); Vector<scoped_refptr<Interpolation>> active_interpolations; interpolation_effect.GetActiveInterpolations(-2, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(0ul, active_interpolations.size()); interpolation_effect.GetActiveInterpolations(-0.5, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(1ul, active_interpolations.size()); EXPECT_EQ(-5, GetInterpolableNumber(active_interpolations.at(0))); interpolation_effect.GetActiveInterpolations(0.5, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(1ul, active_interpolations.size()); EXPECT_FLOAT_EQ(5, GetInterpolableNumber(active_interpolations.at(0))); interpolation_effect.GetActiveInterpolations(1.5, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(1ul, active_interpolations.size()); EXPECT_FLOAT_EQ(15, GetInterpolableNumber(active_interpolations.at(0))); interpolation_effect.GetActiveInterpolations(3, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(0ul, active_interpolations.size()); } TEST(AnimationInterpolationEffectTest, MultipleInterpolations) { InterpolationEffect interpolation_effect; interpolation_effect.AddInterpolation( CreateInterpolation(10, 15), scoped_refptr<TimingFunction>(), 1, 2, 1, 3); interpolation_effect.AddInterpolation( CreateInterpolation(0, 1), LinearTimingFunction::Shared(), 0, 1, 0, 1); interpolation_effect.AddInterpolation( CreateInterpolation(1, 6), CubicBezierTimingFunction::Preset( CubicBezierTimingFunction::EaseType::EASE), 0.5, 1.5, 0.5, 1.5); Vector<scoped_refptr<Interpolation>> active_interpolations; interpolation_effect.GetActiveInterpolations(-0.5, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(0ul, active_interpolations.size()); interpolation_effect.GetActiveInterpolations(0, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(1ul, active_interpolations.size()); EXPECT_FLOAT_EQ(0, GetInterpolableNumber(active_interpolations.at(0))); interpolation_effect.GetActiveInterpolations(0.5, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(2ul, active_interpolations.size()); EXPECT_FLOAT_EQ(0.5f, GetInterpolableNumber(active_interpolations.at(0))); EXPECT_FLOAT_EQ(1, GetInterpolableNumber(active_interpolations.at(1))); interpolation_effect.GetActiveInterpolations(1, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(2ul, active_interpolations.size()); EXPECT_FLOAT_EQ(10, GetInterpolableNumber(active_interpolations.at(0))); EXPECT_FLOAT_EQ(5.0282884f, GetInterpolableNumber(active_interpolations.at(1))); interpolation_effect.GetActiveInterpolations( 1, kInterpolationTestDuration * 1000, active_interpolations); EXPECT_EQ(2ul, active_interpolations.size()); EXPECT_FLOAT_EQ(10, GetInterpolableNumber(active_interpolations.at(0))); EXPECT_FLOAT_EQ(5.0120168f, GetInterpolableNumber(active_interpolations.at(1))); interpolation_effect.GetActiveInterpolations(1.5, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(1ul, active_interpolations.size()); EXPECT_FLOAT_EQ(12.5f, GetInterpolableNumber(active_interpolations.at(0))); interpolation_effect.GetActiveInterpolations(2, kInterpolationTestDuration, active_interpolations); EXPECT_EQ(1ul, active_interpolations.size()); EXPECT_FLOAT_EQ(15, GetInterpolableNumber(active_interpolations.at(0))); } } // namespace blink
2,486
5,169
<filename>Specs/7/6/1/MaioSDK/1.5.8/MaioSDK.podspec.json { "name": "MaioSDK", "version": "1.5.8", "summary": "MaioSDK for iOS", "description": "MaioSDK is Ad SDK of fullscreen movie", "homepage": "https://maio.jp", "license": { "type": "Copyright", "text": "Copyright 2015 i-mobile" }, "authors": "i-mobile", "source": { "http": "https://github.com/imobile-maio/maio-iOS-SDK/releases/download/v1.5.8/Maio.xcframework.zip", "type": "zip" }, "vendored_frameworks": "Maio.xcframework", "platforms": { "ios": "9.0" }, "pod_target_xcconfig": { "OTHER_LDFLAGS": "-lObjC" }, "frameworks": [ "UIKit", "CoreMedia", "AVFoundation", "SystemConfiguration", "MobileCoreServices", "AdSupport", "StoreKit", "WebKit" ], "libraries": "z" }
375
875
package com.jeecg.p3.system.service; import java.util.List; import org.jeecgframework.p3.core.utils.common.PageList; import org.jeecgframework.p3.core.utils.common.PageQuery; import com.jeecg.p3.system.entity.JwSystemProject; /** * 描述:</b>JwSystemProjectService<br> * @author:pituo * @since:2015年12月21日 17时49分18秒 星期一 * @version:1.0 */ public interface JwSystemProjectService { public void doAdd(JwSystemProject jwSystemProject); public void doEdit(JwSystemProject jwSystemProject); public void doDelete(String id); public JwSystemProject queryById(String id); public PageList<JwSystemProject> queryPageList(PageQuery<JwSystemProject> pageQuery); public Boolean validReat(String code,String id); /** * 查询推荐不推荐的 * @param type * @return */ public List<JwSystemProject> queryListByType(String type); /** * 按照分类ID查询 * @param projectClassifyId * @return */ public List<JwSystemProject> queryListByProjectClassifyId(String projectClassifyId); /** * @功能:通过code查询单条记录 * @作者:liwenhui * @时间:2017-8-30 上午11:44:59 * @修改: * @param code * @return */ public JwSystemProject queryByCode(String code); public List<JwSystemProject> queryProjectCode(); //update-begin--Author:zhangweijian Date: 20180824 for:获取所有活动信息 /** * @功能:获取所有系统项目信息 * @return */ public List<JwSystemProject> getAllProject(); //update-end--Author:zhangweijian Date: 20180824 for:获取所有活动信息 public boolean changeUrl(String newUrl, String jwid); public boolean changeType(String domain); public String getOldHdurl(); }
718
1,014
package nl.anchormen.sql4es.model; import java.util.ArrayList; import java.util.List; import java.util.Properties; public class Utils { // statics public static final int ES_MAJOR_VERSION = 2; public static final int ES_MINOR_VERSION = 1; public static final String ELASTICSEARCH_NAME = "Elasticsearch"; public static final String ELASTICSEARCH_VERSION = "2.1"; public static final String CATALOG_SEPARATOR = "."; public static final int DRIVER_MAJOR_VERSION = 0; public static final int DRIVER_MINOR_VERSION = 5; // connection defaults public static final String PREFIX = "jdbc:sql4es:"; public static final int PORT = 9300; // defaults private static final int FETCH_SIZE = 10000; // 10K is current max for ES private static final int SCROLL_TIMEOUT_SEC = 10; private static final int QUERY_TIMEOUT_MS = 10000; private static final int DEFAULT_ROW_LENGTH = 250; // used during initialization of rows when querying all columns (Select *) private static final String QUERY_CACHE = "query_cache"; private static final String RESULT_NESTED_LATERAL = "true"; private static final int FRAGMENT_SIZE = 100; private static final int FRAGMENT_NUMBER = 1; // property keys public static final String PROP_FETCH_SIZE = "fetch.size"; public static final String PROP_SCROLL_TIMEOUT_SEC = "scroll.timeout.sec"; public static final String PROP_QUERY_TIMEOUT_MS = "query.timeout.ms"; public static final String PROP_DEFAULT_ROW_LENGTH = "default.row.length"; public static final String PROP_QUERY_CACHE_TABLE = "query.cache.table"; public static final String PROP_RESULT_NESTED_LATERAL = "result.nested.lateral"; public static final String PROP_TABLE_COLUMN_MAP = "table.column.info.map"; public static final String PROP_FRAGMENT_SIZE = "fragment.size"; public static final String PROP_FRAGMENT_NUMBER = "fragment.number"; public static String getLoggingInfo(){ StackTraceElement element = Thread.currentThread().getStackTrace()[2]; return element.getClassName()+"."+element.getMethodName()+" ["+element.getLineNumber()+"]"; } public static List<Object> clone(List<Object> list){ List<Object> copy = new ArrayList<Object>(list.size()); for(Object o : list) copy.add(o); return copy; } public static Properties defaultProps(){ Properties defaults = new Properties(); defaults.put(PROP_FETCH_SIZE, FETCH_SIZE); defaults.put(PROP_SCROLL_TIMEOUT_SEC, SCROLL_TIMEOUT_SEC); defaults.put(PROP_DEFAULT_ROW_LENGTH, DEFAULT_ROW_LENGTH); defaults.put(PROP_QUERY_CACHE_TABLE, QUERY_CACHE); defaults.put(PROP_QUERY_TIMEOUT_MS, QUERY_TIMEOUT_MS); defaults.put(PROP_RESULT_NESTED_LATERAL, RESULT_NESTED_LATERAL); defaults.put(PROP_FRAGMENT_SIZE, FRAGMENT_SIZE); defaults.put(PROP_FRAGMENT_NUMBER, FRAGMENT_NUMBER); return defaults; } /** * Retrieves the integer property with given name from the properties * @param props * @param name * @param def * @return */ public static int getIntProp(Properties props, String name, int def){ if(!props.containsKey(name)) return def; try { return Integer.parseInt(props.getProperty(name)); } catch (Exception e) { return def; } } /** * Retrieves the integer property with given name from the properties * @param props * @param name * @param def * @return */ public static boolean getBooleanProp(Properties props, String name, boolean def){ if(!props.containsKey(name)) return def; try { return Boolean.parseBoolean( props.getProperty(name) ); } catch (Exception e) { return def; } } public static Object getObjectProperty(Properties props, String name) { return props.get(name); } public static void sleep(int millis) { try{ Thread.sleep(millis); }catch(Exception e){} } }
1,316
335
{ "word": "Hallucinogenic", "definitions": [ "A drug that causes hallucinations, such as LSD." ], "parts-of-speech": "Noun" }
64
1,738
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ // Original file Copyright Crytek GMBH or its affiliates, used under license. #include "StdAfx.h" #include "IShader.h" #include "MaterialHelpers.h" /* ----------------------------------------------------------------------- * These functions are used in Cry3DEngine, CrySystem, CryRenderD3D11, * Editor, ResourceCompilerMaterial and more */ ////////////////////////////////////////////////////////////////////////// namespace { static struct { EEfResTextures slot; const char* ename; bool adjustable; const char* name; const char* description; const char* suffix; } s_TexSlotSemantics[] = { // NOTE: must be in order with filled holes to allow direct lookup { EFTT_DIFFUSE, "EFTT_DIFFUSE", true, "Diffuse" , "Base surface color. Alpha mask is contained in alpha channel." , "_diff" }, { EFTT_NORMALS, "EFTT_NORMALS", true, "Bumpmap" , "Normal direction for each pixel simulating bumps on the surface. Smoothness map contained in alpha channel." , "_ddn" }, // Ideally "Normal" but need to keep backwards-compatibility { EFTT_SPECULAR, "EFTT_SPECULAR", true, "Specular" , "Reflective and shininess intensity and color of reflective highlights" , "_spec" }, { EFTT_ENV, "EFTT_ENV", true, "Environment" , "Deprecated" , "_cm" }, { EFTT_DETAIL_OVERLAY, "EFTT_DETAIL_OVERLAY", true, "Detail" , "Increases micro and macro surface bump, diffuse and gloss detail. To use, enable the 'Detail Mapping' shader gen param. " , "_detail" }, { EFTT_SECOND_SMOOTHNESS, "EFTT_SECOND_SMOOTHNESS", false, "SecondSmoothness" , "" , "" }, { EFTT_HEIGHT, "EFTT_HEIGHT", true, "Heightmap" , "Height for offset bump, POM, silhouette POM, and displacement mapping defined by a Grayscale texture" , "_displ" }, { EFTT_DECAL_OVERLAY, "EFTT_DECAL_OVERLAY", true, "Decal" , "" , "" }, // called "DecalOverlay" in the shaders { EFTT_SUBSURFACE, "EFTT_SUBSURFACE", true, "SubSurface" , "" , "_sss" }, // called "Subsurface" in the shaders { EFTT_CUSTOM, "EFTT_CUSTOM", true, "Custom" , "" , "" }, // called "CustomMap" in the shaders { EFTT_CUSTOM_SECONDARY, "EFTT_CUSTOM_SECONDARY", true, "[1] Custom" , "" , "" }, { EFTT_OPACITY, "EFTT_OPACITY", true, "Opacity" , "SubSurfaceScattering map to simulate thin areas for light to penetrate" , "" }, { EFTT_SMOOTHNESS, "EFTT_SMOOTHNESS", false, "Smoothness" , "" , "_ddna" }, { EFTT_EMITTANCE, "EFTT_EMITTANCE", true, "Emittance" , "Multiplies the emissive color with RGB texture. Emissive alpha mask is contained in alpha channel." , "_em" }, { EFTT_OCCLUSION, "EFTT_OCCLUSION", true, "Occlusion" , "Grayscale texture to mask diffuse lighting response and simulate darker areas" , "" }, { EFTT_SPECULAR_2, "EFTT_SPECULAR_2", true, "Specular2" , "" , "_spec" }, // Backwards compatible names are found here and mapped to the updated enum { EFTT_NORMALS, "EFTT_BUMP", false, "Normal" , "" , "" }, // called "Bump" in the shaders { EFTT_SMOOTHNESS, "EFTT_GLOSS_NORMAL_A", false, "GlossNormalA" , "" , "" }, { EFTT_HEIGHT, "EFTT_BUMPHEIGHT", false, "Height" , "" , "" }, // called "BumpHeight" in the shaders // This is the terminator for the name-search { EFTT_UNKNOWN, "EFTT_UNKNOWN", false, NULL , "" }, }; #if 0 static class Verify { public: Verify() { for (int i = 0; s_TexSlotSemantics[i].name; i++) { if (s_TexSlotSemantics[i].slot != i) { throw std::runtime_error("Invalid texture slot lookup array."); } } } } s_VerifyTexSlotSemantics; #endif } // This should be done per shader (hence, semantics lookup map should be constructed per shader type) EEfResTextures MaterialHelpers::FindTexSlot(const char* texName) const { for (int i = 0; s_TexSlotSemantics[i].name; i++) { if (azstricmp(s_TexSlotSemantics[i].name, texName) == 0) { return s_TexSlotSemantics[i].slot; } } return EFTT_UNKNOWN; } const char* MaterialHelpers::FindTexName(EEfResTextures texSlot) const { for (int i = 0; s_TexSlotSemantics[i].name; i++) { if (s_TexSlotSemantics[i].slot == texSlot) { return s_TexSlotSemantics[i].name; } } return NULL; } const char* MaterialHelpers::LookupTexName(EEfResTextures texSlot) const { assert((texSlot >= 0) && (texSlot < EFTT_MAX)); return s_TexSlotSemantics[texSlot].name; } const char* MaterialHelpers::LookupTexDesc(EEfResTextures texSlot) const { assert((texSlot >= 0) && (texSlot < EFTT_MAX)); return s_TexSlotSemantics[texSlot].description; } const char* MaterialHelpers::LookupTexEnum(EEfResTextures texSlot) const { assert((texSlot >= 0) && (texSlot < EFTT_MAX)); return s_TexSlotSemantics[texSlot].ename; } const char* MaterialHelpers::LookupTexSuffix(EEfResTextures texSlot) const { assert((texSlot >= 0) && (texSlot < EFTT_MAX)); return s_TexSlotSemantics[texSlot].suffix; } bool MaterialHelpers::IsAdjustableTexSlot(EEfResTextures texSlot) const { assert((texSlot >= 0) && (texSlot < EFTT_MAX)); return s_TexSlotSemantics[texSlot].adjustable; } ////////////////////////////////////////////////////////////////////////// // [Shader System TO DO] - automate these lookups to be data driven! bool MaterialHelpers::SetGetMaterialParamFloat(IRenderShaderResources& pShaderResources, const char* sParamName, float& v, bool bGet) const { EEfResTextures texSlot = EFTT_UNKNOWN; if (!azstricmp("emissive_intensity", sParamName)) { texSlot = EFTT_EMITTANCE; } else if (!azstricmp("shininess", sParamName)) { texSlot = EFTT_SMOOTHNESS; } else if (!azstricmp("opacity", sParamName)) { texSlot = EFTT_OPACITY; } if (!azstricmp("alpha", sParamName)) { if (bGet) { v = pShaderResources.GetAlphaRef(); } else { pShaderResources.SetAlphaRef(v); } return true; } else if (texSlot != EFTT_UNKNOWN) { if (bGet) { v = pShaderResources.GetStrengthValue(texSlot); } else { pShaderResources.SetStrengthValue(texSlot, v); } return true; } return false; } ////////////////////////////////////////////////////////////////////////// bool MaterialHelpers::SetGetMaterialParamVec3(IRenderShaderResources& pShaderResources, const char* sParamName, Vec3& v, bool bGet) const { EEfResTextures texSlot = EFTT_UNKNOWN; if (!azstricmp("diffuse", sParamName)) { texSlot = EFTT_DIFFUSE; } else if (!azstricmp("specular", sParamName)) { texSlot = EFTT_SPECULAR; } else if (!azstricmp("emissive_color", sParamName)) { texSlot = EFTT_EMITTANCE; } if (texSlot != EFTT_UNKNOWN) { if (bGet) { v = pShaderResources.GetColorValue(texSlot).toVec3(); } else { pShaderResources.SetColorValue(texSlot, ColorF(v, 1.0f)); } return true; } return false; } ////////////////////////////////////////////////////////////////////////// void MaterialHelpers::SetTexModFromXml(SEfTexModificator& pTextureModifier, const XmlNodeRef& modNode) const { // Modificators float f; uint8 c; modNode->getAttr("TexMod_RotateType", pTextureModifier.m_eRotType); modNode->getAttr("TexMod_TexGenType", pTextureModifier.m_eTGType); modNode->getAttr("TexMod_bTexGenProjected", pTextureModifier.m_bTexGenProjected); for (int baseu = 'U', u = baseu; u <= 'W'; u++) { char RT[] = "Rotate?"; RT[6] = u; if (modNode->getAttr(RT, f)) { pTextureModifier.m_Rot [u - baseu] = Degr2Word(f); } char RR[] = "TexMod_?RotateRate"; RR[7] = u; char RP[] = "TexMod_?RotatePhase"; RP[7] = u; char RA[] = "TexMod_?RotateAmplitude"; RA[7] = u; char RC[] = "TexMod_?RotateCenter"; RC[7] = u; if (modNode->getAttr(RR, f)) { pTextureModifier.m_RotOscRate [u - baseu] = Degr2Word(f); } if (modNode->getAttr(RP, f)) { pTextureModifier.m_RotOscPhase [u - baseu] = Degr2Word(f); } if (modNode->getAttr(RA, f)) { pTextureModifier.m_RotOscAmplitude[u - baseu] = Degr2Word(f); } if (modNode->getAttr(RC, f)) { pTextureModifier.m_RotOscCenter [u - baseu] = f; } if (u > 'V') { continue; } char TL[] = "Tile?"; TL[4] = u; char OF[] = "Offset?"; OF[6] = u; if (modNode->getAttr(TL, f)) { pTextureModifier.m_Tiling [u - baseu] = f; } if (modNode->getAttr(OF, f)) { pTextureModifier.m_Offs [u - baseu] = f; } char OT[] = "TexMod_?OscillatorType"; OT[7] = u; char OR[] = "TexMod_?OscillatorRate"; OR[7] = u; char OP[] = "TexMod_?OscillatorPhase"; OP[7] = u; char OA[] = "TexMod_?OscillatorAmplitude"; OA[7] = u; if (modNode->getAttr(OT, c)) { pTextureModifier.m_eMoveType [u - baseu] = c; } if (modNode->getAttr(OR, f)) { pTextureModifier.m_OscRate [u - baseu] = f; } if (modNode->getAttr(OP, f)) { pTextureModifier.m_OscPhase [u - baseu] = f; } if (modNode->getAttr(OA, f)) { pTextureModifier.m_OscAmplitude [u - baseu] = f; } } } ////////////////////////////////////////////////////////////////////////// static SEfTexModificator defaultTexMod; static bool defaultTexMod_Initialized = false; void MaterialHelpers::SetXmlFromTexMod(const SEfTexModificator& pTextureModifier, XmlNodeRef& node) const { if (!defaultTexMod_Initialized) { ZeroStruct(defaultTexMod); defaultTexMod.m_Tiling[0] = 1; defaultTexMod.m_Tiling[1] = 1; defaultTexMod_Initialized = true; } if (memcmp(&pTextureModifier, &defaultTexMod, sizeof(pTextureModifier)) == 0) { return; } XmlNodeRef modNode = node->newChild("TexMod"); if (modNode) { // Modificators float f; uint16 s; uint8 c; modNode->setAttr("TexMod_RotateType", pTextureModifier.m_eRotType); modNode->setAttr("TexMod_TexGenType", pTextureModifier.m_eTGType); modNode->setAttr("TexMod_bTexGenProjected", pTextureModifier.m_bTexGenProjected); for (int baseu = 'U', u = baseu; u <= 'W'; u++) { char RT[] = "Rotate?"; RT[6] = u; if ((s = pTextureModifier.m_Rot [u - baseu]) != defaultTexMod.m_Rot [u - baseu]) { modNode->setAttr(RT, Word2Degr(s)); } char RR[] = "TexMod_?RotateRate"; RR[7] = u; char RP[] = "TexMod_?RotatePhase"; RP[7] = u; char RA[] = "TexMod_?RotateAmplitude"; RA[7] = u; char RC[] = "TexMod_?RotateCenter"; RC[7] = u; if ((s = pTextureModifier.m_RotOscRate [u - baseu]) != defaultTexMod.m_RotOscRate [u - baseu]) { modNode->setAttr(RR, Word2Degr(s)); } if ((s = pTextureModifier.m_RotOscPhase [u - baseu]) != defaultTexMod.m_RotOscPhase [u - baseu]) { modNode->setAttr(RP, Word2Degr(s)); } if ((s = pTextureModifier.m_RotOscAmplitude[u - baseu]) != defaultTexMod.m_RotOscAmplitude[u - baseu]) { modNode->setAttr(RA, Word2Degr(s)); } if ((f = pTextureModifier.m_RotOscCenter [u - baseu]) != defaultTexMod.m_RotOscCenter [u - baseu]) { modNode->setAttr(RC, f); } if (u > 'V') { continue; } char TL[] = "Tile?"; TL[4] = u; char OF[] = "Offset?"; OF[6] = u; if ((f = pTextureModifier.m_Tiling [u - baseu]) != defaultTexMod.m_Tiling [u - baseu]) { modNode->setAttr(TL, f); } if ((f = pTextureModifier.m_Offs [u - baseu]) != defaultTexMod.m_Offs [u - baseu]) { modNode->setAttr(OF, f); } char OT[] = "TexMod_?OscillatorType"; OT[7] = u; char OR[] = "TexMod_?OscillatorRate"; OR[7] = u; char OP[] = "TexMod_?OscillatorPhase"; OP[7] = u; char OA[] = "TexMod_?OscillatorAmplitude"; OA[7] = u; if ((c = pTextureModifier.m_eMoveType [u - baseu]) != defaultTexMod.m_eMoveType [u - baseu]) { modNode->setAttr(OT, c); } if ((f = pTextureModifier.m_OscRate [u - baseu]) != defaultTexMod.m_OscRate [u - baseu]) { modNode->setAttr(OR, f); } if ((f = pTextureModifier.m_OscPhase [u - baseu]) != defaultTexMod.m_OscPhase [u - baseu]) { modNode->setAttr(OP, f); } if ((f = pTextureModifier.m_OscAmplitude [u - baseu]) != defaultTexMod.m_OscAmplitude [u - baseu]) { modNode->setAttr(OA, f); } } } } ////////////////////////////////////////////////////////////////////////// void MaterialHelpers::SetTexturesFromXml(SInputShaderResources& pShaderResources, const XmlNodeRef& node) const { const char* texmap = ""; const char* fileName = ""; XmlNodeRef texturesNode = node->findChild("Textures"); if (texturesNode) { for (int c = 0; c < texturesNode->getChildCount(); c++) { XmlNodeRef texNode = texturesNode->getChild(c); texmap = texNode->getAttr("Map"); // [Shader System TO DO] - this must become per shader (and not global) according to the parser uint8 texSlot = MaterialHelpers::FindTexSlot(texmap); // [Shader System TO DO] - in the new system simply gather texture slot names, then identify name usage // and accordingly match the slot (dynamically associated per shader by the parser). if (texSlot == EFTT_UNKNOWN) { continue; } fileName = texNode->getAttr("File"); // legacy. Some textures used to be referenced using "engine\\" or "engine/" - this is no longer valid if ( (strlen(fileName) > 7) && (azstrnicmp(fileName, "engine", 6) == 0) && ((fileName[6] == '\\') || (fileName[6] == '/')) ) { fileName = fileName + 7; } // legacy: Files were saved into a mtl with many leading forward or back slashes, we eat them all here. We want it to start with a rel path. const char* actualFileName = fileName; while ((actualFileName[0]) && ((actualFileName[0] == '\\') || (actualFileName[0] == '/'))) { ++actualFileName; } fileName = actualFileName; // Next insert the texture resource if did not exist TexturesResourcesMap* pTextureReourcesMap = pShaderResources.GetTexturesResourceMap(); SEfResTexture* pTextureRes = &(*pTextureReourcesMap)[texSlot]; pTextureRes->m_Name = fileName; texNode->getAttr("IsTileU", pTextureRes->m_bUTile); texNode->getAttr("IsTileV", pTextureRes->m_bVTile); texNode->getAttr("TexType", pTextureRes->m_Sampler.m_eTexType); int filter = pTextureRes->m_Filter; if (texNode->getAttr("Filter", filter)) { pTextureRes->m_Filter = filter; } // Next look for modulation node - add it only if exist XmlNodeRef modNode = texNode->findChild("TexMod"); if (modNode) SetTexModFromXml( *(pTextureRes->AddModificator()), modNode); } } } ////////////////////////////////////////////////////////////////////////// static SInputShaderResources defaultShaderResource; // for comparison with the default values static SEfResTexture defaultTextureResource; // for comparison with the default values void MaterialHelpers::SetXmlFromTextures( SInputShaderResources& pShaderResources, XmlNodeRef& node) const { // Save texturing data. XmlNodeRef texturesNode = node->newChild("Textures"); for (auto& iter : *(pShaderResources.GetTexturesResourceMap()) ) { EEfResTextures texId = static_cast<EEfResTextures>(iter.first); const SEfResTexture* pTextureRes = &(iter.second); if (pTextureRes && !pTextureRes->m_Name.empty() && IsAdjustableTexSlot(texId)) { XmlNodeRef texNode = texturesNode->newChild("Texture"); texNode->setAttr("Map", MaterialHelpers::LookupTexName(texId)); texNode->setAttr("File", pTextureRes->m_Name.c_str()); if (pTextureRes->m_Filter != defaultTextureResource.m_Filter) { texNode->setAttr("Filter", pTextureRes->m_Filter); } if (pTextureRes->m_bUTile != defaultTextureResource.m_bUTile) { texNode->setAttr("IsTileU", pTextureRes->m_bUTile); } if (pTextureRes->m_bVTile != defaultTextureResource.m_bVTile) { texNode->setAttr("IsTileV", pTextureRes->m_bVTile); } if (pTextureRes->m_Sampler.m_eTexType != defaultTextureResource.m_Sampler.m_eTexType) { texNode->setAttr("TexType", pTextureRes->m_Sampler.m_eTexType); } ////////////////////////////////////////////////////////////////////////// // Save texture modificators Modificators ////////////////////////////////////////////////////////////////////////// SetXmlFromTexMod( *pTextureRes->GetModificator(), texNode); } /* [Shader System] - TO DO: test to see if slots can be removed else { AZ_Assert(!pTextureRes->m_Name.empty(), "Shader resource texture error - Texture exists without a name"); } */ } } ////////////////////////////////////////////////////////////////////////// void MaterialHelpers::SetVertexDeformFromXml(SInputShaderResources& pShaderResources, const XmlNodeRef& node) const { if (defaultShaderResource.m_DeformInfo.m_eType != pShaderResources.m_DeformInfo.m_eType) { node->setAttr("vertModifType", pShaderResources.m_DeformInfo.m_eType); } XmlNodeRef deformNode = node->findChild("VertexDeform"); if (deformNode) { int deform_type = eDT_Unknown; deformNode->getAttr("Type", deform_type); pShaderResources.m_DeformInfo.m_eType = (EDeformType)deform_type; deformNode->getAttr("DividerX", pShaderResources.m_DeformInfo.m_fDividerX); deformNode->getAttr("NoiseScale", pShaderResources.m_DeformInfo.m_vNoiseScale); XmlNodeRef waveX = deformNode->findChild("WaveX"); if (waveX) { int type = eWF_None; waveX->getAttr("Type", type); pShaderResources.m_DeformInfo.m_WaveX.m_eWFType = (EWaveForm)type; waveX->getAttr("Amp", pShaderResources.m_DeformInfo.m_WaveX.m_Amp); waveX->getAttr("Level", pShaderResources.m_DeformInfo.m_WaveX.m_Level); waveX->getAttr("Phase", pShaderResources.m_DeformInfo.m_WaveX.m_Phase); waveX->getAttr("Freq", pShaderResources.m_DeformInfo.m_WaveX.m_Freq); } } } ////////////////////////////////////////////////////////////////////////// void MaterialHelpers::SetXmlFromVertexDeform(const SInputShaderResources& pShaderResources, XmlNodeRef& node) const { int vertModif = pShaderResources.m_DeformInfo.m_eType; node->setAttr("vertModifType", vertModif); if (pShaderResources.m_DeformInfo.m_eType != eDT_Unknown) { XmlNodeRef deformNode = node->newChild("VertexDeform"); deformNode->setAttr("Type", pShaderResources.m_DeformInfo.m_eType); deformNode->setAttr("DividerX", pShaderResources.m_DeformInfo.m_fDividerX); deformNode->setAttr("NoiseScale", pShaderResources.m_DeformInfo.m_vNoiseScale); XmlNodeRef waveX = deformNode->newChild("WaveX"); waveX->setAttr("Type", pShaderResources.m_DeformInfo.m_WaveX.m_eWFType); waveX->setAttr("Amp", pShaderResources.m_DeformInfo.m_WaveX.m_Amp); waveX->setAttr("Level", pShaderResources.m_DeformInfo.m_WaveX.m_Level); waveX->setAttr("Phase", pShaderResources.m_DeformInfo.m_WaveX.m_Phase); waveX->setAttr("Freq", pShaderResources.m_DeformInfo.m_WaveX.m_Freq); } } ////////////////////////////////////////////////////////////////////////// static inline ColorF ToCFColor(const Vec3& col) { return ColorF(col); } void MaterialHelpers::SetLightingFromXml(SInputShaderResources& pShaderResources, const XmlNodeRef& node) const { // Load lighting data. Vec3 vColor; Vec4 vColor4; if (node->getAttr("Diffuse", vColor4)) { pShaderResources.m_LMaterial.m_Diffuse = ColorF(vColor4.x, vColor4.y, vColor4.z, vColor4.w); } else if (node->getAttr("Diffuse", vColor)) { pShaderResources.m_LMaterial.m_Diffuse = ToCFColor(vColor); } if (node->getAttr("Specular", vColor4)) { pShaderResources.m_LMaterial.m_Specular = ColorF(vColor4.x, vColor4.y, vColor4.z, vColor4.w); } else if (node->getAttr("Specular", vColor)) { pShaderResources.m_LMaterial.m_Specular = ToCFColor(vColor); } if (node->getAttr("Emittance", vColor4)) { pShaderResources.m_LMaterial.m_Emittance = ColorF(vColor4.x, vColor4.y, vColor4.z, vColor4.w); } node->getAttr("Shininess", pShaderResources.m_LMaterial.m_Smoothness); node->getAttr("Opacity", pShaderResources.m_LMaterial.m_Opacity); node->getAttr("AlphaTest", pShaderResources.m_AlphaRef); node->getAttr("VoxelCoverage", pShaderResources.m_VoxelCoverage); } ////////////////////////////////////////////////////////////////////////// static inline Vec3 ToVec3(const ColorF& col) { return Vec3(col.r, col.g, col.b); } static inline Vec4 ToVec4(const ColorF& col) { return Vec4(col.r, col.g, col.b, col.a); } void MaterialHelpers::SetXmlFromLighting(const SInputShaderResources& pShaderResources, XmlNodeRef& node) const { // Save ligthing data. if (defaultShaderResource.m_LMaterial.m_Diffuse != pShaderResources.m_LMaterial.m_Diffuse) { node->setAttr("Diffuse", ToVec4(pShaderResources.m_LMaterial.m_Diffuse)); } if (defaultShaderResource.m_LMaterial.m_Specular != pShaderResources.m_LMaterial.m_Specular) { node->setAttr("Specular", ToVec4(pShaderResources.m_LMaterial.m_Specular)); } if (defaultShaderResource.m_LMaterial.m_Emittance != pShaderResources.m_LMaterial.m_Emittance) { node->setAttr("Emittance", ToVec4(pShaderResources.m_LMaterial.m_Emittance)); } if (defaultShaderResource.m_LMaterial.m_Opacity != pShaderResources.m_LMaterial.m_Opacity) { node->setAttr("Opacity", pShaderResources.m_LMaterial.m_Opacity); } if (defaultShaderResource.m_LMaterial.m_Smoothness != pShaderResources.m_LMaterial.m_Smoothness) { node->setAttr("Shininess", pShaderResources.m_LMaterial.m_Smoothness); } if (defaultShaderResource.m_AlphaRef != pShaderResources.m_AlphaRef) { node->setAttr("AlphaTest", pShaderResources.m_AlphaRef); } if (defaultShaderResource.m_VoxelCoverage != pShaderResources.m_VoxelCoverage) { node->setAttr("VoxelCoverage", pShaderResources.m_VoxelCoverage); } } ////////////////////////////////////////////////////////////////////////// void MaterialHelpers::SetShaderParamsFromXml(SInputShaderResources& pShaderResources, const XmlNodeRef& node) const { int nA = node->getNumAttributes(); if (!nA) { return; } for (int i = 0; i < nA; i++) { const char* key = NULL, * val = NULL; node->getAttributeByIndex(i, &key, &val); // try to set existing param first bool bFound = false; for (int j = 0; j < pShaderResources.m_ShaderParams.size(); j++) { SShaderParam* pParam = &pShaderResources.m_ShaderParams[j]; if (pParam->m_Name == key) { bFound = true; switch (pParam->m_Type) { case eType_BYTE: node->getAttr(key, pParam->m_Value.m_Byte); break; case eType_SHORT: node->getAttr(key, pParam->m_Value.m_Short); break; case eType_INT: node->getAttr(key, pParam->m_Value.m_Int); break; case eType_FLOAT: node->getAttr(key, pParam->m_Value.m_Float); break; case eType_FCOLOR: case eType_FCOLORA: { Vec3 vValue; node->getAttr(key, vValue); pParam->m_Value.m_Color[0] = vValue.x; pParam->m_Value.m_Color[1] = vValue.y; pParam->m_Value.m_Color[2] = vValue.z; } break; case eType_VECTOR: { Vec4 vValue; if (node->getAttr(key, vValue)) { pParam->m_Value.m_Color[0] = vValue.x; pParam->m_Value.m_Color[1] = vValue.y; pParam->m_Value.m_Color[2] = vValue.z; pParam->m_Value.m_Color[3] = vValue.w; } else { Vec3 vValue3; if (node->getAttr(key, vValue3)) { pParam->m_Value.m_Color[0] = vValue3.x; pParam->m_Value.m_Color[1] = vValue3.y; pParam->m_Value.m_Color[2] = vValue3.z; pParam->m_Value.m_Color[3] = 1.0f; } } } break; default: break; } } } if (!bFound) { assert(val && key); SShaderParam Param; Param.m_Name = key; Param.m_Value.m_Color[0] = Param.m_Value.m_Color[1] = Param.m_Value.m_Color[2] = Param.m_Value.m_Color[3] = 0; int res = azsscanf(val, "%f,%f,%f,%f", &Param.m_Value.m_Color[0], &Param.m_Value.m_Color[1], &Param.m_Value.m_Color[2], &Param.m_Value.m_Color[3]); assert(res); pShaderResources.m_ShaderParams.push_back(Param); } } } ////////////////////////////////////////////////////////////////////////// void MaterialHelpers::SetXmlFromShaderParams(const SInputShaderResources& pShaderResources, XmlNodeRef& node) const { for (int i = 0; i < pShaderResources.m_ShaderParams.size(); i++) { const SShaderParam* pParam = &pShaderResources.m_ShaderParams[i]; switch (pParam->m_Type) { case eType_BYTE: node->setAttr(pParam->m_Name.c_str(), (int)pParam->m_Value.m_Byte); break; case eType_SHORT: node->setAttr(pParam->m_Name.c_str(), (int)pParam->m_Value.m_Short); break; case eType_INT: node->setAttr(pParam->m_Name.c_str(), (int)pParam->m_Value.m_Int); break; case eType_FLOAT: node->setAttr(pParam->m_Name.c_str(), (float)pParam->m_Value.m_Float); break; case eType_FCOLOR: node->setAttr(pParam->m_Name.c_str(), Vec3(pParam->m_Value.m_Color[0], pParam->m_Value.m_Color[1], pParam->m_Value.m_Color[2])); break; case eType_VECTOR: node->setAttr(pParam->m_Name.c_str(), Vec3(pParam->m_Value.m_Vector[0], pParam->m_Value.m_Vector[1], pParam->m_Value.m_Vector[2])); break; default: break; } } } //------------------------------------------------------------------------------ // [Shader System TO DO] - the following function supports older version of data // and converts them. // This needs to go away soon! //------------------------------------------------------------------------------ void MaterialHelpers::MigrateXmlLegacyData(SInputShaderResources& pShaderResources, const XmlNodeRef& node) const { float glowAmount; // Migrate glow from 3.8.3 to emittance if (node->getAttr("GlowAmount", glowAmount) && glowAmount > 0) { SEfResTexture* pTextureRes = pShaderResources.GetTextureResource(EFTT_DIFFUSE); if (pTextureRes && (pTextureRes->m_Sampler.m_eTexType == eTT_2D)) { // The following line will create and insert a new texture data slot if did not exist. pShaderResources.m_TexturesResourcesMap[EFTT_EMITTANCE].m_Name = pTextureRes->m_Name; } const float legacyHDRDynMult = 2.0f; const float legacyIntensityScale = 10.0f; // Legacy scale factor 10000 divided by 1000 for kilonits // Clamp this at EMISSIVE_INTENSITY_SOFT_MAX because some previous glow parameters become extremely bright. pShaderResources.m_LMaterial.m_Emittance.a = min(powf(glowAmount * legacyHDRDynMult, legacyHDRDynMult) * legacyIntensityScale, EMISSIVE_INTENSITY_SOFT_MAX); std::string materialName = node->getAttr("Name"); CryWarning(VALIDATOR_MODULE_3DENGINE, VALIDATOR_WARNING, "Material %s has had legacy GlowAmount automatically converted to Emissive Intensity. The material parameters related to Emittance should be manually adjusted for this material.", materialName.c_str()); } // In Lumberyard version 1.9 BlendLayer2Specular became a color instead of a single float, so it needs to be updated XmlNodeRef publicParamsNode = node->findChild("PublicParams"); if (publicParamsNode && publicParamsNode->haveAttr("BlendLayer2Specular")) { // Check to see if the BlendLayer2Specular is a float AZStd::string blendLayer2SpecularString(publicParamsNode->getAttr("BlendLayer2Specular")); // If there are no commas in the string representation, it must be a single float instead of a color if (blendLayer2SpecularString.find(',') == AZStd::string::npos) { float blendLayer2SpecularFloat = 0.0f; publicParamsNode->getAttr("BlendLayer2Specular", blendLayer2SpecularFloat); publicParamsNode->setAttr("BlendLayer2Specular", Vec4(blendLayer2SpecularFloat, blendLayer2SpecularFloat, blendLayer2SpecularFloat, 0.0)); } } }
15,839
5,379
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #include "brpc/details/health_check.h" #include "brpc/socket.h" #include "brpc/channel.h" #include "brpc/controller.h" #include "brpc/details/controller_private_accessor.h" #include "brpc/global.h" #include "brpc/log.h" #include "bthread/unstable.h" #include "bthread/bthread.h" namespace brpc { // Declared at socket.cpp extern SocketVarsCollector* g_vars; DEFINE_string(health_check_path, "", "Http path of health check call." "By default health check succeeds if the server is connectable." "If this flag is set, health check is not completed until a http " "call to the path succeeds within -health_check_timeout_ms(to make " "sure the server functions well)."); DEFINE_int32(health_check_timeout_ms, 500, "The timeout for both establishing " "the connection and the http call to -health_check_path over the connection"); class HealthCheckChannel : public brpc::Channel { public: HealthCheckChannel() {} ~HealthCheckChannel() {} int Init(SocketId id, const ChannelOptions* options); }; int HealthCheckChannel::Init(SocketId id, const ChannelOptions* options) { brpc::GlobalInitializeOrDie(); if (InitChannelOptions(options) != 0) { return -1; } _server_id = id; return 0; } class OnAppHealthCheckDone : public google::protobuf::Closure { public: virtual void Run(); HealthCheckChannel channel; brpc::Controller cntl; SocketId id; int64_t interval_s; int64_t last_check_time_ms; }; class HealthCheckManager { public: static void StartCheck(SocketId id, int64_t check_interval_s); static void* AppCheck(void* arg); }; void HealthCheckManager::StartCheck(SocketId id, int64_t check_interval_s) { SocketUniquePtr ptr; const int rc = Socket::AddressFailedAsWell(id, &ptr); if (rc < 0) { RPC_VLOG << "SocketId=" << id << " was abandoned during health checking"; return; } LOG(INFO) << "Checking path=" << ptr->remote_side() << FLAGS_health_check_path; OnAppHealthCheckDone* done = new OnAppHealthCheckDone; done->id = id; done->interval_s = check_interval_s; brpc::ChannelOptions options; options.protocol = PROTOCOL_HTTP; options.max_retry = 0; options.timeout_ms = std::min((int64_t)FLAGS_health_check_timeout_ms, check_interval_s * 1000); if (done->channel.Init(id, &options) != 0) { LOG(WARNING) << "Fail to init health check channel to SocketId=" << id; ptr->_ninflight_app_health_check.fetch_sub( 1, butil::memory_order_relaxed); delete done; return; } AppCheck(done); } void* HealthCheckManager::AppCheck(void* arg) { OnAppHealthCheckDone* done = static_cast<OnAppHealthCheckDone*>(arg); done->cntl.Reset(); done->cntl.http_request().uri() = FLAGS_health_check_path; ControllerPrivateAccessor(&done->cntl).set_health_check_call(); done->last_check_time_ms = butil::gettimeofday_ms(); done->channel.CallMethod(NULL, &done->cntl, NULL, NULL, done); return NULL; } void OnAppHealthCheckDone::Run() { std::unique_ptr<OnAppHealthCheckDone> self_guard(this); SocketUniquePtr ptr; const int rc = Socket::AddressFailedAsWell(id, &ptr); if (rc < 0) { RPC_VLOG << "SocketId=" << id << " was abandoned during health checking"; return; } if (!cntl.Failed() || ptr->Failed()) { LOG_IF(INFO, !cntl.Failed()) << "Succeeded to call " << ptr->remote_side() << FLAGS_health_check_path; // if ptr->Failed(), previous SetFailed would trigger next round // of hc, just return here. ptr->_ninflight_app_health_check.fetch_sub( 1, butil::memory_order_relaxed); return; } RPC_VLOG << "Fail to check path=" << FLAGS_health_check_path << ", " << cntl.ErrorText(); int64_t sleep_time_ms = last_check_time_ms + interval_s * 1000 - butil::gettimeofday_ms(); if (sleep_time_ms > 0) { // TODO(zhujiashun): we need to handle the case when timer fails // and bthread_usleep returns immediately. In most situations, // the possibility of this case is quite small, so currently we // just keep sending the hc call. bthread_usleep(sleep_time_ms * 1000); } HealthCheckManager::AppCheck(self_guard.release()); } class HealthCheckTask : public PeriodicTask { public: explicit HealthCheckTask(SocketId id); bool OnTriggeringTask(timespec* next_abstime) override; void OnDestroyingTask() override; private: SocketId _id; bool _first_time; }; HealthCheckTask::HealthCheckTask(SocketId id) : _id(id) , _first_time(true) {} bool HealthCheckTask::OnTriggeringTask(timespec* next_abstime) { SocketUniquePtr ptr; const int rc = Socket::AddressFailedAsWell(_id, &ptr); CHECK(rc != 0); if (rc < 0) { RPC_VLOG << "SocketId=" << _id << " was abandoned before health checking"; return false; } // Note: Making a Socket re-addessable is hard. An alternative is // creating another Socket with selected internal fields to replace // failed Socket. Although it avoids concurrent issues with in-place // revive, it changes SocketId: many code need to watch SocketId // and update on change, which is impractical. Another issue with // this method is that it has to move "selected internal fields" // which may be accessed in parallel, not trivial to be moved. // Finally we choose a simple-enough solution: wait until the // reference count hits `expected_nref', which basically means no // one is addressing the Socket(except here). Because the Socket // is not addressable, the reference count will not increase // again. This solution is not perfect because the `expected_nref' // is implementation specific. In our case, one reference comes // from SocketMapInsert(socket_map.cpp), one reference is here. // Although WaitAndReset() could hang when someone is addressing // the failed Socket forever (also indicating bug), this is not an // issue in current code. if (_first_time) { // Only check at first time. _first_time = false; if (ptr->WaitAndReset(2/*note*/) != 0) { LOG(INFO) << "Cancel checking " << *ptr; return false; } } // g_vars must not be NULL because it is newed at the creation of // first Socket. When g_vars is used, the socket is at health-checking // state, which means the socket must be created and then g_vars can // not be NULL. g_vars->nhealthcheck << 1; int hc = 0; if (ptr->_user) { hc = ptr->_user->CheckHealth(ptr.get()); } else { hc = ptr->CheckHealth(); } if (hc == 0) { if (ptr->CreatedByConnect()) { g_vars->channel_conn << -1; } if (!FLAGS_health_check_path.empty()) { ptr->_ninflight_app_health_check.fetch_add( 1, butil::memory_order_relaxed); } ptr->Revive(); ptr->_hc_count = 0; if (!FLAGS_health_check_path.empty()) { HealthCheckManager::StartCheck(_id, ptr->_health_check_interval_s); } return false; } else if (hc == ESTOP) { LOG(INFO) << "Cancel checking " << *ptr; return false; } ++ ptr->_hc_count; *next_abstime = butil::seconds_from_now(ptr->_health_check_interval_s); return true; } void HealthCheckTask::OnDestroyingTask() { delete this; } void StartHealthCheck(SocketId id, int64_t delay_ms) { PeriodicTaskManager::StartTaskAt(new HealthCheckTask(id), butil::milliseconds_from_now(delay_ms)); } } // namespace brpc
3,240
1,338
<reponame>Kirishikesan/haiku /* * Copyright (c) 1999-2000, <NAME>. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions, and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions, and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR * TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // cortex::NodeManager::AddOnHostApp.h // * PURPOSE // Definition of (and provisions for communication with) // a separate BApplication whose single responsibility is // to launch nodes. NodeManager-launched nodes run in // another team, helping to lower the likelihood of a // socially maladjusted young node taking you out. // // * HISTORY // e.moon 6nov99 #ifndef NODE_MANAGER_ADD_ON_HOST_APP_H #define NODE_MANAGER_ADD_ON_HOST_APP_H #include "cortex_defs.h" #include <Application.h> #include <MediaAddOn.h> #include <MediaDefs.h> __BEGIN_CORTEX_NAMESPACE namespace addon_host { class App : public BApplication { typedef BApplication _inherited; public: ~App(); App(); public: bool QuitRequested(); void MessageReceived(BMessage* message); }; } // addon_host __END_CORTEX_NAMESPACE #endif // NODE_MANAGER_ADD_ON_HOST_APP_H
758
348
<filename>docs/data/t2/047/47289.json {"nom":"<NAME>","dpt":"Lot-et-Garonne","inscrits":365,"abs":64,"votants":301,"blancs":38,"nuls":8,"exp":255,"res":[{"panneau":"2","voix":135},{"panneau":"1","voix":120}]}
89
852
import FWCore.ParameterSet.Config as cms from DQMOffline.EGamma.photonOfflineClient_cfi import * from DQMOffline.EGamma.electronOfflineClientSequence_cff import * from DQMOffline.EGamma.photonOfflineDQMClient_cff import * egammaPostProcessing = cms.Sequence(photonOfflineDQMClient*electronOfflineClientSequence)
109
1,233
<gh_stars>1000+ import json import unittest from uuid import uuid4 import requests_mock from alerta.app import create_app, db, plugins from alerta.models.enums import Scope from alerta.models.key import ApiKey from alerta.utils.response import base_url class ForwarderTestCase(unittest.TestCase): def setUp(self): test_config = { 'DEBUG': False, 'TESTING': True, 'AUTH_REQUIRED': True, 'BASE_URL': 'http://localhost:8080', 'PLUGINS': ['forwarder'] } HMAC_AUTH_CREDENTIALS = [ { # http://localhost:9001 'key': '<KEY>', 'secret': '<KEY>', 'algorithm': 'sha256' }, ] FWD_DESTINATIONS = [ ('http://localhost:9000', {'username': 'user', 'password': '<PASSWORD>', 'timeout': 10}, ['alerts', 'actions']), # BasicAuth # ('https://httpbin.org/anything', dict(username='foo', password='<PASSWORD>', ssl_verify=False), ['*']), ('http://localhost:9001', { 'key': '<KEY>', 'secret': '<KEY>' }, ['actions']), # Hawk HMAC ('http://localhost:9002', {'key': 'demo-key'}, ['delete']), # API key ('http://localhost:9003', {'token': 'bearer-token'}, ['*']), # Bearer token ] test_config['HMAC_AUTH_CREDENTIALS'] = HMAC_AUTH_CREDENTIALS test_config['FWD_DESTINATIONS'] = FWD_DESTINATIONS self.app = create_app(test_config) self.client = self.app.test_client() self.resource = str(uuid4()).upper()[:8] self.major_alert = { 'id': 'b528c6f7-0925-4f6d-b930-fa6c0bba51dc', 'event': 'node_marginal', 'resource': self.resource, 'environment': 'Production', 'service': ['Network'], 'severity': 'major', 'correlate': ['node_down', 'node_marginal', 'node_up'], 'timeout': 40 } self.repeated_major_alert = { 'id': '4ba2b0d6-ff4a-4fc0-8d93-45939c819465', 'event': 'node_marginal', 'resource': self.resource, 'environment': 'Production', 'service': ['Network'], 'severity': 'major', 'correlate': ['node_down', 'node_marginal', 'node_up'], 'timeout': 40 } self.warn_alert = { 'id': '67344228-bd03-4660-9c45-ff9c8f1d53d0', 'event': 'node_marginal', 'resource': self.resource, 'environment': 'Production', 'service': ['Network'], 'severity': 'warning', 'correlate': ['node_down', 'node_marginal', 'node_up'], 'timeout': 50 } self.normal_alert = { 'id': 'cb12250d-42ed-42cc-97ef-592f3a49618c', 'event': 'node_up', 'resource': self.resource, 'environment': 'Production', 'service': ['Network'], 'severity': 'normal', 'correlate': ['node_down', 'node_marginal', 'node_up'], 'timeout': 100 } with self.app.test_request_context('/'): self.app.preprocess_request() self.api_key = ApiKey( user='<EMAIL>', scopes=[Scope.admin, Scope.read, Scope.write], text='demo-key' ) self.api_key.create() def tearDown(self): plugins.plugins.clear() db.destroy() @requests_mock.mock() def test_forward_alert(self, m): ok_response = """ {"status": "ok"} """ m.post('http://localhost:9000/alert', text=ok_response) m.post('http://localhost:9001/alert', text=ok_response) m.post('http://localhost:9002/alert', text=ok_response) m.post('http://localhost:9003/alert', text=ok_response) headers = { 'Authorization': f'Key {self.api_key.key}', 'Content-type': 'application/json', 'Origin': 'http://localhost:5000', 'X-Alerta-Loop': 'http://localhost:5000', } response = self.client.post('/alert', data=json.dumps(self.major_alert), headers=headers) self.assertEqual(response.status_code, 201) data = json.loads(response.data.decode('utf-8')) self.assertEqual(data['status'], 'ok') history = m.request_history self.assertEqual(history[0].port, 9000) self.assertEqual(history[1].port, 9003) @requests_mock.mock() def test_forward_action(self, m): ok_response = """ {"status": "ok"} """ m.post('http://localhost:9000/alert', text=ok_response) m.post('http://localhost:9003/alert', text=ok_response) # create alert headers = { 'Authorization': f'Key {self.api_key.key}', 'Content-type': 'application/json' } response = self.client.post('/alert', data=json.dumps(self.warn_alert), headers=headers) self.assertEqual(response.status_code, 201) data = json.loads(response.data.decode('utf-8')) self.assertEqual(data['alert']['status'], 'open') alert_id = data['id'] m.put(f'http://localhost:9000/alert/{alert_id}/action', text=ok_response) m.put(f'http://localhost:9001/alert/{alert_id}/action', text=ok_response) m.put(f'http://localhost:9002/alert/{alert_id}/action', text=ok_response) m.put(f'http://localhost:9003/alert/{alert_id}/action', text=ok_response) headers = { 'Authorization': f'Key {self.api_key.key}', 'Content-type': 'application/json', 'Origin': 'http://localhost:8000' } response = self.client.put(f'/alert/{alert_id}/action', data=json.dumps({'action': 'ack'}), headers=headers) self.assertEqual(response.status_code, 200) data = json.loads(response.data.decode('utf-8')) self.assertEqual(data['status'], 'ok') history = m.request_history self.assertEqual(history[0].port, 9000) self.assertEqual(history[1].port, 9003) self.assertEqual(history[2].port, 9000) self.assertEqual(history[3].port, 9001) self.assertEqual(history[4].port, 9003) @requests_mock.mock() def test_forward_delete(self, m): ok_response = """ {"status": "ok"} """ m.post('http://localhost:9000/alert', text=ok_response) m.post('http://localhost:9003/alert', text=ok_response) # create alert headers = { 'Authorization': f'Key {self.api_key.key}', 'Content-type': 'application/json' } response = self.client.post('/alert', data=json.dumps(self.warn_alert), headers=headers) self.assertEqual(response.status_code, 201) data = json.loads(response.data.decode('utf-8')) self.assertEqual(data['alert']['status'], 'open') alert_id = data['id'] m.delete(f'http://localhost:9002/alert/{alert_id}', text=ok_response) m.delete(f'http://localhost:9003/alert/{alert_id}', text=ok_response) headers = { 'Authorization': f'Key {self.api_key.key}', 'Content-type': 'application/json', 'Origin': 'http://localhost:8000' } response = self.client.delete(f'/alert/{alert_id}', headers=headers) self.assertEqual(response.status_code, 200) data = json.loads(response.data.decode('utf-8')) self.assertEqual(data['status'], 'ok') history = m.request_history self.assertEqual(history[0].port, 9000) self.assertEqual(history[1].port, 9003) self.assertEqual(history[2].port, 9002) self.assertEqual(history[3].port, 9003) @requests_mock.mock() def test_forward_heartbeat(self, m): # FIXME: currently not possible pass @requests_mock.mock() def test_already_processed(self, m): # Alert is not processed locally or forwarded when an Alerta server # receives an alert which it has already processed. This is # determined by checking to see if the BASE_URL of the server # is already in the X-Alerta-Loop header. A 202 is returned because # the alert was accepted, even though it wasn't processed. ok_response = """ {"status": "ok"} """ m.post('http://localhost:9000/alert', text=ok_response) m.post('http://localhost:9001/alert', text=ok_response) m.post('http://localhost:9002/alert', text=ok_response) m.post('http://localhost:9003/alert', text=ok_response) headers = { 'Authorization': f'Key {self.api_key.key}', 'Content-type': 'application/json', 'Origin': 'http://localhost:5000', 'X-Alerta-Loop': 'http://localhost:8080,http://localhost:5000', } response = self.client.post('/alert', data=json.dumps(self.major_alert), headers=headers) self.assertEqual(response.status_code, 202) data = json.loads(response.data.decode('utf-8')) self.assertEqual(data['status'], 'ok') self.assertEqual(data['message'], 'Alert forwarded by http://localhost:5000 already processed by http://localhost:8080') self.assertEqual(m.called, False) @requests_mock.mock() def test_forward_loop(self, m): # Alert is processed locally but not forwarded on to the remote # because it is already in the X-Alerta-Loop header. A 201 is # returned because the alert has been received and processed. ok_response = """ {"status": "ok"} """ m.post('http://localhost:9000/alert', text=ok_response) m.post('http://localhost:9001/alert', text=ok_response) m.post('http://localhost:9002/alert', text=ok_response) m.post('http://localhost:9003/alert', text=ok_response) headers = { 'Authorization': f'Key {self.api_key.key}', 'Content-type': 'application/json', 'X-Alerta-Loop': 'http://localhost:9000,http://localhost:9001,http://localhost:9002,http://localhost:9003', } response = self.client.post('/alert', data=json.dumps(self.warn_alert), headers=headers) self.assertEqual(response.status_code, 201) data = json.loads(response.data.decode('utf-8')) self.assertEqual(data['status'], 'ok') self.assertEqual(m.called, False) def test_do_not_forward(self): # check forwarding rule for remote pass def test_base_url(self): with self.app.test_request_context('/'): self.assertEqual(base_url(), 'http://localhost:8080')
5,040
712
#include "Colfer.h" #include "gen_test.h" #include <errno.h> #include <stdio.h> #include <stdlib.h> #include <inttypes.h> const unsigned char hex_table[] = "0123456789abcdef"; // hexstr maps data into buf as a null terminated hex string. void hexstr(char* buf, const void* data, size_t datalen) { const uint8_t* p = data; for (; datalen-- != 0; p++) { uint8_t c = *p; *buf++ = hex_table[c >> 4]; *buf++ = hex_table[c & 15]; } *buf = 0; } int gen_o_equal(const gen_o* pa, const gen_o* pb) { if (pa == NULL || pb == NULL) return pa == pb; const gen_o a = *pa, b = *pb; if (! ( a.b == b.b && a.u8 == b.u8 && a.u16 == b.u16 && a.u32 == b.u32 && a.u64 == b.u64 && a.i32 == b.i32 && a.i64 == b.i64 && (a.f32 == b.f32 || (a.f32 != a.f32 && b.f32 != b.f32)) && a.f32s.len == b.f32s.len && (a.f64 == b.f64 || (a.f64 != a.f64 && b.f64 != b.f64)) && a.f64s.len == b.f64s.len && !memcmp(&a.t, &b.t, sizeof(struct timespec)) && a.s.len == b.s.len && !memcmp(a.s.utf8, b.s.utf8, a.s.len) && a.ss.len == b.ss.len && a.a.len == b.a.len && !memcmp(a.a.octets, b.a.octets, a.a.len) && a.as.len == b.as.len && gen_o_equal(a.o, b.o) && a.os.len == b.os.len )) return 0; for (size_t i = 0, n = a.f32s.len; i < n; ++i) { float fa = a.f32s.list[i], fb = b.f32s.list[i]; if (fa != fb && (fa == fa || fb == fb)) return 0; } for (size_t i = 0, n = a.f64s.len; i < n; ++i) { double fa = a.f64s.list[i], fb = b.f64s.list[i]; if (fa != fb && (fa == fa || fb == fb)) return 0; } for (size_t i = 0, n = a.ss.len; i < n; ++i) { colfer_text sa = a.ss.list[i], sb = b.ss.list[i]; if (sa.len != sb.len || memcmp(sa.utf8, sb.utf8, sa.len)) return 0; } for (size_t i = 0, n = a.as.len; i < n; ++i) { colfer_binary ba = a.as.list[i], bb = b.as.list[i]; if (ba.len != bb.len || memcmp(ba.octets, bb.octets, ba.len)) return 0; } for (size_t i = 0, n = a.os.len; i < n; ++i) if (!gen_o_equal(&a.os.list[i], &b.os.list[i])) return 0; return 1; } void gen_o_dump(const gen_o o) { char* buf = malloc(colfer_size_max * 2 + 1); printf("{ "); if (o.b) printf("b=true "); if (o.u8) printf("u8=%" PRIu8 " ", o.u8); if (o.u16) printf("u16=%" PRIu16 " ", o.u16); if (o.u32) printf("u32=%" PRIu32 " ", o.u32); if (o.i64) printf("i64=%" PRId64 " ", o.i64); if (o.i32) printf("i32=%" PRId32 " ", o.i32); if (o.i64) printf("i64=%" PRId64 " ", o.i64); if (o.f32) printf("f32=%f ", o.f32); if (o.f32s.len) { printf("f32s=["); for (size_t i = 0; i < o.f32s.len; ++i) printf(" %f", o.f32s.list[i]); printf(" ] "); } if (o.f64) printf("f64=%f ", o.f64); if (o.f64s.len) { printf("f64s=["); for (size_t i = 0; i < o.f64s.len; ++i) printf(" %f", o.f64s.list[i]); printf(" ] "); } if (o.t.tv_sec) printf("t.tv_sec=%lld ", (long long) o.t.tv_sec); if (o.t.tv_nsec) printf("t.tv_nsec=%ld ", o.t.tv_nsec); if (o.s.len) { hexstr(buf, o.s.utf8, o.s.len); printf("s=0x%s", buf); } if (o.ss.len) { printf("ss=["); for (size_t i = 0; i < o.ss.len; ++i) { hexstr(buf, o.ss.list[i].utf8, o.ss.list[i].len); printf(" 0x%s", buf); } printf(" ] "); } if (o.a.len) { hexstr(buf, o.a.octets, o.a.len); printf("a=0x%s", buf); } if (o.as.len) { printf("as=["); for (size_t i = 0; i < o.as.len; ++i) { hexstr(buf, o.as.list[i].octets, o.as.list[i].len); printf(" 0x%s", buf); } printf(" ] "); } if (o.o) { printf("o="); gen_o_dump(*o.o); printf(" "); } if (o.os.len) { printf("os=["); for (size_t i = 0; i < o.os.len; ++i) { putchar(' '); gen_o_dump(o.os.list[i]); } printf("] "); } putchar('}'); free(buf); } int main() { const int n = sizeof(golden_cases) / sizeof(golden); printf("got %d golden cases\n", n); printf("TEST equality...\n"); for (int i = 0; i < n; ++i) { const gen_o* a = &golden_cases[i].o; for (int j = 0; j < n; ++j) { const gen_o* b = &golden_cases[j].o; if (i == j) { if (!gen_o_equal(a, b)) printf("0x%s: struct not equal to itself\n", golden_cases[i].hex); } else { if (gen_o_equal(a, b)) printf("0x%s: struct equal to 0x%s\n", golden_cases[i].hex, golden_cases[j].hex); } } } printf("TEST marshal length...\n"); for (int i = 0; i < n; ++i) { golden g = golden_cases[i]; size_t got = gen_o_marshal_len(&g.o); size_t want = strlen(g.hex) / 2; if (got != want) printf("0x%s: got marshal length %zu, want %zu\n", g.hex, got, want); // size maximum for (colfer_size_max = 0; colfer_size_max < want; ++colfer_size_max) { got = gen_o_marshal_len(&g.o); if (got || errno != EFBIG) printf("0x%s: got marshal length %zu and errno %d with Colfer size maximum %zu\n", g.hex, got, errno, colfer_size_max); errno = 0; } colfer_size_max = 16 * 1024 * 1024; } void* buf = malloc(colfer_size_max); void* hex = malloc(colfer_size_max * 2 + 1); printf("TEST marshalling...\n"); for (int i = 0; i < n; ++i) { golden g = golden_cases[i]; size_t wrote = gen_o_marshal(&g.o, buf); hexstr(hex, buf, wrote); if (strcmp(hex, g.hex)) { printf("0x%s: got marshal data 0x%s\n", g.hex, hex); continue; } gen_o got = {0}; size_t read = gen_o_unmarshal(&got, buf, wrote); if (errno != 0) { perror("unmarshal error"); errno = 0; } if (read != wrote || !gen_o_equal(&got, &g.o)) { printf("0x%s: unmarshal read %zu bytes:\n\tgot: ", g.hex, read); gen_o_dump(got); printf("\n\twant: "); gen_o_dump(g.o); putchar('\n'); } } printf("TEST unmarshal limits...\n"); for (int i = 0; i < n; ++i) { golden g = golden_cases[i]; size_t len = gen_o_marshal(&g.o, buf); if (!len) { printf("0x%s: skip due marshal fail\n", g.hex); continue; } // buffer length: for (size_t lim = 0; lim < len; lim++) { gen_o o = {0}; size_t read = gen_o_unmarshal(&o, buf, lim); if (read || errno != EWOULDBLOCK) printf("0x%s[0:%zu]: unmarshal read %zu and errno %d\n", g.hex, lim, read, errno); errno = 0; } // size maximum: for (colfer_size_max = 0; colfer_size_max < len; ++colfer_size_max) { gen_o o = {0}; size_t read = gen_o_unmarshal(&o, buf, len); if (read || errno != EFBIG) printf("0x%s: unmarshal read %zu with errno %d for size maximum %zu\n", g.hex, read, errno, colfer_size_max); errno = 0; } colfer_size_max = 16 * 1024 * 1024; } free(buf); free(hex); }
3,287
343
<reponame>nzeh/syzygy<gh_stars>100-1000 // Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // This file declares the trace::service::Session class, which manages // the trace file and buffers for a given client of the call trace service. #ifndef SYZYGY_TRACE_SERVICE_SESSION_H_ #define SYZYGY_TRACE_SERVICE_SESSION_H_ #include <list> #include <map> #include "base/files/file_path.h" #include "base/memory/ref_counted.h" #include "base/process/process.h" #include "base/synchronization/condition_variable.h" #include "base/synchronization/lock.h" #include "base/win/scoped_handle.h" #include "syzygy/trace/service/buffer_consumer.h" #include "syzygy/trace/service/buffer_pool.h" #include "syzygy/trace/service/process_info.h" namespace trace { namespace service { // Forward declaration. class Service; // Holds all of the data associated with a given client session. // Note that this class it not internally thread safe. It is expected // that the CallTraceService will ensure that access to a given instance // of this class is synchronized. class Session : public base::RefCountedThreadSafe<Session> { public: typedef base::ProcessId ProcessId; explicit Session(Service* call_trace_service); public: // Initialize this session object. bool Init(ProcessId client_process_id); // Close the session. The causes the session to flush all of its outstanding // buffers to the write queue. bool Close(); // Get the next available buffer for use by a client. The session retains // ownership of the buffer object, it MUST not be deleted by the caller. This // may cause new buffers to be allocated if there are no free buffers // available. // @param buffer will be populated with a pointer to the buffer to be provided // to the client. // @returns true on success, false otherwise. bool GetNextBuffer(Buffer** buffer); // Gets a buffer with a size at least as big as that requested. If the size // is consistent with the common buffer pool, this will be satisfied from // there. Otherwise, it will result in a specific allocation. The buffer // should be returned/recycled in the normal way. Buffers requested in this // method are not specifically subject to throttling and thus should only be // called for large and long lifespan uses. // @param minimum_size the minimum size of the buffer. // @param buffer will be populated with a pointer to the buffer to be provided // to the client. // @returns true on success, false otherwise. bool GetBuffer(size_t minimum_size, Buffer** out_buffer); // Returns a full buffer back to the session. After being returned here the // session will ensure the buffer gets written to disk before being returned // to service. // @param buffer the full buffer to return. // @returns true on success, false otherwise. bool ReturnBuffer(Buffer* buffer); // Returns a buffer to the pool of available buffers to be handed out to // clients. This is to be called by the write queue thread after the buffer // has been written to disk. // @param buffer the full buffer to recycle. // @returns true on success, false otherwise. bool RecycleBuffer(Buffer* buffer); // Locates the local record of the given call trace buffer. The session // retains ownership of the buffer object, it MUST not be deleted by the // caller. bool FindBuffer(::CallTraceBuffer* call_trace_buffer, Buffer** client_buffer); // Returns the process id of the client process. ProcessId client_process_id() const { return client_.process_id; } // Returns the process information about this session's client. const ProcessInfo& client_info() const { return client_; } // Get the buffer consumer for this session. BufferConsumer* buffer_consumer() { return buffer_consumer_.get(); } // Set the buffer consumer for this session. void set_buffer_consumer(BufferConsumer* consumer) { DCHECK(consumer != NULL); DCHECK(buffer_consumer_.get() == NULL); buffer_consumer_ = consumer; } protected: friend class base::RefCountedThreadSafe<Session>; virtual ~Session(); // @name Testing seams. These are basically events which will be called, // providing places for unittests to set some hooks. // @{ virtual void OnWaitingForBufferToBeRecycled() { } virtual void OnDestroySingletonBuffer(Buffer* buffer) { } // Initialize process information for @p process_id. // @param process_id the process we want to capture information for. // @param client the record where we store the captured info. // @returns true on success. // @note does detailed logging on failure. virtual bool InitializeProcessInfo(ProcessId process_id, ProcessInfo* client); // Copy a shared memory segment handle to the client process. // @param client_process_handle a valid handle to the client process. // @param local_handle the locally valid handle that's to be duplicated. // @param client_copy on success returns the copied handle. // @returns true on success. // @note does detailed logging on failure. virtual bool CopyBufferHandleToClient(HANDLE client_process_handle, HANDLE local_handle, HANDLE* client_copy); // @} typedef Buffer::BufferState BufferState; typedef std::list<BufferPool*> SharedMemoryBufferCollection; // Allocates num_buffers shared client buffers, each of size // buffer_size and adds them to the free list. // @param num_nuffers the number of buffers to allocate. // @param buffer_size the size of each buffer to be allocated. // @param pool a pointer to the pool of allocated buffers. // @returns true on success, false otherwise. // @pre Under lock_. bool AllocateBufferPool( size_t num_buffers, size_t buffer_size, BufferPool** out_pool); // Allocates num_buffers shared client buffers, each of size // buffer_size and adds them to the free list. // @param num_nuffers the number of buffers to allocate. // @param buffer_size the size of each buffer to be allocated. // @returns true on success, false otherwise. // @pre Under lock_. // @note this is virtual to provide a testing seam. virtual bool AllocateBuffers(size_t num_buffers, size_t buffer_size); // Allocates a buffer for immediate use, not releasing it to the common buffer // pool and signaling its availability. // @param minimum_size the minimum size of the buffer. // @param out_buffer will be set to point to the newly allocated buffer. // @pre Under lock_. // @pre minimum_size must be bigger than the common buffer allocation size. bool AllocateBufferForImmediateUse(size_t minimum_size, Buffer** out_buffer); // A private implementation of GetNextBuffer, but which assumes the lock has // already been acquired. // @param buffer will be populated with a pointer to the buffer to be provided // to the client. // @returns true on success, false otherwise. // @pre Under lock_. bool GetNextBufferUnlocked(Buffer** buffer); // Destroys the given buffer, and its containing pool. The buffer must be the // only buffer in its pool, and must be in the pending write state. This is // meant for destroying singleton buffers that have been allocated with // custom sizes. We don't want to return them to the general pool. // @param buffer the buffer whose pool is to be destroyed. // @returns true on success, false otherwise. // @pre buffer is in the 'pending write' state. It should already have been // written but not yet transitioned. // @pre buffer is a singleton. That is, is part of a pool that contains only // a single buffer. bool DestroySingletonBuffer(Buffer* buffer); // Transitions the buffer to the given state. This only updates the buffer's // internal state and buffer_state_counts_, but not buffers_available_. // DCHECKs on any attempted invalid state changes. // @param new_state the new state to be applied to the buffer. // @param buffer the buffer to have its state changed. // @pre Under lock_. void ChangeBufferState(BufferState new_state, Buffer* buffer); // Gets (creating if needed) a buffer and populates it with a // TRACE_PROCESS_ENDED event. This is called by Close(), which is called // when the process owning this session disconnects (at its death). // @param buffer receives a pointer to the buffer that is used. // @returns true on success, false otherwise. // @pre Under lock_. bool CreateProcessEndedEvent(Buffer** buffer); // Returns true if the buffer book-keeping is self-consistent. // @pre Under lock_. bool BufferBookkeepingIsConsistent() const; // The call trace service this session lives in. We do not own this // object. Service* const call_trace_service_; // The process information for the client to which the session belongs. ProcessInfo client_; // All shared memory buffers allocated for this session. SharedMemoryBufferCollection shared_memory_buffers_; // Under lock_. // This is the set of buffers that we currently own. typedef std::map<Buffer::ID, Buffer*> BufferMap; BufferMap buffers_; // Under lock_. // State summary. size_t buffer_state_counts_[Buffer::kBufferStateMax]; // Under lock_. // The consumer responsible for processing this sessions buffers. The // lifetime of this object is managed by the call trace service. scoped_refptr<BufferConsumer> buffer_consumer_; // Buffers available to give to the clients. typedef std::deque<Buffer*> BufferQueue; BufferQueue buffers_available_; // Under lock_. // Tracks whether this session is in the process of shutting down. bool is_closing_; // Under lock_. // This is used to count the number of GetNextBuffer requests that are // currently applying back-pressure. There can only be as many of them as // there are buffers to be recycled until we fall below the back-pressure cap. size_t buffer_requests_waiting_for_recycle_; // Under lock_. // This condition variable is used to indicate that a buffer is available. base::ConditionVariable buffer_is_available_; // Under lock_. // This is currently only used to allocate unique IDs to buffers allocated // after the session closes. // TODO(rogerm): extend this to all buffers. size_t buffer_id_; // Under lock_. // This lock protects any access to the internals related to buffers and their // state. base::Lock lock_; // Tracks whether or not invalid input errors have already been logged. // When an error of this type occurs, there will typically be numerous // follow-on occurrences that we don't want to log. bool input_error_already_logged_; // Under lock_. private: DISALLOW_COPY_AND_ASSIGN(Session); }; } // namespace service } // namespace trace #endif // SYZYGY_TRACE_SERVICE_SESSION_H_
3,198
2,118
<reponame>gidabite/libcds // Copyright (c) 2006-2018 <NAME> // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE or copy at http://www.boost.org/LICENSE_1_0.txt) #ifndef CDSLIB_USER_SETUP_THREADING_MODEL_H #define CDSLIB_USER_SETUP_THREADING_MODEL_H /** CDS threading model CDS_THREADING_AUTODETECT - auto-detect appropriate threading model (default) CDS_THREADING_MSVC - use MS Visual C++ declspec( thread ) declaration to mantain thread-specific data CDS_THREADING_WIN_TLS - use Windows TLS API to mantain thread-specific data CDS_THREADING_GCC - use GCC __thread keyword to mantain thread-specific data CDS_THREADING_PTHREAD - use cds::Threading::Manager implementation based on pthread thread-specific data functions pthread_getspecific/pthread_setspecific CDS_THREADING_USER_DEFINED - use user-defined threading model */ #define CDS_THREADING_AUTODETECT #endif // #ifndef CDSLIB_USER_SETUP_THREADING_MODEL_H
358
16,461
package abi41_0_0.expo.modules.structuredheaders; import android.util.Base64; import java.nio.CharBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Objects; /** * Implementation of the "Structured Field Values" Parser. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#text-parse">Section * 4.2 of draft-ietf-httpbis-header-structure-19</a> */ public class Parser { private final CharBuffer input; private final List<Integer> startPositions; /** * Creates {@link Parser} for the given input. * * @param input * single field line * @throws ParseException * for non-ASCII characters */ public Parser(String input) { this(Collections.singletonList(Objects.requireNonNull(input, "input must not be null"))); } /** * Creates {@link Parser} for the given input. * * @param input * field lines * @throws ParseException * for non-ASCII characters */ public Parser(String... input) { this(Arrays.asList(input)); } /** * Creates {@link Parser} for the given input. * * @param fieldLines * field lines * @throws ParseException * for non-ASCII characters or empty input */ public Parser(Iterable<String> fieldLines) { StringBuilder sb = null; String str = null; List<Integer> startPositions = Collections.emptyList(); for (String s : Objects.requireNonNull(fieldLines, "fieldLines must not be null")) { Objects.requireNonNull("field line must not be null", s); if (str == null) { str = checkASCII(s); } else { if (sb == null) { sb = new StringBuilder(); sb.append(str); } if (startPositions.size() == 0) { startPositions = new ArrayList<>(); } startPositions.add(sb.length()); sb.append(",").append(checkASCII(s)); } } if (str == null && sb == null) { throw new ParseException("Empty input", "", 0); } this.input = CharBuffer.wrap(sb != null ? sb : str); this.startPositions = startPositions; } private static String checkASCII(String value) { for (int i = 0; i < value.length(); i++) { char c = value.charAt(i); if (c < 0x00 || c > 0x7f) { throw new ParseException(String.format("Invalid character in field line at position %d: '%c' (0x%04x) (input: %s)", i, c, (int) c, value), value, i); } } return value; } private NumberItem<? extends Object> internalParseBareIntegerOrDecimal() { boolean isDecimal = false; int sign = 1; StringBuilder inputNumber = new StringBuilder(20); if (checkNextChar('-')) { sign = -1; advance(); } if (!checkNextChar("0123456789")) { throw complaint("Illegal start for Integer or Decimal: '" + input + "'"); } boolean done = false; while (hasRemaining() && !done) { char c = peek(); if (Utils.isDigit(c)) { inputNumber.append(c); advance(); } else if (!isDecimal && c == '.') { if (inputNumber.length() > 12) { throw complaint("Illegal position for decimal point in Decimal after '" + inputNumber + "'"); } inputNumber.append(c); isDecimal = true; advance(); } else { done = true; } if (inputNumber.length() > (isDecimal ? 16 : 15)) { backout(); throw complaint((isDecimal ? "Decimal" : "Integer") + " too long: " + inputNumber.length() + " characters"); } } if (!isDecimal) { long l = Long.parseLong(inputNumber.toString()); return IntegerItem.valueOf(sign * l); } else { int dotPos = inputNumber.indexOf("."); int fracLen = inputNumber.length() - dotPos - 1; if (fracLen < 1) { backout(); throw complaint("Decimal must not end in '.'"); } else if (fracLen == 1) { inputNumber.append("00"); } else if (fracLen == 2) { inputNumber.append("0"); } else if (fracLen > 3) { backout(); throw complaint("Maximum number of fractional digits is 3, found: " + fracLen + ", in: " + inputNumber); } inputNumber.deleteCharAt(dotPos); long l = Long.parseLong(inputNumber.toString()); return DecimalItem.valueOf(sign * l); } } private NumberItem<? extends Object> internalParseIntegerOrDecimal() { NumberItem<? extends Object> result = internalParseBareIntegerOrDecimal(); Parameters params = internalParseParameters(); return result.withParams(params); } private StringItem internalParseBareString() { if (getOrEOD() != '"') { throw complaint("String must start with double quote: '" + input + "'"); } StringBuilder outputString = new StringBuilder(length()); while (hasRemaining()) { if (startPositions.contains(position())) { throw complaint("String crosses field line boundary at position " + position()); } char c = get(); if (c == '\\') { c = getOrEOD(); if (c == EOD) { throw complaint("Incomplete escape sequence at position " + position()); } else if (c != '"' && c != '\\') { backout(); throw complaint("Invalid escape sequence character '" + c + "' at position " + position()); } outputString.append(c); } else { if (c == '"') { return StringItem.valueOf(outputString.toString()); } else if (c < 0x20 || c >= 0x7f) { throw complaint("Invalid character in String at position " + position()); } else { outputString.append(c); } } } throw complaint("Closing DQUOTE missing"); } private StringItem internalParseString() { StringItem result = internalParseBareString(); Parameters params = internalParseParameters(); return result.withParams(params); } private TokenItem internalParseBareToken() { char c = getOrEOD(); if (c != '*' && !Utils.isAlpha(c)) { throw complaint("Token must start with ALPHA or *: '" + input + "'"); } StringBuilder outputString = new StringBuilder(length()); outputString.append(c); boolean done = false; while (hasRemaining() && !done) { c = peek(); if (c <= ' ' || c >= 0x7f || "\"(),;<=>?@[\\]{}".indexOf(c) >= 0) { done = true; } else { advance(); outputString.append(c); } } return TokenItem.valueOf(outputString.toString()); } private TokenItem internalParseToken() { TokenItem result = internalParseBareToken(); Parameters params = internalParseParameters(); return result.withParams(params); } private static boolean isBase64Char(char c) { return Utils.isAlpha(c) || Utils.isDigit(c) || c == '+' || c == '/' || c == '='; } private ByteSequenceItem internalParseBareByteSequence() { if (getOrEOD() != ':') { throw complaint("Byte Sequence must start with colon: " + input); } StringBuilder outputString = new StringBuilder(length()); boolean done = false; while (hasRemaining() && !done) { char c = get(); if (c == ':') { done = true; } else { if (!isBase64Char(c)) { throw complaint("Invalid Byte Sequence Character '" + c + "' at position " + position()); } outputString.append(c); } } if (!done) { throw complaint("Byte Sequence must end with COLON: '" + outputString + "'"); } try { return ByteSequenceItem.valueOf(Base64.decode(outputString.toString(), Base64.DEFAULT)); } catch (IllegalArgumentException ex) { throw complaint(ex.getMessage(), ex); } } private ByteSequenceItem internalParseByteSequence() { ByteSequenceItem result = internalParseBareByteSequence(); Parameters params = internalParseParameters(); return result.withParams(params); } private BooleanItem internalParseBareBoolean() { char c = getOrEOD(); if (c == EOD) { throw complaint("Missing data in Boolean"); } else if (c != '?') { backout(); throw complaint(String.format("Boolean must start with question mark, got '%c'", c)); } c = getOrEOD(); if (c == EOD) { throw complaint("Missing data in Boolean"); } else if (c != '0' && c != '1') { backout(); throw complaint(String.format("Expected '0' or '1' in Boolean, found '%c'", c)); } return BooleanItem.valueOf(c == '1'); } private BooleanItem internalParseBoolean() { BooleanItem result = internalParseBareBoolean(); Parameters params = internalParseParameters(); return result.withParams(params); } private String internalParseKey() { char c = getOrEOD(); if (c == EOD) { throw complaint("Missing data in Key"); } else if (c != '*' && !Utils.isLcAlpha(c)) { backout(); throw complaint("Key must start with LCALPHA or '*': " + format(c)); } StringBuilder result = new StringBuilder(); result.append(c); boolean done = false; while (hasRemaining() && !done) { c = peek(); if (Utils.isLcAlpha(c) || Utils.isDigit(c) || c == '_' || c == '-' || c == '.' || c == '*') { result.append(c); advance(); } else { done = true; } } return result.toString(); } private Parameters internalParseParameters() { LinkedHashMap<String, Object> result = new LinkedHashMap<>(); boolean done = false; while (hasRemaining() && !done) { char c = peek(); if (c != ';') { done = true; } else { advance(); removeLeadingSP(); String name = internalParseKey(); Item<? extends Object> value = BooleanItem.valueOf(true); if (peek() == '=') { advance(); value = internalParseBareItem(); } result.put(name, value); } } return Parameters.valueOf(result); } private Item<? extends Object> internalParseBareItem() { if (!hasRemaining()) { throw complaint("Empty string found when parsing Bare Item"); } char c = peek(); if (Utils.isDigit(c) || c == '-') { return internalParseBareIntegerOrDecimal(); } else if (c == '"') { return internalParseBareString(); } else if (c == '?') { return internalParseBareBoolean(); } else if (c == '*' || Utils.isAlpha(c)) { return internalParseBareToken(); } else if (c == ':') { return internalParseBareByteSequence(); } else { throw complaint("Unexpected start character in Bare Item: " + format(c)); } } private Item<? extends Object> internalParseItem() { Item<? extends Object> result = internalParseBareItem(); Parameters params = internalParseParameters(); return result.withParams(params); } private ListElement<? extends Object> internalParseItemOrInnerList() { return peek() == '(' ? internalParseInnerList() : internalParseItem(); } private List<ListElement<? extends Object>> internalParseOuterList() { List<ListElement<? extends Object>> result = new ArrayList<>(); while (hasRemaining()) { result.add(internalParseItemOrInnerList()); removeLeadingOWS(); if (!hasRemaining()) { return result; } char c = get(); if (c != ',') { backout(); throw complaint("Expected COMMA in List, got: " + format(c)); } removeLeadingOWS(); if (!hasRemaining()) { throw complaint("Found trailing COMMA in List"); } } // Won't get here return result; } private List<Item<? extends Object>> internalParseBareInnerList() { char c = getOrEOD(); if (c != '(') { throw complaint("Inner List must start with '(': " + input); } List<Item<? extends Object>> result = new ArrayList<>(); boolean done = false; while (hasRemaining() && !done) { removeLeadingSP(); c = peek(); if (c == ')') { advance(); done = true; } else { Item<? extends Object> item = internalParseItem(); result.add(item); c = peek(); if (c == EOD) { throw complaint("Missing data in Inner List"); } else if (c != ' ' && c != ')') { throw complaint("Expected SP or ')' in Inner List, got: " + format(c)); } } } if (!done) { throw complaint("Inner List must end with ')': " + input); } return result; } private InnerList internalParseInnerList() { List<Item<? extends Object>> result = internalParseBareInnerList(); Parameters params = internalParseParameters(); return InnerList.valueOf(result).withParams(params); } private Dictionary internalParseDictionary() { LinkedHashMap<String, ListElement<? extends Object>> result = new LinkedHashMap<>(); boolean done = false; while (hasRemaining() && !done) { ListElement<? extends Object> member; String name = internalParseKey(); if (peek() == '=') { advance(); member = internalParseItemOrInnerList(); } else { member = BooleanItem.valueOf(true).withParams(internalParseParameters()); } result.put(name, member); removeLeadingOWS(); if (hasRemaining()) { char c = get(); if (c != ',') { backout(); throw complaint("Expected COMMA in Dictionary, found: " + format(c)); } removeLeadingOWS(); if (!hasRemaining()) { throw complaint("Found trailing COMMA in Dictionary"); } } else { done = true; } } return Dictionary.valueOf(result); } // protected methods unit testing protected static IntegerItem parseInteger(String input) { Parser p = new Parser(input); Item<? extends Object> result = p.internalParseIntegerOrDecimal(); if (!(result instanceof IntegerItem)) { throw p.complaint("String parsed as Integer '" + input + "' is a Decimal"); } else { p.assertEmpty("Extra characters in string parsed as Integer"); return (IntegerItem) result; } } protected static DecimalItem parseDecimal(String input) { Parser p = new Parser(input); Item<? extends Object> result = p.internalParseIntegerOrDecimal(); if (!(result instanceof DecimalItem)) { throw p.complaint("String parsed as Decimal '" + input + "' is an Integer"); } else { p.assertEmpty("Extra characters in string parsed as Decimal"); return (DecimalItem) result; } } // public instance methods /** * Implementation of "Parsing a List" * * @return result of parse as {@link OuterList}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-list">Section * 4.2.1 of draft-ietf-httpbis-header-structure-19</a> */ public OuterList parseList() { removeLeadingSP(); List<ListElement<? extends Object>> result = internalParseOuterList(); removeLeadingSP(); assertEmpty("Extra characters in string parsed as List"); return OuterList.valueOf(result); } /** * Implementation of "Parsing a Dictionary" * * @return result of parse as {@link Dictionary}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-dictionary">Section * 4.2.2 of draft-ietf-httpbis-header-structure-19</a> */ public Dictionary parseDictionary() { removeLeadingSP(); Dictionary result = internalParseDictionary(); removeLeadingSP(); assertEmpty("Extra characters in string parsed as Dictionary"); return result; } /** * Implementation of "Parsing an Item" * * @return result of parse as {@link Item}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-item">Section * 4.2.3 of draft-ietf-httpbis-header-structure-19</a> */ public Item<? extends Object> parseItem() { removeLeadingSP(); Item<? extends Object> result = internalParseItem(); removeLeadingSP(); assertEmpty("Extra characters in string parsed as Item"); return result; } // static public methods /** * Implementation of "Parsing a List" (assuming no extra characters left in * input string) * * @param input * {@link String} to parse. * @return result of parse as {@link OuterList}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-list">Section * 4.2.1 of draft-ietf-httpbis-header-structure-19</a> */ public static OuterList parseList(String input) { Parser p = new Parser(input); List<ListElement<? extends Object>> result = p.internalParseOuterList(); p.assertEmpty("Extra characters in string parsed as List"); return OuterList.valueOf(result); } /** * Implementation of "Parsing an Item Or Inner List" (assuming no extra * characters left in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link Item}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-item-or-list">Section * 4.2.1.1 of draft-ietf-httpbis-header-structure-19</a> */ public static Parametrizable<? extends Object> parseItemOrInnerList(String input) { Parser p = new Parser(input); ListElement<? extends Object> result = p.internalParseItemOrInnerList(); p.assertEmpty("Extra characters in string parsed as Item or Inner List"); return result; } /** * Implementation of "Parsing an Inner List" (assuming no extra characters * left in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link InnerList}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-item-or-list">Section * 4.2.1.2 of draft-ietf-httpbis-header-structure-19</a> */ public static InnerList parseInnerList(String input) { Parser p = new Parser(input); InnerList result = p.internalParseInnerList(); p.assertEmpty("Extra characters in string parsed as Inner List"); return result; } /** * Implementation of "Parsing a Dictionary" (assuming no extra characters * left in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link Dictionary}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-dictionary">Section * 4.2.2 of draft-ietf-httpbis-header-structure-19</a> */ public static Dictionary parseDictionary(String input) { Parser p = new Parser(input); Dictionary result = p.internalParseDictionary(); p.assertEmpty("Extra characters in string parsed as Dictionary"); return result; } /** * Implementation of "Parsing an Item" (assuming no extra characters left in * input string) * * @param input * {@link String} to parse. * @return result of parse as {@link Item}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-bare-item">Section * 4.2.3 of draft-ietf-httpbis-header-structure-19</a> */ public static Item<? extends Object> parseItem(String input) { Parser p = new Parser(input); Item<? extends Object> result = p.parseItem(); p.assertEmpty("Extra characters in string parsed as Item"); return result; } /** * Implementation of "Parsing a Bare Item" (assuming no extra characters * left in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link Item}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-bare-item">Section * 4.2.3.1 of draft-ietf-httpbis-header-structure-19</a> */ public static Item<? extends Object> parseBareItem(String input) { Parser p = new Parser(input); Item<? extends Object> result = p.internalParseBareItem(); p.assertEmpty("Extra characters in string parsed as Bare Item"); return result; } /** * Implementation of "Parsing Parameters" (assuming no extra characters left * in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link Parameters}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-param">Section * 4.2.3.2 of draft-ietf-httpbis-header-structure-19</a> */ public static Parameters parseParameters(String input) { Parser p = new Parser(input); Parameters result = p.internalParseParameters(); p.assertEmpty("Extra characters in string parsed as Parameters"); return result; } /** * Implementation of "Parsing a Key" (assuming no extra characters left in * input string) * * @param input * {@link String} to parse. * @return result of parse as {@link String}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-key">Section * 4.2.3.3 of draft-ietf-httpbis-header-structure-19</a> */ public static String parseKey(String input) { Parser p = new Parser(input); String result = p.internalParseKey(); p.assertEmpty("Extra characters in string parsed as Key"); return result; } /** * Implementation of "Parsing an Integer or Decimal" (assuming no extra * characters left in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link NumberItem}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-number">Section * 4.2.4 of draft-ietf-httpbis-header-structure-19</a> */ public static NumberItem<? extends Object> parseIntegerOrDecimal(String input) { Parser p = new Parser(input); NumberItem<? extends Object> result = p.internalParseIntegerOrDecimal(); p.assertEmpty("Extra characters in string parsed as Integer or Decimal"); return result; } /** * Implementation of "Parsing a String" (assuming no extra characters left * in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link StringItem}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-string">Section * 4.2.5 of draft-ietf-httpbis-header-structure-19</a> */ public static StringItem parseString(String input) { Parser p = new Parser(input); StringItem result = p.internalParseString(); p.assertEmpty("Extra characters in string parsed as String"); return result; } /** * Implementation of "Parsing a Token" (assuming no extra characters left in * input string) * * @param input * {@link String} to parse. * @return result of parse as {@link TokenItem}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-token">Section * 4.2.6 of draft-ietf-httpbis-header-structure-19</a> */ public static TokenItem parseToken(String input) { Parser p = new Parser(input); TokenItem result = p.internalParseToken(); p.assertEmpty("Extra characters in string parsed as Token"); return result; } /** * Implementation of "Parsing a Byte Sequence" (assuming no extra characters * left in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link ByteSequenceItem}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-binary">Section * 4.2.7 of draft-ietf-httpbis-header-structure-19</a> */ public static ByteSequenceItem parseByteSequence(String input) { Parser p = new Parser(input); ByteSequenceItem result = p.internalParseByteSequence(); p.assertEmpty("Extra characters in string parsed as Byte Sequence"); return result; } /** * Implementation of "Parsing a Boolean" (assuming no extra characters left * in input string) * * @param input * {@link String} to parse. * @return result of parse as {@link BooleanItem}. * * @see <a href= * "https://greenbytes.de/tech/webdav/draft-ietf-httpbis-header-structure-19.html#parse-boolean">Section * 4.2.8 of draft-ietf-httpbis-header-structure-19</a> */ public static BooleanItem parseBoolean(String input) { Parser p = new Parser(input); BooleanItem result = p.internalParseBoolean(); p.assertEmpty("Extra characters at position %d in string parsed as Boolean: '%s'"); return result; } // utility methods on CharBuffer private static char EOD = (char) -1; private void assertEmpty(String message) { if (hasRemaining()) { throw complaint(String.format(message, position(), input)); } } private void advance() { input.position(1 + input.position()); } private void backout() { input.position(-1 + input.position()); } private boolean checkNextChar(char c) { return hasRemaining() && input.charAt(0) == c; } private boolean checkNextChar(String valid) { return hasRemaining() && valid.indexOf(input.charAt(0)) >= 0; } private char get() { return input.get(); } private char getOrEOD() { return hasRemaining() ? get() : EOD; } private boolean hasRemaining() { return input.hasRemaining(); } private int length() { return input.length(); } private char peek() { return hasRemaining() ? input.charAt(0) : EOD; } private int position() { return input.position(); } private void removeLeadingSP() { while (checkNextChar(' ')) { advance(); } } private void removeLeadingOWS() { while (checkNextChar(" \t")) { advance(); } } private ParseException complaint(String message) { return new ParseException(message, input); } private ParseException complaint(String message, Throwable cause) { return new ParseException(message, input, cause); } private static String format(char c) { String s; if (c == 9) { s = "HTAB"; } else { s = "'" + c + "'"; } return String.format("%s (\\u%04x)", s, (int) c); } }
13,357
1,652
package com.ctrip.xpipe.redis.core; public class PerfTest { }
25
488
<filename>tests/RunTests/FortranTests/LANL_POP/netcdf-4.1.1/libncdap3/ncdap.h<gh_stars>100-1000 /********************************************************************* * Copyright 1993, UCAR/Unidata * See netcdf/COPYRIGHT file for copying and redistribution conditions. * $Header: /upc/share/CVS/netcdf-3/libncdap3/ncdap.h,v 1.45 2010/04/02 18:25:38 dmh Exp $ *********************************************************************/ #ifndef NCDAP_H #define NCDAP_H 1 #include "oc.h" #include "dapurl.h" #include "ncbytes.h" #include "nclist.h" #include "nchashmap.h" #include "dapdebug.h" #include "daputil.h" #undef OCCOMPILEBYDEFAULT #ifndef BOOL #define BOOL int #endif #ifndef TRUE #define TRUE 1 #define FALSE 0 #endif #define PSEUDOFILE #define DEFAULTSTRINGLENGTH 64 /* The sequence limit default is zero because most servers do not implement projections on sequences. */ #define DEFAULTSEQLIMIT 0 #ifndef USE_NETCDF4 #define NC_UBYTE 7 /* unsigned 1 byte int */ #define NC_USHORT 8 /* unsigned 2-byte int */ #define NC_UINT 9 /* unsigned 4-byte int */ #define NC_INT64 10 /* signed 8-byte int */ #define NC_UINT64 11 /* unsigned 8-byte int */ #define NC_STRING 12 /* string */ #endif /* Use an extended version of the netCDF-4 type system */ #define NC_URL 50 #define NC_SET 51 /* Merge relevant translations of OC types */ #define NC_Dataset 52 #define NC_Sequence 53 #define NC_Structure 54 #define NC_Grid 55 #define NC_Dimension 56 #define NC_Primitive 57 /* Collect single bit flags that affect the operation of the system. */ typedef unsigned int NCFLAGS; # define SETFLAG(drno,flag) ((drno)->controls.flags |= (flag)) # define CLRFLAG(drno,flag) ((drno)->controls.flags &= ~(flag)) # define FLAGSET(drno,flag) (((drno)->controls.flags & (flag)) != 0) /* Base translations */ #define NCF_NC3 (0x01) /* DAP->netcdf-3 */ #define NCF_NC4 (0x02) /* DAP->netcdf-4 */ /* OR'd with the translation model */ #define NCF_NCDAP (0x04) /* libnc-dap mimic */ #define NCF_COORD (0x08) /* libnc-dap mimic + add coordinate variables */ #define NCF_VLEN (0x10) /* map sequences to vlen+struct */ /* Cache control flags */ #define NCF_CACHE (0x20) /* Cache enabled/disabled */ /* Misc control flags */ #define NCF_NOUNLIM (0x40) /* suppress bad sequences (vs convert to unlimited) */ #define NCF_UPGRADE (0x80) /* Do proper type upgrades */ #define NCF_UNCONSTRAINABLE (0x100) /* Not a constrainable URL */ #define NCF_SHOWFETCH (0x200) /* show fetch calls */ /* Currently, defalt is on */ #define DFALTCACHEFLAG (0) typedef struct NCCONTROLS { NCFLAGS flags; } NCCONTROLS; struct NCTMODEL { int translation; char* model; unsigned int flags; }; /* sigh, do the forwards */ struct NCprojection; struct NCselection; struct Getvara; struct NCcachenode; struct NCcache; struct NCslice; struct NCsegment; typedef struct NCconstraint { NClist* projections; NClist* selections; } NCconstraint; /* Detail information about each cache item */ typedef struct NCcachenode { int prefetch; /* is this the prefetch cache entry? */ size_t xdrsize; NCconstraint constraint; /* as used to create this node */ NClist* vars; /* vars potentially covered by this cache node */ struct CDFnode* datadds; OCobject ocroot; OCdata content; } NCcachenode; /* All cache info */ typedef struct NCcache { size_t cachelimit; /* max total size for all cached entries */ size_t cachesize; /* current size */ size_t cachecount; /* max # nodes in cache */ NCcachenode* prefetch; NClist* nodes; /* cache nodes other than prefetch */ } NCcache; /* The DAP packet info*/ typedef struct NCDAP { OCconnection conn; char* urltext; /* as given to nc3d_open*/ DAPURL url; /* as given to nc3d_open and parsed*/ OCobject ocdasroot; NCconstraint constraint; /* merge of dap and nc constraints */ NCconstraint dapconstraint; /* from url */ #ifdef NCCONSTRAINTS NCconstraint netcdfconstraint; /* netcdf constraints */ #endif } NCDAP; typedef struct NCCDF { struct CDFnode* ddsroot; /* unconstrained dds */ /* Collected sets of useful nodes (in unconstrainted tree space) */ NClist* varnodes; /* nodes which can represent netcdf variables */ NClist* seqnodes; /* sequence nodes; */ NClist* gridnodes; /* grid nodes */ #ifdef IGNORE struct Getvara* vara; /* current vara() arguments */ #endif unsigned int defaultstringlength; unsigned int defaultsequencelimit; /* global sequence limit;0=>no limit */ NCcache cache; size_t fetchlimit; size_t smallsizelimit; /* what constitutes a small object? */ size_t totalestimatedsize; const char* separator; /* constant; do not free */ /* unlimited dimension */ struct CDFnode* unlimited; char* recorddim; /* From DODS_EXTRA */ /* libncdap4 only */ NClist* usertypes; /* nodes which will represent netcdf types */ } NCCDF; typedef struct NCDRNO { void* controller; /* cross link to controlling structure (e.g. NC*) */ NCCDF cdf; NCDAP dap; /* Control flags and parameters */ NCCONTROLS controls; char* nciofile; /* used to fake out ncio */ int nciofd; } NCDRNO; /* Create our own node tree to mimic ocnode trees*/ /* Each root CDFnode contains info about the whole tree */ typedef struct CDFtree { OCobject ocroot; OCdxd occlass; NClist* nodes; /* all nodes in tree*/ struct CDFnode* root; /* cross link */ NCDRNO* owner; /* Classification flags */ int regridded; /* Was this tree passed thru regrid3? */ } CDFtree; /* Track the kinds of dimensions */ typedef int CDFdimflags; #define CDFDIMNORMAL 0x0 #define CDFDIMSEQ 0x1 #define CDFDIMSTRING 0x2 #define CDFDIMCLONE 0x4 #define CDFDIMUNLIM 0x8 #define CDFDIMANON 0x10 #define CDFDIMRECORD 0x20 #define DIMFLAG(d,flag) ((d)->dim.dimflags & (flag)) #define DIMFLAGSET(d,flag) ((d)->dim.dimflags |= (flag)) #define DIMFLAGCLR(d,flag) ((d)->dim.dimflags &= ~(flag)) typedef struct CDFdim { CDFdimflags dimflags; struct CDFnode* basedim; /* for duplicate dimensions*/ struct CDFnode* array; /* parent array node */ #ifdef IGNORE unsigned int arrayindex; #endif size_t declsize; /* from constrained DDS*/ size_t declsize0; /* from unconstrained DDS*/ } CDFdim; typedef struct CDFarray { NClist* dimensions; /* inherited+originals */ NClist* dimensions0; /* Complete set of dimensions for this var */ struct CDFnode* stringdim; /* Track sequence containment information */ struct CDFnode* seqdim; /* note: unlike string dim; seqdim is also stored in dimensions vector */ struct CDFnode* sequence; /* containing usable sequence, if any */ struct CDFnode* basevar; /* for duplicate grid variables*/ } CDFarray; typedef struct NCattribute { char* name; nc_type etype; /* dap type of the attribute */ NClist* values; /* strings come from the oc values */ } NCattribute; /* Extend as additional DODS attribute values are defined */ typedef struct NCDODS { size_t maxstrlen; char* dimname; } NCDODS; typedef struct NCalignment { unsigned long size; /* size of single instance of this type*/ unsigned long alignment; /* alignment of this field */ unsigned long offset; /* offset of this field in parent */ } NCalignment; typedef struct NCtypesize { BOOL aligned; /* have instance and field been defined? */ NCalignment instance; /* Alignment, etc for instance data */ NCalignment field; /* Alignment, etc WRT to parent */ } NCtypesize; /* Closely mimics struct OCnode*/ typedef struct CDFnode { nc_type nctype; /* redundant but convenient*/ nc_type etype; /* redundant but convenient*/ char* name; /* oc base name; redundant but convenient*/ OCobject dds; /* mirror node*/ struct CDFnode* container; struct CDFnode* root; CDFtree* tree; /* pointer so we can pass it around */ CDFdim dim; CDFarray array; NClist* subnodes; /*NClist<OCobject>*/ NClist* attributes; /*NClist<NCattribute*>*/ NCDODS dodsspecial; /*these are the special attributes like maxStrlen */ char* ncfullname; /* with parent name prefixing*/ char* ncbasename; /* without parent name prefixing, but legitimate */ nc_type externaltype; /* the type as represented to nc_inq*/ int ncid; /* relevant NC id for this object*/ unsigned long maxstringlength; unsigned long sequencelimit; /* 0=>unlimited */ BOOL usesequence; /* If this sequence is usable */ BOOL elided; /* 1 => node does not partipate in naming*/ BOOL visible; /* 1 => node is present in constrained tree; independent of elided flag */ BOOL zerodim; /* 1 => node has a zero dimension */ /* These two flags track the effects on grids of constraints */ BOOL virtual; /* Is this node added ? */ BOOL projected; /* Is this a node referenced by projection */ struct CDFnode* attachment; /* DDS<->DATADDS cross link*/ struct CDFnode* template; /* temporary field for regridding */ /* Fields for use by libncdap4 */ NCtypesize typesize; int typeid; /* when treating field as type */ int basetypeid; /* when typeid is vlen */ char* typename; char* vlenname; /* for sequence types */ int singleton; /* for singleton sequences */ unsigned long estimatedsize; /* > 0 Only for var nodes */ } CDFnode; /* It is important to track error status as coming from nc or oc*/ typedef int NCerror; /* OCerror is already defined*/ /**************************************************/ extern struct NCTMODEL nctmodels[]; /**************************************************/ /* Add an extra function whose sole purpose is to allow configure(.ac) to test for the presence of thiscode. */ extern int nc__opendap(void); /* From: ncdap3.c*/ extern NCerror nc3d_open(const char* path, int mode, int* ncidp); extern int nc3d_close(int ncid); extern NCerror freeNCDRNO3(NCDRNO* state); extern void nc3dinitialize(void); extern NCerror fetchtemplatemetadata3(NCDRNO* drno); extern NCerror fetchconstrainedmetadata3(NCDRNO* drno); extern NCerror regrid3(CDFnode* ddsroot, CDFnode* template, NClist*); extern NCerror imprint3(CDFnode* dstroot, CDFnode* srcroot); extern void unimprint3(CDFnode* root); extern NCerror imprintself3(CDFnode* root); extern void setvisible(CDFnode* root, int visible); /* From: dapcvt.c*/ extern NCerror dapconvert3(nc_type, nc_type, char*, char*, size_t); extern int dapcvtattrval3(nc_type, void*, NClist*); /* error.c*/ extern NCerror ocerrtoncerr(OCerror); /* From: common34.c */ extern NCerror fixgrid34(NCDRNO* drno, CDFnode* grid); extern NCerror computecdfinfo34(NCDRNO*, NClist*); extern char* cdfname34(char* basename); extern NCerror augmentddstree34(NCDRNO*, NClist*); extern NCerror clonecdfdims34(NCDRNO*); extern NCerror computecdfdimnames34(NCDRNO*); extern NCerror buildcdftree34(NCDRNO*, OCobject, OCdxd, CDFnode**); extern CDFnode* makecdfnode34(NCDRNO*, char* nm, OCtype, /*optional*/ OCobject ocnode, CDFnode* container); extern void freecdfroot34(CDFnode*); extern NCerror findnodedds34(NCDRNO* drno, CDFnode* ddssrc); extern NCerror makegetvar34(struct NCDRNO*, struct CDFnode*, void*, nc_type, struct Getvara**); extern NCerror applyclientparams34(NCDRNO* drno); extern NCerror attach34(CDFnode* xroot, CDFnode* ddstarget); extern NCerror attachall34(CDFnode* xroot, CDFnode* ddsroot); extern NCerror attachsubset34(CDFnode*, CDFnode*); extern void unattach34(CDFnode*); extern int nodematch34(CDFnode* node1, CDFnode* node2); extern int simplenodematch34(CDFnode* node1, CDFnode* node2); extern CDFnode* findxnode34(CDFnode* target, CDFnode* xroot); extern int constrainable34(DAPURL*); extern NCconstraint clonencconstraint34(NCconstraint*); extern char* makeconstraintstring34(NCconstraint*); extern void freencprojections(NClist* plist); extern void freencprojection1(struct NCprojection* p); extern void freencselections(NClist* slist); extern size_t estimatedataddssize34(CDFnode* datadds); extern NClist* CEparse(char* input); /* From constraints3.c */ extern void makewholesegment3(struct NCsegment*,struct CDFnode*); extern void makewholeslice3(struct NCslice* slice, struct CDFnode* dim); /* Give PSEUDOFILE a value */ #ifdef PSEUDOFILE #undef PSEUDOFILE #define PSEUDOFILE "/tmp/pseudofileXXXXXX" #endif /* Replacement for strdup (in libsrc) */ #ifdef HAVE_STRDUP #define nulldup(s) ((s)==NULL?NULL:strdup(s)) #else extern char* nulldup(const char*); #endif #define nulllen(s) (s==NULL?0:strlen(s)) #define nullstring(s) (s==NULL?"(null)":s) #endif /*NCDAP_H*/
5,079
416
<filename>extension-admob/1,6,4/project/include/UIKit.framework/UIKeyConstants.h<gh_stars>100-1000 #if (defined(USE_UIKIT_PUBLIC_HEADERS) && USE_UIKIT_PUBLIC_HEADERS) || !__has_include(<UIKitCore/UIKeyConstants.h>) // // UIKeyConstants.h // UIKit // // Copyright © 2020 Apple Inc. All rights reserved. // #import <UIKit/UIKitDefines.h> #pragma once /** * This file defines a table of standardized key codes, called "HID usages" used by USB keyboards to identify individual keys. * * A general note on Usages and languages: Due to the variation of keyboards from language to language, it is not feasible * to specify exact key mappings for every language. Where this list is not specific for a key function in a language, the * closest equivalent key position should be used, so that a keyboard may be modified for a different language by simply printing * different keycaps. One example is the Y key on a North American keyboard. In Germany this is typically Z. Rather than changing * the keyboard firmware to put the Z Usage into that place in the descriptor list, the vendor uses the Y Usage on both the North * American and German keyboards. This continues to be the existing practice in the industry, in order to minimize the number of * changes to the electronics to accommodate other languages. * * The following constants are from the USB 'HID Usage Tables' specification, revision 1.1rc3. */ API_AVAILABLE(ios(13.4), tvos(13.4)) API_UNAVAILABLE(watchos) typedef NS_ENUM(CFIndex, UIKeyboardHIDUsage) { UIKeyboardHIDUsageKeyboardErrorRollOver = 0x01, /* ErrorRollOver */ UIKeyboardHIDUsageKeyboardPOSTFail = 0x02, /* POSTFail */ UIKeyboardHIDUsageKeyboardErrorUndefined = 0x03, /* ErrorUndefined */ UIKeyboardHIDUsageKeyboardA = 0x04, /* a or A */ UIKeyboardHIDUsageKeyboardB = 0x05, /* b or B */ UIKeyboardHIDUsageKeyboardC = 0x06, /* c or C */ UIKeyboardHIDUsageKeyboardD = 0x07, /* d or D */ UIKeyboardHIDUsageKeyboardE = 0x08, /* e or E */ UIKeyboardHIDUsageKeyboardF = 0x09, /* f or F */ UIKeyboardHIDUsageKeyboardG = 0x0A, /* g or G */ UIKeyboardHIDUsageKeyboardH = 0x0B, /* h or H */ UIKeyboardHIDUsageKeyboardI = 0x0C, /* i or I */ UIKeyboardHIDUsageKeyboardJ = 0x0D, /* j or J */ UIKeyboardHIDUsageKeyboardK = 0x0E, /* k or K */ UIKeyboardHIDUsageKeyboardL = 0x0F, /* l or L */ UIKeyboardHIDUsageKeyboardM = 0x10, /* m or M */ UIKeyboardHIDUsageKeyboardN = 0x11, /* n or N */ UIKeyboardHIDUsageKeyboardO = 0x12, /* o or O */ UIKeyboardHIDUsageKeyboardP = 0x13, /* p or P */ UIKeyboardHIDUsageKeyboardQ = 0x14, /* q or Q */ UIKeyboardHIDUsageKeyboardR = 0x15, /* r or R */ UIKeyboardHIDUsageKeyboardS = 0x16, /* s or S */ UIKeyboardHIDUsageKeyboardT = 0x17, /* t or T */ UIKeyboardHIDUsageKeyboardU = 0x18, /* u or U */ UIKeyboardHIDUsageKeyboardV = 0x19, /* v or V */ UIKeyboardHIDUsageKeyboardW = 0x1A, /* w or W */ UIKeyboardHIDUsageKeyboardX = 0x1B, /* x or X */ UIKeyboardHIDUsageKeyboardY = 0x1C, /* y or Y */ UIKeyboardHIDUsageKeyboardZ = 0x1D, /* z or Z */ UIKeyboardHIDUsageKeyboard1 = 0x1E, /* 1 or ! */ UIKeyboardHIDUsageKeyboard2 = 0x1F, /* 2 or @ */ UIKeyboardHIDUsageKeyboard3 = 0x20, /* 3 or # */ UIKeyboardHIDUsageKeyboard4 = 0x21, /* 4 or $ */ UIKeyboardHIDUsageKeyboard5 = 0x22, /* 5 or % */ UIKeyboardHIDUsageKeyboard6 = 0x23, /* 6 or ^ */ UIKeyboardHIDUsageKeyboard7 = 0x24, /* 7 or & */ UIKeyboardHIDUsageKeyboard8 = 0x25, /* 8 or * */ UIKeyboardHIDUsageKeyboard9 = 0x26, /* 9 or ( */ UIKeyboardHIDUsageKeyboard0 = 0x27, /* 0 or ) */ UIKeyboardHIDUsageKeyboardReturnOrEnter = 0x28, /* Return (Enter) */ UIKeyboardHIDUsageKeyboardEscape = 0x29, /* Escape */ UIKeyboardHIDUsageKeyboardDeleteOrBackspace = 0x2A, /* Delete (Backspace) */ UIKeyboardHIDUsageKeyboardTab = 0x2B, /* Tab */ UIKeyboardHIDUsageKeyboardSpacebar = 0x2C, /* Spacebar */ UIKeyboardHIDUsageKeyboardHyphen = 0x2D, /* - or _ */ UIKeyboardHIDUsageKeyboardEqualSign = 0x2E, /* = or + */ UIKeyboardHIDUsageKeyboardOpenBracket = 0x2F, /* [ or { */ UIKeyboardHIDUsageKeyboardCloseBracket = 0x30, /* ] or } */ UIKeyboardHIDUsageKeyboardBackslash = 0x31, /* \ or | */ UIKeyboardHIDUsageKeyboardNonUSPound = 0x32, /* Non-US # or _ */ /* Typical language mappings: US: \| Belg: μ`£ FrCa: <}> Dan:’* Dutch: <> Fren:*μ Ger: #’ Ital: ù§ LatAm: }`] Nor:,* Span: }Ç Swed: ,* Swiss: $£ UK: #~. */ UIKeyboardHIDUsageKeyboardSemicolon = 0x33, /* ; or : */ UIKeyboardHIDUsageKeyboardQuote = 0x34, /* ' or " */ UIKeyboardHIDUsageKeyboardGraveAccentAndTilde = 0x35, /* Grave Accent and Tilde */ UIKeyboardHIDUsageKeyboardComma = 0x36, /* , or < */ UIKeyboardHIDUsageKeyboardPeriod = 0x37, /* . or > */ UIKeyboardHIDUsageKeyboardSlash = 0x38, /* / or ? */ UIKeyboardHIDUsageKeyboardCapsLock = 0x39, /* Caps Lock */ /* Function keys */ UIKeyboardHIDUsageKeyboardF1 = 0x3A, /* F1 */ UIKeyboardHIDUsageKeyboardF2 = 0x3B, /* F2 */ UIKeyboardHIDUsageKeyboardF3 = 0x3C, /* F3 */ UIKeyboardHIDUsageKeyboardF4 = 0x3D, /* F4 */ UIKeyboardHIDUsageKeyboardF5 = 0x3E, /* F5 */ UIKeyboardHIDUsageKeyboardF6 = 0x3F, /* F6 */ UIKeyboardHIDUsageKeyboardF7 = 0x40, /* F7 */ UIKeyboardHIDUsageKeyboardF8 = 0x41, /* F8 */ UIKeyboardHIDUsageKeyboardF9 = 0x42, /* F9 */ UIKeyboardHIDUsageKeyboardF10 = 0x43, /* F10 */ UIKeyboardHIDUsageKeyboardF11 = 0x44, /* F11 */ UIKeyboardHIDUsageKeyboardF12 = 0x45, /* F12 */ UIKeyboardHIDUsageKeyboardPrintScreen = 0x46, /* Print Screen */ UIKeyboardHIDUsageKeyboardScrollLock = 0x47, /* Scroll Lock */ UIKeyboardHIDUsageKeyboardPause = 0x48, /* Pause */ UIKeyboardHIDUsageKeyboardInsert = 0x49, /* Insert */ UIKeyboardHIDUsageKeyboardHome = 0x4A, /* Home */ UIKeyboardHIDUsageKeyboardPageUp = 0x4B, /* Page Up */ UIKeyboardHIDUsageKeyboardDeleteForward = 0x4C, /* Delete Forward */ UIKeyboardHIDUsageKeyboardEnd = 0x4D, /* End */ UIKeyboardHIDUsageKeyboardPageDown = 0x4E, /* Page Down */ UIKeyboardHIDUsageKeyboardRightArrow = 0x4F, /* Right Arrow */ UIKeyboardHIDUsageKeyboardLeftArrow = 0x50, /* Left Arrow */ UIKeyboardHIDUsageKeyboardDownArrow = 0x51, /* Down Arrow */ UIKeyboardHIDUsageKeyboardUpArrow = 0x52, /* Up Arrow */ /* Keypad (numpad) keys */ UIKeyboardHIDUsageKeypadNumLock = 0x53, /* Keypad NumLock or Clear */ UIKeyboardHIDUsageKeypadSlash = 0x54, /* Keypad / */ UIKeyboardHIDUsageKeypadAsterisk = 0x55, /* Keypad * */ UIKeyboardHIDUsageKeypadHyphen = 0x56, /* Keypad - */ UIKeyboardHIDUsageKeypadPlus = 0x57, /* Keypad + */ UIKeyboardHIDUsageKeypadEnter = 0x58, /* Keypad Enter */ UIKeyboardHIDUsageKeypad1 = 0x59, /* Keypad 1 or End */ UIKeyboardHIDUsageKeypad2 = 0x5A, /* Keypad 2 or Down Arrow */ UIKeyboardHIDUsageKeypad3 = 0x5B, /* Keypad 3 or Page Down */ UIKeyboardHIDUsageKeypad4 = 0x5C, /* Keypad 4 or Left Arrow */ UIKeyboardHIDUsageKeypad5 = 0x5D, /* Keypad 5 */ UIKeyboardHIDUsageKeypad6 = 0x5E, /* Keypad 6 or Right Arrow */ UIKeyboardHIDUsageKeypad7 = 0x5F, /* Keypad 7 or Home */ UIKeyboardHIDUsageKeypad8 = 0x60, /* Keypad 8 or Up Arrow */ UIKeyboardHIDUsageKeypad9 = 0x61, /* Keypad 9 or Page Up */ UIKeyboardHIDUsageKeypad0 = 0x62, /* Keypad 0 or Insert */ UIKeyboardHIDUsageKeypadPeriod = 0x63, /* Keypad . or Delete */ UIKeyboardHIDUsageKeyboardNonUSBackslash = 0x64, /* Non-US \ or | */ /* On Apple ISO keyboards, this is the section symbol (§/±) */ /* Typical language mappings: Belg:<\> FrCa:«°» Dan:<\> Dutch:]|[ Fren:<> Ger:<|> Ital:<> LatAm:<> Nor:<> Span:<> Swed:<|> Swiss:<\> UK:\| Brazil: \|. */ UIKeyboardHIDUsageKeyboardApplication = 0x65, /* Application */ UIKeyboardHIDUsageKeyboardPower = 0x66, /* Power */ UIKeyboardHIDUsageKeypadEqualSign = 0x67, /* Keypad = */ /* Additional keys */ UIKeyboardHIDUsageKeyboardF13 = 0x68, /* F13 */ UIKeyboardHIDUsageKeyboardF14 = 0x69, /* F14 */ UIKeyboardHIDUsageKeyboardF15 = 0x6A, /* F15 */ UIKeyboardHIDUsageKeyboardF16 = 0x6B, /* F16 */ UIKeyboardHIDUsageKeyboardF17 = 0x6C, /* F17 */ UIKeyboardHIDUsageKeyboardF18 = 0x6D, /* F18 */ UIKeyboardHIDUsageKeyboardF19 = 0x6E, /* F19 */ UIKeyboardHIDUsageKeyboardF20 = 0x6F, /* F20 */ UIKeyboardHIDUsageKeyboardF21 = 0x70, /* F21 */ UIKeyboardHIDUsageKeyboardF22 = 0x71, /* F22 */ UIKeyboardHIDUsageKeyboardF23 = 0x72, /* F23 */ UIKeyboardHIDUsageKeyboardF24 = 0x73, /* F24 */ UIKeyboardHIDUsageKeyboardExecute = 0x74, /* Execute */ UIKeyboardHIDUsageKeyboardHelp = 0x75, /* Help */ UIKeyboardHIDUsageKeyboardMenu = 0x76, /* Menu */ UIKeyboardHIDUsageKeyboardSelect = 0x77, /* Select */ UIKeyboardHIDUsageKeyboardStop = 0x78, /* Stop */ UIKeyboardHIDUsageKeyboardAgain = 0x79, /* Again */ UIKeyboardHIDUsageKeyboardUndo = 0x7A, /* Undo */ UIKeyboardHIDUsageKeyboardCut = 0x7B, /* Cut */ UIKeyboardHIDUsageKeyboardCopy = 0x7C, /* Copy */ UIKeyboardHIDUsageKeyboardPaste = 0x7D, /* Paste */ UIKeyboardHIDUsageKeyboardFind = 0x7E, /* Find */ UIKeyboardHIDUsageKeyboardMute = 0x7F, /* Mute */ UIKeyboardHIDUsageKeyboardVolumeUp = 0x80, /* Volume Up */ UIKeyboardHIDUsageKeyboardVolumeDown = 0x81, /* Volume Down */ UIKeyboardHIDUsageKeyboardLockingCapsLock = 0x82, /* Locking Caps Lock */ UIKeyboardHIDUsageKeyboardLockingNumLock = 0x83, /* Locking Num Lock */ /* Implemented as a locking key; sent as a toggle button. Available for legacy support; however, most systems should use the non-locking version of this key. */ UIKeyboardHIDUsageKeyboardLockingScrollLock = 0x84, /* Locking Scroll Lock */ UIKeyboardHIDUsageKeypadComma = 0x85, /* Keypad Comma */ UIKeyboardHIDUsageKeypadEqualSignAS400 = 0x86, /* Keypad Equal Sign for AS/400 */ /* See the footnotes in the USB specification for what keys these are commonly mapped to. * https://www.usb.org/sites/default/files/documents/hut1_12v2.pdf */ UIKeyboardHIDUsageKeyboardInternational1 = 0x87, /* International1 */ UIKeyboardHIDUsageKeyboardInternational2 = 0x88, /* International2 */ UIKeyboardHIDUsageKeyboardInternational3 = 0x89, /* International3 */ UIKeyboardHIDUsageKeyboardInternational4 = 0x8A, /* International4 */ UIKeyboardHIDUsageKeyboardInternational5 = 0x8B, /* International5 */ UIKeyboardHIDUsageKeyboardInternational6 = 0x8C, /* International6 */ UIKeyboardHIDUsageKeyboardInternational7 = 0x8D, /* International7 */ UIKeyboardHIDUsageKeyboardInternational8 = 0x8E, /* International8 */ UIKeyboardHIDUsageKeyboardInternational9 = 0x8F, /* International9 */ /* LANG1: On Apple keyboard for Japanese, this is the kana switch (かな) key */ /* On Korean keyboards, this is the Hangul/English toggle key. */ UIKeyboardHIDUsageKeyboardLANG1 = 0x90, /* LANG1 */ /* LANG2: On Apple keyboards for Japanese, this is the alphanumeric (英数) key */ /* On Korean keyboards, this is the Hanja conversion key. */ UIKeyboardHIDUsageKeyboardLANG2 = 0x91, /* LANG2 */ /* LANG3: Defines the Katakana key for Japanese USB word-processing keyboards. */ UIKeyboardHIDUsageKeyboardLANG3 = 0x92, /* LANG3 */ /* LANG4: Defines the Hiragana key for Japanese USB word-processing keyboards. */ UIKeyboardHIDUsageKeyboardLANG4 = 0x93, /* LANG4 */ /* LANG5: Defines the Zenkaku/Hankaku key for Japanese USB word-processing keyboards. */ UIKeyboardHIDUsageKeyboardLANG5 = 0x94, /* LANG5 */ /* LANG6-9: Reserved for language-specific functions, such as Front End Processors and Input Method Editors. */ UIKeyboardHIDUsageKeyboardLANG6 = 0x95, /* LANG6 */ UIKeyboardHIDUsageKeyboardLANG7 = 0x96, /* LANG7 */ UIKeyboardHIDUsageKeyboardLANG8 = 0x97, /* LANG8 */ UIKeyboardHIDUsageKeyboardLANG9 = 0x98, /* LANG9 */ UIKeyboardHIDUsageKeyboardAlternateErase = 0x99, /* AlternateErase */ UIKeyboardHIDUsageKeyboardSysReqOrAttention = 0x9A, /* SysReq/Attention */ UIKeyboardHIDUsageKeyboardCancel = 0x9B, /* Cancel */ UIKeyboardHIDUsageKeyboardClear = 0x9C, /* Clear */ UIKeyboardHIDUsageKeyboardPrior = 0x9D, /* Prior */ UIKeyboardHIDUsageKeyboardReturn = 0x9E, /* Return */ UIKeyboardHIDUsageKeyboardSeparator = 0x9F, /* Separator */ UIKeyboardHIDUsageKeyboardOut = 0xA0, /* Out */ UIKeyboardHIDUsageKeyboardOper = 0xA1, /* Oper */ UIKeyboardHIDUsageKeyboardClearOrAgain = 0xA2, /* Clear/Again */ UIKeyboardHIDUsageKeyboardCrSelOrProps = 0xA3, /* CrSel/Props */ UIKeyboardHIDUsageKeyboardExSel = 0xA4, /* ExSel */ /* 0xA5-0xDF: Reserved */ UIKeyboardHIDUsageKeyboardLeftControl = 0xE0, /* Left Control */ UIKeyboardHIDUsageKeyboardLeftShift = 0xE1, /* Left Shift */ UIKeyboardHIDUsageKeyboardLeftAlt = 0xE2, /* Left Alt */ UIKeyboardHIDUsageKeyboardLeftGUI = 0xE3, /* Left GUI */ UIKeyboardHIDUsageKeyboardRightControl = 0xE4, /* Right Control */ UIKeyboardHIDUsageKeyboardRightShift = 0xE5, /* Right Shift */ UIKeyboardHIDUsageKeyboardRightAlt = 0xE6, /* Right Alt */ UIKeyboardHIDUsageKeyboardRightGUI = 0xE7, /* Right GUI */ /* 0xE8-0xFFFF: Reserved */ UIKeyboardHIDUsageKeyboard_Reserved = 0xFFFF, /* Helpful aliases */ UIKeyboardHIDUsageKeyboardHangul = UIKeyboardHIDUsageKeyboardLANG1, UIKeyboardHIDUsageKeyboardHanja = UIKeyboardHIDUsageKeyboardLANG2, UIKeyboardHIDUsageKeyboardKanaSwitch = UIKeyboardHIDUsageKeyboardLANG1, UIKeyboardHIDUsageKeyboardAlphanumericSwitch = UIKeyboardHIDUsageKeyboardLANG2, UIKeyboardHIDUsageKeyboardKatakana = UIKeyboardHIDUsageKeyboardLANG3, UIKeyboardHIDUsageKeyboardHiragana = UIKeyboardHIDUsageKeyboardLANG4, UIKeyboardHIDUsageKeyboardZenkakuHankakuKanji = UIKeyboardHIDUsageKeyboardLANG5, }; #else #import <UIKitCore/UIKeyConstants.h> #endif
7,861
365
# -*- coding: utf-8 -*- from benedict.core import clean as _clean from benedict.core import clone as _clone from benedict.core import dump as _dump from benedict.core import filter as _filter from benedict.core import find as _find from benedict.core import flatten as _flatten from benedict.core import groupby as _groupby from benedict.core import invert as _invert from benedict.core import items_sorted_by_keys as _items_sorted_by_keys from benedict.core import items_sorted_by_values as _items_sorted_by_values from benedict.core import keypaths as _keypaths from benedict.core import match as _match from benedict.core import merge as _merge from benedict.core import move as _move from benedict.core import nest as _nest from benedict.core import remove as _remove from benedict.core import rename as _rename from benedict.core import search as _search from benedict.core import standardize as _standardize from benedict.core import subset as _subset from benedict.core import swap as _swap from benedict.core import traverse as _traverse from benedict.core import unflatten as _unflatten from benedict.core import unique as _unique from benedict.dicts.io import IODict from benedict.dicts.keylist import KeylistDict from benedict.dicts.keypath import KeypathDict from benedict.dicts.parse import ParseDict class benedict(KeypathDict, IODict, ParseDict): def __init__(self, *args, **kwargs): """ Constructs a new instance. """ if len(args) == 1 and isinstance(args[0], benedict): obj = args[0] kwargs.setdefault('keypath_separator', obj.keypath_separator) super(benedict, self).__init__(obj.dict(), **kwargs) return super(benedict, self).__init__(*args, **kwargs) def __deepcopy__(self, memo): obj = benedict(keypath_separator=self._keypath_separator) for key, value in self.items(): obj[key] = _clone(value, memo=memo) return obj def __getitem__(self, key): return self._cast(super(benedict, self).__getitem__(key)) def _cast(self, value): """ Cast a dict instance to a benedict instance keeping the pointer to the original dict. """ if isinstance(value, dict) and not isinstance(value, benedict): return benedict( value, keypath_separator=self._keypath_separator, check_keys=False ) return value def clean(self, strings=True, collections=True): """ Clean the current dict instance removing all empty values: None, '', {}, [], (). If strings or collections (dict, list, set, tuple) flags are False, related empty values will not be deleted. """ _clean(self, strings=strings, collections=collections) def clone(self): """ Creates and return a clone of the current dict instance (deep copy). """ return self._cast(_clone(self)) def copy(self): """ Creates and return a copy of the current instance (shallow copy). """ return self._cast(super(benedict, self).copy()) def deepcopy(self): """ Alias of 'clone' method. """ return self.clone() def deepupdate(self, other, *args): """ Alias of 'merge' method. """ self.merge(other, *args) def dump(self, data=None): """ Return a readable string representation of any dict/list. This method can be used both as static method or instance method. """ return _dump(data or self) def filter(self, predicate): """ Return a new filtered dict using the given predicate function. Predicate function receives key, value arguments and should return a bool value. """ return _filter(self, predicate) def find(self, keys, default=None): """ Return the first match searching for the given keys. If no result found, default value is returned. """ return _find(self, keys, default) def flatten(self, separator='_'): """ Return a new flattened dict using the given separator to join nested dict keys to flatten keypaths. """ return _flatten(self, separator) def get(self, key, default=None): return self._cast(super(benedict, self).get(key, default)) def get_dict(self, key, default=None): return self._cast(super(benedict, self).get_dict(key, default)) def get_list_item(self, key, index=0, default=None, separator=','): return self._cast( super(benedict, self).get_list_item(key, index, default, separator) ) def groupby(self, key, by_key): """ Group a list of dicts at key by the value of the given by_key and return a new dict. """ return self._cast(_groupby(self[key], by_key)) def invert(self, flat=False): """ Return a new inverted dict, where values become keys and keys become values. Since multiple keys could have the same value, each value will be a list of keys. If flat is True each value will be a single value (use this only if values are unique). """ return _invert(self, flat) def items_sorted_by_keys(self, reverse=False): """ Return items (key/value list) sorted by keys. If reverse is True, the list will be reversed. """ return _items_sorted_by_keys(self, reverse=reverse) def items_sorted_by_values(self, reverse=False): """ Return items (key/value list) sorted by values. If reverse is True, the list will be reversed. """ return _items_sorted_by_values(self, reverse=reverse) def keypaths(self, indexes=False): """ Return a list of all keypaths in the dict. If indexes is True, the output will include list values indexes. """ return _keypaths(self, separator=self._keypath_separator, indexes=indexes) def match(self, pattern, indexes=True): """ Return a list of all values whose keypath matches the given pattern (a regex or string). If pattern is string, wildcard can be used (eg. [*] can be used to match all list indexes). If indexes is True, the pattern will be matched also against list values. """ return _match(self, pattern, separator=self._keypath_separator, indexes=indexes) def merge(self, other, *args, **kwargs): """ Merge one or more dict objects into current instance (deepupdate). Sub-dictionaries will be merged toghether. If overwrite is False, existing values will not be overwritten. If concat is True, list values will be concatenated toghether. """ _merge(self, other, *args, **kwargs) def move(self, key_src, key_dest): """ Move a dict instance value item from 'key_src' to 'key_dst'. If key_dst exists, its value will be overwritten. """ _move(self, key_src, key_dest) def nest( self, key, id_key='id', parent_id_key='parent_id', children_key='children' ): """ Nest a list of dicts at the given key and return a new nested list using the specified keys to establish the correct items hierarchy. """ return _nest(self[key], id_key, parent_id_key, children_key) def pop(self, key, *args): return self._cast(super(benedict, self).pop(key, *args)) def remove(self, keys, *args): """ Remove multiple keys from the current dict instance. It is possible to pass a single key or more keys (as list or *args). """ _remove(self, keys, *args) def setdefault(self, key, default=None): return self._cast(super(benedict, self).setdefault(key, default)) def rename(self, key, key_new): """ Rename a dict item key from 'key' to 'key_new'. If key_new exists, a KeyError will be raised. """ _rename(self, key, key_new) def search( self, query, in_keys=True, in_values=True, exact=False, case_sensitive=False ): """ Search and return a list of items (dict, key, value, ) matching the given query. """ return _search(self, query, in_keys, in_values, exact, case_sensitive) def standardize(self): """ Standardize all dict keys (e.g. 'Location Latitude' -> 'location_latitude'). """ _standardize(self) def subset(self, keys, *args): """ Return a new dict subset for the given keys. It is possible to pass a single key or multiple keys (as list or *args). """ return _subset(self, keys, *args) def swap(self, key1, key2): """ Swap items values at the given keys. """ _swap(self, key1, key2) def traverse(self, callback): """ Traverse the current dict instance (including nested dicts), and pass each item (dict, key, value) to the callback function. """ _traverse(self, callback) def unflatten(self, separator='_'): """ Return a new unflattened dict using the given separator to split dict keys to nested keypaths. """ return _unflatten(self, separator) def unique(self): """ Remove duplicated values from the current dict instance. """ _unique(self) # fix benedict json dumps support - #57 #59 #61 from json import encoder encoder.c_make_encoder = None # fix benedict yaml representer - #43 from yaml import SafeDumper from yaml.representer import SafeRepresenter SafeDumper.yaml_representers[benedict] = SafeRepresenter.represent_dict
3,882
2,073
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.store.kahadb.scheduler.legacy; import java.io.DataInput; import java.io.DataOutput; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.NoSuchElementException; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import org.apache.activemq.store.kahadb.disk.index.BTreeIndex; import org.apache.activemq.store.kahadb.disk.journal.Journal; import org.apache.activemq.store.kahadb.disk.journal.Location; import org.apache.activemq.store.kahadb.disk.page.Page; import org.apache.activemq.store.kahadb.disk.page.PageFile; import org.apache.activemq.store.kahadb.disk.page.Transaction; import org.apache.activemq.store.kahadb.disk.util.IntegerMarshaller; import org.apache.activemq.store.kahadb.disk.util.StringMarshaller; import org.apache.activemq.store.kahadb.disk.util.VariableMarshaller; import org.apache.activemq.util.ByteSequence; import org.apache.activemq.util.IOHelper; import org.apache.activemq.util.LockFile; import org.apache.activemq.util.ServiceStopper; import org.apache.activemq.util.ServiceSupport; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Read-only view of a legacy JobSchedulerStore implementation. */ final class LegacyJobSchedulerStoreImpl extends ServiceSupport { static final Logger LOG = LoggerFactory.getLogger(LegacyJobSchedulerStoreImpl.class); private static final int DATABASE_LOCKED_WAIT_DELAY = 10 * 1000; private File directory; private PageFile pageFile; private Journal journal; private LockFile lockFile; private final AtomicLong journalSize = new AtomicLong(0); private boolean failIfDatabaseIsLocked; private int journalMaxFileLength = Journal.DEFAULT_MAX_FILE_LENGTH; private int journalMaxWriteBatchSize = Journal.DEFAULT_MAX_WRITE_BATCH_SIZE; private boolean enableIndexWriteAsync = false; private MetaData metaData = new MetaData(this); private final MetaDataMarshaller metaDataMarshaller = new MetaDataMarshaller(this); private final Map<String, LegacyJobSchedulerImpl> schedulers = new HashMap<String, LegacyJobSchedulerImpl>(); protected class MetaData { protected MetaData(LegacyJobSchedulerStoreImpl store) { this.store = store; } private final LegacyJobSchedulerStoreImpl store; Page<MetaData> page; BTreeIndex<Integer, Integer> journalRC; BTreeIndex<String, LegacyJobSchedulerImpl> storedSchedulers; void createIndexes(Transaction tx) throws IOException { this.storedSchedulers = new BTreeIndex<String, LegacyJobSchedulerImpl>(pageFile, tx.allocate().getPageId()); this.journalRC = new BTreeIndex<Integer, Integer>(pageFile, tx.allocate().getPageId()); } void load(Transaction tx) throws IOException { this.storedSchedulers.setKeyMarshaller(StringMarshaller.INSTANCE); this.storedSchedulers.setValueMarshaller(new JobSchedulerMarshaller(this.store)); this.storedSchedulers.load(tx); this.journalRC.setKeyMarshaller(IntegerMarshaller.INSTANCE); this.journalRC.setValueMarshaller(IntegerMarshaller.INSTANCE); this.journalRC.load(tx); } void loadScheduler(Transaction tx, Map<String, LegacyJobSchedulerImpl> schedulers) throws IOException { for (Iterator<Entry<String, LegacyJobSchedulerImpl>> i = this.storedSchedulers.iterator(tx); i.hasNext();) { Entry<String, LegacyJobSchedulerImpl> entry = i.next(); entry.getValue().load(tx); schedulers.put(entry.getKey(), entry.getValue()); } } public void read(DataInput is) throws IOException { this.storedSchedulers = new BTreeIndex<String, LegacyJobSchedulerImpl>(pageFile, is.readLong()); this.storedSchedulers.setKeyMarshaller(StringMarshaller.INSTANCE); this.storedSchedulers.setValueMarshaller(new JobSchedulerMarshaller(this.store)); this.journalRC = new BTreeIndex<Integer, Integer>(pageFile, is.readLong()); this.journalRC.setKeyMarshaller(IntegerMarshaller.INSTANCE); this.journalRC.setValueMarshaller(IntegerMarshaller.INSTANCE); } public void write(DataOutput os) throws IOException { os.writeLong(this.storedSchedulers.getPageId()); os.writeLong(this.journalRC.getPageId()); } } class MetaDataMarshaller extends VariableMarshaller<MetaData> { private final LegacyJobSchedulerStoreImpl store; MetaDataMarshaller(LegacyJobSchedulerStoreImpl store) { this.store = store; } @Override public MetaData readPayload(DataInput dataIn) throws IOException { MetaData rc = new MetaData(this.store); rc.read(dataIn); return rc; } @Override public void writePayload(MetaData object, DataOutput dataOut) throws IOException { object.write(dataOut); } } class ValueMarshaller extends VariableMarshaller<List<LegacyJobLocation>> { @Override public List<LegacyJobLocation> readPayload(DataInput dataIn) throws IOException { List<LegacyJobLocation> result = new ArrayList<LegacyJobLocation>(); int size = dataIn.readInt(); for (int i = 0; i < size; i++) { LegacyJobLocation jobLocation = new LegacyJobLocation(); jobLocation.readExternal(dataIn); result.add(jobLocation); } return result; } @Override public void writePayload(List<LegacyJobLocation> value, DataOutput dataOut) throws IOException { dataOut.writeInt(value.size()); for (LegacyJobLocation jobLocation : value) { jobLocation.writeExternal(dataOut); } } } class JobSchedulerMarshaller extends VariableMarshaller<LegacyJobSchedulerImpl> { private final LegacyJobSchedulerStoreImpl store; JobSchedulerMarshaller(LegacyJobSchedulerStoreImpl store) { this.store = store; } @Override public LegacyJobSchedulerImpl readPayload(DataInput dataIn) throws IOException { LegacyJobSchedulerImpl result = new LegacyJobSchedulerImpl(this.store); result.read(dataIn); return result; } @Override public void writePayload(LegacyJobSchedulerImpl js, DataOutput dataOut) throws IOException { js.write(dataOut); } } public File getDirectory() { return directory; } public void setDirectory(File directory) { this.directory = directory; } public long size() { if (!isStarted()) { return 0; } try { return journalSize.get() + pageFile.getDiskSize(); } catch (IOException e) { throw new RuntimeException(e); } } /** * Returns the named Job Scheduler if it exists, otherwise throws an exception. * * @param name * The name of the scheduler that is to be returned. * * @return the named scheduler if it exists. * * @throws Exception if the named scheduler does not exist in this store. */ public LegacyJobSchedulerImpl getJobScheduler(final String name) throws Exception { LegacyJobSchedulerImpl result = this.schedulers.get(name); if (result == null) { throw new NoSuchElementException("No such Job Scheduler in this store: " + name); } return result; } /** * Returns the names of all the schedulers that exist in this scheduler store. * * @return a set of names of all scheduler instances in this store. * * @throws Exception if an error occurs while collecting the scheduler names. */ public Set<String> getJobSchedulerNames() throws Exception { Set<String> names = Collections.emptySet(); if (!schedulers.isEmpty()) { return this.schedulers.keySet(); } return names; } @Override protected void doStart() throws Exception { if (this.directory == null) { this.directory = new File(IOHelper.getDefaultDataDirectory() + File.pathSeparator + "delayedDB"); } IOHelper.mkdirs(this.directory); lock(); this.journal = new Journal(); this.journal.setDirectory(directory); this.journal.setMaxFileLength(getJournalMaxFileLength()); this.journal.setWriteBatchSize(getJournalMaxWriteBatchSize()); this.journal.setSizeAccumulator(this.journalSize); this.journal.start(); this.pageFile = new PageFile(directory, "scheduleDB"); this.pageFile.setWriteBatchSize(1); this.pageFile.load(); this.pageFile.tx().execute(new Transaction.Closure<IOException>() { @Override public void execute(Transaction tx) throws IOException { if (pageFile.getPageCount() == 0) { Page<MetaData> page = tx.allocate(); assert page.getPageId() == 0; page.set(metaData); metaData.page = page; metaData.createIndexes(tx); tx.store(metaData.page, metaDataMarshaller, true); } else { Page<MetaData> page = tx.load(0, metaDataMarshaller); metaData = page.get(); metaData.page = page; } metaData.load(tx); metaData.loadScheduler(tx, schedulers); for (LegacyJobSchedulerImpl js : schedulers.values()) { try { js.start(); } catch (Exception e) { LegacyJobSchedulerStoreImpl.LOG.error("Failed to load " + js.getName(), e); } } } }); this.pageFile.flush(); LOG.info(this + " started"); } @Override protected void doStop(ServiceStopper stopper) throws Exception { for (LegacyJobSchedulerImpl js : this.schedulers.values()) { js.stop(); } if (this.pageFile != null) { this.pageFile.unload(); } if (this.journal != null) { journal.close(); } if (this.lockFile != null) { this.lockFile.unlock(); } this.lockFile = null; LOG.info(this + " stopped"); } ByteSequence getPayload(Location location) throws IllegalStateException, IOException { ByteSequence result = null; result = this.journal.read(location); return result; } Location write(ByteSequence payload, boolean sync) throws IllegalStateException, IOException { return this.journal.write(payload, sync); } private void lock() throws IOException { if (lockFile == null) { File lockFileName = new File(directory, "lock"); lockFile = new LockFile(lockFileName, true); if (failIfDatabaseIsLocked) { lockFile.lock(); } else { while (true) { try { lockFile.lock(); break; } catch (IOException e) { LOG.info("Database " + lockFileName + " is locked... waiting " + (DATABASE_LOCKED_WAIT_DELAY / 1000) + " seconds for the database to be unlocked. Reason: " + e); try { Thread.sleep(DATABASE_LOCKED_WAIT_DELAY); } catch (InterruptedException e1) { } } } } } } PageFile getPageFile() { this.pageFile.isLoaded(); return this.pageFile; } public boolean isFailIfDatabaseIsLocked() { return failIfDatabaseIsLocked; } public void setFailIfDatabaseIsLocked(boolean failIfDatabaseIsLocked) { this.failIfDatabaseIsLocked = failIfDatabaseIsLocked; } public int getJournalMaxFileLength() { return journalMaxFileLength; } public void setJournalMaxFileLength(int journalMaxFileLength) { this.journalMaxFileLength = journalMaxFileLength; } public int getJournalMaxWriteBatchSize() { return journalMaxWriteBatchSize; } public void setJournalMaxWriteBatchSize(int journalMaxWriteBatchSize) { this.journalMaxWriteBatchSize = journalMaxWriteBatchSize; } public boolean isEnableIndexWriteAsync() { return enableIndexWriteAsync; } public void setEnableIndexWriteAsync(boolean enableIndexWriteAsync) { this.enableIndexWriteAsync = enableIndexWriteAsync; } @Override public String toString() { return "LegacyJobSchedulerStore:" + this.directory; } }
5,894
76,518
{"name":"Shortcut Guide","properties":{"overlay_opacity":{"value":32},"press_time":{"value":1150},"theme":{"value":"system"}},"version":"1.0"}
41
468
<gh_stars>100-1000 { "name": "freemanlab", "author": "freemanlab", "license": "CC0", "vector": "http://hexb.in/vector/freemanlab.svg", "raster": "http://hexb.in/hexagons/freemanlab.png", "filename": "meta/freemanlab.json" }
100
311
package test.published.dependencies; public class App { public static void main(String[] args) { System.out.println("Test published dependencies!"); } }
47
1,127
<reponame>wood-ghost/openvino<gh_stars>1000+ // Copyright (C) 2022 Intel Corporation // SPDX-License-Identifier: Apache-2.0 // #include <vector> #include "single_layer_tests/einsum.hpp" using namespace ngraph::helpers; using namespace LayerTestsDefinitions; namespace { const std::vector<InferenceEngine::Precision> precisions = { InferenceEngine::Precision::FP32, InferenceEngine::Precision::FP16 }; const std::vector<EinsumEquationWithInput> equationsWithInput = { { "ij->ji", {{{1, 2}}} }, // transpose 2d { "ijk->kij", { {1, 2, 3} } }, // transpose 3d { "ij->i", { {2, 3} } }, // reduce { "ab,cd->abcd", { { 1, 2}, {3, 4} } }, // no reduction { "ab,bc->ac", { {2, 3}, {3, 2} } }, // matrix multiplication { "ab,bcd,bc->ca", { {2, 4}, {4, 3, 1}, {4, 3} } }, // multiple multiplications { "kii->ki", { {1, 3, 3} } }, // diagonal { "abbac,bad->ad", { {2, 3, 3, 2, 4}, {3, 2, 1} } }, // diagonal and multiplication with repeated labels { "a...->...a", { {2, 2, 3} } }, // transpose with ellipsis { "a...->...", { {2, 2, 3} } }, // reduce with ellipsis { "ab...,...->ab...", { {2, 2, 3}, {1} } }, // multiply by scalar { "a...j,j...->a...", { {1, 1, 4, 3}, {3, 4, 2, 1} } } // complex multiplication }; const auto params = ::testing::Combine( ::testing::ValuesIn(precisions), ::testing::ValuesIn(equationsWithInput), ::testing::Values(CommonTestUtils::DEVICE_GPU)); INSTANTIATE_TEST_SUITE_P(smoke_Einsum, EinsumLayerTest, params, EinsumLayerTest::getTestCaseName); } // namespace
662
672
/* * Copyright (c) 2010 Apple Inc. All rights reserved. * * @APPLE_LLVM_LICENSE_HEADER@ */ /* * orbars.c * testObjects * * Created by <NAME> on 9/17/08. * Copyright 2008 __MyCompanyName__. All rights reserved. */ // rdar://6276695 error: before ‘|’ token // TEST_CONFIG RUN=0 /* TEST_BUILD_OUTPUT .*orbars.c:29:\d+: error: expected expression END */ #include <stdio.h> #include "test.h" int main() { int i __unused = 10; void (^b)(void) __unused = ^(void){ | i | printf("hello world, %d\n", i); }; fail("should not compile"); }
229
399
/* * Copyright(c) 2019 Netflix, Inc. * * This source code is subject to the terms of the BSD 2 Clause License and * the Alliance for Open Media Patent License 1.0. If the BSD 2 Clause License * was not distributed with this source code in the LICENSE file, you can * obtain it at https://www.aomedia.org/license/software-license. If the Alliance for Open * Media Patent License 1.0 was not distributed with this source code in the * PATENTS file, you can obtain it at https://www.aomedia.org/license/patent-license. */ #include <stdlib.h> #include <stddef.h> #include <string.h> #include <assert.h> #include "EbFileUtils.h" const char *ivf_signature = "DKIF"; static const size_t k_maximum_leb_128_size = 8; static const uint8_t k_leb_128byte_mask = 0x7f; // Binary: 01111111 static unsigned int mem_get_le16(const void *vmem) { unsigned int val; const uint8_t *mem = (const uint8_t *)vmem; val = mem[1] << 8; val |= mem[0]; return val; } static unsigned int mem_get_le32(const void *vmem) { unsigned int val; const uint8_t *mem = (const uint8_t *)vmem; val = ((unsigned int)mem[3]) << 24; val |= mem[2] << 16; val |= mem[1] << 8; val |= mem[0]; return val; } static void fix_framerate(int *num, int *den) { if (*den <= 0 || *den >= 1000000000 || *num <= 0 || *num >= 1000) { // framerate seems to be invalid, just default to 30fps. *num = 30; *den = 1; } } typedef struct ReadBitBuffer { const uint8_t *bit_buffer; const uint8_t *bit_buffer_end; uint32_t bit_offset; } ReadBitBuffer; int uleb_decode(const uint8_t *buffer, size_t available, uint64_t *value, size_t *length) { if (buffer && value) { *value = 0; for (size_t i = 0; i < k_maximum_leb_128_size && i < available; ++i) { const uint8_t decoded_byte = *(buffer + i) & k_leb_128byte_mask; *value |= ((uint64_t)decoded_byte) << (i * 7); if ((*(buffer + i) >> 7) == 0) { if (length) { *length = i + 1; } // Fail on values larger than 32-bits to ensure consistent // behavior on 32 and 64 bit targets: value is typically // used to determine buffer allocation size. if (*value > UINT32_MAX) return -1; return 0; } } } // If we get here, either the buffer/value pointers were invalid, // or we ran over the available space return -1; } // Reads unsigned LEB128 integer and returns 0 upon successful read and decode. // Stores raw bytes in 'value_buffer', length of the number in 'value_length', // and decoded value in 'value'. static int obudec_read_leb128(FILE *f, uint8_t *value_buffer, size_t *value_length, uint64_t *value) { if (!f || !value_buffer || !value_length || !value) return -1; size_t len; for (len = 0; len < OBU_MAX_LENGTH_FIELD_SIZE; ++len) { const size_t num_read = fread(&value_buffer[len], 1, 1, f); if (num_read == 0) { if (len == 0 && feof(f)) { *value_length = 0; return 0; } // Ran out of data before completing read of value. return -1; } if ((value_buffer[len] >> 7) == 0) { ++len; *value_length = len; break; } } return uleb_decode(value_buffer, len, value, NULL); } int rb_read_bit(ReadBitBuffer *rb) { const uint32_t off = rb->bit_offset; const uint32_t p = off >> 3; const int q = 7 - (int)(off & 0x7); if (rb->bit_buffer + p < rb->bit_buffer_end) { const int bit = (rb->bit_buffer[p] >> q) & 1; rb->bit_offset = off + 1; return bit; } else return 0; } int rb_read_literal(ReadBitBuffer *rb, int bits) { assert(bits <= 31); int value = 0, bit; for (bit = bits - 1; bit >= 0; bit--) value |= rb_read_bit(rb) << bit; return value; } // Returns 1 when OBU type is valid, and 0 otherwise. static int valid_obu_type(int obu_type) { int valid_type = 0; switch (obu_type) { case OBU_SEQUENCE_HEADER: case OBU_TEMPORAL_DELIMITER: case OBU_FRAME_HEADER: case OBU_TILE_GROUP: case OBU_METADATA: case OBU_FRAME: case OBU_REDUNDANT_FRAME_HEADER: case OBU_TILE_LIST: case OBU_PADDING: valid_type = 1; break; default: break; } return valid_type; } // Parses OBU header and stores values in 'header'. static int read_obu_header(ReadBitBuffer *rb, uint32_t is_annexb, ObuHeader *header) { if (!rb || !header) return -1; const ptrdiff_t bit_buffer_byte_length = rb->bit_buffer_end - rb->bit_buffer; if (bit_buffer_byte_length < 1) return -1; header->size = 1; if (rb_read_bit(rb) != 0) { // Forbidden bit. Must not be set. return -1; } header->type = (OBU_TYPE)rb_read_literal(rb, 4); if (!valid_obu_type(header->type)) return -1; header->has_extension = rb_read_bit(rb); header->has_size_field = rb_read_bit(rb); if (!header->has_size_field && !is_annexb) { // section 5 obu streams must have obu_size field set. return -1; } if (rb_read_bit(rb) != 0) { // obu_reserved_1bit must be set to 0. return -1; } if (header->has_extension) { if (bit_buffer_byte_length == 1) return -1; header->size += 1; header->temporal_layer_id = rb_read_literal(rb, 3); header->spatial_layer_id = rb_read_literal(rb, 2); if (rb_read_literal(rb, 3) != 0) { // extension_header_reserved_3bits must be set to 0. return -1; } } return 0; } int svt_read_obu_header(uint8_t *buffer, size_t buffer_length, size_t *consumed, ObuHeader *header, uint32_t is_annexb) { if (buffer_length < 1 || !consumed || !header) return -1; ReadBitBuffer rb = {buffer, buffer + buffer_length, 0}; int parse_result = read_obu_header(&rb, is_annexb, header); if (parse_result == 0) *consumed = header->size; return parse_result; } // Reads OBU header from 'f'. The 'buffer_capacity' passed in must be large // enough to store an OBU header with extension (2 bytes). Raw OBU data is // written to 'obu_data', parsed OBU header values are written to 'obu_header', // and total bytes read from file are written to 'bytes_read'. Returns 0 for // success, and non-zero on failure. When end of file is reached, the return // value is 0 and the 'bytes_read' value is set to 0. static int obudec_read_obu_header(FILE *f, size_t buffer_capacity, uint32_t is_annexb, uint8_t *obu_data, ObuHeader *obu_header, size_t *bytes_read) { if (!f || buffer_capacity < (OBU_HEADER_SIZE + OBU_EXTENSION_SIZE) || !obu_data || !obu_header || !bytes_read) { return -1; } *bytes_read = fread(obu_data, 1, 1, f); if (feof(f) && *bytes_read == 0) { return 0; } else if (*bytes_read != 1) { fprintf(stderr, "obudec: Failure reading OBU header.\n"); return -1; } const int has_extension = (obu_data[0] >> 2) & 0x1; if (has_extension) { if (fread(&obu_data[1], 1, 1, f) != 1) { fprintf(stderr, "obudec: Failure reading OBU extension."); return -1; } ++*bytes_read; } size_t obu_bytes_parsed = 0; svt_read_obu_header(obu_data, *bytes_read, &obu_bytes_parsed, obu_header, is_annexb); return 0; } static int obudec_read_obu_header_and_size(FILE *f, size_t buffer_capacity, uint32_t is_annexb, uint8_t *buffer, size_t *bytes_read, size_t *payload_length, ObuHeader *obu_header) { const size_t k_minimum_buffer_size = OBU_MAX_HEADER_SIZE; if (!f || !buffer || !bytes_read || !payload_length || !obu_header || buffer_capacity < k_minimum_buffer_size) { return -1; } size_t leb128_length_obu = 0; size_t leb128_length_payload = 0; uint64_t obu_size = 0; if (is_annexb) { if (obudec_read_leb128(f, &buffer[0], &leb128_length_obu, &obu_size) != 0) { fprintf(stderr, "obudec: Failure reading OBU size length.\n"); return -1; } else if (leb128_length_obu == 0) { *payload_length = 0; return 0; } if (obu_size > UINT32_MAX) { fprintf(stderr, "obudec: OBU payload length too large.\n"); return -1; } } size_t header_size = 0; if (obudec_read_obu_header(f, buffer_capacity - leb128_length_obu, is_annexb, buffer + leb128_length_obu, obu_header, &header_size) != 0) { return -1; } else if (header_size == 0) { *payload_length = 0; return 0; } if (!obu_header->has_size_field) { assert(is_annexb); if (obu_size < header_size) { fprintf(stderr, "obudec: OBU size is too small.\n"); return -1; } *payload_length = (size_t)obu_size - header_size; } else { uint64_t u64_payload_length = 0; if (obudec_read_leb128(f, &buffer[leb128_length_obu + header_size], &leb128_length_payload, &u64_payload_length) != 0) { fprintf(stderr, "obudec: Failure reading OBU payload length.\n"); return -1; } if (u64_payload_length > UINT32_MAX) { fprintf(stderr, "obudec: OBU payload length too large.\n"); return -1; } *payload_length = (size_t)u64_payload_length; } *bytes_read = leb128_length_obu + header_size + leb128_length_payload; return 0; } // Reads OBU payload from 'f' and returns 0 for success when all payload bytes // are read from the file. Payload data is written to 'obu_data', and actual // bytes read added to 'bytes_read'. static int obudec_read_obu_payload(FILE *f, size_t payload_length, uint8_t *obu_data, size_t *bytes_read) { if (!f || payload_length == 0 || !obu_data || !bytes_read) return -1; if (fread(obu_data, 1, payload_length, f) != payload_length) { fprintf(stderr, "obudec: Failure reading OBU payload.\n"); return -1; } *bytes_read += payload_length; return 0; } int file_is_obu(CliInput *cli, ObuDecInputContext *obu_ctx) { if (!obu_ctx || !cli) return 0; uint8_t detect_buf[OBU_DETECTION_SIZE] = {0}; const uint32_t is_annexb = obu_ctx->is_annexb; FILE * f = cli->in_file; size_t payload_length = 0; ObuHeader obu_header; memset(&obu_header, 0, sizeof(obu_header)); size_t length_of_unit_size = 0; size_t annexb_header_length = 0; uint64_t unit_size = 0; if (is_annexb) { // read the size of first temporal unit if (obudec_read_leb128(f, &detect_buf[0], &length_of_unit_size, &unit_size) != 0) { fprintf(stderr, "obudec: Failure reading temporal unit header\n"); return 0; } // read the size of first frame unit if (obudec_read_leb128( f, &detect_buf[length_of_unit_size], &annexb_header_length, &unit_size) != 0) { fprintf(stderr, "obudec: Failure reading frame unit header\n"); return 0; } annexb_header_length += length_of_unit_size; } size_t bytes_read = 0; if (obudec_read_obu_header_and_size(f, OBU_DETECTION_SIZE - annexb_header_length, is_annexb, &detect_buf[annexb_header_length], &bytes_read, &payload_length, &obu_header) != 0) { fprintf(stderr, "obudec: Failure reading first OBU.\n"); rewind(f); return 0; } if (is_annexb) { bytes_read += annexb_header_length; } if (obu_header.type != OBU_TEMPORAL_DELIMITER && obu_header.type != OBU_SEQUENCE_HEADER) { return 0; } if (obu_header.has_size_field) { if (obu_header.type == OBU_TEMPORAL_DELIMITER && payload_length != 0) { fprintf(stderr, "obudec: Invalid OBU_TEMPORAL_DELIMITER payload length (non-zero)."); rewind(f); return 0; } } else if (!is_annexb) { fprintf(stderr, "obudec: OBU size fields required, cannot decode input.\n"); rewind(f); return 0; } // Appears that input is valid Section 5 AV1 stream. obu_ctx->buffer = (uint8_t *)malloc(OBU_BUFFER_SIZE); if (!obu_ctx->buffer) { fprintf(stderr, "Out of memory.\n"); rewind(f); return 0; } obu_ctx->buffer_capacity = OBU_BUFFER_SIZE; memcpy(obu_ctx->buffer, &detect_buf[0], bytes_read); obu_ctx->bytes_buffered = bytes_read; // If the first OBU is a SEQUENCE_HEADER, then it will have a payload. // We need to read this in so that our buffer only contains complete OBUs. if (payload_length > 0) { if (payload_length > (obu_ctx->buffer_capacity - bytes_read)) { fprintf(stderr, "obudec: First OBU's payload is too large\n"); rewind(f); return 0; } size_t payload_bytes = 0; const int status = obudec_read_obu_payload( f, payload_length, &obu_ctx->buffer[bytes_read], &payload_bytes); if (status < 0) { rewind(f); return 0; } obu_ctx->bytes_buffered += payload_bytes; } /* This is because to avoid to many conditions while reading frame by frame information in TU's */ if (is_annexb) { rewind(f); obu_ctx->bytes_buffered = 0; } return 1; } static int obudec_grow_buffer(size_t growth_amount, uint8_t **obu_buffer, size_t *obu_buffer_capacity) { if (!*obu_buffer || !obu_buffer_capacity || growth_amount == 0) { return -1; } const size_t capacity = *obu_buffer_capacity; if (SIZE_MAX - growth_amount < capacity) { fprintf(stderr, "obudec: cannot grow buffer, capacity will roll over.\n"); return -1; } const size_t new_capacity = capacity + growth_amount; uint8_t *new_buffer = (uint8_t *)realloc(*obu_buffer, new_capacity); if (!new_buffer) { fprintf(stderr, "obudec: Failed to allocate compressed data buffer.\n"); return -1; } *obu_buffer = new_buffer; *obu_buffer_capacity = new_capacity; return 0; } static int obudec_read_one_obu(FILE *f, uint8_t **obu_buffer, size_t obu_bytes_buffered, size_t *obu_buffer_capacity, size_t *obu_length, ObuHeader *obu_header, uint32_t is_annexb) { if (!f || !(*obu_buffer) || !obu_buffer_capacity || !obu_length || !obu_header) { return -1; } size_t bytes_read = 0; size_t obu_payload_length = 0; size_t available_buffer_capacity = *obu_buffer_capacity - obu_bytes_buffered; if (available_buffer_capacity < OBU_MAX_HEADER_SIZE) { if (obudec_grow_buffer(DECAPP_MAX(*obu_buffer_capacity, OBU_MAX_HEADER_SIZE), obu_buffer, obu_buffer_capacity) != 0) { *obu_length = bytes_read; return -1; } available_buffer_capacity += DECAPP_MAX(*obu_buffer_capacity, OBU_MAX_HEADER_SIZE); } const int status = obudec_read_obu_header_and_size(f, available_buffer_capacity, is_annexb, *obu_buffer + obu_bytes_buffered, &bytes_read, &obu_payload_length, obu_header); if (status < 0) return status; if (obu_payload_length > SIZE_MAX - bytes_read) return -1; if (obu_payload_length > 256 * 1024 * 1024) { fprintf(stderr, "obudec: Read invalid OBU size (%u)\n", (unsigned int)obu_payload_length); *obu_length = bytes_read + obu_payload_length; return -1; } if (bytes_read + obu_payload_length > available_buffer_capacity && obudec_grow_buffer(DECAPP_MAX(*obu_buffer_capacity, obu_payload_length), obu_buffer, obu_buffer_capacity) != 0) { *obu_length = bytes_read + obu_payload_length; return -1; } if (obu_payload_length > 0 && obudec_read_obu_payload( f, obu_payload_length, *obu_buffer + obu_bytes_buffered + bytes_read, &bytes_read) != 0) { return -1; } *obu_length = bytes_read; return 0; } int obudec_read_temporal_unit(DecInputContext *input, uint8_t **buffer, size_t *bytes_read, size_t *buffer_size) { CliInput * cli = input->cli_ctx; FILE * f = cli->in_file; ObuDecInputContext *obu_ctx = input->obu_ctx; if (!f) return 0; *buffer_size = 0; *bytes_read = 0; if (feof(f)) { return 0; } size_t txb_size = 0, fr_size = 0; size_t obu_size = 0; size_t length_of_temporal_unit_size = 0; size_t length_of_frame_unit_size = 0; if (obu_ctx->is_annexb) { uint64_t size = 0; uint8_t frheader[OBU_MAX_LENGTH_FIELD_SIZE] = {0}; assert(obu_ctx->bytes_buffered == 0); if (!obu_ctx->rem_txb_size) { if (obudec_read_leb128(f, &frheader[0], &length_of_temporal_unit_size, &size) != 0) { fprintf(stderr, "obudec: Failure reading temporal unit header\n"); return 0; } if (size == 0 && feof(f)) { return 0; } /*Stores only tu size ie excluding tu header*/ obu_ctx->rem_txb_size = size; } if (size > UINT32_MAX || size + length_of_temporal_unit_size > UINT32_MAX) { fprintf(stderr, "obudec: TU too large.\n"); return 0; } if (obudec_read_leb128(f, &frheader[0], &length_of_frame_unit_size, &size) != 0) { fprintf(stderr, "obudec: Failure reading frame header\n"); return 0; } if (size == 0 || feof(f)) { return 0; } fr_size = (size_t)size; txb_size = fr_size; } else { while (1) { ObuHeader obu_header; memset(&obu_header, 0, sizeof(obu_header)); if (obudec_read_one_obu(f, &obu_ctx->buffer, obu_ctx->bytes_buffered, &obu_ctx->buffer_capacity, &obu_size, &obu_header, 0) != 0) { fprintf(stderr, "obudec: read_one_obu failed in TU loop\n"); return 0; } if (obu_header.type == OBU_TEMPORAL_DELIMITER || obu_size == 0) { txb_size = obu_ctx->bytes_buffered; break; } else { obu_ctx->bytes_buffered += obu_size; } } } uint8_t *new_buffer = (uint8_t *)realloc(*buffer, txb_size); if (!new_buffer) { free(*buffer); fprintf(stderr, "obudec: Out of memory.\n"); return 0; } *buffer = new_buffer; *bytes_read = txb_size; *buffer_size = txb_size; if (!obu_ctx->is_annexb) { memcpy(*buffer, obu_ctx->buffer, txb_size); // At this point, (obu_ctx->buffer + obu_ctx->bytes_buffered + obu_size) // points to the end of the buffer. memmove(obu_ctx->buffer, obu_ctx->buffer + obu_ctx->bytes_buffered, obu_size); obu_ctx->bytes_buffered = obu_size; } else { if (!feof(f)) { if (fread(*buffer, 1, fr_size, f) != fr_size) { fprintf(stderr, "obudec: Failed to read full temporal unit\n"); return 0; } obu_ctx->rem_txb_size -= (fr_size + length_of_frame_unit_size); } } return 1; } int file_is_ivf(CliInput *cli) { char raw_hdr[32]; int is_ivf = 0; if (fread(raw_hdr, 1, 32, cli->in_file) == 32) { if (memcmp(ivf_signature, raw_hdr, 4) == 0) { is_ivf = 1; if (mem_get_le16(raw_hdr + 4) != 0) { fprintf(stderr, "Error: Unrecognized IVF version! This file may not" " decode properly."); } cli->fourcc = mem_get_le32(raw_hdr + 8); cli->width = mem_get_le16(raw_hdr + 12); cli->height = mem_get_le16(raw_hdr + 14); cli->framerate.numerator = mem_get_le32(raw_hdr + 16); cli->framerate.denominator = mem_get_le32(raw_hdr + 20); fix_framerate(&cli->framerate.numerator, &cli->framerate.denominator); } } if (!is_ivf) { rewind(cli->in_file); cli->detect.buf_read = 0; } else cli->detect.position = 4; return is_ivf; } int read_ivf_frame(FILE *infile, uint8_t **buffer, size_t *bytes_read, size_t *buffer_size, int64_t *pts) { char raw_header[IVF_FRAME_HDR_SZ] = {0}; size_t frame_size = 0; if (fread(raw_header, IVF_FRAME_HDR_SZ, 1, infile) != 1) { if (!feof(infile)) fprintf(stderr, "Failed to read frame size. \n"); } else { frame_size = mem_get_le32(raw_header); if (frame_size > 256 * 1024 * 1024) { fprintf(stderr, "Read invalid frame size (%u) \n", (unsigned int)frame_size); frame_size = 0; } if (frame_size > *buffer_size) { uint8_t *new_buffer = (uint8_t *)realloc(*buffer, 2 * frame_size); if (new_buffer) { *buffer = new_buffer; *buffer_size = 2 * frame_size; } else { fprintf(stderr, "Failed to allocate compressed data buffer. \n"); frame_size = 0; } } if (pts) { *pts = mem_get_le32(&raw_header[4]); *pts += ((int64_t)mem_get_le32(&raw_header[8]) << 32); } } if (!feof(infile)) { if (fread(*buffer, 1, frame_size, infile) != frame_size) { fprintf(stderr, "Failed to read full frame. \n"); return 0; } *bytes_read = frame_size; return 1; } return 0; }
12,320
348
<filename>docs/data/leg-t2/015/01502198.json {"nom":"Sainte-Marie","circ":"2ème circonscription","dpt":"Cantal","inscrits":103,"abs":40,"votants":63,"blancs":6,"nuls":2,"exp":55,"res":[{"nuance":"LR","nom":"<NAME>","voix":43},{"nuance":"REM","nom":"Mme <NAME>","voix":12}]}
113
392
<reponame>Da-Krause/settlers-remake /******************************************************************************* * Copyright (c) 2015 - 2017 * * Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. *******************************************************************************/ package jsettlers.logic.constants; import java.util.Random; /** * Extended version of java.util.Random with some more functions. * * @author <NAME> * */ public class ExtendedRandom extends Random { private static final long serialVersionUID = -2814532519838158362L; public ExtendedRandom(long seed) { super(seed); } /** * Returns a random number in the interval [min, max]. * * @param min * Minimum value (inclusive) * @param max * Maximum value (inclusive) * @return */ public int nextInt(int min, int max) { return min + nextInt(max - min + 1); } }
514
775
{ "#acknowledgments": { "current": { "number": "", "spec": "CSS Scrollbars", "text": "Acknowledgments", "url": "https://drafts.csswg.org/css-scrollbars-1/#acknowledgments" }, "snapshot": { "number": "", "spec": "CSS Scrollbars", "text": "Acknowledgments", "url": "https://www.w3.org/TR/css-scrollbars-1/#acknowledgments" } }, "#changes": { "current": { "number": "", "spec": "CSS Scrollbars", "text": "Changes", "url": "https://drafts.csswg.org/css-scrollbars-1/#changes" }, "snapshot": { "number": "", "spec": "CSS Scrollbars", "text": "Changes", "url": "https://www.w3.org/TR/css-scrollbars-1/#changes" } }, "#intro": { "current": { "number": "1", "spec": "CSS Scrollbars", "text": "Introduction", "url": "https://drafts.csswg.org/css-scrollbars-1/#intro" }, "snapshot": { "number": "1", "spec": "CSS Scrollbars", "text": "Introduction", "url": "https://www.w3.org/TR/css-scrollbars-1/#intro" } }, "#out-of-scope": { "current": { "number": "1.1.1", "spec": "CSS Scrollbars", "text": "Out Of Scope", "url": "https://drafts.csswg.org/css-scrollbars-1/#out-of-scope" } }, "#scope": { "current": { "number": "1.1", "spec": "CSS Scrollbars", "text": "Scope", "url": "https://drafts.csswg.org/css-scrollbars-1/#scope" }, "snapshot": { "number": "1.1", "spec": "CSS Scrollbars", "text": "Scope", "url": "https://www.w3.org/TR/css-scrollbars-1/#scope" } }, "#scrollbar-color": { "current": { "number": "2", "spec": "CSS Scrollbars", "text": "Scrollbar Colors: the scrollbar-color property", "url": "https://drafts.csswg.org/css-scrollbars-1/#scrollbar-color" }, "snapshot": { "number": "2", "spec": "CSS Scrollbars", "text": "Scrollbar Colors: the scrollbar-color property", "url": "https://www.w3.org/TR/css-scrollbars-1/#scrollbar-color" } }, "#scrollbar-width": { "current": { "number": "3", "spec": "CSS Scrollbars", "text": "Scrollbar Thickness: the scrollbar-width property", "url": "https://drafts.csswg.org/css-scrollbars-1/#scrollbar-width" }, "snapshot": { "number": "3", "spec": "CSS Scrollbars", "text": "Scrollbar Thickness: the scrollbar-width property", "url": "https://www.w3.org/TR/css-scrollbars-1/#scrollbar-width" } }, "#security-privacy-considerations": { "current": { "number": "", "spec": "CSS Scrollbars", "text": "Considerations for Security and Privacy", "url": "https://drafts.csswg.org/css-scrollbars-1/#security-privacy-considerations" }, "snapshot": { "number": "", "spec": "CSS Scrollbars", "text": "Considerations for Security and Privacy", "url": "https://www.w3.org/TR/css-scrollbars-1/#security-privacy-considerations" } }, "#values": { "current": { "number": "1.2", "spec": "CSS Scrollbars", "text": "Value Definitions", "url": "https://drafts.csswg.org/css-scrollbars-1/#values" }, "snapshot": { "number": "1.2", "spec": "CSS Scrollbars", "text": "Values", "url": "https://www.w3.org/TR/css-scrollbars-1/#values" } } }
1,584
2,212
import os import pytest from detect_secrets.core import plugins from detect_secrets.core.upgrades import v1_0 def test_custom_plugins_does_not_pollute_settings(): old_baseline = { 'version': '0.14.3', 'custom_plugin_paths': [ 'testing/plugins.py', ], 'plugins_used': [], 'results': [], } with pytest.raises(TypeError): plugins.initialize.from_plugin_classname('HippoDetector') new_baseline = {**old_baseline} v1_0.upgrade(new_baseline) assert 'custom_plugin_paths' not in new_baseline assert new_baseline['plugins_used'] == [ { 'name': 'HippoDetector', 'path': f'file://{os.path.abspath("testing/plugins.py")}', }, ] with pytest.raises(TypeError): plugins.initialize.from_plugin_classname('HippoDetector')
388
3,269
# Time: O(n) # Space: O(n) class Solution(object): def findTilt(self, root): """ :type root: TreeNode :rtype: int """ def postOrderTraverse(root, tilt): if not root: return 0, tilt left, tilt = postOrderTraverse(root.left, tilt) right, tilt = postOrderTraverse(root.right, tilt) tilt += abs(left-right) return left+right+root.val, tilt return postOrderTraverse(root, 0)[1]
252
524
<gh_stars>100-1000 #pragma once #include "controls/ClickWidget.h" #include "controls/SimpleListWidget.h" #include "controls/TextUnit.h" #include "animation/animation.h" namespace Utils { class OpacityEffect; } namespace Ui { class InputWidget; class AttachFileMenuItem : public SimpleListItem { Q_OBJECT Q_SIGNALS: void selectChanged(QPrivateSignal) const; public: AttachFileMenuItem(QWidget* _parent, const QString& _icon, const QString& _caption, const QColor& _iconBgColor); void setSelected(bool _value) override; bool isSelected() const override; protected: void paintEvent(QPaintEvent*) override; private: QColor iconBgColor_; QPixmap icon_; TextRendering::TextUnitPtr caption_; bool isSelected_ = false; }; class AttachPopupBackground : public QWidget { Q_OBJECT public: AttachPopupBackground(QWidget* _parent); protected: void paintEvent(QPaintEvent* _event) override; }; class AttachFilePopup : public ClickableWidget { Q_OBJECT Q_SIGNALS: void photoVideoClicked(QPrivateSignal) const; void fileClicked(QPrivateSignal) const; void cameraClicked(QPrivateSignal) const; void geoClicked(QPrivateSignal) const; void contactClicked(QPrivateSignal) const; void pttClicked(QPrivateSignal) const; public: static AttachFilePopup& instance(); static bool isOpen(); enum class ShowMode { Normal, Persistent }; static void showPopup(const ShowMode _mode = ShowMode::Normal); static void hidePopup(); void showAnimated(); void hideAnimated(); void selectFirstItem(); void setPersistent(const bool _persistent); void updateSizeAndPos(); bool focusNextPrevChild(bool) override { return false; } bool eventFilter(QObject* _obj, QEvent* _event) override; protected: void mouseMoveEvent(QMouseEvent* _e) override; void leaveEvent(QEvent*) override; void keyPressEvent(QKeyEvent* _event) override; void showEvent(QShowEvent*) override; void hideEvent(QHideEvent*) override; private: explicit AttachFilePopup(QWidget* _parent, InputWidget* _input); void onItemClicked(const int _idx); void onBackgroundClicked(); void onHideTimer(); void hideWithDelay(); bool isMouseInArea(const QPoint& _pos) const; QRect getPlusButtonRect() const; QPolygon getMouseAreaPoly() const; private: SimpleListWidget* listWidget_ = nullptr; AttachPopupBackground* widget_ = nullptr; InputWidget* input_ = nullptr; Utils::OpacityEffect* opacityEffect_ = nullptr; enum class AnimState { None, Showing, Hiding }; AnimState animState_ = AnimState::None; anim::Animation opacityAnimation_; enum class MenuItemId { photoVideo, file, camera, contact, ptt, geo, }; std::vector<std::pair<int, MenuItemId>> items_; QRect buttonRect_; QPolygon mouseAreaPoly_; bool persistent_; QTimer hideTimer_; }; }
1,507
10,125
<filename>OpenCorePkg/Library/OcAppleImg4Lib/libDER/DER_Digest.h /* Copyright (c) 2005-2008,2010 Apple Inc. All Rights Reserved. */ /* * DER_Digest.h - DER encode a DigestInfo * * Created Nov. 9 2005 by dmitch */ #ifndef _DER_DIGEST_H_ #define _DER_DIGEST_H_ #ifdef __cplusplus extern "C" { #endif #include "libDER.h" /* * Create an encoded DigestInfo based on the specified SHA1 digest. * The incoming digest must be 20 bytes long. * * Result is placed in caller's buffer, which must be at least of * length DER_SHA1_DIGEST_INFO_LEN bytes. * * The *resultLen parameter is the available size in the result * buffer on input, and the actual length of the encoded DigestInfo * on output. */ #define DER_SHA1_DIGEST_LEN 20 #define DER_SHA1_DIGEST_INFO_LEN 35 DERReturn DEREncodeSHA1DigestInfo( const DERByte *sha1Digest, DERSize sha1DigestLen, DERByte *result, /* encoded result RETURNED here */ DERSize *resultLen); /* IN/OUT */ #define DER_SHA256_DIGEST_LEN 32 #define DER_SHA256_DIGEST_INFO_LEN 51 DERReturn DEREncodeSHA256DigestInfo( const DERByte *sha256Digest, DERSize sha256DigestLen, DERByte *result, /* encoded result RETURNED here */ DERSize *resultLen); /* IN/OUT */ /* * Likewise, create an encoded DIgestInfo for specified MD5 or MD2 digest. */ #define DER_MD_DIGEST_LEN 16 #define DER_MD_DIGEST_INFO_LEN 34 typedef enum { WD_MD2 = 1, WD_MD5 = 2 } WhichDigest; DERReturn DEREncodeMDDigestInfo( WhichDigest whichDigest, const DERByte *mdDigest, DERSize mdDigestLen, DERByte *result, /* encoded result RETURNED here */ DERSize *resultLen); /* IN/OUT */ /* max sizes you'll need in the general cases */ #define DER_MAX_DIGEST_LEN DER_SHA256_DIGEST_LEN #define DER_MAX_ENCODED_INFO_LEN DER_SHA256_DIGEST_INFO_LEN #ifdef __cplusplus } #endif #endif /* _DER_DIGEST_H_ */
765
575
<filename>chrome/browser/dom_distiller/tab_utils_browsertest.cc<gh_stars>100-1000 // Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include <string.h> #include <memory> #include "base/command_line.h" #include "base/run_loop.h" #include "base/strings/utf_string_conversions.h" #include "base/test/metrics/histogram_tester.h" #include "base/test/scoped_feature_list.h" #include "base/time/time.h" #include "base/timer/timer.h" #include "build/build_config.h" #include "chrome/browser/dom_distiller/dom_distiller_service_factory.h" #include "chrome/browser/dom_distiller/tab_utils.h" #include "chrome/browser/dom_distiller/test_distillation_observers.h" #include "chrome/browser/ssl/security_state_tab_helper.h" #include "chrome/browser/ui/browser.h" #include "chrome/browser/ui/tabs/tab_strip_model.h" #include "chrome/test/base/in_process_browser_test.h" #include "chrome/test/base/ui_test_utils.h" #include "components/back_forward_cache/back_forward_cache_disable.h" #include "components/dom_distiller/content/browser/distillable_page_utils.h" #include "components/dom_distiller/content/browser/distiller_javascript_utils.h" #include "components/dom_distiller/content/browser/test_distillability_observer.h" #include "components/dom_distiller/core/dom_distiller_features.h" #include "components/dom_distiller/core/dom_distiller_service.h" #include "components/dom_distiller/core/dom_distiller_switches.h" #include "components/dom_distiller/core/task_tracker.h" #include "components/dom_distiller/core/url_constants.h" #include "components/dom_distiller/core/url_utils.h" #include "components/favicon/content/content_favicon_driver.h" #include "components/favicon/core/favicon_driver_observer.h" #include "components/security_state/core/security_state.h" #include "content/public/browser/navigation_entry.h" #include "content/public/browser/render_frame_host.h" #include "content/public/browser/render_process_host.h" #include "content/public/browser/render_view_host.h" #include "content/public/browser/ssl_status.h" #include "content/public/browser/web_contents.h" #include "content/public/browser/web_contents_observer.h" #include "content/public/common/content_switches.h" #include "content/public/common/isolated_world_ids.h" #include "content/public/test/back_forward_cache_util.h" #include "content/public/test/browser_test.h" #include "content/public/test/browser_test_utils.h" #include "content/public/test/test_utils.h" #include "net/test/embedded_test_server/embedded_test_server.h" #include "net/test/embedded_test_server/request_handler_util.h" #include "testing/gtest/include/gtest/gtest.h" #include "third_party/blink/public/common/features.h" #include "third_party/blink/public/common/web_preferences/web_preferences.h" #include "ui/gfx/image/image_unittest_util.h" namespace dom_distiller { namespace { const char* kSimpleArticlePath = "/dom_distiller/simple_article.html"; const char* kOriginalArticleTitle = "Test Page Title"; const char* kExpectedArticleHeading = "Test Page Title"; #if defined(OS_ANDROID) const char* kExpectedDocumentTitle = "Test Page Title"; #else // Desktop. This test is in chrome/ and is not run on iOS. const char* kExpectedDocumentTitle = "Test Page Title - Reader Mode"; #endif // defined(OS_ANDROID) const char* kDistillablePageHistogram = "DomDistiller.Time.ActivelyViewingArticleBeforeDistilling"; const char* kDistilledPageHistogram = "DomDistiller.Time.ActivelyViewingReaderModePage"; std::unique_ptr<content::WebContents> NewContentsWithSameParamsAs( content::WebContents* source_web_contents) { content::WebContents::CreateParams create_params( source_web_contents->GetBrowserContext()); auto new_web_contents = content::WebContents::Create(create_params); DCHECK(new_web_contents); return new_web_contents; } // FaviconUpdateWaiter waits for favicons to be changed after navigation. // TODO(1064318): Combine with FaviconUpdateWaiter in // chrome/browser/chrome_service_worker_browsertest.cc. class FaviconUpdateWaiter : public favicon::FaviconDriverObserver { public: explicit FaviconUpdateWaiter(content::WebContents* web_contents) { scoped_observer_.Add( favicon::ContentFaviconDriver::FromWebContents(web_contents)); } ~FaviconUpdateWaiter() override = default; void Wait() { if (updated_) return; base::RunLoop run_loop; quit_closure_ = run_loop.QuitClosure(); run_loop.Run(); } void StopObserving() { scoped_observer_.RemoveAll(); } private: void OnFaviconUpdated(favicon::FaviconDriver* favicon_driver, NotificationIconType notification_icon_type, const GURL& icon_url, bool icon_url_changed, const gfx::Image& image) override { updated_ = true; if (!quit_closure_.is_null()) std::move(quit_closure_).Run(); } bool updated_ = false; ScopedObserver<favicon::FaviconDriver, favicon::FaviconDriverObserver> scoped_observer_{this}; base::OnceClosure quit_closure_; DISALLOW_COPY_AND_ASSIGN(FaviconUpdateWaiter); }; class DomDistillerTabUtilsBrowserTest : public InProcessBrowserTest { public: void SetUpOnMainThread() override { if (!DistillerJavaScriptWorldIdIsSet()) { SetDistillerJavaScriptWorldId(content::ISOLATED_WORLD_ID_CONTENT_END); } ASSERT_TRUE(https_server_->Start()); article_url_ = https_server_->GetURL(kSimpleArticlePath); } void SetUpCommandLine(base::CommandLine* command_line) override { command_line->AppendSwitch(switches::kEnableDomDistiller); } protected: DomDistillerTabUtilsBrowserTest() { feature_list_.InitAndEnableFeature(dom_distiller::kReaderMode); } void SetUpInProcessBrowserTestFixture() override { https_server_ = std::make_unique<net::EmbeddedTestServer>( net::EmbeddedTestServer::TYPE_HTTPS); https_server_->ServeFilesFromSourceDirectory(GetChromeTestDataDir()); } const GURL& article_url() const { return article_url_; } std::string GetDocumentTitle(content::WebContents* web_contents) const { return content::ExecuteScriptAndGetValue(web_contents->GetMainFrame(), "document.title") .GetString(); } std::string GetArticleHeading(content::WebContents* web_contents) const { return content::ExecuteScriptAndGetValue( web_contents->GetMainFrame(), "document.getElementById('title-holder').textContent") .GetString(); } std::unique_ptr<net::EmbeddedTestServer> https_server_; private: base::test::ScopedFeatureList feature_list_; GURL article_url_; }; IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTest, DistillCurrentPageSwapsWebContents) { content::WebContents* initial_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); TestDistillabilityObserver distillability_observer(initial_web_contents); DistillabilityResult expected_result; expected_result.is_distillable = true; expected_result.is_last = false; expected_result.is_mobile_friendly = false; // This blocks until the navigation has completely finished. ui_test_utils::NavigateToURL(browser(), article_url()); // This blocks until the page is found to be distillable. distillability_observer.WaitForResult(expected_result); DistillCurrentPageAndView(initial_web_contents); // Retrieve new web contents and wait for it to finish loading. content::WebContents* after_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); ASSERT_NE(after_web_contents, nullptr); DistilledPageObserver(after_web_contents).WaitUntilFinishedLoading(); // Verify the new URL is showing distilled content in a new WebContents. EXPECT_NE(initial_web_contents, after_web_contents); EXPECT_TRUE( after_web_contents->GetLastCommittedURL().SchemeIs(kDomDistillerScheme)); EXPECT_EQ(kExpectedDocumentTitle, GetDocumentTitle(after_web_contents)); EXPECT_EQ(kExpectedArticleHeading, GetArticleHeading(after_web_contents)); } // TODO(1061928): Make this test more robust by using a TestMockTimeTaskRunner // and a test TickClock. This would require having UMAHelper be an object // so that it can hold a TickClock reference. IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTest, UMATimesAreLogged) { base::HistogramTester histogram_tester; content::WebContents* initial_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); TestDistillabilityObserver distillability_observer(initial_web_contents); DistillabilityResult expected_result; expected_result.is_distillable = true; expected_result.is_last = false; expected_result.is_mobile_friendly = false; // This blocks until the navigation has completely finished. ui_test_utils::NavigateToURL(browser(), article_url()); // This blocks until the page is found to be distillable. distillability_observer.WaitForResult(expected_result); // No UMA logged for distillable or distilled yet. histogram_tester.ExpectTotalCount(kDistillablePageHistogram, 0); histogram_tester.ExpectTotalCount(kDistilledPageHistogram, 0); DistillCurrentPageAndView(initial_web_contents); // UMA should now exist for the distillable page because we distilled it. histogram_tester.ExpectTotalCount(kDistillablePageHistogram, 1); // Distilled page UMA isn't logged until we leave that page. histogram_tester.ExpectTotalCount(kDistilledPageHistogram, 0); // Go back to the article, check UMA exists for distilled page now. ui_test_utils::NavigateToURL(browser(), article_url()); histogram_tester.ExpectTotalCount(kDistilledPageHistogram, 1); // However, there should not be a second distillable histogram. histogram_tester.ExpectTotalCount(kDistillablePageHistogram, 1); } IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTest, DistillAndViewCreatesNewWebContentsAndPreservesOld) { content::WebContents* source_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); // This blocks until the navigation has completely finished. ui_test_utils::NavigateToURL(browser(), article_url()); // Create destination WebContents and add it to the tab strip. browser()->tab_strip_model()->AppendWebContents( NewContentsWithSameParamsAs(source_web_contents), /* foreground = */ true); content::WebContents* destination_web_contents = browser()->tab_strip_model()->GetWebContentsAt(1); DistillAndView(source_web_contents, destination_web_contents); DistilledPageObserver(destination_web_contents).WaitUntilFinishedLoading(); // Verify that the source WebContents is showing the original article. EXPECT_EQ(article_url(), source_web_contents->GetLastCommittedURL()); EXPECT_EQ(kOriginalArticleTitle, GetDocumentTitle(source_web_contents)); // Verify the destination WebContents is showing distilled content. EXPECT_TRUE(destination_web_contents->GetLastCommittedURL().SchemeIs( kDomDistillerScheme)); EXPECT_EQ(kExpectedDocumentTitle, GetDocumentTitle(destination_web_contents)); EXPECT_EQ(kExpectedArticleHeading, GetArticleHeading(destination_web_contents)); content::WebContentsDestroyedWatcher destroyed_watcher( destination_web_contents); browser()->tab_strip_model()->CloseWebContentsAt(1, 0); destroyed_watcher.Wait(); } IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTest, ToggleOriginalPage) { content::WebContents* source_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); // This blocks until the navigation has completely finished. ui_test_utils::NavigateToURL(browser(), article_url()); // Create and navigate to the distilled page. browser()->tab_strip_model()->AppendWebContents( NewContentsWithSameParamsAs(source_web_contents), /* foreground = */ true); content::WebContents* destination_web_contents = browser()->tab_strip_model()->GetWebContentsAt(1); DistillAndView(source_web_contents, destination_web_contents); DistilledPageObserver(destination_web_contents).WaitUntilFinishedLoading(); ASSERT_TRUE(url_utils::IsDistilledPage( destination_web_contents->GetLastCommittedURL())); // Now return to the original page. ReturnToOriginalPage(destination_web_contents); OriginalPageNavigationObserver(destination_web_contents) .WaitUntilFinishedLoading(); EXPECT_EQ(source_web_contents->GetLastCommittedURL(), destination_web_contents->GetLastCommittedURL()); } IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTest, DomDistillDisableForBackForwardCache) { content::BackForwardCacheDisabledTester tester; GURL url1(article_url()); content::WebContents* initial_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); content::RenderFrameHost* main_frame = browser()->tab_strip_model()->GetActiveWebContents()->GetMainFrame(); int process_id = main_frame->GetProcess()->GetID(); int frame_routing_id = main_frame->GetRoutingID(); GURL url2(https_server_->GetURL("/title1.html")); TestDistillabilityObserver distillability_observer(initial_web_contents); DistillabilityResult expected_result; expected_result.is_distillable = true; expected_result.is_last = false; expected_result.is_mobile_friendly = false; // Navigate to the page ui_test_utils::NavigateToURL(browser(), url1); distillability_observer.WaitForResult(expected_result); DistillCurrentPageAndView(initial_web_contents); // Navigate away while starting distillation. This should block bfcache. ui_test_utils::NavigateToURL(browser(), url2); EXPECT_TRUE(tester.IsDisabledForFrameWithReason( process_id, frame_routing_id, back_forward_cache::DisabledReason( back_forward_cache::DisabledReasonId:: kDomDistiller_SelfDeletingRequestDelegate))); } IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTest, SecurityStateIsNone) { content::WebContents* initial_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); TestDistillabilityObserver distillability_observer(initial_web_contents); DistillabilityResult expected_result; expected_result.is_distillable = true; expected_result.is_last = false; expected_result.is_mobile_friendly = false; ui_test_utils::NavigateToURL(browser(), article_url()); distillability_observer.WaitForResult(expected_result); // Check security state is not NONE. SecurityStateTabHelper* helper = SecurityStateTabHelper::FromWebContents(initial_web_contents); ASSERT_NE(security_state::NONE, helper->GetSecurityLevel()); DistillCurrentPageAndView(initial_web_contents); content::WebContents* after_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); DistilledPageObserver(after_web_contents).WaitUntilFinishedLoading(); // Now security state should be NONE. helper = SecurityStateTabHelper::FromWebContents(after_web_contents); ASSERT_EQ(security_state::NONE, helper->GetSecurityLevel()); } IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTest, FaviconFromOriginalPage) { content::WebContents* initial_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); TestDistillabilityObserver distillability_observer(initial_web_contents); DistillabilityResult expected_result; expected_result.is_distillable = true; expected_result.is_last = false; expected_result.is_mobile_friendly = false; FaviconUpdateWaiter waiter(initial_web_contents); ui_test_utils::NavigateToURL(browser(), article_url()); // Ensure the favicon is loaded and the distillability result has also // loaded before proceeding with the test. waiter.Wait(); distillability_observer.WaitForResult(expected_result); gfx::Image article_favicon = browser()->GetCurrentPageIcon(); // Remove the FaviconUpdateWaiter because we are done with // initial_web_contents. waiter.StopObserving(); DistillCurrentPageAndView(initial_web_contents); content::WebContents* after_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); ASSERT_NE(after_web_contents, nullptr); DistilledPageObserver(after_web_contents).WaitUntilFinishedLoading(); gfx::Image distilled_favicon = browser()->GetCurrentPageIcon(); EXPECT_TRUE(gfx::test::AreImagesEqual(article_favicon, distilled_favicon)); } #if !defined(OS_ANDROID) class DistilledPageImageLoadWaiter { public: explicit DistilledPageImageLoadWaiter(content::WebContents* contents, int ok_elem, int ok_width, int bad_elem, int bad_width) : contents_(contents), ok_elem_(ok_elem), ok_width_(ok_width), bad_elem_(bad_elem), bad_width_(bad_width) {} ~DistilledPageImageLoadWaiter() = default; DistilledPageImageLoadWaiter(const DistilledPageImageLoadWaiter&) = delete; DistilledPageImageLoadWaiter& operator=(const DistilledPageImageLoadWaiter&) = delete; void Wait() { base::RepeatingTimer check_timer; check_timer.Start(FROM_HERE, base::TimeDelta::FromMilliseconds(10), this, &DistilledPageImageLoadWaiter::OnTimer); runner_.Run(); } private: void OnTimer() { bool loaded = false; // Use ExecuteScriptAndExtractInt to avoid Content SecurityPolicy errors. // Use naturalWidth because the distiller sets the width and height // attributes on the img. // Get the good and bad imags and check they are loaded and their size. // If they aren't loaded or the size is wrong, stay in the loop until the // load completes. ASSERT_TRUE(content::ExecuteScriptAndExtractBool( contents_, content::JsReplace("var ok = document.getElementById('main-content')" " .getElementsByTagName('img')[$1];" "var bad = document.getElementById('main-content')" " .getElementsByTagName('img')[$2];" "window.domAutomationController.send(" " ok.complete && ok.naturalWidth == $3 &&" " bad.complete && bad.naturalWidth == $4)", ok_elem_, bad_elem_, ok_width_, bad_width_), &loaded)); if (loaded) runner_.Quit(); } content::WebContents* contents_; int ok_elem_; int ok_width_; int bad_elem_; int bad_width_; base::RunLoop runner_; }; class DomDistillerTabUtilsBrowserTestInsecureContent : public InProcessBrowserTest { public: void SetUpOnMainThread() override { if (!DistillerJavaScriptWorldIdIsSet()) { SetDistillerJavaScriptWorldId(content::ISOLATED_WORLD_ID_CONTENT_END); } ASSERT_TRUE(https_server_->Start()); ASSERT_TRUE(https_server_expired_->Start()); } void SetUpCommandLine(base::CommandLine* command_line) override { command_line->AppendSwitch(switches::kEnableDomDistiller); command_line->AppendSwitch(switches::kAllowInsecureLocalhost); } void CheckImageWidthById(content::WebContents* contents, std::string id, int expected_width) { EXPECT_EQ(expected_width, content::EvalJs(contents, "document.getElementById('" + id + "').naturalWidth")); } protected: DomDistillerTabUtilsBrowserTestInsecureContent() { feature_list_.InitWithFeatures({dom_distiller::kReaderMode}, {blink::features::kMixedContentAutoupgrade}); } void SetUpInProcessBrowserTestFixture() override { https_server_ = std::make_unique<net::EmbeddedTestServer>( net::EmbeddedTestServer::TYPE_HTTPS); https_server_->ServeFilesFromSourceDirectory(GetChromeTestDataDir()); https_server_expired_ = std::make_unique<net::EmbeddedTestServer>( net::EmbeddedTestServer::TYPE_HTTPS); https_server_expired_->SetSSLConfig(net::EmbeddedTestServer::CERT_EXPIRED); https_server_expired_->ServeFilesFromSourceDirectory( GetChromeTestDataDir()); } std::unique_ptr<net::EmbeddedTestServer> https_server_; std::unique_ptr<net::EmbeddedTestServer> https_server_expired_; private: base::test::ScopedFeatureList feature_list_; }; IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTestInsecureContent, DoesNotLoadMixedContent) { content::WebContents* initial_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); ui_test_utils::NavigateToURL( browser(), https_server_->GetURL("/dom_distiller/simple_article_mixed_image.html")); // Security state should be downgraded. SecurityStateTabHelper* helper = SecurityStateTabHelper::FromWebContents(initial_web_contents); EXPECT_EQ(security_state::WARNING, helper->GetSecurityLevel()); EXPECT_TRUE(initial_web_contents->GetController() .GetVisibleEntry() ->GetSSL() .content_status & content::SSLStatus::DISPLAYED_INSECURE_CONTENT); // The first image should not have loaded. CheckImageWidthById(initial_web_contents, "bad_image", 0); CheckImageWidthById(initial_web_contents, "ok_image", 276); // Create destination WebContents and add it to the tab strip. browser()->tab_strip_model()->AppendWebContents( NewContentsWithSameParamsAs(initial_web_contents), /* foreground = */ true); content::WebContents* destination_web_contents = browser()->tab_strip_model()->GetWebContentsAt(1); // Original page has a http image, but the page was loaded over https. It // isn't technically distillable because it isn't SECURE, but we will distill // it anyway to ensure the mixed resource is not loaded in the distilled page. DistillAndView(initial_web_contents, destination_web_contents); DistilledPageObserver(destination_web_contents).WaitUntilFinishedLoading(); // The DistilledPageObserver looks for the title change after the JS runs, // but we also need to wait for the images to load since we are going to // be inspecting their size. DistilledPageImageLoadWaiter image_waiter( destination_web_contents, /* ok image */ 1, /* ok_elem's width */ 276, /* bad image */ 0, /* bad image's width */ 0); image_waiter.Wait(); // The distilled page should not try to load insecure content. helper = SecurityStateTabHelper::FromWebContents(destination_web_contents); EXPECT_EQ(security_state::NONE, helper->GetSecurityLevel()); EXPECT_FALSE(destination_web_contents->GetController() .GetVisibleEntry() ->GetSSL() .content_status & content::SSLStatus::DISPLAYED_INSECURE_CONTENT); } IN_PROC_BROWSER_TEST_F(DomDistillerTabUtilsBrowserTestInsecureContent, DoesNotLoadContentWithBadCert) { content::WebContents* initial_web_contents = browser()->tab_strip_model()->GetActiveWebContents(); base::StringPairs replacement_text; // Create a page with an image that is loaded over a HTTPS server with invalid // certificate. replacement_text.push_back( make_pair("REPLACE_WITH_HOST_AND_PORT", https_server_expired_->host_port_pair().ToString())); std::string path = net::test_server::GetFilePathWithReplacements( "/dom_distiller/simple_article_bad_cert_image.html", replacement_text); ui_test_utils::NavigateToURL(browser(), https_server_->GetURL(path)); // Should have loaded the image with the cert errors. SecurityStateTabHelper* helper = SecurityStateTabHelper::FromWebContents(initial_web_contents); EXPECT_TRUE( helper->GetVisibleSecurityState()->displayed_content_with_cert_errors); // Check both the good and the bad images loaded. CheckImageWidthById(initial_web_contents, "bad_image", 276); CheckImageWidthById(initial_web_contents, "ok_image", 276); // Create destination WebContents and add it to the tab strip. browser()->tab_strip_model()->AppendWebContents( NewContentsWithSameParamsAs(initial_web_contents), /* foreground = */ true); content::WebContents* destination_web_contents = browser()->tab_strip_model()->GetWebContentsAt(1); // Original page has broken cert image. It isn't technically distillable // because it isn't SECURE, but we will distill it anyway to ensure those // resources are not loaded in the distilled page. DistillAndView(initial_web_contents, destination_web_contents); DistilledPageObserver(destination_web_contents).WaitUntilFinishedLoading(); DistilledPageImageLoadWaiter image_waiter( destination_web_contents, /* ok image */ 1, /* ok_elem's width */ 276, /* bad image */ 0, /* bad image's width */ 0); image_waiter.Wait(); // Check security of the distilled page. It should not try to load the // image with the invalid cert. helper = SecurityStateTabHelper::FromWebContents(destination_web_contents); EXPECT_EQ(security_state::NONE, helper->GetSecurityLevel()); EXPECT_FALSE( helper->GetVisibleSecurityState()->displayed_content_with_cert_errors); } #endif // !defined(OS_ANDROID) } // namespace } // namespace dom_distiller
9,074
348
{"nom":"Villotran","circ":"2ème circonscription","dpt":"Oise","inscrits":246,"abs":122,"votants":124,"blancs":6,"nuls":2,"exp":116,"res":[{"nuance":"FN","nom":"<NAME>","voix":65},{"nuance":"REM","nom":"<NAME>","voix":51}]}
88
1,958
<filename>src/test/java/com/shzlw/poli/rest/JdbcDataSourceWsTest.java package com.shzlw.poli.rest; import com.fasterxml.jackson.core.type.TypeReference; import com.shzlw.poli.dao.JdbcDataSourceDao; import com.shzlw.poli.model.JdbcDataSource; import com.shzlw.poli.util.Constants; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.autoconfigure.web.servlet.AutoConfigureMockMvc; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.http.MediaType; import org.springframework.test.context.TestPropertySource; import org.springframework.test.context.jdbc.Sql; import org.springframework.test.context.junit4.SpringRunner; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.StringUtils; import java.util.List; import static org.springframework.test.web.servlet.request.MockMvcRequestBuilders.*; import static org.springframework.test.web.servlet.result.MockMvcResultMatchers.status; @RunWith(SpringRunner.class) @SpringBootTest @AutoConfigureMockMvc @Transactional @TestPropertySource(locations="classpath:application-test.properties") @Sql(scripts = "classpath:schema-sqlite.sql", executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD) public class JdbcDataSourceWsTest extends AbstractWsTest { @Autowired JdbcDataSourceDao jdbcDataSourceDao; @Test public void test() throws Exception { // ********** Create ********** JdbcDataSource j1 = new JdbcDataSource(); j1.setName("j1"); j1.setConnectionUrl("c1"); j1.setDriverClassName("d1"); j1.setUsername("u1"); j1.setPassword("p1"); j1.setPing("p1"); String body = mapper.writeValueAsString(j1); mvcResult = mvc.perform( post(JDBCDATASOURCES_BASE_URL) .contentType(MediaType.APPLICATION_JSON) .requestAttr(Constants.HTTP_REQUEST_ATTR_USER, adminUser) .content(body) ) .andExpect(status().isCreated()) .andReturn(); long id = Long.parseLong(mvcResult.getResponse().getContentAsString()); // Verify one responeText = findJdbcDataSource(id); JdbcDataSource saved = mapper.readValue(responeText, JdbcDataSource.class); assertJdbcDataSource(j1, saved); // Verify the list mvcResult = mvc.perform( get(JDBCDATASOURCES_BASE_URL) .requestAttr(Constants.HTTP_REQUEST_ATTR_USER, adminUser) ) .andReturn(); responeText = mvcResult.getResponse().getContentAsString(); List<JdbcDataSource> jdbcDataSources = mapper.readValue(responeText, new TypeReference<List<JdbcDataSource>>() {}); Assert.assertEquals(1, jdbcDataSources.size()); saved = jdbcDataSources.get(0); assertJdbcDataSource(j1, saved); // ********** Update information only ********** j1.setId(id); j1.setName("j2"); j1.setConnectionUrl("c2"); j1.setDriverClassName("d2"); j1.setUsername("u2"); j1.setPing("p2"); body = mapper.writeValueAsString(j1); mvcResult = mvc.perform( put(JDBCDATASOURCES_BASE_URL) .contentType(MediaType.APPLICATION_JSON) .requestAttr(Constants.HTTP_REQUEST_ATTR_USER, adminUser) .content(body) ) .andExpect(status().isOk()) .andReturn(); responeText = findJdbcDataSource(id); saved = mapper.readValue(responeText, JdbcDataSource.class); assertJdbcDataSource(j1, saved); // ********** Update password ********** j1.setId(id); j1.setPassword("p3"); body = mapper.writeValueAsString(j1); mvcResult = mvc.perform( put(JDBCDATASOURCES_BASE_URL) .contentType(MediaType.APPLICATION_JSON) .requestAttr(Constants.HTTP_REQUEST_ATTR_USER, adminUser) .content(body) ) .andExpect(status().isOk()) .andReturn(); saved = jdbcDataSourceDao.findById(id); Assert.assertEquals(saved.getPassword(), j1.getPassword()); // ********** Delete ********** mvcResult = mvc.perform( delete(JDBCDATASOURCES_BASE_URL + "/" + id) .requestAttr(Constants.HTTP_REQUEST_ATTR_USER, adminUser) ) .andExpect(status().isNoContent()) .andReturn(); // Verify responeText = findJdbcDataSource(id); Assert.assertTrue(StringUtils.isEmpty(responeText)); } private String findJdbcDataSource(long id) throws Exception { mvcResult = mvc.perform( get(JDBCDATASOURCES_BASE_URL + "/" + id) .requestAttr(Constants.HTTP_REQUEST_ATTR_USER, adminUser) ) .andReturn(); return mvcResult.getResponse().getContentAsString(); } private void assertJdbcDataSource(JdbcDataSource expected, JdbcDataSource target) { Assert.assertEquals(expected.getName(), target.getName()); Assert.assertEquals(expected.getConnectionUrl(), target.getConnectionUrl()); Assert.assertEquals(expected.getDriverClassName(), target.getDriverClassName()); Assert.assertEquals(expected.getUsername(), target.getUsername()); Assert.assertEquals(expected.getPing(), target.getPing()); Assert.assertNull(target.getPassword()); } }
2,642
23,901
# coding=utf-8 # Copyright 2021 The Google Research Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Lint as: python2, python3 """ParameterContainer is a class for managing hyper-parameters. Key capabilities: 1. CONVERTS A PYTHON DICTIONARY TO A CLASS, WHERE THE DICTIONARY KEYS ARE CONVERTED TO ATTRIBUTES. For example: params_dict = {'key1': 'value1', 'nested': {'key2': 'val2', 'key3': 'val3'}} pc = ParameterContainer(params_dict) assert pc.key1 == 'value1' assert pc.nested.key2 == 'val2' assert pc.nested.key3 == 'val3' 2. ALLOWS EXTENSION AND OVERRIDING. For example: extension_dict = {'nested': {'key2': 'new_val2'}, 'key4': 'val4', 'nested2': {'key5': 'val5'}} pc.override(extension_dict) assert pc.key1 == 'value1' assert pc.nested.key2 == 'new_val2' assert pc.nested.key3 == 'val3' assert pc.key4 == 'val4' assert pc.nested2.key5 == 'val5' 3. ALLOWS OVERRIDING VALUES WITH USER-SPECIFIED PARAMETERS. For example: user_params = {nested2': {'key5': 'user_val5'}, 'key4': 'user_val4'} pc.override(extension_dict, is_custom=True) assert pc.key1 == 'value1' assert pc.nested.key2 == 'new_val2' assert pc.nested.key3 == 'val3' assert pc.key4 == 'user_val4' assert pc.nested2.key5 == 'user_val5' The differences between is_custom=True and the default is_custom=False are: a. Once a parameter was set to be custom, its value becomes immutable. b. On can later query pc.is_custom(parameter_name) to know if it was set by the user or not. The idea is that user-specified parameters are set in stone. For example, we may decide to choose the weight decay of convolutions depending on whether we are using batch-norm or not. But if the user explicitly specified the weight decay, we will always use the latter. 4. LOADS THE PARAMETERS FROM JSON format. The override method can receive a filepath to a json file or a literal json contents instead of a dictionary. 5. HAS A LOCK METHOD. While we do want to allow adjustments in the parameters at the early stages (e. g. the batch-norm-dependent weight decay mentioned above), we would like the parameters to be read-only. The lock() method will make the ParameterContainer read-only. That is, an exception will be thrown when attempting to modify values (of course you can hack around it, but you shouldn't). Before lock() has been called, values can be modified directly: pc.nested.key3 = 'another_val3' assert pc.nested.key3 == 'another_val3' After calling lock(), the lines above will throw an exception. WHY NOT USE PROTOCOL BUFFERS? Protocol buffers have four main disadvantages compared to ParameterContainer: - They require at least one separate file, probably two. The intended usage pattern of ParameterContainer is quickly defining a new network architecture and training setup, which will mostly reuse existing hyper-parameters, but may override a few values and add a few new specific keys. With Protocol Buffers, extension would require an additional proto file, and overriding would require an additional pbtxt file, because proto extensions cannot override default values of their parent. We would have to reference the pbtxt file somewhere, parse it and override the parameter values, which is not elegant and error prone. - When merging protocol buffers, repeated fields are concatenated rather than replaced. This is usually not what we want - if a parameter is a list of values, and the user overrides it, they most likely want to replace the list by their own list. - When using protocol buffer extensions, passing a literal string in a user flag is somewhat cluttered. For example: [my_namespace.MyExtensionName.ext] { my_parameter_name: "value" } - Protocol buffers do not distinguish between overriding by new defaults (say, I have a network architecture where I know the weight decay should be different than the common value), and overriding by user values. In both cases, querying has_ will return true. WHY NOT USE HPARAMS? HParams is a wrapper over protocol buffers, often used with TensorFlow, which also implements a parser for user-specified values. It also resolves the merging issue of repeating fields, which are replaced instead of being concatenated. However: - It is still a protocol buffer, so it requires extra files for each new network architecture / training setup. - Extensions are not supported, nor is nesting, which means that all parameters live in the one long list, and cannot be grouped into meaningful groups. WHY JSON? - Writing our own format / parser sounds like a bad idea. - Json can be parsed from c++ too, unlike other python-specific serializations. - Json is the preferred (and default) format. ParameterContainer works well with polymorphism. If a base class contains a ParameterContainer object, subclasses may call its override() method to add or override parameters, but they cannot remove parameters. Therefore one can be sure that if the base class' ParameterContainer has some key, all subclasses will have it too. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import ast import copy import json import six # Datatypes that should be overridden without further nesting. PRIMITIVES = (int, float, bool, six.string_types) def parse_object(object_or_json_or_file, allow_dict=False, allow_list=False): """Parses an allowed object from several alternative representations. Parsing is attempted in the order specified in the argument definition below, until it succeeds. The parsing order matters because ", ', {, } are all valid filename characters, and because a single quoted string is a valid json. Args: object_or_json_or_file: Any of the following: * A dictionary, or * a list, or * a string with a json serialization of a dictionary, or * a string with a python dictionary literal, or * a string with a path to a file with a json serialization of a dictionary allow_dict: Whether to allow the parsed object to be a dict. allow_list: Whether to allow the parsed object to be a list. Returns: A dictionary or list, which is object_or_json_or_file itself if the latter dictionary or list, or the parse result otherwise. Raises: ValueError: If the json could not be parsed as a an allowed object. OSError: If the file did not open. """ if not allow_dict and not allow_list: raise ValueError('At least one of allow_dict or allow_list must be True.') if not object_or_json_or_file: return {} if isinstance(object_or_json_or_file, six.string_types): # first, attempt to parse the string as a JSON dict try: object_or_json_or_file = json.loads(object_or_json_or_file) except ValueError as literal_json_parsing_error: # then try to parse as a python dict literal # # Note that parsing as JSON above is still required, since null literal # (without quotes) is not valid python but is valid json (it is mapped to # None by json.loads). try: object_or_json_or_file = ast.literal_eval(object_or_json_or_file) # Looking for a literal dict, no need for isinstance. if type(object_or_json_or_file) == dict: # pylint: disable=unidiomatic-typecheck if not allow_dict: raise ValueError( 'object_or_json_or_file parsed as a dictionary, but allow_dict=False.' ) return object_or_json_or_file # Looking for a literal list, no need for isinstance. elif type(object_or_json_or_file) == list: # pylint: disable=unidiomatic-typecheck if not allow_list: raise ValueError( 'object_or_json_or_file parsed as a list, but allow_list=False.' ) return object_or_json_or_file # These are the only exceptions ever raised by literal_eval. except SyntaxError as e: python_parsing_error = e except ValueError as e: python_parsing_error = e else: python_parsing_error = None try: # then try to use as a path to a JSON file f = open(object_or_json_or_file) object_or_json_or_file = json.load(f) f.close() except ValueError as json_file_parsing_error: raise ValueError('Unable to parse the content of the json file %s. ' 'Parsing error: %s.' % (object_or_json_or_file, str(json_file_parsing_error))) except OSError as file_error: max_file_error_len = 50 if len(str(file_error)) > max_file_error_len: file_error_str = str(file_error)[:max_file_error_len] + '...' else: file_error_str = file_error.message message = ('Unable to parse override parameters either as a literal ' 'JSON or as a python dictionary literal or as the name of ' 'a file that exists.\n\n' 'GFile error: %s\n\n' 'JSON parsing error: %s\n\n' 'Python dict parsing error: %s\n\n' 'Override parameters:\n%s.\n' % (file_error_str, str(literal_json_parsing_error), str(python_parsing_error), object_or_json_or_file)) if '{' in object_or_json_or_file or '}' in object_or_json_or_file: message += _debug_message(object_or_json_or_file) raise ValueError(message) if isinstance(object_or_json_or_file, dict): if not allow_dict: raise ValueError( 'object_or_json_or_file parsed as a dictionary, but allow_dict=False.' ) elif isinstance(object_or_json_or_file, list): if not allow_list: raise ValueError( 'object_or_json_or_file parsed as a list, but allow_list=False.') else: raise ValueError( 'object_or_json_or_file did not parsed to a supported type. Found: %s' % object_or_json_or_file) return object_or_json_or_file def parse_dict(dict_or_json_or_file): return parse_object(dict_or_json_or_file, allow_dict=True) def _get_key_and_indices(maybe_key_with_indices): """Extracts key and indices from key in format 'key_name[index0][index1]'.""" patterns = maybe_key_with_indices.split('[') if len(patterns) == 1: return (maybe_key_with_indices, None) # For each index ensure that the brackets are closed and extract number indices = [] for split_pattern in patterns[1:]: # Remove surrounding whitespace. split_pattern = split_pattern.strip() if split_pattern[-1] != ']': raise ValueError( 'ParameterName {} has bad format. Supported format: key_name, ' 'key_name[index0], key_name[index0][index1], ...'.format( maybe_key_with_indices)) try: indices.append(int(split_pattern[:-1])) except ValueError: raise ValueError( 'Only integer indexing allowed for ParameterName. ' 'Faulty specification: {}'.format(maybe_key_with_indices)) return patterns[0], indices class ParameterContainer(object): """Helper class that provides a class-like interface to a dictionary. It can be initialized with a dictionary of custom parameters, which can be passed either as a dict, a JSON string, or a path to a JSON file. ParameterContainer supports nesting of dictionaries. For example, the dictionary { 'key1': 'value1', 'nested': { 'key2': 'val2', 'key3': 'val3' } } will be converted to a ParameterContainer pc that has the attributes pc.key1, pc.nested.key2, and pc.nested.key3. pc.nested is a ParameterContainer itself. """ SPECIAL_ATTRS = ['_lock', '_custom_params'] # Allow integration with static python type checking. See # https://opensource.google/projects/pytype. Since ParameterContainer # dynamically sets its attributes, pytype needs guidance to know that pc.key1 # is valid. HAS_DYNAMIC_ATTRIBUTES = True def __init__(self, default_params=None, custom_params=None): if default_params is None: default_params = {} self._custom_params = set() self._lock = False if not custom_params: custom_params = {} self.override(default_params) self.override(custom_params, True) @classmethod def from_defaults_and_overrides(cls, defaults, overrides, **override_kwargs): """Creates a new object from defaults and overrides. Args: defaults: A dictionary or another type of object that ParameterContainer's consructor accepts, containing the default arguments. overrides: A dictionary or another type of object that ParameterContainer's consructor accepts, containing overrides for the default arguments. **override_kwargs: Keyword arguments that are passed to the `override` method when `overrides` override `defaults`. Returns: A ParameterContainer object constructed from `defaults` and overridden by `overrides`. """ params = cls(defaults) params.override(overrides, **override_kwargs) return params def override(self, params, is_custom=False, is_strict=False, strictness_depth=None, allow_custom_override=False): """Extends fields and overrides values of the ParameterContainer. Args: params: A dictionary, where the keys are strings and the values are objects that are json-serializable or dictionaries where the keys are strings and the values are objects that are json-serializable or other dictionaries... (and so on), OR: A string, a json-serialization of that dictionary, OR: A string, a filepath to a json-serialization of that dictionary, OR: A ParameterContainer. In the latter case, the 'custom' properties of the keys in 'params' will propagate to self. is_custom: A boolean, any overridden / extended parameter will be marked as 'custom', and will become immutable. is_strict: If true, an exception will be thrown if a key in params_dict (or in its nested sub-ductionaries) does not already exist in the ParameterContainer. In other words, if is_strict is True, only overriding is allowed, but extension is forbidden. strictness_depth: An integer or None. If is_strict is true, strictness_depth states for how many levels of depth the strictness will hold. For example, if strictness_depth is 1, strictness is imposed only the first level keys (as opposed to nested keys). None means that strictness will be imposed all the way through, at all levels. allow_custom_override: if true, no exception are thrown if custom params are being overridden. The ParameterContainer remembers, for any parameter, if the last update was with is_custom True or False. Raises: ValueError: If one of the keys in the dictionary is not a string, or if param_dict was not successfully parsed from the literal json or json file. AttributeError: If one of the keys in 'params_dict' cannot be updated, because of one of the following reasons: 1. is_strict=True and the key does not exist in the ParameterContainer. 2. The key has already been updated with a custom value, which made it immutable. 3. The ParameterContainer (or the relevant nested ParameterContainer is locked. """ if strictness_depth is not None: if not is_strict: raise ValueError('strictness_depth can only be specified when is_strict' ' is true') if strictness_depth <= 0: raise ValueError( 'strictness_depth must be positive, not %d' % strictness_depth) if isinstance(params, ParameterContainer): params_dict = params.as_dict() if is_custom: self._override( params_dict, is_custom=True, is_strict=is_strict, strictness_depth=strictness_depth, allow_custom_override=allow_custom_override) else: self._override( params_dict, is_custom=False, is_strict=is_strict, strictness_depth=strictness_depth, allow_custom_override=allow_custom_override) self._override( params.as_dict(custom_only=True), is_custom=True, allow_custom_override=allow_custom_override) else: self._override( params, is_custom=is_custom, is_strict=is_strict, strictness_depth=strictness_depth, allow_custom_override=allow_custom_override) def _override_list_element(self, indices, nested_list, params_dict, is_custom, is_strict, strictness_depth, allow_custom_override): """Recursively overrides the item at list index indices[0].""" if indices: if not isinstance(nested_list, list): raise AttributeError( 'Nested List mismatch: Can only override list with list.') nested_list[indices[0]] = self._override_list_element( indices[1:], nested_list[indices[0]], params_dict, is_custom, is_strict, strictness_depth, allow_custom_override) elif isinstance(nested_list, PRIMITIVES): if not isinstance(params_dict, PRIMITIVES): raise AttributeError( 'Nested List mismatch: Can only override primitive with primitive.') nested_list = params_dict else: if isinstance(nested_list, dict): nested_list = ParameterContainer(nested_list) # Using _override to bypass strictness_depth > 0 test in override. # pylint: disable=protected-access nested_list._override(params_dict, is_custom, is_strict, strictness_depth, allow_custom_override) # pylint: enable=protected-access return nested_list def _override(self, params_dict, is_custom=False, is_strict=None, strictness_depth=None, allow_custom_override=False): """Extends fields and overrides values of the ParameterContainer.""" self._raise_if_locked() params_dict = parse_dict(params_dict) if strictness_depth == 0: is_strict = False if strictness_depth is not None and strictness_depth > 0: strictness_depth -= 1 for k, v in six.iteritems(params_dict): if not isinstance(k, six.string_types): raise ValueError('The keys in the dictionary must be strings, \'%s\' ' 'encountered (type: %s).' % (str(k), type(k))) k, indices = _get_key_and_indices(k) if indices is not None and not isinstance(self.__dict__[k], list): raise ValueError('Only parameters with list values can use ' 'indices (key=%s, index0=%d)' % (k, indices[0])) # If v is a dict k needs to be overridden recursively. if isinstance(v, dict): if hasattr(self, k) and isinstance(self.__dict__[k], ParameterContainer): # Using _override to bypass strictness_depth > 0 test in override. # pylint: disable=protected-access self.__dict__[k]._override(v, is_custom, is_strict, strictness_depth, allow_custom_override) # pylint: enable=protected-access else: # self.__dict__[k] is not a ParameterContainer. if hasattr(self, k): if isinstance(self.__dict__[k], list) and indices is not None: # If k's value is a list and indices are specified, recursively # override the value at indices[0]. self.__dict__[k] = self._override_list_element( indices, self.__dict__[k], v, is_custom, is_strict, strictness_depth, allow_custom_override) else: # No known handling strategy for the paramater configuration. raise TypeError( 'Parameter {} cannot be overridden with {}'.format(k, v)) elif is_strict: # Only existing keys can be overridden if is_strict. raise AttributeError('Parameter not recognized: %s' % k) else: # Unknown parameter, create empty container at k and override it. self.__dict__[k] = ParameterContainer() # Using _override to bypass strictness_depth > 0 test in override. # pylint: disable=protected-access self.__dict__[k]._override(v, is_custom, is_strict, strictness_depth, allow_custom_override) # pylint: enable=protected-access else: if not allow_custom_override: self._raise_if_custom(k) if is_strict or k in self.__dict__: self.__setattr__(k, v, allow_custom_override, indices=indices) else: if isinstance(v, StringEnum): self.__dict__[k] = copy.deepcopy(v) else: self.__dict__[k] = v if is_custom: self._custom_params.add(k) def _raise_if_custom(self, key): if hasattr(self, '_custom_params') and key in self._custom_params: raise AttributeError( 'Parameter %s is immutable because it has a custom value.' % key) def _raise_if_locked(self): if hasattr(self, '_lock') and self._lock: raise AttributeError('Cannot set parameter: the ParameterContainer ' 'is locked.') def _set_nested_attribute(self, nested_list, indices, value): if indices: if not isinstance(nested_list, list): raise AttributeError( 'Nested List mismatch: Can only override list with list.') nested_list[indices[0]] = self._set_nested_attribute( nested_list[indices[0]], indices[1:], value) return nested_list return value def __setattr__(self, key, value, allow_custom_override=False, indices=None): self._raise_if_locked() if not allow_custom_override: self._raise_if_custom(key) if key not in self.__dict__ and key not in ParameterContainer.SPECIAL_ATTRS: raise AttributeError('Parameter not recognized: %s' % key) if key in self.__dict__ and isinstance(self.__dict__[key], StringEnum): self.__dict__[key].assign(value) else: if indices is not None: self.__dict__[key] = self._set_nested_attribute(self.__dict__[key], indices, value) else: self.__dict__[key] = value def get(self, key, default_value=None): v = self.__dict__.get(key, default_value) if isinstance(v, StringEnum): return v.value else: return v def is_custom(self, key): return key in self._custom_params def _nested_list_to_dict(self, nested_list): """Returns a (possibly nested) list of all parameters in the list. Args: nested_list: The list to convert. Returns: A copy of nested_list where each ParamContainer is converted to a dict and each StringEnum converted to its value. """ output_list = [] for element in nested_list: if isinstance(element, ParameterContainer): output_list.append(element.as_dict()) elif isinstance(element, StringEnum): output_list.append(element.value) elif isinstance(element, list): output_list.append(self._nested_list_to_dict(element)) else: output_list.append(element) return output_list def as_dict(self, custom_only=False): """Returns a (possibly nested) dictionary with all / custom parameters. Args: custom_only: A boolean, if true, only the custom parameters will be retured. Returns: A (possibly nested) dictionary with all / custom parameters. """ params = {} for k, v in six.iteritems(self.__dict__): if (k in ParameterContainer.SPECIAL_ATTRS or k not in self._custom_params and custom_only): continue if isinstance(v, ParameterContainer): params[k] = v.as_dict(custom_only) elif isinstance(v, StringEnum): if k in self._custom_params or v.value is not None: params[k] = v.value elif isinstance(v, list): params[k] = self._nested_list_to_dict(v) else: params[k] = v return params def __repr__(self): # Will output for example ParameterContainer{'a': 1}. return 'ParameterContainer' + self.as_dict().__repr__() def __str__(self): return self.__repr__() def lock(self): """The ParameterContainer can be set to read-only via .lock(). By definition, this cannot be reverted. """ for v in self.__dict__.values(): if isinstance(v, ParameterContainer): v.lock() self._lock = True def get_params_of_indicated_type(params): """Gets the parameters of the type indicated in params.type itself. For example, if params is params = ParameterContainer({ 'type': 'MOMENTUM', 'ADAM': { 'beta1': 0.9, 'beta2': 0.999, }, 'MOMENTUM': { 'momentum': 0.9, }, }) then get_params_of_indicated_type(params) is ParameterContainer({'momentum': 0.9}). Args: params: A ParameterContainer of the structure illustrated above. Returns: A ParameterContainer which is: - params.get('params.type') if params.type is present - ParameterContainer() if params.type is not present - None if params.type is None. Raises: ValueError: If params does not have match the pattern illustrated above. """ try: type_ = params.type except AttributeError: raise ValueError("`params` must have a 'type' attribute.") if type_ is None: return None params_of_type = params.get(type_, ParameterContainer()) return params_of_type def import_params(defaults, overrides, strictness_depth=None): """Overrides one dictionary's value with the other's. The choice of the name import_params is because a typical usage pattern is importing the parameters dictionary from some other file and possibly overriding some of them. Args: defaults: A dictionary overrides: A dictionary strictness_depth: An integer or None. Returns: A dictionary. For ant key that is in `override_params` its value will override the respective value in `base_params`. This includes nested dictionaries, the same way as it is done with ParameterContainer.override (see above). Raises: ValueError: If `overrides` (or a nested dictionary within) has a key that `defaults` does not have """ base = ParameterContainer(defaults) try: base.override(overrides, is_strict=True, strictness_depth=strictness_depth) except AttributeError as e: raise ValueError(str(e)) return base.as_dict() def extend_params(params, more_params): """Extends dictionary with new values. Args: params: A dictionary more_params: A dictionary Returns: A dictionary which combines keys from both dictionaries. Raises: ValueError: if dicts have the same key. """ for yak in more_params: if yak in params: raise ValueError('Key "%s" is already in dict' % yak) params.update(more_params) return params class StringEnum(object): """Class for defining and storing enum value inside ParameterContainer. Example: params = ParameterContainer({ 'v': utils.StringEnum( [ # Comment about a particular value goes here. 'a', # Another option to consider. 'b', # Also a valid value. 'c' ], default_value='b') }) ... params.v # equals to 'b' params.v.value_a # equals to 'a' params.v = 'a' # OK params.override({'v': 'a'} # OK params.v = 'xxx' # raises ValueError params.override({'v': 'xxx'}) # raiese ValueError params.v == 'a' # OK params.v == 'xxx' # raises ValueError """ def __init__(self, values, default_value): self._values = values self.assign(default_value) self._setup_values() def assign(self, value): """Assigns value with checking correctness.""" if not isinstance(value, six.string_types): raise ValueError('Can\'t assign to a non-string') if value not in self._values: raise ValueError('Expected one of ["%s"], found: "%s"' % ('","'.join(self._values), value)) self._value = value @property def value(self): """Returns current value of the enum.""" return self._value def _setup_values(self): """Setups values constants.""" for v in self._values: setattr(self, 'value_' + v, v) def __eq__(self, other): """Compares enum value against a string.""" if not isinstance(other, six.string_types): raise ValueError('Can\'t compare with a non-string') if other not in self._values: raise ValueError('Expected one of ["%s"], found: "%s"' % ('","'.join(self._values), other)) return self._value == other def __ne__(self, other): """Compares enum value against a string.""" return not self.__eq__(other) def __hash__(self): """Hash to be used for the enum.""" return self._value.__hash__() def __str__(self): return str(self.value) def __repr__(self): return '\'%s\' of StringEnum([\'%s\'])' % ( self.value, '\', \''.join(self._values)) def _debug_message(object_or_json_or_file): """Create a debug message for badly formatted json params.""" debug_info = 'Debug Info:\n' counts = { c: object_or_json_or_file.count(c) for c in ['\'', '"', '{', '}', ':', ','] } # Expect to see same number of { and }. if counts['{'] != counts['}']: debug_info += ('Expected counts of \'{\' and \'}\' differ %d != %d\n' % (counts['{'], counts['}'])) # Usually there is a : for each two delimiters. if (counts[':'] * 2 != counts['\''] and counts[':'] * 2 != counts['"'] and counts[':'] * 2 != counts['\''] + counts['"']): debug_info += ( 'Expected counts of ":" (%d) to be half of string delimiters: ' '\' (%d) and \" (%d)\n' % (counts[':'], counts['\''], counts['"'])) # ' and " should come in pairs. for c in ['\'', '"']: if counts[c] % 2: debug_info += ('Expected even number of delimiter %s but notice %d' % (c, counts[c])) debug_info += 'Char Count\n' for c, count in counts.items(): debug_info += '\n%s ==> %d' % (c, count) return debug_info
12,127
375
{ "id": 345, "logo_path": null, "description": null, "name": "<NAME>", "parent_company": null, "homepage": null, "headquarters": null }
60
5,411
<filename>code/client/launcher/UserLibrary.h<gh_stars>1000+ /* * This file is part of the CitizenFX project - http://citizen.re/ * * See LICENSE and MENTIONS in the root of the source tree for information * regarding licensing. */ #pragma once class UserLibrary { private: std::vector<uint8_t> m_libraryBuffer; public: UserLibrary(const wchar_t* fileName); uint32_t GetExportCode(const char* name) const; const uint8_t* GetOffsetPointer(uint32_t offset) const; };
184
3,513
<reponame>puzzle/nochmal #include <nokogiri.h> VALUE cNokogiriXmlEntityReference; /* * call-seq: * new(document, content) * * Create a new EntityReference element on the +document+ with +name+ */ static VALUE new (int argc, VALUE *argv, VALUE klass) { xmlDocPtr xml_doc; xmlNodePtr node; VALUE document; VALUE name; VALUE rest; VALUE rb_node; rb_scan_args(argc, argv, "2*", &document, &name, &rest); Data_Get_Struct(document, xmlDoc, xml_doc); node = xmlNewReference( xml_doc, (const xmlChar *)StringValueCStr(name) ); noko_xml_document_pin_node(node); rb_node = noko_xml_node_wrap(klass, node); rb_obj_call_init(rb_node, argc, argv); if (rb_block_given_p()) { rb_yield(rb_node); } return rb_node; } void noko_init_xml_entity_reference() { assert(cNokogiriXmlNode); /* * EntityReference represents an EntityReference node in an xml document. */ cNokogiriXmlEntityReference = rb_define_class_under(mNokogiriXml, "EntityReference", cNokogiriXmlNode); rb_define_singleton_method(cNokogiriXmlEntityReference, "new", new, -1); }
461
399
<reponame>adoug/3D-Graphics-Rendering-Cookbook #include "shared/scene/Scene.h" #include "shared/Utils.h" #include <algorithm> #include <numeric> void saveStringList(FILE* f, const std::vector<std::string>& lines); void loadStringList(FILE* f, std::vector<std::string>& lines); int addNode(Scene& scene, int parent, int level) { int node = (int)scene.hierarchy_.size(); { // TODO: resize aux arrays (local/global etc.) scene.localTransform_.push_back(glm::mat4(1.0f)); scene.globalTransform_.push_back(glm::mat4(1.0f)); } scene.hierarchy_.push_back({ .parent_ = parent, .lastSibling_ = -1 }); if (parent > -1) { // find first item (sibling) int s = scene.hierarchy_[parent].firstChild_; if (s == -1) { scene.hierarchy_[parent].firstChild_ = node; scene.hierarchy_[node].lastSibling_ = node; } else { int dest = scene.hierarchy_[s].lastSibling_; if (dest <= -1) { // no cached lastSibling, iterate nextSibling indices for (dest = s; scene.hierarchy_[dest].nextSibling_ != -1; dest = scene.hierarchy_[dest].nextSibling_); } scene.hierarchy_[dest].nextSibling_ = node; scene.hierarchy_[s].lastSibling_ = node; } } scene.hierarchy_[node].level_ = level; scene.hierarchy_[node].nextSibling_ = -1; scene.hierarchy_[node].firstChild_ = -1; return node; } void markAsChanged(Scene& scene, int node) { int level = scene.hierarchy_[node].level_; scene.changedAtThisFrame_[level].push_back(node); // TODO: use non-recursive iteration with aux stack for (int s = scene.hierarchy_[node].firstChild_; s != - 1 ; s = scene.hierarchy_[s].nextSibling_) markAsChanged(scene, s); } int findNodeByName(const Scene& scene, const std::string& name) { // Extremely simple linear search without any hierarchy reference // To support DFS/BFS searches separate traversal routines are needed for (size_t i = 0 ; i < scene.localTransform_.size() ; i++) if (scene.nameForNode_.contains(i)) { int strID = scene.nameForNode_.at(i); if (strID > -1) if (scene.names_[strID] == name) return (int)i; } return -1; } int getNodeLevel(const Scene& scene, int n) { int level = -1; for (int p = 0 ; p != -1 ; p = scene.hierarchy_[p].parent_, level++); return level; } bool mat4IsIdentity(const glm::mat4& m); void fprintfMat4(FILE* f, const glm::mat4& m); // CPU version of global transform update [] void recalculateGlobalTransforms(Scene& scene) { if (!scene.changedAtThisFrame_[0].empty()) { int c = scene.changedAtThisFrame_[0][0]; scene.globalTransform_[c] = scene.localTransform_[c]; scene.changedAtThisFrame_[0].clear(); } for (int i = 1 ; i < MAX_NODE_LEVEL && (!scene.changedAtThisFrame_[i].empty()); i++ ) { for (const int& c: scene.changedAtThisFrame_[i]) { int p = scene.hierarchy_[c].parent_; scene.globalTransform_[c] = scene.globalTransform_[p] * scene.localTransform_[c]; } scene.changedAtThisFrame_[i].clear(); } } void loadMap(FILE* f, std::unordered_map<uint32_t, uint32_t>& map) { std::vector<uint32_t> ms; uint32_t sz = 0; fread(&sz, 1, sizeof(sz), f); ms.resize(sz); fread(ms.data(), sizeof(int), sz, f); for (size_t i = 0; i < (sz / 2) ; i++) map[ms[i * 2 + 0]] = ms[i * 2 + 1]; } void loadScene(const char* fileName, Scene& scene) { FILE* f = fopen(fileName, "rb"); if (!f) { printf("Cannot open scene file '%s'. Please run SceneConverter from Chapter7 and/or MergeMeshes from Chapter 9", fileName); return; } uint32_t sz = 0; fread(&sz, sizeof(sz), 1, f); scene.hierarchy_.resize(sz); scene.globalTransform_.resize(sz); scene.localTransform_.resize(sz); // TODO: check > -1 // TODO: recalculate changedAtThisLevel() - find max depth of a node [or save scene.maxLevel] fread(scene.localTransform_.data(), sizeof(glm::mat4), sz, f); fread(scene.globalTransform_.data(), sizeof(glm::mat4), sz, f); fread(scene.hierarchy_.data(), sizeof(Hierarchy), sz, f); // Mesh for node [index to some list of buffers] loadMap(f, scene.materialForNode_); loadMap(f, scene.meshes_); if (!feof(f)) { loadMap(f, scene.nameForNode_); loadStringList(f, scene.names_); loadStringList(f, scene.materialNames_); } fclose(f); } void saveMap(FILE* f, const std::unordered_map<uint32_t, uint32_t>& map) { std::vector<uint32_t> ms; ms.reserve(map.size() * 2); for (const auto& m : map) { ms.push_back(m.first); ms.push_back(m.second); } const uint32_t sz = static_cast<uint32_t>(ms.size()); fwrite(&sz, sizeof(sz), 1, f); fwrite(ms.data(), sizeof(int), ms.size(), f); } void saveScene(const char* fileName, const Scene& scene) { FILE* f = fopen(fileName, "wb"); const uint32_t sz = (uint32_t)scene.hierarchy_.size(); fwrite(&sz, sizeof(sz), 1, f); fwrite(scene.localTransform_.data(), sizeof(glm::mat4), sz, f); fwrite(scene.globalTransform_.data(), sizeof(glm::mat4), sz, f); fwrite(scene.hierarchy_.data(), sizeof(Hierarchy), sz, f); // Mesh for node [index to some list of buffers] saveMap(f, scene.materialForNode_); saveMap(f, scene.meshes_); if (!scene.names_.empty() && !scene.nameForNode_.empty()) { saveMap(f, scene.nameForNode_); saveStringList(f, scene.names_); saveStringList(f, scene.materialNames_); } fclose(f); } bool mat4IsIdentity(const glm::mat4& m) { return (m[0][0] == 1 && m[0][1] == 0 && m[0][2] == 0 && m[0][3] == 0 && m[1][0] == 0 && m[1][1] == 1 && m[1][2] == 0 && m[1][3] == 0 && m[2][0] == 0 && m[2][1] == 0 && m[2][2] == 1 && m[2][3] == 0 && m[3][0] == 0 && m[3][1] == 0 && m[3][2] == 0 && m[3][3] == 1); } void fprintfMat4(FILE* f, const glm::mat4& m) { if (mat4IsIdentity(m)) { fprintf(f, "Identity\n"); } else { fprintf(f, "\n"); for (int i = 0 ; i < 4 ; i++) { for (int j = 0 ; j < 4 ; j++) fprintf(f, "%f ;", m[i][j]); fprintf(f, "\n"); } } } void dumpTransforms(const char* fileName, const Scene& scene) { FILE* f = fopen(fileName, "a+"); for (size_t i = 0 ; i < scene.localTransform_.size() ; i++) { fprintf(f, "Node[%d].localTransform: ", (int)i); fprintfMat4(f, scene.localTransform_[i]); fprintf(f, "Node[%d].globalTransform: ", (int)i); fprintfMat4(f, scene.globalTransform_[i]); fprintf(f, "Node[%d].globalDet = %f; localDet = %f\n", (int)i, glm::determinant(scene.globalTransform_[i]), glm::determinant(scene.localTransform_[i])); } fclose(f); } void printChangedNodes(const Scene& scene) { for (int i = 0 ; i < MAX_NODE_LEVEL && (!scene.changedAtThisFrame_[i].empty()); i++ ) { printf("Changed at level(%d):\n", i); for (const int& c: scene.changedAtThisFrame_[i]) { int p = scene.hierarchy_[c].parent_; //scene.globalTransform_[c] = scene.globalTransform_[p] * scene.localTransform_[c]; printf(" Node %d. Parent = %d; LocalTransform: ", c, p); fprintfMat4(stdout, scene.localTransform_[i]); if (p > -1) { printf(" ParentGlobalTransform: "); fprintfMat4(stdout, scene.globalTransform_[p]); } } } } // Shift all hierarchy components in the nodes void shiftNodes(Scene& scene, int startOffset, int nodeCount, int shiftAmount) { auto shiftNode = [shiftAmount](Hierarchy& node) { if (node.parent_ > -1) node.parent_ += shiftAmount; if (node.firstChild_ > -1) node.firstChild_ += shiftAmount; if (node.nextSibling_ > -1) node.nextSibling_ += shiftAmount; if (node.lastSibling_ > -1) node.lastSibling_ += shiftAmount; // node->level_ does not have to be shifted }; // If there are too many nodes, we can use std::execution::par with std::transform // std::transform(scene.hierarchy_.begin() + startOffset, scene.hierarchy_.begin() + nodeCount, scene.hierarchy_.begin() + startOffset, shiftNode); // for (auto i = scene.hierarchy_.begin() + startOffset ; i != scene.hierarchy_.begin() + nodeCount ; i++) // shiftNode(*i); for (int i = 0 ; i < nodeCount ; i++) shiftNode(scene.hierarchy_[i + startOffset]); } using ItemMap = std::unordered_map<uint32_t, uint32_t>; // Add the items from otherMap shifting indices and values along the way void mergeMaps(ItemMap& m, const ItemMap& otherMap, int indexOffset, int itemOffset) { for (const auto& i: otherMap) m[i.first + indexOffset] = i.second + itemOffset; } /** There are different use cases for scene merging. The simplest one is the direct "gluing" of multiple scenes into one [all the material lists and mesh lists are merged and indices in all scene nodes are shifted appropriately] The second one is creating a "grid" of objects (or scenes) with the same material and mesh sets. For the second use case we need two flags: 'mergeMeshes' and 'mergeMaterials' to avoid shifting mesh indices */ void mergeScenes(Scene& scene, const std::vector<Scene*>& scenes, const std::vector<glm::mat4>& rootTransforms, const std::vector<uint32_t>& meshCounts, bool mergeMeshes, bool mergeMaterials) { // Create new root node scene.hierarchy_ = { { .parent_ = -1, .firstChild_ = 1, .nextSibling_ = -1, .lastSibling_ = -1, .level_ = 0 } }; scene.nameForNode_[0] = 0; scene.names_ = { "NewRoot" }; scene.localTransform_.push_back(glm::mat4(1.f)); scene.globalTransform_.push_back(glm::mat4(1.f)); if (scenes.empty()) return; int offs = 1; int meshOffs = 0; int nameOffs = (int)scene.names_.size(); int materialOfs = 0; auto meshCount = meshCounts.begin(); if (!mergeMaterials) scene.materialNames_ = scenes[0]->materialNames_; // FIXME: too much logic (for all the components in a scene, though mesh data and materials go separately - there are dedicated data lists) for (const Scene* s: scenes) { mergeVectors(scene.localTransform_, s->localTransform_); mergeVectors(scene.globalTransform_, s->globalTransform_); mergeVectors(scene.hierarchy_, s->hierarchy_); mergeVectors(scene.names_, s->names_); if (mergeMaterials) mergeVectors(scene.materialNames_, s->materialNames_); int nodeCount = (int)s->hierarchy_.size(); shiftNodes(scene, offs, nodeCount, offs); mergeMaps(scene.meshes_, s->meshes_, offs, mergeMeshes ? meshOffs : 0); mergeMaps(scene.materialForNode_, s->materialForNode_, offs, mergeMaterials ? materialOfs : 0); mergeMaps(scene.nameForNode_, s->nameForNode_, offs, nameOffs); offs += nodeCount; materialOfs += (int)s->materialNames_.size(); nameOffs += (int)s->names_.size(); if (mergeMeshes) { meshOffs += *meshCount; meshCount++; } } // fixing 'nextSibling' fields in the old roots (zero-index in all the scenes) offs = 1; int idx = 0; for (const Scene* s: scenes) { int nodeCount = (int)s->hierarchy_.size(); bool isLast = (idx == scenes.size() - 1); // calculate new next sibling for the old scene roots int next = isLast ? -1 : offs + nodeCount; scene.hierarchy_[offs].nextSibling_ = next; // attach to new root scene.hierarchy_[offs].parent_ = 0; // transform old root nodes, if the transforms are given if (!rootTransforms.empty()) scene.localTransform_[offs] = rootTransforms[idx] * scene.localTransform_[offs]; offs += nodeCount; idx++; } // now shift levels of all nodes below the root for (auto i = scene.hierarchy_.begin() + 1 ; i != scene.hierarchy_.end() ; i++) i->level_++; } void dumpSceneToDot(const char* fileName, const Scene& scene, int* visited) { FILE* f = fopen(fileName, "w"); fprintf(f, "digraph G\n{\n"); for (size_t i = 0; i < scene.globalTransform_.size(); i++) { std::string name = ""; std::string extra = ""; if (scene.nameForNode_.contains(i)) { int strID = scene.nameForNode_.at(i); name = scene.names_[strID]; } if (visited) { if (visited[i]) extra = ", color = red"; } fprintf(f, "n%d [label=\"%s\" %s]\n", (int)i, name.c_str(), extra.c_str()); } for (size_t i = 0; i < scene.hierarchy_.size(); i++) { int p = scene.hierarchy_[i].parent_; if (p > -1) fprintf(f, "\t n%d -> n%d\n", p, (int)i); } fprintf(f, "}\n"); fclose(f); } /** A rather long algorithm (and the auxiliary routines) to delete a number of scene nodes from the hierarchy */ /* */ // Add an index to a sorted index array static void addUniqueIdx(std::vector<uint32_t>& v, uint32_t index) { if (!std::binary_search(v.begin(), v.end(), index)) v.push_back(index); } // Recurse down from a node and collect all nodes which are already marked for deletion static void collectNodesToDelete(const Scene& scene, int node, std::vector<uint32_t>& nodes) { for (int n = scene.hierarchy_[node].firstChild_; n != - 1 ; n = scene.hierarchy_[n].nextSibling_) { addUniqueIdx(nodes, n); collectNodesToDelete(scene, n, nodes); } } int findLastNonDeletedItem(const Scene& scene, const std::vector<int>& newIndices, int node) { // we have to be more subtle: // if the (newIndices[firstChild_] == -1), we should follow the link and extract the last non-removed item // .. if (node == -1) return -1; return (newIndices[node] == -1) ? findLastNonDeletedItem(scene, newIndices, scene.hierarchy_[node].nextSibling_) : newIndices[node]; } void shiftMapIndices(std::unordered_map<uint32_t, uint32_t>& items, const std::vector<int>& newIndices) { std::unordered_map<uint32_t, uint32_t> newItems; for (const auto& m: items) { int newIndex = newIndices[m.first]; if (newIndex != -1) newItems[newIndex] = m.second; } items = newItems; } // Approximately an O ( N * Log(N) * Log(M)) algorithm (N = scene.size, M = nodesToDelete.size) to delete a collection of nodes from scene graph void deleteSceneNodes(Scene& scene, const std::vector<uint32_t>& nodesToDelete) { // 0) Add all the nodes down below in the hierarchy auto indicesToDelete = nodesToDelete; for (auto i: indicesToDelete) collectNodesToDelete(scene, i, indicesToDelete); // aux array with node indices to keep track of the moved ones [moved = [](node) { return (node != nodes[node]); ] std::vector<int> nodes(scene.hierarchy_.size()); std::iota(nodes.begin(), nodes.end(), 0); // 1.a) Move all the indicesToDelete to the end of 'nodes' array (and cut them off, a variation of swap'n'pop for multiple elements) auto oldSize = nodes.size(); eraseSelected(nodes, indicesToDelete); // 1.b) Make a newIndices[oldIndex] mapping table std::vector<int> newIndices(oldSize, -1); for(int i = 0 ; i < nodes.size() ; i++) newIndices[nodes[i]] = i; // 2) Replace all non-null parent/firstChild/nextSibling pointers in all the nodes by new positions auto nodeMover = [&scene, &newIndices](Hierarchy& h) { return Hierarchy { .parent_ = (h.parent_ != -1) ? newIndices[h.parent_] : -1, .firstChild_ = findLastNonDeletedItem(scene, newIndices, h.firstChild_), .nextSibling_ = findLastNonDeletedItem(scene, newIndices, h.nextSibling_), .lastSibling_ = findLastNonDeletedItem(scene, newIndices, h.lastSibling_) }; }; std::transform(scene.hierarchy_.begin(), scene.hierarchy_.end(), scene.hierarchy_.begin(), nodeMover); // 3) Finally throw away the hierarchy items eraseSelected(scene.hierarchy_, indicesToDelete); // 4) As in mergeScenes() routine we also have to adjust all the "components" (i.e., meshes, materials, names and transformations) // 4a) Transformations are stored in arrays, so we just erase the items as we did with the scene.hierarchy_ eraseSelected(scene.localTransform_, indicesToDelete); eraseSelected(scene.globalTransform_, indicesToDelete); // 4b) All the maps should change the key values with the newIndices[] array shiftMapIndices(scene.meshes_, newIndices); shiftMapIndices(scene.materialForNode_, newIndices); shiftMapIndices(scene.nameForNode_, newIndices); // 5) scene node names list is not modified, but in principle it can be (remove all non-used items and adjust the nameForNode_ map) // 6) Material names list is not modified also, but if some materials fell out of use }
6,124
473
import re import bisect import ctypes def get_hex(byte_str): return re.sub('([0-9A-Fa-f]{2})', '0x\g<0>,', byte_str) def pretty_bytes(byte_str): byte_lines = re.sub('(0x[0-9A-Fa-f]{2}, ?){16}', '\g<0>\n', byte_str); return re.sub('(0x[0-9A-Fa-f]{2}.*?\n)', ' \g<0>', byte_lines); def xor_bytes(byte_str): KEY = 'CS10FUN!' i = 0 keylen = len(KEY) byte_array = byte_str.split(', ') packed_array = '' for byte in byte_array: if not byte: continue packed_array += '0x' + hex(int(byte, 16) ^ ord(KEY[i % keylen]))[2:].zfill(2) + ', ' i += 1 return packed_array def get_function_text(function_name, filename=None): filename = 'gdb.txt' if not filename else filename f = open(filename) lines = f.read().splitlines() f.close() fn_lines = [] found_fn = False for line in lines: if not found_fn: if line.find(function_name) != -1: found_fn = True else: if line.find('End of assembler dump') != -1: break else: fn_lines.append(line) #print '\n'.join(fn_lines) return fn_lines def parse_gdb_function(function_name, filename=None): gdb_re = re.compile('.*?<\+(\d+)>: *(([0-9A-Fa-f]{2} )+) *(.*)\r?\n?') byte_code_c = 'unsigned char %s[] = {\n' % (function_name + '_bytes') byte_code = '' lines = get_function_text(function_name, filename) for line in lines: line = line.replace('\t', ' ') matches = gdb_re.match(line) if matches: #byte_code_c += get_hex(matches.group(2)) byte_code += get_hex(matches.group(2)) byte_code = xor_bytes(byte_code) byte_code_c += byte_code[:-2] + '\n};\n\n' offset_list = [] return pretty_bytes(byte_code_c) of = open('../src/packed.h', 'w') of.write('#ifndef PACKED_H_\n') of.write('#define PACKED_H_\n\n') of.write(parse_gdb_function('insertion_sort')) of.write(parse_gdb_function('selection_sort')) of.write(parse_gdb_function('heap_propagate')) of.write(parse_gdb_function('heapify')) of.write(parse_gdb_function('heap_sort')) of.write(parse_gdb_function('merge_helper')) of.write('#ifndef PATCHED_1\n') of.write(parse_gdb_function('merge_sort')) of.write('#else\n') of.write(parse_gdb_function('merge_sort', 'gdb2.txt')) of.write('#endif\n\n') of.write('#endif /* PACKED_H_ */\n') of.close()
1,173
470
{ "author": { "name": "<NAME>", "email": "<EMAIL>", "url": "https://www.stratha.us" }, "contributors": [ "<NAME> <<EMAIL>>", "<NAME> <<EMAIL>>", "<NAME> (https://twitter.com/#!/lunafiko)" ], "name": "graphdracula", "description": "JavaScript Graph Layouting and Drawing", "version": "1.2.1", "homepage": "https://www.graphdracula.net", "scripts": { "start": "npm run build:dev", "build": "npm run build:node; npm run build:prod", "build:node": "babel src -d lib", "build:dev": "watchify src -v --standalone Dracula --ignore raphael -t babelify --debug -o dist/dracula.dev.js", "build:prod": "browserify src --standalone Dracula --ignore raphael -t babelify --debug | uglifyjs > dist/dracula.min.js", "pretest": "npm run check", "prebuild": "npm test", "test": "mocha --require babel-register src/**/*.spec.js src/*.spec.js", "test-watch": "mocha --watch --require babel-register src/**/*.spec.js src/*.spec.js", "check": "eslint src/", "fix": "eslint --fix src/", "snyk-protect": "snyk protect", "postinstall": "opencollective-postinstall", "prepare": "npm run snyk-protect" }, "license": "MIT", "files": [ "index.js", "lib/*", "dist/*", "README.md" ], "main": "lib/index.js", "bugs": { "email": "<EMAIL>", "url": "https://github.com/strathausen/dracula/issues" }, "keywords": [ "bellman-ford", "dijkstra", "dracula", "graph", "math", "raphael", "snap", "static", "svg", "vml" ], "repository": { "type": "git", "url": "git://github.com/strathausen/dracula.git" }, "dependencies": { "lodash": "^4.17.20", "opencollective": "^1.0.3", "opencollective-postinstall": "^2.0.2", "snyk": "^1.425.4", "uuid": "^3.3.2" }, "devDependencies": { "@babel/cli": "^7.12.1", "babel-preset-env": "^1.7.0", "babelify": "^7.3.0", "browserify": "^16.5.2", "eslint": "^4.18.2", "eslint-config-airbnb-base": "^11.0.0", "eslint-plugin-babel": "^4.0.0", "eslint-plugin-import": "^2.22.1", "mocha": "^8.2.0", "randomcolor": "^0.4.4", "raphael": "^2.2.8", "seedrandom": "^2.4.4", "sinon": "^1.17.5", "snapsvg": "^0.4.0", "uglify-js": "^3.11.4", "watchify": "^3.11.1" }, "peerDependencies": { "raphael": "2.x.x" }, "optionalDependencies": {}, "engines": { "node": "*" }, "collective": { "type": "opencollective", "url": "https://opencollective.com/dracula" }, "snyk": true }
1,239
7,113
/* * Copyright (C) 2010-2101 Alibaba Group Holding Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.otter.manager.biz.config.record.dal.ibatis; import java.util.List; import java.util.Map; import org.springframework.orm.ibatis.support.SqlMapClientDaoSupport; import com.alibaba.otter.shared.common.utils.Assert; import com.alibaba.otter.manager.biz.config.record.dal.LogRecordDAO; import com.alibaba.otter.manager.biz.config.record.dal.dataobject.LogRecordDO; /** * 类IbatisLogRecordDAO.java的实现描述:TODO 类实现描述 * * @author simon 2012-6-15 下午1:52:15 */ public class IbatisLogRecordDAO extends SqlMapClientDaoSupport implements LogRecordDAO { public LogRecordDO insert(LogRecordDO entityObj) { Assert.assertNotNull(entityObj); getSqlMapClientTemplate().insert("insertLogRecord", entityObj); return entityObj; } public void delete(Long identity) { Assert.assertNotNull(identity); getSqlMapClientTemplate().delete("deleteLogRecordById", identity); } public void update(LogRecordDO entityObj) { Assert.assertNotNull(entityObj); getSqlMapClientTemplate().update("updateLogRecord", entityObj); } public List<LogRecordDO> listAll() { List<LogRecordDO> logRecordDos = getSqlMapClientTemplate().queryForList("listLogRecords"); return logRecordDos; } public List<LogRecordDO> listByCondition(Map condition) { List<LogRecordDO> logRecordDos = getSqlMapClientTemplate().queryForList("listLogRecordsWithCondition", condition); return logRecordDos; } public List<LogRecordDO> listByMultiId(Long... identities) { // TODO Auto-generated method stub return null; } public LogRecordDO findById(Long identity) { Assert.assertNotNull(identity); return (LogRecordDO) getSqlMapClientTemplate().queryForObject("findLogRecordById", identity); } public int getCount() { Integer count = (Integer) getSqlMapClientTemplate().queryForObject("getLogRecordCount"); return count.intValue(); } public int getCount(Map condition) { Integer count = (Integer) getSqlMapClientTemplate().queryForObject("getLogRecordCountWithPIdAndSearchKey", condition); return count.intValue(); } public boolean checkUnique(LogRecordDO entityObj) { // TODO Auto-generated method stub return false; } public List<LogRecordDO> listByPipelineId(Long pipelineId) { List<LogRecordDO> logRecordDos = getSqlMapClientTemplate().queryForList("listLogRecordsByPipelineId", pipelineId); return logRecordDos; } public List<LogRecordDO> listByPipelineIdWithoutContent(Long pipelineId) { List<LogRecordDO> logRecordDos = getSqlMapClientTemplate().queryForList("listLogRecordsByPipelineIdWithoutContent", pipelineId); return logRecordDos; } }
1,529
2,023
<reponame>tdiprima/code #!/usr/bin/env python import os import urllib2 import urllib import zipfile import sys import subprocess import shutil import pipes ## browser friendly pages ## http://commondatastorage.googleapis.com/chromium-browser-continuous/index.html ## http://commondatastorage.googleapis.com/chromium-browser-continuous/index.html?path=Win/ ## Chrome Canary: https://tools.google.com/dlpage/chromesxs?platform=win MBFACTOR = float(1<<20) # divisor to convert bytes to megabytes # URL_PREFIX_CONTINUOUS = 'http://commondatastorage.googleapis.com/chromium-browser-continuous/' # URL_PREFIX_SNAPSHOTS = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots/' # URL_PREFIX_OFFICIAL = 'http://commondatastorage.googleapis.com/chromium-browser-official/' AVAILABLE_CHANELLS = {'continuous':'http://commondatastorage.googleapis.com/chromium-browser-continuous/', 'snapshot':'http://commondatastorage.googleapis.com/chromium-browser-snapshots/', 'official':'http://commondatastorage.googleapis.com/chromium-browser-official/'} os_suffixes = dict( linux2=['Linux', 'linux'], linux64=['Linux_x64', 'linux64'], mac=['Mac', 'mac'], win32=['Win', 'win32'] ) def usage(): os_list = "|".join([platform_[1] for platform_ in os_suffixes.values()]) print '''Usage %s [%s] [%s] Default channell is: continuous ''' % (os.path.basename(sys.argv[0]), os_list, '|'.join(AVAILABLE_CHANELLS.keys())) if len(sys.argv) > 1 and sys.argv[1].lower() in ['-h', '--help', '/?']: usage() sys.exit(-1) platform = len(sys.argv) > 1 and sys.argv[1] or sys.platform if platform == 'darwin': platform = 'mac' channell = None if len(sys.argv) > 2: channell = sys.argv[2] if channell not in AVAILABLE_CHANELLS.keys(): print '\n***ERROR***:No such channell %s' % channell usage() sys.exit(-1) if not channell: channell = 'continuous' tmp = os.environ.get('TMP') or os.environ.get('TEMP') or '/tmp' if sys.platform == 'win32': apps_folder = r'd:\Program Files' if sys.platform == 'darwin': apps_folder = os.path.join(os.environ.get('HOME'), 'Applications') else: apps_folder = os.path.join(os.environ.get('HOME'), 'bin') def _reporthook2(count, blockSize, total_size, url=None): current_size = int(count*blockSize) sys.stdout.write(" %.2f MB of %.2f MB \r" % (current_size/(MBFACTOR), total_size/MBFACTOR)) def download_binary_file(url, local_fname): print ''' Downloading %s to %s...\n''' % (url, local_fname), if sys.stdout.isatty(): urllib.urlretrieve(url, local_fname,lambda nb, bs, fs, url=url: _reporthook2(nb,bs,fs,url)) sys.stdout.write('\n') else: urllib.urlretrieve(url, dst) def extract_zip_file(zipFilePath, extractDir): if not os.path.exists(extractDir): os.mkdir(extractDir) print '''Extracting %s to %s...''' % (zipFilePath, extractDir), zfile = zipfile.ZipFile(zipFilePath) uncompress_size = sum((file.file_size for file in zfile.infolist())) extracted_size = 0 print '\n' for _file in zfile.infolist(): extracted_size += _file.file_size sys.stdout.write(" %s%%\t\t%s\n" % (extracted_size * 100/uncompress_size, _file.filename)) zfile.extract(_file, path=extractDir) # ORIG zip.extractall(path=extractDir) print 'Ok' def do_osx_install(srcdir, targetdir): if os.path.exists(targetdir): print 'Target dir %s already exists! Removing...' shutil.rmtree(targetdir) install_script = os.popen('find '+ srcdir +' -iname install.sh').read().strip() print 'DBG install_script:', install_script os.popen('chmod +x "%s"' % install_script) cmd_install = '%s %s %s' % (pipes.quote(install_script), srcdir, targetdir) print 'DBG cmd: "%s"' % cmd_install cmd_chmod_chromium = 'find %s -name Chromium -exec chmod +x {} \;' % (targetdir) cmd_chmod_chromium_helper = 'find %s -name Chromium\ Helper -exec chmod +x {} \;' % (targetdir) for cmd in [cmd_install, cmd_chmod_chromium, cmd_chmod_chromium_helper]: proc = subprocess.Popen(cmd, shell=True) proc.wait() if proc.returncode: print "returncode " + str(proc.returncode) print ''' platform:\t%s Unpacking to dir:\t%s ''' % (platform, apps_folder) print 'Checking for latest version...', try: os_suffix = os_suffixes[platform][0] except KeyError: print '\n\nERROR:\tFirst arg should be platform name, one of: %s' % ', '.join(os_suffixes.keys()) sys.exit(-1) # use proxy if set as an ENV var proxy = os.environ.get('HTTP_PROXY') if proxy: protocol, host = proxy.split('://') print 'Using proxy', proxy http_proxy_handler = urllib2.ProxyHandler({protocol:host}) opener = urllib2.build_opener(http_proxy_handler) urllib2.install_opener(opener) else: opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24')] print 'Probing last version...', url_last_change = AVAILABLE_CHANELLS[channell] + os_suffix + '/LAST_CHANGE' print url_last_change version = opener.open(url_last_change).read().strip() print 'Current version:', version local_fname = os.path.join(tmp, 'chrome-%s_%s.zip' % (platform, version)) need_downloading = True if os.path.exists(local_fname): print '\nFile %s already exists!\nRemove it? (Y/n)' % local_fname ans = raw_input() if ans.lower() != 'n': print 'Deleting existing file %s' % local_fname, os.remove(local_fname) print 'Ok' else: need_downloading = False url = AVAILABLE_CHANELLS[channell] + os_suffix + '/' + version + '/chrome-' + os_suffixes[platform][1] + '.zip' print 'Downloading from:', url if (sys.version_info.minor < 6): need_downloading and os.system('wget -c %s -O %s' % (url, local_fname)) print 'Unzippping...' cmd = 'unzip -o %s -d "%s"' % (os.path.abspath(local_fname), apps_folder) print cmd os.system(cmd) else: need_downloading and download_binary_file(url, local_fname) extract_zip_file(local_fname, apps_folder) if (sys.platform=='darwin'): chrome_app_dir = os.path.join(apps_folder, 'chrome-mac') target_dir = os.path.join(apps_folder, 'Chromium') do_osx_install(chrome_app_dir, target_dir) # raw_input('removing unpacked chrome folder ' + chrome_app_dir) shutil.rmtree(chrome_app_dir) print 'Done!'
2,597
1,874
<reponame>zjzh/nova # Copyright 2012, Piston Cloud Computing, Inc. # Copyright 2012, OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import netaddr from oslo_log import log as logging from nova.scheduler import filters from nova.scheduler.filters import utils LOG = logging.getLogger(__name__) class DifferentHostFilter(filters.BaseHostFilter): """Schedule the instance on a different host from a set of instances.""" # The hosts the instances are running on doesn't change within a request run_filter_once_per_request = True RUN_ON_REBUILD = False def host_passes(self, host_state, spec_obj): affinity_uuids = spec_obj.get_scheduler_hint('different_host') if affinity_uuids: overlap = utils.instance_uuids_overlap(host_state, affinity_uuids) return not overlap # With no different_host key return True class SameHostFilter(filters.BaseHostFilter): """Schedule the instance on the same host as another instance in a set of instances. """ # The hosts the instances are running on doesn't change within a request run_filter_once_per_request = True RUN_ON_REBUILD = False def host_passes(self, host_state, spec_obj): affinity_uuids = spec_obj.get_scheduler_hint('same_host') if affinity_uuids: overlap = utils.instance_uuids_overlap(host_state, affinity_uuids) return overlap # With no same_host key return True class SimpleCIDRAffinityFilter(filters.BaseHostFilter): """Schedule the instance on a host with a particular cidr""" # The address of a host doesn't change within a request run_filter_once_per_request = True RUN_ON_REBUILD = False def host_passes(self, host_state, spec_obj): affinity_cidr = spec_obj.get_scheduler_hint('cidr', '/24') affinity_host_addr = spec_obj.get_scheduler_hint('build_near_host_ip') host_ip = host_state.host_ip if affinity_host_addr: affinity_net = netaddr.IPNetwork(str.join('', (affinity_host_addr, affinity_cidr))) return netaddr.IPAddress(host_ip) in affinity_net # We don't have an affinity host address. return True class _GroupAntiAffinityFilter(filters.BaseHostFilter): """Schedule the instance on a different host from a set of group hosts. """ RUN_ON_REBUILD = False def host_passes(self, host_state, spec_obj): # Only invoke the filter if 'anti-affinity' is configured instance_group = spec_obj.instance_group policy = instance_group.policy if instance_group else None if self.policy_name != policy: return True # NOTE(hanrong): Move operations like resize can check the same source # compute node where the instance is. That case, AntiAffinityFilter # must not return the source as a non-possible destination. if spec_obj.instance_uuid in host_state.instances.keys(): return True # The list of instances UUIDs on the given host instances = set(host_state.instances.keys()) # The list of instances UUIDs which are members of this group members = set(spec_obj.instance_group.members) # The set of instances on the host that are also members of this group servers_on_host = instances.intersection(members) rules = instance_group.rules if rules and 'max_server_per_host' in rules: max_server_per_host = rules['max_server_per_host'] else: max_server_per_host = 1 # Very old request specs don't have a full InstanceGroup with the UUID group_uuid = (instance_group.uuid if instance_group and 'uuid' in instance_group else 'n/a') LOG.debug("Group anti-affinity: check if the number of servers from " "group %(group_uuid)s on host %(host)s is less than " "%(max_server)s.", {'group_uuid': group_uuid, 'host': host_state.host, 'max_server': max_server_per_host}) # NOTE(yikun): If the number of servers from same group on this host # is less than the max_server_per_host, this filter will accept the # given host. In the default case(max_server_per_host=1), this filter # will accept the given host if there are 0 servers from the group # already on this host. return len(servers_on_host) < max_server_per_host class ServerGroupAntiAffinityFilter(_GroupAntiAffinityFilter): def __init__(self): self.policy_name = 'anti-affinity' super(ServerGroupAntiAffinityFilter, self).__init__() class _GroupAffinityFilter(filters.BaseHostFilter): """Schedule the instance on to host from a set of group hosts. """ RUN_ON_REBUILD = False def host_passes(self, host_state, spec_obj): # Only invoke the filter if 'affinity' is configured policies = (spec_obj.instance_group.policies if spec_obj.instance_group else []) if self.policy_name not in policies: return True group_hosts = (spec_obj.instance_group.hosts if spec_obj.instance_group else []) LOG.debug("Group affinity: check if %(host)s in " "%(configured)s", {'host': host_state.host, 'configured': group_hosts}) if group_hosts: return host_state.host in group_hosts # No groups configured return True class ServerGroupAffinityFilter(_GroupAffinityFilter): def __init__(self): self.policy_name = 'affinity' super(ServerGroupAffinityFilter, self).__init__()
2,511
769
<filename>cube-usb-cdc/main.c #include "main.h" USBD_HandleTypeDef USBD_Device; uint8_t CDC_BUF[128]; int main(void) { // Initialize the UART UARTx_Init(USART2,USART_TX,1382400); printf("--- STM32L151RDT6 ---\r\n"); // Initialize the CDC Application USBD_Init(&USBD_Device,&USBD_CDC_Descriptor,0); // Add Supported Class USBD_RegisterClass(&USBD_Device,&USBD_CDC); // Add CDC Interface Class USBD_CDC_RegisterInterface(&USBD_Device,&USBD_CDC_fops); // Start Device Process USBD_Start(&USBD_Device); // Stuff the buffer CDC_BUF[0] = 'H'; CDC_BUF[1] = 'E'; CDC_BUF[2] = 'L'; CDC_BUF[3] = 'L'; CDC_BUF[4] = 'O'; CDC_BUF[5] = ' '; CDC_BUF[6] = 'C'; CDC_BUF[7] = 'D'; CDC_BUF[8] = 'C'; CDC_BUF[9] = '\r'; CDC_BUF[10] = '\n'; uint32_t i; while(1) { CDC_Itf_Transmit(CDC_BUF,11); for (i = 0x008FFFFF; i--; ); } }
427
852
import FWCore.ParameterSet.Config as cms # DQM file saver module dqmSaver = cms.EDAnalyzer("DQMFileSaverPB", # Name of the producer. producer = cms.untracked.string('DQM'), # Directory in which to save the files. path = cms.untracked.string('./'), # Tag, used in the filename as the third term. tag = cms.untracked.string('UNKNOWN'), # Control reference saving (default / skip / qtests / all) referenceHandling = cms.untracked.string('all'), # Control which references are saved for qtests (default: STATUS_OK) referenceRequireStatus = cms.untracked.int32(100), # If set, EvFDaqDirector is emulated and not used fakeFilterUnitMode = cms.untracked.bool(False), # Label of the stream streamLabel = cms.untracked.string("streamDQMHistograms"), runNumber = cms.untracked.int32(111), )
307
2,151
<filename>content/public/test/scoped_overscroll_modes.h // Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CONTENT_PUBLIC_TEST_SCOPED_OVERSCROLL_MODES_H_ #define CONTENT_PUBLIC_TEST_SCOPED_OVERSCROLL_MODES_H_ #include "base/macros.h" #include "content/public/browser/overscroll_configuration.h" namespace content { // Helper class to set the overscroll history navigation mode temporarily in // tests. class ScopedHistoryNavigationMode { public: explicit ScopedHistoryNavigationMode( OverscrollConfig::HistoryNavigationMode mode); ~ScopedHistoryNavigationMode(); private: DISALLOW_COPY_AND_ASSIGN(ScopedHistoryNavigationMode); }; // Helper class to set the pull-to-refresh mode temporarily in tests. class ScopedPullToRefreshMode { public: explicit ScopedPullToRefreshMode(OverscrollConfig::PullToRefreshMode mode); ~ScopedPullToRefreshMode(); private: DISALLOW_COPY_AND_ASSIGN(ScopedPullToRefreshMode); }; } // namespace content #endif // CONTENT_PUBLIC_TEST_SCOPED_OVERSCROLL_MODES_H_
385
4,111
<gh_stars>1000+ { "lang": "español", "dateFormat": "DD/MM/YYYY HH:mm", "details": "Detalles", "download": "Descargar", "empty": "vacío", "files": "Archivos", "filter": "Filtrar", "folders": "Directorios", "grid": "Cuadrícula", "icons": "Íconos", "language": "Idioma", "lastModified": "Última modificación", "name": "Nombre", "noMatch": "Sin coincidencias", "parentDirectory": "Directorio superior", "search": "buscar", "size": "Tamaño", "tree": "Arbol", "view": "Vista" }
259
349
<filename>sublimenavigationviewlibrary/src/main/java/com/appeaser/sublimenavigationviewlibrary/SublimeSwitchItemView.java /* * Copyright 2015 <NAME> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.appeaser.sublimenavigationviewlibrary; import android.content.Context; import android.content.res.ColorStateList; import android.graphics.drawable.Drawable; import android.os.Build; import android.support.v4.content.ContextCompat; import android.support.v4.graphics.drawable.DrawableCompat; import android.support.v7.widget.SwitchCompat; import android.util.AttributeSet; import android.view.LayoutInflater; /** * View implementation for Switch menu item. * * Created by Vikram. */ public class SublimeSwitchItemView extends SublimeBaseItemView { private SwitchCompat mSwitch; public SublimeSwitchItemView(Context context) { this(context, null); } public SublimeSwitchItemView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public SublimeSwitchItemView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); LayoutInflater.from(context).inflate(R.layout.sublime_menu_switch_item_content, this, true); initializeViews(); } @Override protected void initializeViews() { super.initializeViews(); mSwitch = (SwitchCompat) findViewById(R.id.switch_ctrl); } @Override public void initialize(SublimeBaseMenuItem itemData, SublimeThemer themer) { setCheckableItemTintList(themer.getCheckableItemTintList()); super.initialize(itemData, themer); } @Override public void setItemTextColor(ColorStateList textColor) { super.setItemTextColor(textColor); mSwitch.setTextColor(textColor); } @Override public void setEnabled(boolean enabled) { super.setEnabled(enabled); mSwitch.setEnabled(enabled); } @Override public void setItemChecked(boolean checked) { super.setItemChecked(checked); mSwitch.setChecked(checked); } public void setCheckableItemTintList(ColorStateList checkableItemTintList) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) { // Covers android M (23) Drawable dTrack = getResources().getDrawable(R.drawable.snv_switch_track_material, getContext().getTheme()); Drawable dThumb = getResources().getDrawable(R.drawable.snv_switch_thumb_material_anim, getContext().getTheme()); if (dTrack != null && dThumb != null) { DrawableCompat.setTintList(dTrack, checkableItemTintList); DrawableCompat.setTintList(dThumb, checkableItemTintList); mSwitch.setTrackDrawable(dTrack); mSwitch.setThumbDrawable(dThumb); } } else { Drawable dTrack = ContextCompat.getDrawable(getContext(), R.drawable.snv_switch_track); Drawable dThumb = ContextCompat.getDrawable(getContext(), R.drawable.switch_thumb_pre_lollipop); if (dTrack != null && dThumb != null) { dTrack = DrawableCompat.wrap(dTrack); DrawableCompat.setTintList(dTrack, checkableItemTintList); dTrack.setAlpha(85 /* 0.3f */); dThumb = DrawableCompat.wrap(dThumb); DrawableCompat.setTintList(dThumb, checkableItemTintList); mSwitch.setTrackDrawable(dTrack); mSwitch.setThumbDrawable(dThumb); } } } }
1,570
2,151
/* * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "modules/rtp_rtcp/source/rtcp_packet/app.h" #include "test/gmock.h" #include "test/gtest.h" #include "test/rtcp_packet_parser.h" namespace webrtc { namespace { using ::testing::ElementsAreArray; using ::testing::make_tuple; using ::webrtc::rtcp::App; constexpr uint32_t kName = ((uint32_t)'n' << 24) | ((uint32_t)'a' << 16) | ((uint32_t)'m' << 8) | (uint32_t)'e'; constexpr uint8_t kSubtype = 0x1e; constexpr uint32_t kSenderSsrc = 0x12345678; constexpr uint8_t kData[] = {'t', 'e', 's', 't', 'd', 'a', 't', 'a'}; constexpr uint8_t kVersionBits = 2 << 6; constexpr uint8_t kPaddingBit = 1 << 5; // clang-format off constexpr uint8_t kPacketWithoutData[] = { kVersionBits | kSubtype, App::kPacketType, 0x00, 0x02, 0x12, 0x34, 0x56, 0x78, 'n', 'a', 'm', 'e'}; constexpr uint8_t kPacketWithData[] = { kVersionBits | kSubtype, App::kPacketType, 0x00, 0x04, 0x12, 0x34, 0x56, 0x78, 'n', 'a', 'm', 'e', 't', 'e', 's', 't', 'd', 'a', 't', 'a'}; constexpr uint8_t kTooSmallPacket[] = { kVersionBits | kSubtype, App::kPacketType, 0x00, 0x01, 0x12, 0x34, 0x56, 0x78}; constexpr uint8_t kPaddingSize = 1; constexpr uint8_t kPacketWithUnalignedPayload[] = { kVersionBits | kPaddingBit | kSubtype, App::kPacketType, 0x00, 0x03, 0x12, 0x34, 0x56, 0x78, 'n', 'a', 'm', 'e', 'd', 'a', 't', kPaddingSize}; // clang-format on } // namespace TEST(RtcpPacketAppTest, CreateWithoutData) { App app; app.SetSsrc(kSenderSsrc); app.SetSubType(kSubtype); app.SetName(kName); rtc::Buffer raw = app.Build(); EXPECT_THAT(make_tuple(raw.data(), raw.size()), ElementsAreArray(kPacketWithoutData)); } TEST(RtcpPacketAppTest, ParseWithoutData) { App parsed; EXPECT_TRUE(test::ParseSinglePacket(kPacketWithoutData, &parsed)); EXPECT_EQ(kSenderSsrc, parsed.ssrc()); EXPECT_EQ(kSubtype, parsed.sub_type()); EXPECT_EQ(kName, parsed.name()); EXPECT_EQ(0u, parsed.data_size()); } TEST(RtcpPacketAppTest, CreateWithData) { App app; app.SetSsrc(kSenderSsrc); app.SetSubType(kSubtype); app.SetName(kName); app.SetData(kData, sizeof(kData)); rtc::Buffer raw = app.Build(); EXPECT_THAT(make_tuple(raw.data(), raw.size()), ElementsAreArray(kPacketWithData)); } TEST(RtcpPacketAppTest, ParseWithData) { App parsed; EXPECT_TRUE(test::ParseSinglePacket(kPacketWithData, &parsed)); EXPECT_EQ(kSenderSsrc, parsed.ssrc()); EXPECT_EQ(kSubtype, parsed.sub_type()); EXPECT_EQ(kName, parsed.name()); EXPECT_THAT(make_tuple(parsed.data(), parsed.data_size()), ElementsAreArray(kData)); } TEST(RtcpPacketAppTest, ParseFailsOnTooSmallPacket) { App parsed; EXPECT_FALSE(test::ParseSinglePacket(kTooSmallPacket, &parsed)); } TEST(RtcpPacketAppTest, ParseFailsOnUnalignedPayload) { App parsed; EXPECT_FALSE(test::ParseSinglePacket(kPacketWithUnalignedPayload, &parsed)); } } // namespace webrtc
1,469
2,023
<filename>recipes/Python/576757_Shelllike_datprocessing_using_Popen_pipes/recipe-576757.py from io import BytesIO from subprocess import Popen, PIPE from os import pipe, fdopen from threading import Thread class Pipeable( object ): def __init__( self ): self.output = None def _output( self, input = None ): return self.output def __or__( self, right ): if not isinstance( right, Pipeable ): return NotImplemented self.output = right._output( self._output() ) return self class Shell( Pipeable ): def __init__( self, cmd ): Pipeable.__init__( self ) self.cmd = cmd def _output( self, input = None ): return Popen( self.cmd, stdin = input, stdout = PIPE ).stdout class ThreadedFilter( Pipeable ): def __init__( self, filter ): self.filter = filter _pipe = pipe() self.pipe = fdopen( _pipe[ 1 ], 'w' ) self.output = fdopen( _pipe[ 0 ], 'r' ) def _output( self, input = None ): def _target(): _out = self.pipe for line in input: _out.write( self.filter( line ) ) Thread( target = _target ).start() return self.output class CachedFilter( Pipeable ): def __init__( self, filter ): self.filter = filter def _output( self, input = None ): output = BytesIO() for line in input: output.write( self.filter( line ) ) output.seek( 0 ) return output class Output( Pipeable ): def __init__( self, output ): self.output = output class Print( object ): def __ror__( self, left ): print left.output.read() class Write( object ): def __init__( self, path ): self.path = path def __ror__( self, left ): f = open( self.path, 'w' ) while True: buf = left.output.read( 1024 ) if not buf: break f.write( buf ) f.close() if __name__ == '__main__': Output( open( "/etc/passwd" ) ) | Shell( "rev" ) | ThreadedFilter( lambda str : str[::-1] ) | CachedFilter( lambda x : x ) | Print()
692
598
<reponame>cmendes0101/contrib-drivers /* * Copyright 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.things.contrib.driver.button; import android.app.Activity; import android.os.Bundle; import android.util.Log; import android.view.KeyEvent; import com.google.android.things.contrib.driver.button.Button.LogicState; import com.google.android.things.pio.Gpio; import org.mockito.Mockito; import java.io.IOException; import java.util.concurrent.BlockingQueue; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; public class ButtonTestActivity extends Activity { private static final String TAG = "ButtonTestActivity"; public static final int KEYCODE = KeyEvent.KEYCODE_ENTER; private Gpio mGpio; private Button mButton; private ButtonInputDriver mInputDriver; private BlockingQueue<KeyEvent> mKeyDownEvents; private BlockingQueue<KeyEvent> mKeyUpEvents; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mGpio = Mockito.mock(Gpio.class); try { mButton = new Button(mGpio, LogicState.PRESSED_WHEN_HIGH); } catch (IOException e) { throw new RuntimeException("Failed to initialize Button", e); } mInputDriver = new ButtonInputDriver(mButton, KEYCODE); mInputDriver.register(); mKeyDownEvents = new LinkedBlockingQueue<>(); mKeyUpEvents = new LinkedBlockingQueue<>(); } @Override protected void onDestroy() { super.onDestroy(); try { mInputDriver.close(); } catch (IOException e) { Log.e(TAG, "Error closing Button", e); } } void sendMockButtonEvent(boolean press) { mButton.performButtonEvent(press); } KeyEvent getNextKeyDownEvent() throws InterruptedException { return mKeyDownEvents.poll(1L, TimeUnit.SECONDS); } KeyEvent getNextKeyUpEvent() throws InterruptedException { return mKeyUpEvents.poll(1L, TimeUnit.SECONDS); } @Override public boolean onKeyDown(int keyCode, KeyEvent event) { mKeyDownEvents.offer(event); return super.onKeyDown(keyCode, event); } @Override public boolean onKeyUp(int keyCode, KeyEvent event) { mKeyUpEvents.offer(event); return super.onKeyUp(keyCode, event); } }
1,070
8,747
<filename>components/esp_wifi/include/esp_wifi_default.h // Copyright 2019 Espressif Systems (Shanghai) PTE LTD // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #ifndef _ESP_WIFI_DEFAULT_H #define _ESP_WIFI_DEFAULT_H #ifdef __cplusplus extern "C" { #endif /** * @brief Attaches wifi station interface to supplied netif * * @param esp_netif instance to attach the wifi station to * * @return * - ESP_OK on success * - ESP_FAIL if attach failed */ esp_err_t esp_netif_attach_wifi_station(esp_netif_t *esp_netif); /** * @brief Attaches wifi soft AP interface to supplied netif * * @param esp_netif instance to attach the wifi AP to * * @return * - ESP_OK on success * - ESP_FAIL if attach failed */ esp_err_t esp_netif_attach_wifi_ap(esp_netif_t *esp_netif); /** * @brief Sets default wifi event handlers for STA interface * * @return * - ESP_OK on success, error returned from esp_event_handler_register if failed */ esp_err_t esp_wifi_set_default_wifi_sta_handlers(void); /** * @brief Sets default wifi event handlers for AP interface * * @return * - ESP_OK on success, error returned from esp_event_handler_register if failed */ esp_err_t esp_wifi_set_default_wifi_ap_handlers(void); /** * @brief Clears default wifi event handlers for supplied network interface * * @param esp_netif instance of corresponding if object * * @return * - ESP_OK on success, error returned from esp_event_handler_register if failed */ esp_err_t esp_wifi_clear_default_wifi_driver_and_handlers(void *esp_netif); /** * @brief Creates default WIFI AP. In case of any init error this API aborts. * * @note The API creates esp_netif object with default WiFi access point config, * attaches the netif to wifi and registers default wifi handlers. * * @return pointer to esp-netif instance */ esp_netif_t* esp_netif_create_default_wifi_ap(void); /** * @brief Creates default WIFI STA. In case of any init error this API aborts. * * @note The API creates esp_netif object with default WiFi station config, * attaches the netif to wifi and registers default wifi handlers. * * @return pointer to esp-netif instance */ esp_netif_t* esp_netif_create_default_wifi_sta(void); /** * @brief Destroys default WIFI netif created with esp_netif_create_default_wifi_...() API. * * @param[in] esp_netif object to detach from WiFi and destroy * * @note This API unregisters wifi handlers and detaches the created object from the wifi. * (this function is a no-operation if esp_netif is NULL) */ void esp_netif_destroy_default_wifi(void *esp_netif); /** * @brief Creates esp_netif WiFi object based on the custom configuration. * * @attention This API DOES NOT register default handlers! * * @param[in] wifi_if type of wifi interface * @param[in] esp_netif_config inherent esp-netif configuration pointer * * @return pointer to esp-netif instance */ esp_netif_t* esp_netif_create_wifi(wifi_interface_t wifi_if, esp_netif_inherent_config_t *esp_netif_config); /** * @brief Creates default STA and AP network interfaces for esp-mesh. * * Both netifs are almost identical to the default station and softAP, but with * DHCP client and server disabled. Please note that the DHCP client is typically * enabled only if the device is promoted to a root node. * * Returns created interfaces which could be ignored setting parameters to NULL * if an application code does not need to save the interface instances * for further processing. * * @param[out] p_netif_sta pointer where the resultant STA interface is saved (if non NULL) * @param[out] p_netif_ap pointer where the resultant AP interface is saved (if non NULL) * * @return ESP_OK on success */ esp_err_t esp_netif_create_default_wifi_mesh_netifs(esp_netif_t **p_netif_sta, esp_netif_t **p_netif_ap); #ifdef __cplusplus } #endif #endif //_ESP_WIFI_DEFAULT_H
1,367
3,269
<filename>C++/unique-number-of-occurrences.cpp // Time: O(n) // Space: O(n) class Solution { public: bool uniqueOccurrences(vector<int>& arr) { unordered_map<int, int> count; for (const auto& a : arr) { ++count[a]; } unordered_set<int> lookup; for (const auto& [k, v] : count) { if (lookup.count(v)) { return false; } lookup.emplace(v); } return true; } };
263
953
int main() { // error: unary minus requires arithmetic type operand -""; // error: unary plus requires arithmetic type operand +""; // error: bit-complement requires integral type operand ~""; }
62
1,561
import com.cossacklabs.themis.*; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.Base64; public class main { static Charset charset = StandardCharsets.UTF_8; public static void main(final String[] args) throws NullArgumentException, SecureMessageWrapException, IOException, SecureSessionException, SecureCellException, InvalidArgumentException { encryptDataForStoring(); encryptDataForMessaging(); // tests with Themis Interactive simulator // setup Themis IS first: // https://themis.cossacklabs.com/interactive-simulator/setup/ // SMessageClient.runSMessageWithThemisInteractiveSimulator(); // SSessionClient.runSSessionWithThemisInteractiveSimulator(); } static void encryptDataForStoring() throws SecureCellException, NullArgumentException, InvalidArgumentException { byte[] message = "data to encrypt".getBytes(charset); byte[] optionalContext = "some context".getBytes(charset); byte[] password = "<PASSWORD>".getBytes(charset); System.out.println("Running SecureCell example"); SecureCell.Seal sc = SecureCell.SealWithPassphrase(password); byte[] encryptedData = sc.encrypt(message, optionalContext); String encryptedDataString = Base64.getEncoder().encodeToString(encryptedData); System.out.println("Encrypted encoded data = \n" + encryptedDataString); byte[] decodedEncryptedString = Base64.getDecoder().decode(encryptedDataString); byte[] unprotected = sc.decrypt(decodedEncryptedString, optionalContext); String decryptedString = new String(unprotected); System.out.println("Decrypted data = "+ decryptedString); } static void encryptDataForMessaging() throws UnsupportedEncodingException, NullArgumentException, SecureMessageWrapException { // keys can be generated using KeypairGenerator String clientPrivateKey = "<KEY>"; String serverPublicKey = "<KEY>"; String message = "message to send"; System.out.println("Running SecureMessage example"); PrivateKey privateKey = new PrivateKey(Base64.getDecoder().decode(clientPrivateKey.getBytes(charset.name()))); PublicKey publicKey = new PublicKey(Base64.getDecoder().decode(serverPublicKey.getBytes(charset.name()))); System.out.println("privateKey1 = " + Arrays.toString(privateKey.toByteArray())); System.out.println("publicKey1 = " + Arrays.toString(publicKey.toByteArray())); final SecureMessage sm = new SecureMessage(privateKey, publicKey); byte[] wrappedMessage = sm.wrap(message.getBytes(charset)); String encodedMessage = Base64.getEncoder().encodeToString(wrappedMessage); System.out.println("EncodedMessage = " + encodedMessage); byte[] wrappedMessageFromB64 = Base64.getDecoder().decode(encodedMessage); String decodedMessage = new String(sm.unwrap(wrappedMessageFromB64), charset); System.out.println("DecodedMessage = " + decodedMessage); } }
929
348
{"nom":"<NAME>","dpt":"Haute-Savoie","inscrits":165,"abs":21,"votants":144,"blancs":20,"nuls":7,"exp":117,"res":[{"panneau":"1","voix":66},{"panneau":"2","voix":51}]}
71
10,351
"""allow nulls for metric values Revision ID: 181f10493468 Revises: 90e64c465722 Create Date: 2019-07-10 22:40:18.787993 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = "181f10493468" down_revision = "90e64c465722" branch_labels = None depends_on = None def upgrade(): with op.batch_alter_table("metrics") as batch_op: batch_op.alter_column("value", type_=sa.types.Float(precision=53), nullable=False) batch_op.add_column( sa.Column( "is_nan", sa.Boolean(create_constraint=False), nullable=False, server_default="0" ) ) batch_op.drop_constraint(constraint_name="metric_pk", type_="primary") batch_op.create_primary_key( constraint_name="metric_pk", columns=["key", "timestamp", "step", "run_uuid", "value", "is_nan"], ) def downgrade(): pass
413
892
{ "schema_version": "1.2.0", "id": "GHSA-fq4g-p58q-8f6g", "modified": "2022-05-01T06:43:25Z", "published": "2022-05-01T06:43:25Z", "aliases": [ "CVE-2006-0887" ], "details": "Eval injection vulnerability in sessions.inc in PHP Base Library (PHPLib) before 7.4a, when index.php3 from the PHPLib distribution is available on the server, allows remote attackers to execute arbitrary PHP code by including a base64-encoded representation of the code in a cookie. NOTE: this description was significantly updated on 20060605 to reflect new details after an initial vague advisory.", "severity": [ ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2006-0887" }, { "type": "WEB", "url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/24873" }, { "type": "WEB", "url": "http://secunia.com/advisories/16902" }, { "type": "WEB", "url": "http://securitytracker.com/id?1016123" }, { "type": "WEB", "url": "http://sourceforge.net/project/shownotes.php?group_id=31885&release_id=396091" }, { "type": "WEB", "url": "http://www.gulftech.org/?node=research&article_id=00107-03052006" }, { "type": "WEB", "url": "http://www.osvdb.org/23466" }, { "type": "WEB", "url": "http://www.securityfocus.com/bid/16801" }, { "type": "WEB", "url": "http://www.vupen.com/english/advisories/2006/0720" } ], "database_specific": { "cwe_ids": [ "CWE-94" ], "severity": "HIGH", "github_reviewed": false } }
756
374
<reponame>bamwu/cocotron #import <Foundation/Foundation.h> @interface DemoContext : NSObject @end @interface DemoContext (subclass) - (size_t)pixelsWide; - (size_t)pixelsHigh; - (size_t)bitsPerComponent; - (size_t)bitsPerPixel; - (size_t)bytesPerRow; - (CGBitmapInfo)bitmapInfo; - (void *)bytes; - (void)setStrokeColor:(float)r:(float)g:(float)b:(float)a; - (void)setFillColor:(float)r:(float)g:(float)b:(float)a; - (void)setBlendMode:(CGBlendMode)mode; - (void)setShadowBlur:(float)value; - (void)setShadowOffsetX:(float)value; - (void)setShadowOffsetY:(float)value; - (void)setShadowColor:(float)r:(float)g:(float)b:(float)a; - (void)setPathDrawingMode:(CGPathDrawingMode)mode; - (void)setLineWidth:(float)width; - (void)setDashPhase:(float)phase; - (void)setDashLength:(float)value; - (void)setFlatness:(float)value; - (void)setScaleX:(float)value; - (void)setScaleY:(float)value; - (void)setRotation:(float)value; - (void)setShouldAntialias:(BOOL)value; - (void)setInterpolationQuality:(CGInterpolationQuality)value; - (void)setImageData:(NSData *)data; - (void)setPDFData:(NSData *)data; - (void)drawClassic; - (void)drawBitmapImageRep; - (void)drawPDF; @end
468
507
<filename>test/lib/testMayaUsdInteractiveLayerEditorCommands.py #!/usr/bin/env mayapy # # Copyright 2021 Autodesk # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os import platform import unittest import testUtils from maya import cmds import maya.mel as mel import fixturesUtils class MayaUsdInteractiveLayerEditorCommandsTestCase(unittest.TestCase): """Test interactive commands that need the UI of the layereditor.""" @classmethod def setUpClass(cls): inputPath = fixturesUtils.setUpClass(__file__, initializeStandalone=False, loadPlugin=False) cls._baselineDir = os.path.join(inputPath,'VP2RenderDelegatePrimPathTest', 'baseline') cls._testDir = os.path.abspath('.') cmds.loadPlugin('mayaUsdPlugin') mel.eval('mayaUsdLayerEditorWindow mayaUsdLayerEditor') def samefile(self, path1, path2): if platform.system() == 'Windows': return os.path.normcase(os.path.normpath(path1)) == os.path.normcase(os.path.normpath(path2)) else: return os.path.samefile(path1, path2) def testCreateStageFromFileWithInvalidUsd(self): # We cannot directly call the 'mayaUsdCreateStageFromFile' # as it opens a file dialog to choose the scene. So instead # we can call what it does once the file is choose. # Note: on ballFilePath we replace \ with / to stop the \ as # being interpreted. ballFilePath = os.path.normpath(testUtils.getTestScene('ballset', 'StandaloneScene', 'invalid_layer.usda')).replace('\\', '/') mel.eval('source \"mayaUsd_createStageFromFile.mel\"') shapeNode = mel.eval('mayaUsd_createStageFromFilePath(\"'+ballFilePath+'\")') mayaSel = cmds.ls(sl=True) self.assertEqual(1, len(mayaSel)) nt = cmds.nodeType(shapeNode) self.assertEqual('mayaUsdProxyShape', nt) # Verify that the shape node has the correct file path. filePathAttr = cmds.getAttr(shapeNode+'.filePath') self.assertTrue(self.samefile(filePathAttr, ballFilePath)) # Verify that the shape node is connected to time. self.assertTrue(cmds.isConnected('time1.outTime', shapeNode+'.time')) if __name__ == '__main__': fixturesUtils.runTests(globals())
1,024
455
<reponame>madhubandubey9/StarEngine<filename>StarEngine/jni/Actions/TimedScaleAction.h<gh_stars>100-1000 #pragma once #include "TimedAction.h" namespace star { class TimedScaleAction : public TimedAction { public: TimedScaleAction( float32 seconds, float32 begin, float32 end, const std::function<void()> & callback = nullptr ); TimedScaleAction( const tstring & name, float32 seconds, float32 begin, float32 end, const std::function<void()> & callback = nullptr ); virtual ~TimedScaleAction(void); virtual void Restart(); protected: virtual void Update(const Context& context); float32 m_CurrentSeconds, m_BeginValue, m_EndValue; private: TimedScaleAction & operator=(const TimedScaleAction&); TimedScaleAction & operator=(TimedScaleAction&&); TimedScaleAction(const TimedScaleAction&); TimedScaleAction(TimedScaleAction&&); }; }
363
1,333
<filename>whale/src/android/art/art_jni_trampoline.h #ifndef WHALE_ANDROID_ART_JNI_TRAMPOLINE_H_ #define WHALE_ANDROID_ART_JNI_TRAMPOLINE_H_ #include "android/art/art_hook_param.h" namespace whale { namespace art { void BuildJniClosure(ArtHookParam *param); } // namespace art } // namespace whale #endif // WHALE_ANDROID_ART_JNI_TRAMPOLINE_H_
150
320
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.jme3.gde.materialdefinition.navigator; import com.jme3.gde.materialdefinition.MatDefDataObject; import java.util.Collection; import javax.swing.JComponent; import javax.swing.JPanel; import org.netbeans.spi.navigator.NavigatorPanel; import org.openide.explorer.ExplorerManager; import org.openide.explorer.ExplorerUtils; import org.openide.explorer.view.BeanTreeView; import org.openide.nodes.Node; import org.openide.util.Lookup; import org.openide.util.LookupEvent; import org.openide.util.LookupListener; /** * * @author Nehon */ @NavigatorPanel.Registration(mimeType = "text/jme-materialdefinition", displayName = "Material Def") @SuppressWarnings({"unchecked", "rawtypes"}) public class MatDefNavigatorPanel extends JPanel implements NavigatorPanel, ExplorerManager.Provider { /** * template for finding data in given context. */ private static final Lookup.Template<MatDefDataObject> MY_DATA = new Lookup.Template<MatDefDataObject>(MatDefDataObject.class); /** * current context to work on */ private Lookup.Result<MatDefDataObject> curContext; private Lookup lookup; /** * listener to context changes */ private LookupListener contextL; private final ExplorerManager mgr = new ExplorerManager(); /** * Creates new form MatDefNavigatorPanel */ public MatDefNavigatorPanel() { initComponents(); lookup = ExplorerUtils.createLookup(mgr, getActionMap()); } public String getDisplayHint() { return "Material definition outline view"; } @Override public String getDisplayName() { return "Bla"; } public JComponent getComponent() { return this; } public void panelActivated(Lookup context) { // lookup context and listen to result to get notified about context changes curContext = context.lookup(MY_DATA); //lookup = context; curContext.addLookupListener(getContextListener()); // get actual data and recompute content Collection<? extends MatDefDataObject> data = curContext.allInstances(); setNewContent(data); // ExplorerUtils.activateActions(mgr, true); } public void panelDeactivated() { Collection<? extends MatDefDataObject> data = curContext.allInstances(); if (!data.isEmpty()) { MatDefDataObject obj = (MatDefDataObject) data.iterator().next(); obj.getLookupContents().remove(this); } curContext.removeLookupListener(getContextListener()); curContext = null; mgr.setRootContext(Node.EMPTY); //ExplorerUtils.activateActions(mgr, false); } public Lookup getLookup() { // go with default activated Node strategy return lookup; } /** * *********** non - public part *********** */ private void setNewContent(Collection<? extends MatDefDataObject> newData) { if (!newData.isEmpty()) { MatDefDataObject data = (MatDefDataObject) newData.iterator().next(); data.getLookupContents().add(this); if (data.isLoaded()) { updateData(data); } else { mgr.setRootContext(Node.EMPTY); } } } /** * Accessor for listener to context */ private LookupListener getContextListener() { if (contextL == null) { contextL = new ContextListener(); } return contextL; } public ExplorerManager getExplorerManager() { return mgr; } public void updateData(MatDefDataObject data) { if (data != null) { data.getEditableFile().buildOverview(mgr); } else { mgr.setRootContext(Node.EMPTY); } } /** * Listens to changes of context and triggers proper action */ private class ContextListener implements LookupListener { public void resultChanged(LookupEvent ev) { Collection<? extends MatDefDataObject> data = (Collection<? extends MatDefDataObject>)((Lookup.Result<?> ) ev.getSource()).allInstances(); setNewContent(data); } } // end of ContextListener /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { jScrollPane1 = new BeanTreeView(); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 400, Short.MAX_VALUE) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(jScrollPane1, javax.swing.GroupLayout.DEFAULT_SIZE, 300, Short.MAX_VALUE) ); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JScrollPane jScrollPane1; // End of variables declaration//GEN-END:variables }
2,132
17,702
<gh_stars>1000+ # ============================================================================== # Copyright (c) Microsoft. All rights reserved. # Licensed under the MIT license. See LICENSE.md file in the project root # for full license information. # ============================================================================== ''' Standard attention model. ''' from __future__ import division import cntk as C from cntk.ops.functions import Function from cntk.default_options import default_options, get_default_override, default_override_or from cntk.initializer import glorot_uniform from ..layers import Dense, Label from ..blocks import Stabilizer, _inject_name # helpers from ..sequence import PastValueWindow from warnings import warn #from .. import * # AttentionModel block def AttentionModel(attention_dim, attention_span=None, attention_axis=None, init=default_override_or(glorot_uniform()), go_backwards=default_override_or(False), enable_self_stabilization=default_override_or(True), name=''): ''' AttentionModel(attention_dim, attention_span=None, attention_axis=None, init=glorot_uniform(), go_backwards=False, enable_self_stabilization=True, name='') Layer factory function to create a function object that implements an attention model as described in Bahdanau, et al., "Neural machine translation by jointly learning to align and translate." ''' init = get_default_override(AttentionModel, init=init) go_backwards = get_default_override(AttentionModel, go_backwards=go_backwards) enable_self_stabilization = get_default_override(AttentionModel, enable_self_stabilization=enable_self_stabilization) compatible_attention_mode = True if attention_span is None: if attention_axis is not None: raise ValueError('attention_span cannot be None when attention_axis is not None') compatible_attention_mode = False elif attention_span <= 0: raise ValueError('attention_span must be a positive value') elif attention_axis is None: raise ValueError('attention_axis cannot be None when attention_span is not None') # model parameters with default_options(bias=False): # all the projections have no bias attn_proj_enc = Stabilizer(enable_self_stabilization=enable_self_stabilization) >> Dense(attention_dim, init=init, input_rank=1) # projects input hidden state, keeping span axes intact attn_proj_dec = Stabilizer(enable_self_stabilization=enable_self_stabilization) >> Dense(attention_dim, init=init, input_rank=1) # projects decoder hidden state, but keeping span and beam-search axes intact attn_proj_tanh = Stabilizer(enable_self_stabilization=enable_self_stabilization) >> Dense(1 , init=init, input_rank=1) # projects tanh output, keeping span and beam-search axes intact attn_final_stab = Stabilizer(enable_self_stabilization=enable_self_stabilization) if compatible_attention_mode: warn('Specifying non-default values for attention_span and attention_axis has been deprecated since version 2.2. ' 'These arguments will be removed in the future.', DeprecationWarning, stacklevel=2) # old attention function @Function def old_attention(h_enc, h_dec): history_axis = h_dec # we use history_axis wherever we pass this only for the sake of passing its axis # TODO: pull this apart so that we can compute the encoder window only once and apply it to multiple decoders # --- encoder state window (h_enc, h_enc_valid) = PastValueWindow(attention_span, axis=attention_axis, go_backwards=go_backwards)(h_enc).outputs h_enc_proj = attn_proj_enc(h_enc) # window must be broadcast to every decoder time step h_enc_proj = C.sequence.broadcast_as(h_enc_proj, history_axis) h_enc_valid = C.sequence.broadcast_as(h_enc_valid, history_axis) # --- decoder state # project decoder hidden state h_dec_proj = attn_proj_dec(h_dec) tanh_out = C.tanh(h_dec_proj + h_enc_proj) # (attention_span, attention_dim) u = attn_proj_tanh(tanh_out) # (attention_span, 1) u_masked = u + (h_enc_valid - 1) * 50 # logzero-out the unused elements for the softmax denominator TODO: use a less arbitrary number than 50 attention_weights = C.softmax(u_masked, axis=attention_axis) #, name='attention_weights') attention_weights = Label('attention_weights')(attention_weights) # now take weighted sum over the encoder state vectors h_att = C.reduce_sum(C.element_times(C.sequence.broadcast_as(h_enc, history_axis), attention_weights), axis=attention_axis) h_att = attn_final_stab(h_att) return h_att return _inject_name(old_attention, name) else: # new attention function @Function def new_attention(encoder_hidden_state, decoder_hidden_state): # encode_hidden_state: [#, e] [h] # decoder_hidden_state: [#, d] [H] unpacked_encoder_hidden_state, valid_mask = C.sequence.unpack(encoder_hidden_state, padding_value=0).outputs # unpacked_encoder_hidden_state: [#] [*=e, h] # valid_mask: [#] [*=e] projected_encoder_hidden_state = C.sequence.broadcast_as(attn_proj_enc(unpacked_encoder_hidden_state), decoder_hidden_state) # projected_encoder_hidden_state: [#, d] [*=e, attention_dim] broadcast_valid_mask = C.sequence.broadcast_as(C.reshape(valid_mask, (1,), 1), decoder_hidden_state) # broadcast_valid_mask: [#, d] [*=e] projected_decoder_hidden_state = attn_proj_dec(decoder_hidden_state) # projected_decoder_hidden_state: [#, d] [attention_dim] tanh_output = C.tanh(projected_decoder_hidden_state + projected_encoder_hidden_state) # tanh_output: [#, d] [*=e, attention_dim] attention_logits = attn_proj_tanh(tanh_output) # attention_logits = [#, d] [*=e, 1] minus_inf = C.constant(-1e+30) masked_attention_logits = C.element_select(broadcast_valid_mask, attention_logits, minus_inf) # masked_attention_logits = [#, d] [*=e] attention_weights = C.softmax(masked_attention_logits, axis=0) attention_weights = Label('attention_weights')(attention_weights) # attention_weights = [#, d] [*=e] attended_encoder_hidden_state = C.reduce_sum(attention_weights * C.sequence.broadcast_as(unpacked_encoder_hidden_state, attention_weights), axis=0) # attended_encoder_hidden_state = [#, d] [1, h] output = attn_final_stab(C.reshape(attended_encoder_hidden_state, (), 0, 1)) # output = [#, d], [h] return output return _inject_name(new_attention, name)
2,795
796
<reponame>liudonghua123/9-Patch-Resizer<gh_stars>100-1000 /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Copyright 2013-2015 Redwarp */ package net.redwarp.tool.resizer.views; import net.iharder.dnd.FileDrop; import net.redwarp.tool.resizer.misc.Configuration; import net.redwarp.tool.resizer.misc.Localization; import net.redwarp.tool.resizer.misc.NameValidator; import net.redwarp.tool.resizer.table.Operation; import net.redwarp.tool.resizer.table.ResultTable; import net.redwarp.tool.resizer.worker.ImageScaler; import net.redwarp.tool.resizer.worker.ScreenDensity; import java.awt.*; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.io.File; import java.io.FilenameFilter; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Vector; import javax.swing.*; import javax.swing.border.EmptyBorder; @SuppressWarnings("serial") public class MainWindow extends JFrame { private ImageIcon blueArrow, redArrow; private ImageIcon blueArrowSmall, redArrowSmall; private JButton xhdpiButton; private ResultTable resultTable; private JLabel instructionLabel; private JMenuItem mntmClear; private final Action action = new SwingAction(); private JComboBox<ScreenDensity> inputDensityChoice; // private JFileChooser fileChooser; private FileDialog fileDialog; public MainWindow() { this.setSize(new Dimension(550, 400)); this.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); this.setTitle(Localization.get("app_name")); List<Image> icons = new ArrayList<Image>(); icons.add(Toolkit.getDefaultToolkit().getImage( MainWindow.class.getResource("/img/icon_512.png"))); icons.add(Toolkit.getDefaultToolkit().getImage( MainWindow.class.getResource("/img/icon_256.png"))); icons.add(Toolkit.getDefaultToolkit().getImage( MainWindow.class.getResource("/img/icon_128.png"))); icons.add(Toolkit.getDefaultToolkit().getImage( MainWindow.class.getResource("/img/icon_64.png"))); icons.add(Toolkit.getDefaultToolkit().getImage( MainWindow.class.getResource("/img/icon_32.png"))); icons.add(Toolkit.getDefaultToolkit().getImage( MainWindow.class.getResource("/img/icon_16.png"))); this.setIconImages(icons); this.blueArrow = new ImageIcon( MainWindow.class.getResource("/img/blue_big.png")); this.redArrow = new ImageIcon( MainWindow.class.getResource("/img/red_big.png")); this.blueArrowSmall = new ImageIcon( MainWindow.class.getResource("/img/blue_small.png")); this.redArrowSmall = new ImageIcon( MainWindow.class.getResource("/img/red_small.png")); this.getContentPane().setLayout(new CardLayout(0, 0)); fileDialog = new FileDialog(this); fileDialog.setFilenameFilter(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return NameValidator.isFilenameValid(name); } }); fileDialog.setMultipleMode(true); fileDialog.setTitle(Localization.get("image_types")); this.getContentPane().add(createInputPanel(), "input"); this.getContentPane().add(createOutputPanel(), "output"); this.setMenuBar(); } private JPanel createOutputPanel() { JPanel outputPanel = new JPanel(); outputPanel.setLayout(new BorderLayout(0, 0)); JTextArea textArea = new JTextArea(); textArea.setLineWrap(true); textArea.setEditable(false); this.resultTable = new ResultTable(); JScrollPane scrollPane = new JScrollPane(this.resultTable); scrollPane .setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); scrollPane .setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); outputPanel.add(scrollPane, BorderLayout.CENTER); FileDrop.Listener<Container> dropListener = new FileDrop.Listener<Container>() { @Override public void filesDropped(Container source, File[] files) { createScaleJobs(files); } @Override public void dragEnter(Container source) { MainWindow.this.xhdpiButton.setSelected(true); MainWindow.this.instructionLabel .setIcon(MainWindow.this.redArrowSmall); } @Override public void dragExit(Container source) { MainWindow.this.xhdpiButton.setSelected(false); MainWindow.this.instructionLabel .setIcon(MainWindow.this.blueArrowSmall); } }; new FileDrop<Container>(this.getContentPane(), null, dropListener); new FileDrop<Container>(outputPanel, null, dropListener); this.instructionLabel = new JLabel(""); this.instructionLabel.setIcon(this.blueArrowSmall); this.instructionLabel.setBorder(new EmptyBorder(4, 4, 4, 4)); this.instructionLabel.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { displayImagePicker(); } }); outputPanel.add(this.instructionLabel, BorderLayout.SOUTH); new FileDrop<Container>(textArea, null, dropListener); return outputPanel; } private void displayImagePicker() { fileDialog.setVisible(true); File[] files = fileDialog.getFiles(); if (files != null) { createScaleJobs(files); } } private JPanel createInputPanel() { JPanel inputPanel = new JPanel(); inputPanel.setPreferredSize(new Dimension(10, 140)); this.xhdpiButton = new JButton(String.format(Locale.getDefault(), Localization.get("xhdpi"), Configuration.getSettings().getDefaultInputDensity().getName())); this.xhdpiButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent arg0) { } }); inputPanel.setLayout(new BorderLayout(0, 0)); this.xhdpiButton.setBorderPainted(false); this.xhdpiButton.setFocusPainted(false); this.xhdpiButton.setVerticalTextPosition(SwingConstants.BOTTOM); this.xhdpiButton.setHorizontalTextPosition(SwingConstants.CENTER); this.xhdpiButton.setHorizontalAlignment(SwingConstants.CENTER); this.xhdpiButton.setIcon(this.blueArrow); this.xhdpiButton.setSelectedIcon(this.redArrow); this.xhdpiButton.setBorder(null); this.xhdpiButton.setContentAreaFilled(false); inputPanel.add(this.xhdpiButton, BorderLayout.CENTER); this.xhdpiButton.addMouseListener( new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { displayImagePicker(); } }); JPanel optionPanel = new JPanel(); optionPanel.setLayout(new BoxLayout(optionPanel, BoxLayout.PAGE_AXIS)); optionPanel.add(Box.createVerticalGlue()); JLabel inputLabel = new JLabel(Localization.get("input_density")); inputLabel.setAlignmentX(Component.LEFT_ALIGNMENT); optionPanel.add(inputLabel); inputDensityChoice = new JComboBox<ScreenDensity>( new Vector<ScreenDensity>(Configuration.getSettings().getSupportedScreenDensity())); inputDensityChoice.setSelectedItem(Configuration.getSettings().getDefaultInputDensity()); inputDensityChoice.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent actionEvent) { JComboBox box = (JComboBox) actionEvent.getSource(); ScreenDensity selectedDensity = (ScreenDensity) box.getSelectedItem(); Configuration.getSettings().setDefaultInputDensity(selectedDensity); xhdpiButton.setText(String.format(Locale.getDefault(), Localization.get("xhdpi"), selectedDensity.getName())); } }); inputDensityChoice.setAlignmentX(Component.LEFT_ALIGNMENT); inputDensityChoice.setAlignmentY(Component.BOTTOM_ALIGNMENT); inputDensityChoice.setPreferredSize(new Dimension(1, 10)); optionPanel.add(inputDensityChoice); optionPanel.add(Box.createVerticalGlue()); JLabel outputLabel = new JLabel(Localization.get("output_density")); optionPanel.add(outputLabel); for (final ScreenDensity density : Configuration.getSettings().getSupportedScreenDensity()) { final JCheckBox box = new JCheckBox(density.getName()); box.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent actionEvent) { density.setActive(box.isSelected()); } }); box.setSelected(density.isActive()); box.setAlignmentX(Component.LEFT_ALIGNMENT); optionPanel.add(box); } optionPanel.add(Box.createVerticalGlue()); final JCheckBox keepDensity = new JCheckBox(Localization.get("keep_same_density_file")); keepDensity.setToolTipText(Localization.get("keep_same_density_file_tooltip")); keepDensity.setSelected(Configuration.getSettings().shouldKeepSameDensityFile()); keepDensity.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { Configuration.getSettings().setShouldKeepSameDensityFile(keepDensity.isSelected()); } }); optionPanel.add(keepDensity); optionPanel.add(Box.createVerticalGlue()); final JButton saveButton = new JButton(Localization.get("save")); saveButton.addActionListener(new ActionListener() { @Override public void actionPerformed(ActionEvent e) { saveButton.setEnabled(false); Configuration.getSettings().save(new Runnable() { @Override public void run() { saveButton.setEnabled(true); } }); } }); saveButton.setToolTipText(Localization.get("save_tooltip")); optionPanel.add(saveButton); optionPanel.add(Box.createVerticalGlue()); optionPanel.setBorder(BorderFactory .createCompoundBorder(BorderFactory.createTitledBorder("Options"), BorderFactory .createEmptyBorder(10, 10, 10, 10))); optionPanel.setPreferredSize(new Dimension(200, -1)); inputPanel.add(optionPanel, BorderLayout.LINE_START); return inputPanel; } private void createScaleJobs(File[] files) { for (File input : files) { String filename = input.getName(); if (NameValidator.isFilenameValid(filename)) { MainWindow.this.mntmClear.setEnabled(true); CardLayout layout = (CardLayout) MainWindow.this .getContentPane().getLayout(); ScreenDensity selectedDensity = (ScreenDensity) inputDensityChoice.getSelectedItem(); instructionLabel.setText(String.format(Locale.getDefault(), Localization.get("xhdpi"), selectedDensity.getName())); layout.show(MainWindow.this.getContentPane(), "output"); Operation operation = new Operation(input); MainWindow.this.resultTable.addOperation(operation); ImageScaler scaler = new ImageScaler(operation, Configuration.getSettings() .getDefaultInputDensity()) { @Override protected void process( List<Operation> chunks) { for (Operation operation : chunks) { MainWindow.this.resultTable .notifyChange(operation); } } }; scaler.post(); } } } private void setMenuBar() { JMenuBar menuBar = new JMenuBar(); this.setJMenuBar(menuBar); JMenu mnEdit = new JMenu(Localization.get("menu_edit")); menuBar.add(mnEdit); this.mntmClear = new JMenuItem(Localization.get("menu_item_clear")); this.mntmClear.setAction(this.action); this.mntmClear.setEnabled(false); mnEdit.add(this.mntmClear); JMenu mnHelp = new JMenu(Localization.get("menu_help")); menuBar.add(mnHelp); JMenuItem mntmAbout = new JMenuItem(); mntmAbout.setAction(new AboutAction()); mnHelp.add(mntmAbout); } private class AboutAction extends AbstractAction { public AboutAction() { this.putValue(NAME, Localization.get("menu_item_about")); } @Override public void actionPerformed(ActionEvent e) { new AboutDialog(MainWindow.this).setVisible(true); } } private class SwingAction extends AbstractAction { public SwingAction() { this.putValue(NAME, Localization.get("menu_item_clear")); this.putValue(SHORT_DESCRIPTION, Localization.get("menu_item_clear_desc")); } @Override public void actionPerformed(ActionEvent e) { MainWindow.this.resultTable.clear(); if (MainWindow.this.resultTable.getModel().getRowCount() == 0) { MainWindow.this.mntmClear.setEnabled(false); CardLayout layout = (CardLayout) MainWindow.this .getContentPane().getLayout(); layout.show(MainWindow.this.getContentPane(), "input"); } } } }
5,238
314
<filename>Multiplex/IDEHeaders/IDEHeaders/IDEKit/IDERefactoringSession.h<gh_stars>100-1000 // // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by <NAME>. // #import "CDStructures.h" #import "DVTInvalidation-Protocol.h" @class DVTSourceExpression, DVTStackBacktrace, IDEIndexSymbol, IDERefactoring, NSMutableDictionary, NSOperationQueue, NSString; @interface IDERefactoringSession : NSObject <DVTInvalidation> { IDERefactoring *_refactoring; id _transformation; DVTSourceExpression *_sourceExpression; IDEIndexSymbol *_selectedSymbol; id _delegate; NSOperationQueue *_queue; NSMutableDictionary *_cachedBuildSettingsForCompilationUnits; } + (void)initialize; @property(retain) IDEIndexSymbol *selectedSymbol; // @synthesize selectedSymbol=_selectedSymbol; @property(readonly) id transformation; // @synthesize transformation=_transformation; @property(retain) id delegate; // @synthesize delegate=_delegate; - (id)snapshotDescription; - (void)cancel; - (void)previewChanges:(id)arg1; - (void)start; - (void)invalidateOnQueue; - (void)primitiveInvalidate; - (id)initWithRefactoring:(id)arg1 transformation:(id)arg2 sourceExpression:(id)arg3; // Remaining properties @property(retain) DVTStackBacktrace *creationBacktrace; @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) DVTStackBacktrace *invalidationBacktrace; @property(readonly) Class superclass; @property(readonly, nonatomic, getter=isValid) BOOL valid; @end
565
310
import itertools from absl.testing import absltest from absl.testing import parameterized from datathon_etl_pipelines.generic_imaging.images_to_tfrecord import ConvertToTFExample # [(test_case_name, image_format, input_join, expected_list)] tf_example_conversion_cases = [('one_to_one_jpg', 'jpg', (u'path_value', { 'images': [b'bytestring'], 'labels': [{ u'a': 1, u'b': 3.0, u'c': u'd' }] }), [{ u'path': b'path_value', u'jpg_bytes': b'bytestring', u'a': 1, u'b': 3.0, u'c': b'd' }]), ('one_to_one_png', 'png', (u'path_value', { 'images': [b'bytestring'], 'labels': [{ u'a': 1, u'b': 3.0, u'c': u'd' }] }), [{ u'path': b'path_value', u'png_bytes': b'bytestring', u'a': 1, u'b': 3.0, u'c': b'd' }]), ('inner_join_property', 'png', (u'path_value', { 'images': [b'bytestring'], 'labels': [] }), []), ('cartesian_product_property', 'jpg', (u'path_value', { 'images': [b'bytestring'], 'labels': [{ u'a': 1, u'b': 3.0, u'c': u'd' }, { u'x': -3, u'y': 4 }] }), [{ u'path': b'path_value', u'jpg_bytes': b'bytestring', u'a': 1, u'b': 3.0, u'c': b'd' }, { u'path': b'path_value', u'jpg_bytes': b'bytestring', u'x': -3, u'y': 4 }])] class ConvertToTFExampleTest(parameterized.TestCase): def tfexample_to_dict(self, tfexample): result = dict() for name, list_proto in tfexample.features.feature.items(): values = list( itertools.chain(list_proto.int64_list.value, list_proto.float_list.value, list_proto.bytes_list.value)) self.assertLen(values, 1) result[name] = values[0] return result @parameterized.named_parameters(*tf_example_conversion_cases) def test_conversion(self, image_format, input_join, expected_list): dofn = ConvertToTFExample(image_format) result_list = [ self.tfexample_to_dict(result) for result in dofn.process(input_join) ] self.assertSameElements(result_list, expected_list) if __name__ == '__main__': absltest.main()
2,359
14,668
// Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.android_webview; import androidx.annotation.NonNull; import org.chromium.base.annotations.CalledByNative; import org.chromium.base.annotations.JNINamespace; import org.chromium.base.annotations.NativeMethods; import org.chromium.base.task.PostTask; import org.chromium.content_public.browser.UiThreadTaskTraits; /** * Used for Js Java interaction, to receive postMessage back to the injected JavaScript object. * When the native counterpart of this object is gone, we still don't know if this is ready for gc * since developer could hold a reference to it. So just cut the connection between native and Java. */ @JNINamespace("android_webview") public class JsReplyProxy extends AwSupportLibIsomorphic { private long mNativeJsReplyProxy; private JsReplyProxy(long nativeJsReplyProxy) { mNativeJsReplyProxy = nativeJsReplyProxy; } /** * Post message to the injected JavaScript object. Note that it will drop message if the * injected object is gone. * * @param message a non-null String message post to the JavaScript object. */ public void postMessage(@NonNull final String message) { if (mNativeJsReplyProxy == 0) return; PostTask.runOrPostTask(UiThreadTaskTraits.USER_VISIBLE, () -> JsReplyProxyJni.get().postMessage(mNativeJsReplyProxy, message)); } @CalledByNative private static JsReplyProxy create(long nativeJsReplyProxy) { return new JsReplyProxy(nativeJsReplyProxy); } @CalledByNative private void onDestroy() { mNativeJsReplyProxy = 0; } @NativeMethods interface Natives { void postMessage(long nativeJsReplyProxy, String message); } }
616
480
/* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.common.utils.encrypt.aes; import org.bouncycastle.crypto.BlockCipher; import org.bouncycastle.crypto.CipherParameters; import org.bouncycastle.crypto.DataLengthException; import org.bouncycastle.crypto.StreamBlockCipher; import org.bouncycastle.crypto.params.KeyParameter; import org.bouncycastle.crypto.params.ParametersWithIV; import static com.alibaba.polardbx.common.utils.encrypt.aes.AesConst.BLOCK_SIZE; import static com.alibaba.polardbx.common.utils.encrypt.aes.AesConst.IV_LENGTH; public class Cfb1BlockCipher extends StreamBlockCipher { private final byte[] IV; private EncryptionKey encryptionKey; private final int blockSize = 1; private final BlockCipher cipher; private boolean encrypting; public Cfb1BlockCipher(BlockCipher cipher) { super(cipher); this.cipher = cipher; this.IV = new byte[IV_LENGTH]; } @Override public void init(boolean encrypting, CipherParameters params) throws IllegalArgumentException { this.encrypting = encrypting; if (params instanceof ParametersWithIV) { ParametersWithIV ivParam = (ParametersWithIV) params; byte[] iv = ivParam.getIV(); if (iv.length < IV_LENGTH) { throw new IllegalArgumentException("IV length shorter than " + IV_LENGTH); } else { System.arraycopy(iv, 0, IV, 0, IV.length); } reset(); if (ivParam.getParameters() != null) { byte[] key = ((KeyParameter) ivParam.getParameters()).getKey(); this.encryptionKey = new EncryptionKey(key); this.encryptionKey.init(); } } else { reset(); } } @Override public int processBytes(byte[] in, int inOff, int len, byte[] out, int outOff) throws DataLengthException { int totalBits = len * 8; byte[] c = new byte[1]; byte[] tmp = new byte[1]; for (int n = 0; n < totalBits; n++) { byte posBit = (byte) (1 << (7 - n % 8)); if ((in[inOff + n / 8] & posBit) != 0x00) { c[0] = (byte) 0x80; } else { c[0] = (byte) 0x00; } doEncryptOrDecrypt(c, tmp, IV); out[outOff + n / 8] = (byte) ((out[outOff + n / 8] & (byte) ~posBit) | ((tmp[0] & 0x80) >>> (n % 8))); } return len; } private void doEncryptOrDecrypt(final byte[] in, byte[] out, byte[] iv) { int n, rem = 1, num = 0; byte[] tmpVec = new byte[2 * BLOCK_SIZE + 1]; System.arraycopy(iv, 0, tmpVec, 0, IV_LENGTH); updateIV(iv, iv); if (encrypting) { for (n = 0; n < 1; ++n) { tmpVec[IV_LENGTH + n] = (byte) (in[n] ^ iv[n]); out[n] = tmpVec[IV_LENGTH + n]; } } else { for (n = 0; n < 1; ++n) { tmpVec[16 + n] = in[n]; out[n] = (byte) (in[n] ^ iv[n]); } } for (n = 0; n < 16; ++n) { iv[n] = (byte) ((tmpVec[n + num] & 0xFF) << rem | (tmpVec[n + num + 1] & 0xFF) >>> (8 - rem)); } } private void updateIV(byte[] in, byte[] out) { AesUtil.encryptSingleBlock(in, 0, out, 0, encryptionKey); } @Override public String getAlgorithmName() { return cipher.getAlgorithmName() + "/CFB1"; } @Override public int getBlockSize() { return blockSize; } @Override public int processBlock(byte[] in, int inOff, byte[] out, int outOff) throws DataLengthException, IllegalStateException { processBytes(in, inOff, blockSize, out, outOff); return blockSize; } @Override public void reset() { cipher.reset(); } @Override protected byte calculateByte(byte b) { throw new UnsupportedOperationException("Don't support streaming calculate."); } }
2,051
640
#include "includes.h" #define BOTH_EMPTY (UART_LS_TEMT | UART_LS_THRE) #define WAIT_FOR_XMITR \ do { \ lsr = REG8(UART_BASE + UART_LS_REG); \ } while ((lsr & BOTH_EMPTY) != BOTH_EMPTY) #define WAIT_FOR_THRE \ do { \ lsr = REG8(UART_BASE + UART_LS_REG); \ } while ((lsr & UART_LS_THRE) != UART_LS_THRE) #define TASK_STK_SIZE 256 OS_STK TaskStartStk[TASK_STK_SIZE]; char Info[103]={0xC9,0xCF,0xB5,0xDB,0xCB,0xB5,0xD2,0xAA,0xD3,0xD0,0xB9,0xE2,0xA3,0xAC,0xD3,0xDA,0xCA,0xC7,0xBE,0xCD,0xD3,0xD0,0xC1,0xCB,0xB9,0xE2,0x0D,0x0A,0xC9,0xCF,0xB5,0xDB,0xCB,0xB5,0xD2,0xAA,0xD3,0xD0,0xCC,0xEC,0xBF,0xD5,0xA3,0xAC,0xD3,0xDA,0xCA,0xC7,0xBE,0xCD,0xD3,0xD0,0xC1,0xCB,0xCC,0xEC,0xBF,0xD5,0x0D,0x0A,0xC9,0xCF,0xB5,0xDB,0xCB,0xB5,0xD2,0xAA,0xD3,0xD0,0xC2,0xBD,0xB5,0xD8,0xBA,0xCD,0xBA,0xA3,0xD1,0xF3,0xA3,0xAC,0xD3,0xDA,0xCA,0xC7,0xBE,0xCD,0xD3,0xD0,0xC1,0xCB,0xC2,0xBD,0xB5,0xD8,0xBA,0xCD,0xBA,0xA3,0xD1,0xF3,0x0D}; void uart_init(void) { INT32U divisor; /* Set baud rate */ divisor = (INT32U) IN_CLK/(16 * UART_BAUD_RATE); REG8(UART_BASE + UART_LC_REG) = 0x80; REG8(UART_BASE + UART_DLB1_REG) = divisor & 0x000000ff; REG8(UART_BASE + UART_DLB2_REG) = (divisor >> 8) & 0x000000ff; REG8(UART_BASE + UART_LC_REG) = 0x00; /* Disable all interrupts */ REG8(UART_BASE + UART_IE_REG) = 0x00; /* Set 8 bit char, 1 stop bit, no parity */ REG8(UART_BASE + UART_LC_REG) = UART_LC_WLEN8 | (UART_LC_ONE_STOP | UART_LC_NO_PARITY); uart_print_str("UART initialize done ! \n"); return; } void uart_putc(char c) { unsigned char lsr; WAIT_FOR_THRE; REG8(UART_BASE + UART_TH_REG) = c; if(c == '\n') { WAIT_FOR_THRE; REG8(UART_BASE + UART_TH_REG) = '\r'; } WAIT_FOR_XMITR; } void uart_print_str(char* str) { INT32U i=0; OS_CPU_SR cpu_sr; OS_ENTER_CRITICAL() while(str[i]!=0) { uart_putc(str[i]); i++; } OS_EXIT_CRITICAL() } void gpio_init() { REG32(GPIO_BASE + GPIO_OE_REG) = 0xffffffff; REG32(GPIO_BASE + GPIO_INTE_REG) = 0x00000000; gpio_out(0x0f0f0f0f); uart_print_str("GPIO initialize done ! \n"); return; } void gpio_out(INT32U number) { REG32(GPIO_BASE + GPIO_OUT_REG) = number; } INT32U gpio_in() { INT32U temp = 0; temp = REG32(GPIO_BASE + GPIO_IN_REG); return temp; } /******************************************* ÉèÖÃcompareŒÄŽæÆ÷£¬²¢ÇÒʹÄÜʱÖÓÖÐ¶Ï ********************************************/ void OSInitTick(void) { INT32U compare = (INT32U)(IN_CLK / OS_TICKS_PER_SEC); asm volatile("mtc0 %0,$9" : :"r"(0x0)); asm volatile("mtc0 %0,$11" : :"r"(compare)); asm volatile("mtc0 %0,$12" : :"r"(0x10000401)); //uart_print_str("OSInitTick Done!!!\n"); return; } void TaskStart (void *pdata) { INT32U count = 0; pdata = pdata; /* Prevent compiler warning */ OSInitTick(); /* don't put this function in main() */ for (;;) { if(count <= 102) { uart_putc(Info[count]); uart_putc(Info[count+1]); } gpio_out(count); count=count+2; OSTimeDly(10); /* Wait 100ms */ } } void main() { OSInit(); uart_init(); gpio_init(); OSTaskCreate(TaskStart, (void *)0, &TaskStartStk[TASK_STK_SIZE - 1], 0); OSStart(); }
2,302
378
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tomee.server.composer; import java.io.File; import java.io.IOException; import java.nio.file.FileVisitResult; import java.nio.file.FileVisitor; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.util.ArrayList; import java.util.List; public class CleanOnExit { private final List<File> files = new ArrayList<>(); private final List<Process> processes = new ArrayList<>(); public CleanOnExit() { Runtime.getRuntime().addShutdownHook(new Thread(this::clean)); } public File clean(final File file) { this.files.add(file); return file; } public Process clean(final Process process) { this.processes.add(process); return process; } public void clean() { processes.stream().forEach(Process::destroy); for (final Process process : processes) { try { process.waitFor(); } catch (InterruptedException e) { Thread.interrupted(); } } files.stream().forEach(this::delete); } private void delete(final File file) { try { java.nio.file.Files.walkFileTree(file.toPath(), new RecursiveDelete()); } catch (IOException e) { e.printStackTrace(); } } private static class RecursiveDelete implements FileVisitor<Path> { @Override public FileVisitResult preVisitDirectory(final Path dir, final BasicFileAttributes attrs) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { java.nio.file.Files.deleteIfExists(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(final Path file, final IOException exc) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(final Path dir, final IOException exc) throws IOException { java.nio.file.Files.deleteIfExists(dir); return FileVisitResult.CONTINUE; } } }
1,111
348
{"nom":"Rombach-le-Franc","circ":"2ème circonscription","dpt":"Haut-Rhin","inscrits":652,"abs":415,"votants":237,"blancs":16,"nuls":1,"exp":220,"res":[{"nuance":"REM","nom":"<NAME>","voix":114},{"nuance":"LR","nom":"<NAME>","voix":106}]}
95
595
#ifdef __OBJC__ #import <UIKit/UIKit.h> #else #ifndef FOUNDATION_EXPORT #if defined(__cplusplus) #define FOUNDATION_EXPORT extern "C" #else #define FOUNDATION_EXPORT extern #endif #endif #endif FOUNDATION_EXPORT double Pods_Colorful_ExampleVersionNumber; FOUNDATION_EXPORT const unsigned char Pods_Colorful_ExampleVersionString[];
121
343
// Copyright 2012 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "syzygy/instrument/transforms/entry_thunk_transform.h" #include "base/logging.h" #include "base/strings/stringprintf.h" #include "syzygy/block_graph/block_builder.h" #include "syzygy/block_graph/block_util.h" #include "syzygy/common/defs.h" #include "syzygy/pe/pe_utils.h" #include "syzygy/pe/transforms/pe_add_imports_transform.h" namespace instrument { namespace transforms { using block_graph::BasicBlock; using block_graph::BasicCodeBlock; using block_graph::BasicBlockAssembler; using block_graph::BasicBlockReference; using block_graph::BasicBlockSubGraph; using block_graph::BlockBuilder; using block_graph::BlockGraph; using block_graph::Displacement; using block_graph::Operand; using block_graph::TransformPolicyInterface; using pe::transforms::PEAddImportsTransform; typedef pe::transforms::ImportedModule ImportedModule; const char EntryThunkTransform::kTransformName[] = "EntryThunkTransform"; const char EntryThunkTransform::kEntryHookName[] = "_indirect_penter"; const char EntryThunkTransform::kDllMainEntryHookName[] = "_indirect_penter_dllmain"; const char EntryThunkTransform::kExeMainEntryHookName[] = "_indirect_penter_exemain"; const char EntryThunkTransform::kDefaultInstrumentDll[] = "call_trace_client.dll"; EntryThunkTransform::EntryThunkTransform() : thunk_section_(NULL), instrument_unsafe_references_(true), src_ranges_for_thunks_(false), only_instrument_module_entry_(false), instrument_dll_name_(kDefaultInstrumentDll) { } bool EntryThunkTransform::SetEntryThunkParameter( const ImmediateType& immediate) { if (immediate.size() != assm::kSizeNone && immediate.size() != assm::kSize32Bit) { return false; } entry_thunk_parameter_ = immediate; return true; } bool EntryThunkTransform::SetFunctionThunkParameter( const ImmediateType& immediate) { if (immediate.size() != assm::kSizeNone && immediate.size() != assm::kSize32Bit) { return false; } function_thunk_parameter_ = immediate; return true; } bool EntryThunkTransform::EntryThunkIsParameterized() const { return entry_thunk_parameter_.size() != assm::kSizeNone; } bool EntryThunkTransform::FunctionThunkIsParameterized() const { return function_thunk_parameter_.size() != assm::kSizeNone; } bool EntryThunkTransform::PreBlockGraphIteration( const TransformPolicyInterface* policy, BlockGraph* block_graph, BlockGraph::Block* header_block) { DCHECK_NE(reinterpret_cast<TransformPolicyInterface*>(NULL), policy); DCHECK_NE(reinterpret_cast<BlockGraph*>(NULL), block_graph); DCHECK_NE(reinterpret_cast<BlockGraph::Block*>(NULL), header_block); DCHECK_EQ(BlockGraph::PE_IMAGE, block_graph->image_format()); DCHECK_EQ(reinterpret_cast<BlockGraph::Section*>(NULL), thunk_section_); if (!GetEntryPoints(header_block)) return false; ImportedModule import_module(instrument_dll_name_); // We import the minimal set of symbols necessary, depending on the types of // entry points we find in the module. We maintain a list of symbol indices/ // reference pointers, which will be traversed after the import to populate // the references. typedef std::pair<size_t, BlockGraph::Reference*> ImportHook; std::vector<ImportHook> import_hooks; // If there are any DllMain-like entry points (TLS initializers or DllMain // itself) then we need the DllMain entry hook. if (dllmain_entrypoints_.size() > 0) { import_hooks.push_back(std::make_pair( import_module.AddSymbol(kDllMainEntryHookName, ImportedModule::kAlwaysImport), &hook_dllmain_ref_)); } // If this was an EXE then we need the EXE entry hook. if (exe_entry_point_.first != NULL) { import_hooks.push_back(std::make_pair( import_module.AddSymbol(kExeMainEntryHookName, ImportedModule::kAlwaysImport), &hook_exe_entry_ref_)); } // If we're not only instrumenting module entry then we need the function // entry hook. if (!only_instrument_module_entry_) { import_hooks.push_back(std::make_pair( import_module.AddSymbol(kEntryHookName, ImportedModule::kAlwaysImport), &hook_ref_)); } // Nothing to do if we don't need any import hooks. if (import_hooks.empty()) return true; // Run the transform. PEAddImportsTransform add_imports_transform; add_imports_transform.AddModule(&import_module); if (!add_imports_transform.TransformBlockGraph( policy, block_graph, header_block)) { LOG(ERROR) << "Unable to add imports for instrumentation DLL."; return false; } // Get references to each of the imported symbols. for (size_t i = 0; i < import_hooks.size(); ++i) { if (!import_module.GetSymbolReference(import_hooks[i].first, import_hooks[i].second)) { LOG(ERROR) << "Unable to get reference to import."; return false; } } // Find or create the section we put our thunks in. thunk_section_ = block_graph->FindOrAddSection(common::kThunkSectionName, pe::kCodeCharacteristics); DCHECK(thunk_section_ != NULL); return true; } bool EntryThunkTransform::OnBlock(const TransformPolicyInterface* policy, BlockGraph* block_graph, BlockGraph::Block* block) { DCHECK(block != NULL); if (block->type() != BlockGraph::CODE_BLOCK) return true; return InstrumentCodeBlock(block_graph, block); } bool EntryThunkTransform::InstrumentCodeBlock( BlockGraph* block_graph, BlockGraph::Block* block) { DCHECK(block_graph != NULL); DCHECK(block != NULL); // Typedef for the thunk block map. The key is the offset within the callee // block and the value is the thunk block that forwards to the callee at that // offset. ThunkBlockMap thunk_block_map; // Iterate through all the block's referrers, creating thunks as we go. // We copy the referrer set for simplicity, as it's potentially mutated // in the loop. BlockGraph::Block::ReferrerSet referrers = block->referrers(); BlockGraph::Block::ReferrerSet::const_iterator referrer_it(referrers.begin()); for (; referrer_it != referrers.end(); ++referrer_it) { const BlockGraph::Block::Referrer& referrer = *referrer_it; if (!InstrumentCodeBlockReferrer( referrer, block_graph, block, &thunk_block_map)) { return false; } } return true; } bool EntryThunkTransform::InstrumentCodeBlockReferrer( const BlockGraph::Block::Referrer& referrer, BlockGraph* block_graph, BlockGraph::Block* block, ThunkBlockMap* thunk_block_map) { DCHECK(block_graph != NULL); DCHECK(block != NULL); DCHECK(thunk_block_map != NULL); // Get the reference. BlockGraph::Reference ref; if (!referrer.first->GetReference(referrer.second, &ref)) { LOG(ERROR) << "Unable to get reference from referrer."; return false; } // Skip self-references, except long references to the start of the block. // TODO(siggi): This needs refining, as it may currently miss important // cases. Notably if a block contains more than one function, and the // functions are mutually recursive, we'll only record the original // entry to the block, but will miss the internal recursion. // As-is, this does work for the common case where a block contains // one self-recursive function, however. if (referrer.first == block) { // Skip short references. if (ref.size() < sizeof(core::AbsoluteAddress)) return true; // Skip interior references. The rationale for this is because these // references will tend to be switch tables, and we don't need the // overhead of instrumenting and recording all switch statement executions // for now. if (ref.offset() != 0) return true; } // See whether this is one of the DLL entrypoints. pe::EntryPoint entry(ref.referenced(), ref.offset()); pe::EntryPointSet::const_iterator entry_it(dllmain_entrypoints_.find( entry)); bool is_dllmain_entry = entry_it != dllmain_entrypoints_.end(); // Determine if this is an EXE entry point. bool is_exe_entry = entry == exe_entry_point_; // It can't be both an EXE and a DLL entry. DCHECK(!is_dllmain_entry || !is_exe_entry); // If we're only instrumenting entry points and this isn't one, then skip it. if (only_instrument_module_entry_ && !is_dllmain_entry && !is_exe_entry) return true; if (!instrument_unsafe_references_ && block_graph::IsUnsafeReference(referrer.first, ref)) { LOG(INFO) << "Skipping reference between unsafe block pair '" << referrer.first->name() << "' and '" << block->name() << "'"; return true; } // Determine which hook function to use. BlockGraph::Reference* hook_ref = &hook_ref_; if (is_dllmain_entry) hook_ref = &hook_dllmain_ref_; else if (is_exe_entry) hook_ref = &hook_exe_entry_ref_; DCHECK(hook_ref->referenced() != NULL); // Determine which parameter to use, if any. const ImmediateType* param = NULL; if ((is_dllmain_entry || is_exe_entry) && EntryThunkIsParameterized()) { param = &entry_thunk_parameter_; } else if (FunctionThunkIsParameterized()) { param = &function_thunk_parameter_; } // Look for the reference in the thunk block map, and only create a new one // if it does not already exist. BlockGraph::Block* thunk_block = NULL; ThunkBlockMap::const_iterator thunk_it = thunk_block_map->find(ref.offset()); if (thunk_it == thunk_block_map->end()) { thunk_block = CreateOneThunk(block_graph, ref, *hook_ref, param); if (thunk_block == NULL) { LOG(ERROR) << "Unable to create thunk block."; return false; } (*thunk_block_map)[ref.offset()] = thunk_block; } else { thunk_block = thunk_it->second; } DCHECK(thunk_block != NULL); // Update the referrer to point to the thunk. BlockGraph::Reference new_ref(ref.type(), ref.size(), thunk_block, 0, 0); referrer.first->SetReference(referrer.second, new_ref); return true; } BlockGraph::Block* EntryThunkTransform::CreateOneThunk( BlockGraph* block_graph, const BlockGraph::Reference& destination, const BlockGraph::Reference& hook, const ImmediateType* parameter) { std::string name; if (destination.offset() == 0) { name = base::StringPrintf("%s%s", destination.referenced()->name().c_str(), common::kThunkSuffix); } else { name = base::StringPrintf("%s%s+%d", destination.referenced()->name().c_str(), common::kThunkSuffix, destination.offset()); } // Set up a basic block subgraph containing a single block description, with // that block description containing a single empty basic block, and get an // assembler writing into that basic block. // TODO(chrisha): Make this reusable somehow. Creating a code block via an // assembler is likely to be pretty common. BasicBlockSubGraph bbsg; BasicBlockSubGraph::BlockDescription* block_desc = bbsg.AddBlockDescription( name, NULL, BlockGraph::CODE_BLOCK, thunk_section_->id(), 1, 0); BasicCodeBlock* bb = bbsg.AddBasicCodeBlock(name); block_desc->basic_block_order.push_back(bb); BasicBlockAssembler assm(bb->instructions().begin(), &bb->instructions()); // Set up our thunk: // 1. push parameter // 2. push func_addr // 3. jmp hook_addr if (parameter != NULL) assm.push(*parameter); assm.push(Immediate(destination.referenced(), destination.offset())); assm.jmp(Operand(Displacement(hook.referenced(), hook.offset()))); // Condense the whole mess into a block. BlockBuilder block_builder(block_graph); if (!block_builder.Merge(&bbsg)) { LOG(ERROR) << "Failed to build thunk block."; return NULL; } // Exactly one new block should have been created. DCHECK_EQ(1u, block_builder.new_blocks().size()); BlockGraph::Block* thunk = block_builder.new_blocks().front(); if (src_ranges_for_thunks_) { // Give the thunk a source range synonymous with the destination. // That way the debugger will resolve calls and jumps to the thunk to the // destination function's name, which makes the assembly much easier to // read. The downside to this is that the symbols are now no longer unique, // and searching for a function by name may turn up either the function or // the thunk. const BlockGraph::Block::SourceRanges& source_ranges = destination.referenced()->source_ranges(); const BlockGraph::Block::SourceRanges::RangePair* source = source_ranges.FindRangePair(destination.offset(), thunk->size()); if (source != NULL) { // Calculate the offset into the range. size_t offs = destination.offset() - source->first.start(); BlockGraph::Block::DataRange data(0, thunk->size()); BlockGraph::Block::SourceRange src(source->second.start() + offs, thunk->size()); bool pushed = thunk->source_ranges().Push(data, src); DCHECK(pushed); } } return thunk; } bool EntryThunkTransform::GetEntryPoints(BlockGraph::Block* header_block) { // Get the TLS initializer entry-points. These have the same signature and // call patterns to DllMain. if (!pe::GetTlsInitializers(header_block, &dllmain_entrypoints_)) { LOG(ERROR) << "Failed to populate the TLS Initializer entry-points."; return false; } // Get the DLL entry-point. pe::EntryPoint dll_entry_point; if (!pe::GetDllEntryPoint(header_block, &dll_entry_point)) { LOG(ERROR) << "Failed to resolve the DLL entry-point."; return false; } // If the image is an EXE or is a DLL that does not specify an entry-point // (the entry-point is optional for DLLs) then the dll_entry_point will have // a NULL block pointer. Otherwise, add it to the entry-point set. if (dll_entry_point.first != NULL) { dllmain_entrypoints_.insert(dll_entry_point); } else { // Get the EXE entry point. We only need to bother looking if we didn't get // a DLL entry point, as we can't have both. if (!pe::GetExeEntryPoint(header_block, &exe_entry_point_)) { LOG(ERROR) << "Failed to resolve the EXE entry-point."; return false; } } return true; } } // namespace transforms } // namespace instrument
5,532
852
<reponame>ckamtsikis/cmssw #include "RecoEcal/EgammaClusterAlgos/interface/Multi5x5BremRecoveryClusterAlgo.h" #include "RecoEcal/EgammaCoreTools/interface/BremRecoveryPhiRoadAlgo.h" #include "FWCore/MessageLogger/interface/MessageLogger.h" reco::SuperClusterCollection Multi5x5BremRecoveryClusterAlgo::makeSuperClusters( const reco::CaloClusterPtrVector& clustersCollection) { const float etaBorder = 1.479; superclusters_v.clear(); // create vectors of references to clusters of a specific origin... reco::CaloClusterPtrVector islandClustersBarrel_v; reco::CaloClusterPtrVector islandClustersEndCap_v; // ...and populate them: for (auto const& cluster_p : clustersCollection) { if (cluster_p->algo() == reco::CaloCluster::multi5x5) { if (fabs(cluster_p->position().eta()) < etaBorder) { islandClustersBarrel_v.push_back(cluster_p); } else { islandClustersEndCap_v.push_back(cluster_p); } } } // make the superclusters from the Barrel clusters - Island makeIslandSuperClusters(islandClustersBarrel_v, eb_rdeta_, eb_rdphi_); // make the superclusters from the EndCap clusters - Island makeIslandSuperClusters(islandClustersEndCap_v, ec_rdeta_, ec_rdphi_); return superclusters_v; } #include "DataFormats/Math/interface/Vector3D.h" void Multi5x5BremRecoveryClusterAlgo::makeIslandSuperClusters(reco::CaloClusterPtrVector& clusters_v, double etaRoad, double phiRoad) { if (clusters_v.empty()) return; const auto clustersSize = clusters_v.size(); assert(clustersSize > 0); bool usedSeed[clustersSize]; for (auto ic = 0U; ic < clustersSize; ++ic) usedSeed[ic] = false; float eta[clustersSize], phi[clustersSize], et[clustersSize]; for (auto ic = 0U; ic < clustersSize; ++ic) { eta[ic] = clusters_v[ic]->eta(); phi[ic] = clusters_v[ic]->phi(); et[ic] = clusters_v[ic]->energy() * sin(clusters_v[ic]->position().theta()); } for (auto is = 0U; is < clustersSize; ++is) { // check this seed was not already used if (usedSeed[is]) continue; auto const& currentSeed = clusters_v[is]; // Does our highest energy cluster have high enough energy? // changed this to continue from break (to be robust against the order of sorting of the seed clusters) if (et[is] < seedTransverseEnergyThreshold) continue; // if yes, make it a seed for a new SuperCluster: double energy = (currentSeed)->energy(); math::XYZVector position_( (currentSeed)->position().X(), (currentSeed)->position().Y(), (currentSeed)->position().Z()); position_ *= energy; usedSeed[is] = true; LogTrace("EcalClusters") << "*****************************"; LogTrace("EcalClusters") << "******NEW SUPERCLUSTER*******"; LogTrace("EcalClusters") << "Seed R = " << (currentSeed)->position().Rho(); reco::CaloClusterPtrVector constituentClusters; constituentClusters.push_back(currentSeed); auto ic = is + 1; while (ic < clustersSize) { auto const& currentCluster = clusters_v[ic]; // if dynamic phi road is enabled then compute the phi road for a cluster // of energy of existing clusters + the candidate cluster. if (dynamicPhiRoad_) phiRoad = phiRoadAlgo_->endcapPhiRoad(energy + (currentCluster)->energy()); auto dphi = [](float p1, float p2) { auto dp = std::abs(p1 - p2); if (dp > float(M_PI)) dp -= float(2 * M_PI); return std::abs(dp); }; auto match = [&](int i, int j) { return (dphi(phi[i], phi[j]) < phiRoad) & (std::abs(eta[i] - eta[j]) < etaRoad); }; // does the cluster match the phi road for this candidate supercluster if (!usedSeed[ic] && match(is, ic)) { // add basic cluster to supercluster constituents constituentClusters.push_back(currentCluster); energy += (currentCluster)->energy(); position_ += (currentCluster)->energy() * math::XYZVector((currentCluster)->position().X(), (currentCluster)->position().Y(), (currentCluster)->position().Z()); // remove cluster from vector of available clusters usedSeed[ic] = true; LogTrace("EcalClusters") << "Cluster R = " << (currentCluster)->position().Rho(); } ++ic; } position_ /= energy; LogTrace("EcalClusters") << "Final SuperCluster R = " << position_.Rho(); reco::SuperCluster newSuperCluster( energy, math::XYZPoint(position_.X(), position_.Y(), position_.Z()), currentSeed, constituentClusters); superclusters_v.push_back(newSuperCluster); LogTrace("EcalClusters") << "created a new supercluster of: "; LogTrace("EcalClusters") << "Energy = " << newSuperCluster.energy(); LogTrace("EcalClusters") << "Position in (R, phi, theta) = (" << newSuperCluster.position().Rho() << ", " << newSuperCluster.position().phi() << ", " << newSuperCluster.position().theta() << ")"; } clusters_v.clear(); }
2,165
540
<reponame>dlee992/sdc<gh_stars>100-1000 //***************************************************************************** // Copyright (c) 2019-2021, Intel Corporation All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, // THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; // OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, // WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR // OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, // EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. //***************************************************************************** #ifndef HSDC_STD_STRING_VIEW #define HSDC_STD_STRING_VIEW #include <iostream> #include <string> #include <string_view> #include <charconv> #include <cstring> #include "numba/core/runtime/nrt_external.h" extern "C" { void string_view_delete(void* p_str_view) { auto p_spec = (std::string_view*)p_str_view; delete p_spec; } void string_view_create(NRT_MemInfo** meminfo, void* nrt_table) { auto nrt = (NRT_api_functions*)nrt_table; auto p_str_view = new std::string_view; void* res = (void*)p_str_view; (*meminfo) = nrt->manage_memory(res, string_view_delete); } void string_view_create_with_data(NRT_MemInfo** meminfo, void* nrt_table, void* data_ptr, int64_t size) { auto nrt = (NRT_api_functions*)nrt_table; auto p_str_view = new std::string_view((char*)data_ptr, size); void* res = (void*)p_str_view; (*meminfo) = nrt->manage_memory(res, string_view_delete); } void string_view_print(void* p_str_view) { auto p_spec = (std::string_view*)p_str_view; std::cout << (*p_spec) << std::endl; } int64_t string_view_len(void* p_str_view) { auto p_spec = (std::string_view*)p_str_view; return p_spec->size(); } const char* string_view_get_data_ptr(void* p_str_view) { auto p_spec = (std::string_view*)p_str_view; return p_spec->data(); } void string_view_set_data(void* p_str_view, char* data, int64_t size) { auto p_spec = (std::string_view*)p_str_view; std::string_view tmp(data, size); p_spec->swap(tmp); } int8_t string_view_to_int(void* p_str_view, int64_t base, int64_t* p_res) { auto p_spec = (std::string_view*)p_str_view; char* p_data = (char*)(p_spec->data()); size_t str_len = p_spec->size(); if (!str_len) return 1; // std::from_chars doesn't recognize "0x" prefixes, so handle this ourselves if (!strncmp(p_data, "0x", 2) || !strncmp(p_data, "0X", 2)) { str_len -= 2; p_data += 2; base = 16; } int64_t res = 0; auto ret = std::from_chars(p_data, p_data + str_len, res, base); if (ret.ptr != p_data + str_len) { // TO-DO: propagate error code to python // std::cerr << "wrong data" << std::endl; return 1; } else { *p_res = res; return 0; } } int8_t string_view_to_float64(void* p_str_view, double* p_res) { auto p_spec = (std::string_view*)p_str_view; size_t str_len = p_spec->size(); if (!str_len) return 1; // std::from_chars overloads for floats are supported from GCC 11.1 only std::string null_term_str(*p_spec); auto data_ptr = null_term_str.c_str(); char* end_pos = nullptr; *p_res = std::strtod(data_ptr, &end_pos); if (data_ptr + str_len != end_pos) return 1; else return 0; } } // extern "C" #endif
1,994
3,102
<gh_stars>1000+ #import "someheader.h" typedef myint aint;
28
473
<filename>challenges/DFARS_Sample_Service/src/service.c #include "cgc_cgc_libc.h" #include "cgc_cgc_malloc.h" #include "cgc_cgc_types.h" typedef struct _program_state program_state; typedef int (*process_t)(program_state *, char *); #define BOOK_SECTION_SIZE 4*25 // #define BOOK_NAME_SIZE 4*148 // #define BOOK_DATA_SIZE 4*250 #define MAX_CHAR 0xFF #define TOLOWER(x) ((x >= 'A' && x <= 'Z') ? x | 0x20 : x) #define COMMAND_NAME_SIZE 12 #define INPUT_SIZE 50 #define DIRECTORY_SIZE BOOK_SECTION_SIZE #define DELIM '|' #define TRAVERSAL '+' typedef struct book { char *section; char *name; char *data; struct book *next; } __attribute__((__packed__)) book_t; typedef struct command { struct command *sibling; struct command *child; process_t method; unsigned char value; } command_t; struct _program_state { char current_dir[DIRECTORY_SIZE]; command_t command_list; char book_count; char input_buf[INPUT_SIZE]; book_t book_list; }; char *cgc_compress(char *string); int cgc_cmd_search(program_state *s, char *args); int cgc_cmd_ch_sec(program_state *s, char *args); int cgc_cmd_compress(program_state *s, char *args); int cgc_cmd_cur_sec(program_state *s, char *args); int cgc_cmd_get(program_state *s, char *args); int cgc_cmd_list(program_state *s, char *args); int cgc_cmd_put(program_state *s, char *args); int cgc_cmd_quit(program_state *s, char *args); int cgc_cmd_make_sec(program_state *s, char *args); int cgc_normalize_path(char *path); int cgc_process(program_state *s); void cgc_register_book(program_state *s, char *section, char *name, char *data); process_t cgc_get_command(program_state *s, char *name); void cgc_remove_newline(char *line); void cgc_register_book(program_state *s, char *section, char *name, char *data) { book_t *bk = &(s->book_list); if (!section) cgc_err(1, "invalid section"); if (!name) cgc_err(1, "invalid name"); if (!data) cgc_err(1, "invalid data"); while (bk != NULL) { if (0 == cgc_strlen(bk->section)) { bk->section = cgc_strdup(section); bk->name = cgc_strdup(name); bk->data = cgc_strdup(data); bk->next = (book_t *) cgc_calloc(1, sizeof(book_t)); if (!bk->next) cgc_err(1, "unable to allocate memory"); s->book_count++; break; } else bk = bk->next; } } /* This is the vulnerable function. * * This function's intent is to normalize "|foo|++|" into "|". The method is to * identify a traversal marker "++|", and seek backwards until the previous * marker "|" is identified, then replace from that location until the next * marker "|" is found. * * If the input does not begin with the marker "|", then the program will * continue the search into unspecified memory regions. If the marker is found * elsewhere, the program will cgc_write the input after the traversal marker "++|" * at the found location. * * By placing a a marker "|" in a known location, a specified input may be able * to leverage the marker "|" to modify memory in a fashion that can be leveraged * gain execution control. */ int cgc_normalize_path(char *path) { char *p; char *q; char *previous_delim = NULL; char *current_delim = NULL; int depth = 0; if (path == NULL) return -1; const cgc_size_t len = cgc_strlen(path); char *end_of_path = path + len; q = p = path; while (*p != 0) { if (*p == DELIM) { previous_delim = current_delim; current_delim = p; p++; continue; } if (*p == TRAVERSAL) { if (current_delim == p - 1 || p == path) { if (p[1] == TRAVERSAL && (p[2] == DELIM || p[2] == '\0')) { depth++; if (depth > 2) goto fail; if (previous_delim == NULL) goto done; cgc_strlcpy(previous_delim, p + 2, len); if (p[2] == '\0') goto done; current_delim = previous_delim; p = previous_delim; q = p - 1; while (*q != DELIM && q != path) { #ifdef PATCHED if (q <= path) goto done; #endif q--; } if (p[0] == DELIM) previous_delim = q; else previous_delim = NULL; } else if (p[1] == DELIM) { if (current_delim != NULL) { if (current_delim >= end_of_path) goto done; cgc_strlcpy(current_delim, (char *)(end_of_path - current_delim), len); continue; } else { if (p >= end_of_path) goto done; cgc_strlcpy(p, (char *)(end_of_path - p), len); continue; } } else if (p[1] != '\0') { if (current_delim != NULL) p = current_delim; *p = '\0'; goto done; } } } p++; } done: return 0; fail: return -1; } process_t cgc_get_command(program_state *s, char *name) { command_t *cmd = &(s->command_list); process_t method = NULL; while (name) { if (cmd->value == *name) { name++; if (cmd->method) { method = cmd->method; break; } if (cmd->child) cmd = cmd->child; else break; } else { if (cmd->sibling) cmd = cmd->sibling; else /* cannot continue */ break; } } return method; } int cgc_cmd_cur_sec(program_state *s, char *args) { (void) args; cgc_transmit_str(STDOUT, s->current_dir); cgc_transmit_str(STDOUT, "\n"); return 1; } int cgc_skip_search(cgc_size_t *skip_dict, char *needle, cgc_size_t needle_len, char *haystack, cgc_size_t haystack_len) { cgc_size_t scan; cgc_size_t last = needle_len - 1; while (haystack_len >= needle_len) { for (scan = last; needle[scan] == TOLOWER(haystack[scan]); scan--) { if (scan == 0) { return 1; } } haystack_len -= skip_dict[(unsigned char) haystack[last]]; haystack += skip_dict[(unsigned char) haystack[last]]; } return 0; } int cgc_cmd_search(program_state *s, char *args) { book_t *cur_book = &(s->book_list); cgc_size_t scan; cgc_size_t last; cgc_size_t skip_dict[MAX_CHAR + 1]; cgc_size_t scan_len; (void) args; scan_len = cgc_strlen(args); if (scan_len == 0) { return 1; } for (scan = 0; scan < scan_len; scan++) { args[scan] = TOLOWER(args[scan]); } for (scan = 0; scan <= MAX_CHAR; scan++) { skip_dict[scan] = scan_len; } last = scan_len - 1; for (scan = 0; scan < last; scan++) { skip_dict[(unsigned char) args[scan]] = last - scan; } while (cur_book != NULL) { if (cgc_skip_search((cgc_size_t *)&skip_dict, args, scan_len, cur_book->data, cgc_strlen(cur_book->data)) == 1) { cgc_printf("%s : %s\n", cur_book->section, cur_book->name); } cur_book = cur_book->next; } return 1; } int cgc_cmd_ch_sec(program_state *s, char *args) { int changed = 0; cgc_size_t sub_section_len; book_t *cur_book = &(s->book_list); sub_section_len = cgc_strlen(args); if (sub_section_len == 0) return 1; if (args[0] != DELIM) { int ret; char *tmpname; tmpname = cgc_strdup(args); assert(tmpname != NULL); ret = cgc_strlcpy(s->input_buf, s->current_dir, sizeof(s->input_buf)); assert(ret <= sizeof(s->input_buf)); ret = cgc_strlcat(s->input_buf, tmpname, sizeof(s->input_buf)); assert(ret <= sizeof(s->input_buf)); args = s->input_buf; cgc_free(tmpname); } if (cgc_normalize_path(args) != 0) return -1; sub_section_len = cgc_strlen(args); if (args[0] != DELIM) return 0; if (args[sub_section_len - 1] != DELIM) return 0; while (cur_book != NULL) { cgc_size_t section_len = cgc_strlen(cur_book->section); if (section_len >= sub_section_len) { if (0 == cgc_strcmp(cur_book->section, args)) { cgc_strlcpy(s->current_dir, args, sizeof(s->current_dir)); changed = 1; break; } } cur_book = cur_book->next; } if (changed == 0) { cur_book = &(s->book_list); while (cur_book != NULL) { cgc_size_t section_len = cgc_strlen(cur_book->section); if (section_len >= sub_section_len) { if (0 == cgc_strncmp(cur_book->section, args, sub_section_len)) { cgc_strlcpy(s->current_dir, args, sizeof(s->current_dir)); changed = 1; break; } } cur_book = cur_book->next; } } else { } return 1; } char *cgc_compress(char *string) { char curr; cgc_size_t length; char *encoded; char count = 0x30; int i = 0; if (string == NULL) return NULL; curr = *string; length = cgc_strlen(string); encoded = (char *)cgc_calloc(((length * 2) + 1), sizeof(char)); if (encoded == NULL) return NULL; while (*string) { if (curr == *(++string) && count < 0x39) count++; else { encoded[i++] = curr; encoded[i++] = count; count = 1; curr = *string; } } return encoded; } int cgc_cmd_compress(program_state *s, char *args) { book_t *cur_book = &(s->book_list); char *compressed = NULL; if (args == NULL) return -1; if (0 == cgc_strlen(args)) return -1; if (0 == cgc_strlen(s->current_dir)) return -1; while (cur_book != NULL) { if (0 == cgc_strcmp(cur_book->section, s->current_dir)) { if (0 == cgc_strcmp(cur_book->name, args)) { compressed = cgc_compress(cur_book->data); if (compressed == NULL) return -1; cgc_transmit_str(STDOUT, cur_book->name); cgc_transmit_str(STDOUT, "\n"); cgc_transmit_str(STDOUT, compressed); cgc_transmit_str(STDOUT, "\n\n"); cgc_free(compressed); return 1; } } cur_book = cur_book->next; } return 1; } int cgc_cmd_quit(program_state *s, char *args) { (void) s; (void) args; return 0; } int cgc_cmd_get(program_state *s, char *args) { book_t *cur_book = &(s->book_list); if (args == NULL) return -1; if (0 == cgc_strlen(args)) return -1; if (NULL == s->current_dir) return -1; if (0 == cgc_strlen(s->current_dir)) return -1; while (cur_book != NULL) { if (cgc_strlen(cur_book->section) == cgc_strlen(s->current_dir)) { if (0 == cgc_strcmp(cur_book->section, s->current_dir)) { if (0 == cgc_strcmp(cur_book->name, args)) { cgc_transmit_str(STDOUT, cur_book->name); cgc_transmit_str(STDOUT, "\n"); cgc_transmit_str(STDOUT, cur_book->data); cgc_transmit_str(STDOUT, "\n\n"); } } } cur_book = cur_book->next; } return 1; } int cgc_cmd_make_sec(program_state *s, char *args) { cgc_size_t out = 0; cgc_size_t len = 3; char *name; if (cgc_strlen(args) == 0) { return -1; } assert((SIZE_MAX - len) - cgc_strlen(args) > cgc_strlen(s->current_dir)); len += cgc_strlen(s->current_dir); len += cgc_strlen(args); name = cgc_calloc(1, len); assert(name != NULL); if (cgc_strlen(s->current_dir) > 0) { out = cgc_strlcat(name, s->current_dir, len); assert(out <= len); } else { out = cgc_strlcat(name, "|", len); assert(out <= len); } out = cgc_strlcat(name, args, len); assert(out <= len); out = cgc_strlcat(name, "|", len); assert(out <= len); cgc_register_book(s, name, "", ""); return 1; } int cgc_cmd_put(program_state *s, char *args) { char *name; char *data; if (cgc_strlen(s->current_dir) == 0) { return -1; } name = cgc_strtok(args, " "); if (name == NULL) { return -1; } data = cgc_strtok(NULL, "\x00"); if (data == NULL) { return -1; } cgc_register_book(s, s->current_dir, name, data); return 1; } int cgc_cmd_list(program_state *s, char *args) { (void) args; book_t *cur_book = &(s->book_list); cgc_transmit_str(STDOUT, "current section: "); cgc_transmit_str(STDOUT, s->current_dir); cgc_transmit_str(STDOUT, "\n"); while (cur_book != NULL) { if (0 == cgc_strlen(cur_book->section)) break; if (0 == cgc_strncmp(cur_book->section, s->current_dir, cgc_strlen(s->current_dir))) { if (cgc_strlen(cur_book->section) - cgc_strlen(s->current_dir) > 0) { cgc_transmit_all(STDOUT, cur_book->section + cgc_strlen(s->current_dir), cgc_strlen(cur_book->section) - cgc_strlen(s->current_dir)); cgc_transmit_str(STDOUT, ":"); } cgc_transmit_str(STDOUT, cur_book->name); cgc_transmit_str(STDOUT, "\n"); } cur_book = cur_book->next; } cgc_transmit_str(STDOUT, "\n"); return 1; } void cgc_remove_newline(char *line) { cgc_size_t new_line = cgc_strlen(line) - 1; if (line[new_line] == '\n') line[new_line] = '\0'; } int cgc_receive_delim(int fd, char *buf, const cgc_size_t size, char delim) { cgc_size_t rx = 0; cgc_size_t rx_now = 0; int ret; if (!buf) return 1; if (!size) return 2; while (rx < size) { ret = cgc_receive(fd, buf + rx, 1, &rx_now); if (rx_now == 0) { //should never return until at least something was received //so consider this an error too return 3; } if (ret != 0) { return 3; } if (buf[rx] == delim) { break; } rx += rx_now; } return 0; } int cgc_process(program_state *s) { int ret; // cgc_size_t size; char *name; char *args; process_t method; while (1) { cgc_memset(s->input_buf, 0, sizeof(s->input_buf)); cgc_transmit_str(STDOUT, "> "); if (0 != cgc_receive_delim(0, s->input_buf, sizeof(s->input_buf) - 1, '\n')) { return -1; } cgc_remove_newline(s->input_buf); if (cgc_strlen(s->input_buf) == 0) continue; name = cgc_strtok(s->input_buf, " "); args = cgc_strtok(NULL, "\x00"); if (cgc_strlen(name) >= COMMAND_NAME_SIZE) return -1; method = cgc_get_command(s, name); if (method == NULL) { cgc_transmit_str(STDOUT, "invalid command: "); cgc_transmit_str(STDOUT, name); cgc_transmit_str(STDOUT, "\n"); continue; } ret = (method)(s, args); if (ret == -1) { cgc_transmit_str(STDOUT, "command failed: "); cgc_transmit_str(STDOUT, name); cgc_transmit_str(STDOUT, "\n"); return -1; } if (ret == 0) { return 0; } } } int main(int cgc_argc, char *cgc_argv[]) { program_state s; cgc_memset(&s, 0, sizeof(s)); command_t node_0x40f0; command_t node_0x40d0; command_t node_0x40b0; command_t node_0x4090; command_t node_0x4070; command_t node_0x4050; command_t node_0x4030; command_t node_0x4010; command_t node_0x3ff0; command_t node_0x3fd0; command_t node_0x3fb0; command_t node_0x3f90; command_t node_0x3f70; command_t node_0x3f50; command_t node_0x3f30; command_t node_0x3f10; command_t node_0x3ef0; command_t node_0x3ed0; command_t node_0x3eb0; command_t node_0x3e90; command_t node_0x3e70; command_t node_0x3e50; command_t node_0x3e30; command_t node_0x3e10; command_t node_0x3df0; command_t node_0x3dd0; command_t node_0x3db0; command_t node_0x3d90; command_t node_0x3d70; command_t node_0x3d50; command_t node_0x3d30; command_t node_0x3d10; command_t node_0x3cf0; command_t node_0x3cd0; command_t node_0x3cb0; command_t node_0x3c90; command_t node_0x3c70; command_t node_0x3c50; command_t node_0x3c30; command_t node_0x3c10; command_t node_0x3bf0; command_t node_0x3bd0; command_t node_0x3bb0; command_t node_0x3b90; command_t node_0x3b70; command_t node_0x3b50; node_0x40f0.value = 99; /* c */ node_0x40f0.method = &cgc_cmd_ch_sec; node_0x40d0.value = 101; /* e */ node_0x40d0.child = &node_0x40f0; node_0x40b0.value = 115; /* s */ node_0x40b0.child = &node_0x40d0; node_0x4090.value = 95; /* _ */ node_0x4090.child = &node_0x40b0; node_0x4070.value = 104; /* h */ node_0x4070.child = &node_0x4090; node_0x4050.value = 115; /* s */ node_0x4050.method = &cgc_cmd_compress; node_0x4030.value = 115; /* s */ node_0x4030.child = &node_0x4050; node_0x4010.value = 101; /* e */ node_0x4010.child = &node_0x4030; node_0x3ff0.value = 114; /* r */ node_0x3ff0.child = &node_0x4010; node_0x3fd0.value = 112; /* p */ node_0x3fd0.child = &node_0x3ff0; node_0x3fb0.value = 109; /* m */ node_0x3fb0.child = &node_0x3fd0; node_0x3f90.value = 111; /* o */ node_0x3f90.sibling = &node_0x4070; node_0x3f90.child = &node_0x3fb0; node_0x3f70.value = 99; /* c */ node_0x3f70.method = &cgc_cmd_cur_sec; node_0x3f50.value = 101; /* e */ node_0x3f50.child = &node_0x3f70; node_0x3f30.value = 115; /* s */ node_0x3f30.child = &node_0x3f50; node_0x3f10.value = 95; /* _ */ node_0x3f10.child = &node_0x3f30; node_0x3ef0.value = 114; /* r */ node_0x3ef0.child = &node_0x3f10; node_0x3ed0.value = 117; /* u */ node_0x3ed0.sibling = &node_0x3f90; node_0x3ed0.child = &node_0x3ef0; node_0x3eb0.value = 99; /* c */ node_0x3eb0.child = &node_0x3ed0; node_0x3e90.value = 116; /* t */ node_0x3e90.method = &cgc_cmd_get; node_0x3e70.value = 101; /* e */ node_0x3e70.child = &node_0x3e90; node_0x3e50.value = 103; /* g */ node_0x3e50.sibling = &node_0x3eb0; node_0x3e50.child = &node_0x3e70; node_0x3e30.value = 116; /* t */ node_0x3e30.method = &cgc_cmd_list; node_0x3e10.value = 115; /* s */ node_0x3e10.child = &node_0x3e30; node_0x3df0.value = 105; /* i */ node_0x3df0.child = &node_0x3e10; node_0x3dd0.value = 108; /* l */ node_0x3dd0.sibling = &node_0x3e50; node_0x3dd0.child = &node_0x3df0; node_0x3db0.value = 99; /* c */ node_0x3db0.method = &cgc_cmd_make_sec; node_0x3d90.value = 101; /* e */ node_0x3d90.child = &node_0x3db0; node_0x3d70.value = 115; /* s */ node_0x3d70.child = &node_0x3d90; node_0x3d50.value = 95; /* _ */ node_0x3d50.child = &node_0x3d70; node_0x3d30.value = 101; /* e */ node_0x3d30.child = &node_0x3d50; node_0x3d10.value = 107; /* k */ node_0x3d10.child = &node_0x3d30; node_0x3cf0.value = 97; /* a */ node_0x3cf0.child = &node_0x3d10; node_0x3cd0.value = 109; /* m */ node_0x3cd0.sibling = &node_0x3dd0; node_0x3cd0.child = &node_0x3cf0; node_0x3cb0.value = 116; /* t */ node_0x3cb0.method = &cgc_cmd_put; node_0x3c90.value = 117; /* u */ node_0x3c90.child = &node_0x3cb0; node_0x3c70.value = 112; /* p */ node_0x3c70.sibling = &node_0x3cd0; node_0x3c70.child = &node_0x3c90; node_0x3c50.value = 116; /* t */ node_0x3c50.method = &cgc_cmd_quit; node_0x3c30.value = 105; /* i */ node_0x3c30.child = &node_0x3c50; node_0x3c10.value = 117; /* u */ node_0x3c10.child = &node_0x3c30; node_0x3bf0.value = 113; /* q */ node_0x3bf0.sibling = &node_0x3c70; node_0x3bf0.child = &node_0x3c10; node_0x3bd0.value = 104; /* h */ node_0x3bd0.method = &cgc_cmd_search; node_0x3bb0.value = 99; /* c */ node_0x3bb0.child = &node_0x3bd0; node_0x3b90.value = 114; /* r */ node_0x3b90.child = &node_0x3bb0; node_0x3b70.value = 97; /* a */ node_0x3b70.child = &node_0x3b90; node_0x3b50.value = 101; /* e */ node_0x3b50.child = &node_0x3b70; s.command_list.value = 115; /* s */ s.command_list.sibling = &node_0x3bf0; s.command_list.child = &node_0x3b50; cgc_register_book(&s, "|", "", ""); cgc_cmd_ch_sec(&s, "|"); #include "cgc_far-include.h" return cgc_process(&s); }
11,399
9,657
# coding=utf-8 import numpy as np import torch.nn.functional as F from datautil.util import random_pairs_of_minibatches from alg.algs.ERM import ERM class Mixup(ERM): def __init__(self, args): super(Mixup, self).__init__(args) self.args = args def update(self, minibatches, opt, sch): objective = 0 for (xi, yi, di), (xj, yj, dj) in random_pairs_of_minibatches(self.args, minibatches): lam = np.random.beta(self.args.mixupalpha, self.args.mixupalpha) x = (lam * xi + (1 - lam) * xj).cuda().float() predictions = self.predict(x) objective += lam * F.cross_entropy(predictions, yi.cuda().long()) objective += (1 - lam) * \ F.cross_entropy(predictions, yj.cuda().long()) objective /= len(minibatches) opt.zero_grad() objective.backward() opt.step() if sch: sch.step() return {'class': objective.item()}
464
2,151
<gh_stars>1000+ // Copyright 2017 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/ui/app_list/chrome_app_list_model_updater.h" #include <unordered_map> #include <utility> #include "base/strings/utf_string_conversions.h" #include "chrome/browser/ui/app_list/app_list_client_impl.h" #include "chrome/browser/ui/app_list/chrome_app_list_item.h" #include "chrome/browser/ui/app_list/search/chrome_search_result.h" #include "chrome/browser/ui/ash/ash_util.h" #include "extensions/common/constants.h" #include "ui/base/models/menu_model.h" ChromeAppListModelUpdater::ChromeAppListModelUpdater(Profile* profile) : profile_(profile), weak_ptr_factory_(this) {} ChromeAppListModelUpdater::~ChromeAppListModelUpdater() {} void ChromeAppListModelUpdater::SetActive(bool active) { const bool was_active = !!app_list_controller_; if (was_active == active) return; app_list_controller_ = active ? AppListClientImpl::GetInstance()->GetAppListController() : nullptr; if (!app_list_controller_) return; // Activating this model updater should sync the cached model to Ash. std::vector<ash::mojom::AppListItemMetadataPtr> items_to_sync; for (auto const& item : items_) items_to_sync.push_back(item.second->CloneMetadata()); app_list_controller_->SetModelData(std::move(items_to_sync), search_engine_is_google_); } void ChromeAppListModelUpdater::AddItem( std::unique_ptr<ChromeAppListItem> app_item) { ash::mojom::AppListItemMetadataPtr item_data = app_item->CloneMetadata(); // Add to Chrome first leave all updates to observer methods. AddChromeItem(std::move(app_item)); if (app_list_controller_) app_list_controller_->AddItem(std::move(item_data)); } void ChromeAppListModelUpdater::AddItemToFolder( std::unique_ptr<ChromeAppListItem> app_item, const std::string& folder_id) { ash::mojom::AppListItemMetadataPtr item_data = app_item->CloneMetadata(); // Add to Chrome first leave all updates to observer methods. ChromeAppListItem* item_added = AddChromeItem(std::move(app_item)); item_added->SetChromeFolderId(folder_id); if (app_list_controller_) { app_list_controller_->AddItemToFolder(std::move(item_data), folder_id); // Set the item's default icon if it has one. if (!item_added->icon().isNull()) app_list_controller_->SetItemIcon(item_added->id(), item_added->icon()); } } void ChromeAppListModelUpdater::RemoveItem(const std::string& id) { if (app_list_controller_) app_list_controller_->RemoveItem(id); else RemoveChromeItem(id); } void ChromeAppListModelUpdater::RemoveUninstalledItem(const std::string& id) { if (app_list_controller_) app_list_controller_->RemoveUninstalledItem(id); else RemoveChromeItem(id); } void ChromeAppListModelUpdater::MoveItemToFolder(const std::string& id, const std::string& folder_id) { if (app_list_controller_) app_list_controller_->MoveItemToFolder(id, folder_id); else MoveChromeItemToFolder(id, folder_id); } void ChromeAppListModelUpdater::SetStatus(ash::AppListModelStatus status) { if (!app_list_controller_) return; app_list_controller_->SetStatus(status); } void ChromeAppListModelUpdater::SetState(ash::AppListState state) { if (!app_list_controller_) return; app_list_controller_->SetState(state); } void ChromeAppListModelUpdater::HighlightItemInstalledFromUI( const std::string& id) { if (!app_list_controller_) return; app_list_controller_->HighlightItemInstalledFromUI(id); } void ChromeAppListModelUpdater::SetSearchEngineIsGoogle(bool is_google) { search_engine_is_google_ = is_google; if (app_list_controller_) app_list_controller_->SetSearchEngineIsGoogle(is_google); } void ChromeAppListModelUpdater::SetSearchTabletAndClamshellAccessibleName( const base::string16& tablet_accessible_name, const base::string16& clamshell_accessible_name) { if (!app_list_controller_) return; app_list_controller_->SetSearchTabletAndClamshellAccessibleName( tablet_accessible_name, clamshell_accessible_name); } void ChromeAppListModelUpdater::SetSearchHintText( const base::string16& hint_text) { if (!app_list_controller_) return; app_list_controller_->SetSearchHintText(hint_text); } void ChromeAppListModelUpdater::UpdateSearchBox(const base::string16& text, bool initiated_by_user) { if (!app_list_controller_) return; app_list_controller_->UpdateSearchBox(text, initiated_by_user); } void ChromeAppListModelUpdater::PublishSearchResults( const std::vector<ChromeSearchResult*>& results) { for (auto* const result : results) result->set_model_updater(this); if (!app_list_controller_) return; std::vector<ash::mojom::SearchResultMetadataPtr> result_data; for (auto* result : results) result_data.push_back(result->CloneMetadata()); app_list_controller_->PublishSearchResults(std::move(result_data)); } void ChromeAppListModelUpdater::ActivateChromeItem(const std::string& id, int event_flags) { ChromeAppListItem* item = FindItem(id); if (!item) return; DCHECK(!item->is_folder()); item->Activate(event_flags); } //////////////////////////////////////////////////////////////////////////////// // Methods for updating Chrome items that never talk to ash. ChromeAppListItem* ChromeAppListModelUpdater::AddChromeItem( std::unique_ptr<ChromeAppListItem> app_item) { ChromeAppListItem* item = app_item.get(); items_[app_item->id()] = std::move(app_item); return item; } void ChromeAppListModelUpdater::RemoveChromeItem(const std::string& id) { items_.erase(id); } void ChromeAppListModelUpdater::MoveChromeItemToFolder( const std::string& id, const std::string& folder_id) { ChromeAppListItem* item = FindItem(id); if (!item) return; item->SetChromeFolderId(folder_id); } //////////////////////////////////////////////////////////////////////////////// // Methods only used by ChromeAppListItem that talk to ash directly. void ChromeAppListModelUpdater::SetItemIcon(const std::string& id, const gfx::ImageSkia& icon) { if (!app_list_controller_) return; app_list_controller_->SetItemIcon(id, icon); } void ChromeAppListModelUpdater::SetItemName(const std::string& id, const std::string& name) { if (!app_list_controller_) return; ChromeAppListItem* item = FindItem(id); if (!item) return; ash::mojom::AppListItemMetadataPtr data = item->CloneMetadata(); data->name = name; app_list_controller_->SetItemMetadata(id, std::move(data)); } void ChromeAppListModelUpdater::SetItemNameAndShortName( const std::string& id, const std::string& name, const std::string& short_name) { if (!app_list_controller_) return; ChromeAppListItem* item = FindItem(id); if (!item) return; ash::mojom::AppListItemMetadataPtr data = item->CloneMetadata(); data->name = name; data->short_name = short_name; app_list_controller_->SetItemMetadata(id, std::move(data)); } void ChromeAppListModelUpdater::SetItemPosition( const std::string& id, const syncer::StringOrdinal& new_position) { if (!app_list_controller_) return; ChromeAppListItem* item = FindItem(id); if (!item) return; ash::mojom::AppListItemMetadataPtr data = item->CloneMetadata(); data->position = new_position; app_list_controller_->SetItemMetadata(id, std::move(data)); } void ChromeAppListModelUpdater::SetItemFolderId(const std::string& id, const std::string& folder_id) { if (!app_list_controller_) return; ChromeAppListItem* item = FindItem(id); if (!item) return; ash::mojom::AppListItemMetadataPtr data = item->CloneMetadata(); data->folder_id = folder_id; app_list_controller_->SetItemMetadata(id, std::move(data)); } void ChromeAppListModelUpdater::SetItemIsInstalling(const std::string& id, bool is_installing) { if (!app_list_controller_) return; app_list_controller_->SetItemIsInstalling(id, is_installing); } void ChromeAppListModelUpdater::SetItemPercentDownloaded( const std::string& id, int32_t percent_downloaded) { if (!app_list_controller_) return; app_list_controller_->SetItemPercentDownloaded(id, percent_downloaded); } //////////////////////////////////////////////////////////////////////////////// // Methods only used by ChromeSearchResult that talk to ash directly. void ChromeAppListModelUpdater::SetSearchResultMetadata( const std::string& id, ash::mojom::SearchResultMetadataPtr metadata) { if (!app_list_controller_) return; app_list_controller_->SetSearchResultMetadata(std::move(metadata)); } void ChromeAppListModelUpdater::SetSearchResultIsInstalling( const std::string& id, bool is_installing) { if (!app_list_controller_) return; app_list_controller_->SetSearchResultIsInstalling(id, is_installing); } void ChromeAppListModelUpdater::SetSearchResultPercentDownloaded( const std::string& id, int percent_downloaded) { if (!app_list_controller_) return; app_list_controller_->SetSearchResultPercentDownloaded(id, percent_downloaded); } void ChromeAppListModelUpdater::NotifySearchResultItemInstalled( const std::string& id) { if (!app_list_controller_) return; app_list_controller_->NotifySearchResultItemInstalled(id); } //////////////////////////////////////////////////////////////////////////////// // Methods for item querying ChromeAppListItem* ChromeAppListModelUpdater::FindItem(const std::string& id) { return items_.count(id) ? items_[id].get() : nullptr; } size_t ChromeAppListModelUpdater::ItemCount() { return items_.size(); } ChromeAppListItem* ChromeAppListModelUpdater::ItemAtForTest(size_t index) { DCHECK_LT(index, items_.size()); DCHECK_LE(0u, index); auto it = items_.begin(); for (size_t i = 0; i < index; ++i) ++it; return it->second.get(); } ChromeAppListItem* ChromeAppListModelUpdater::FindFolderItem( const std::string& folder_id) { ChromeAppListItem* item = FindItem(folder_id); return (item && item->is_folder()) ? item : nullptr; } bool ChromeAppListModelUpdater::FindItemIndexForTest(const std::string& id, size_t* index) { *index = 0; for (auto it = items_.begin(); it != items_.end(); ++it) { if (it->second->id() == id) return true; ++(*index); } return false; } bool ChromeAppListModelUpdater::SearchEngineIsGoogle() { return search_engine_is_google_; } void ChromeAppListModelUpdater::GetIdToAppListIndexMap( GetIdToAppListIndexMapCallback callback) { if (!app_list_controller_) return; app_list_controller_->GetIdToAppListIndexMap(base::BindOnce( [](GetIdToAppListIndexMapCallback callback, const base::flat_map<std::string, uint16_t>& indexes) { std::move(callback).Run(indexes); }, std::move(callback))); } size_t ChromeAppListModelUpdater::BadgedItemCount() { size_t count = 0u; for (auto& key_val : items_) { if (key_val.second->IsBadged()) ++count; } return count; } void ChromeAppListModelUpdater::GetContextMenuModel( const std::string& id, GetMenuModelCallback callback) { ChromeAppListItem* item = FindItem(id); // TODO(stevenjb/jennyz): Implement this for folder items. // TODO(newcomer): Add histograms for folder items. if (!item || item->is_folder()) { std::move(callback).Run(nullptr); return; } item->GetContextMenuModel(std::move(callback)); } void ChromeAppListModelUpdater::ContextMenuItemSelected(const std::string& id, int command_id, int event_flags) { ChromeAppListItem* chrome_item = FindItem(id); if (chrome_item) chrome_item->ContextMenuItemSelected(command_id, event_flags); } //////////////////////////////////////////////////////////////////////////////// // Methods for AppListSyncableService void ChromeAppListModelUpdater::ResolveOemFolderPosition( const syncer::StringOrdinal& preferred_oem_position, ResolveOemFolderPositionCallback callback) { if (!app_list_controller_) return; app_list_controller_->ResolveOemFolderPosition( preferred_oem_position, base::BindOnce( [](base::WeakPtr<ChromeAppListModelUpdater> self, ResolveOemFolderPositionCallback callback, ash::mojom::AppListItemMetadataPtr folder_data) { if (!self) return; ChromeAppListItem* chrome_oem_folder = nullptr; if (folder_data) { chrome_oem_folder = self->FindFolderItem(ash::kOemFolderId); chrome_oem_folder->SetMetadata(std::move(folder_data)); } std::move(callback).Run(chrome_oem_folder); }, weak_ptr_factory_.GetWeakPtr(), std::move(callback))); } void ChromeAppListModelUpdater::AddItemToOemFolder( std::unique_ptr<ChromeAppListItem> item, app_list::AppListSyncableService::SyncItem* oem_sync_item, const std::string& oem_folder_name, const syncer::StringOrdinal& preferred_oem_position) { syncer::StringOrdinal position_to_try = preferred_oem_position; // If we find a valid postion in the sync item, then we'll try it. if (oem_sync_item && oem_sync_item->item_ordinal.IsValid()) position_to_try = oem_sync_item->item_ordinal; if (app_list_controller_) { app_list_controller_->FindOrCreateOemFolder( oem_folder_name, position_to_try, base::BindOnce( [](base::WeakPtr<ChromeAppListModelUpdater> self, std::unique_ptr<ChromeAppListItem> item, ash::mojom::AppListItemMetadataPtr /* oem_folder */) { if (!self) return; self->AddItemToFolder(std::move(item), ash::kOemFolderId); }, weak_ptr_factory_.GetWeakPtr(), std::move(item))); } else { ChromeAppListItem* item_added = AddChromeItem(std::move(item)); item_added->SetChromeFolderId(ash::kOemFolderId); // If we don't have an OEM folder in Chrome, create one first. ChromeAppListItem* oem_folder = FindFolderItem(ash::kOemFolderId); if (!oem_folder) { std::unique_ptr<ChromeAppListItem> new_oem_folder = std::make_unique<ChromeAppListItem>(profile_, ash::kOemFolderId, this); oem_folder = AddChromeItem(std::move(new_oem_folder)); oem_folder->SetChromeIsFolder(true); } oem_folder->SetChromeName(oem_folder_name); oem_folder->SetChromePosition(position_to_try); } } void ChromeAppListModelUpdater::UpdateAppItemFromSyncItem( app_list::AppListSyncableService::SyncItem* sync_item, bool update_name, bool update_folder) { // In chrome & ash: ChromeAppListItem* chrome_item = FindItem(sync_item->item_id); if (!chrome_item) return; VLOG(2) << this << " UpdateAppItemFromSyncItem: " << sync_item->ToString(); if (sync_item->item_ordinal.IsValid() && (!chrome_item->position().IsValid() || !chrome_item->position().Equals(sync_item->item_ordinal))) { // This updates the position in both chrome and ash: chrome_item->SetPosition(sync_item->item_ordinal); } // Only update the item name if it is a Folder or the name is empty. if (update_name && sync_item->item_name != chrome_item->name() && (chrome_item->is_folder() || chrome_item->name().empty())) { // This updates the name in both chrome and ash: chrome_item->SetName(sync_item->item_name); } if (update_folder && chrome_item->folder_id() != sync_item->parent_id) { VLOG(2) << " Moving Item To Folder: " << sync_item->parent_id; // This updates the folder in both chrome and ash: MoveItemToFolder(chrome_item->id(), sync_item->parent_id); } } void ChromeAppListModelUpdater::SetDelegate( AppListModelUpdaterDelegate* delegate) { delegate_ = delegate; } //////////////////////////////////////////////////////////////////////////////// // Methods called from Ash: void ChromeAppListModelUpdater::OnFolderCreated( ash::mojom::AppListItemMetadataPtr item) { DCHECK(item->is_folder); ChromeAppListItem* chrome_item = FindItem(item->id); // If the item already exists, we should have set its information properly. if (chrome_item) return; // Otherwise, we detect an item is created in Ash which is not added into our // Chrome list yet. This only happens when a folder is created. std::unique_ptr<ChromeAppListItem> new_item = std::make_unique<ChromeAppListItem>(profile_, item->id, this); chrome_item = AddChromeItem(std::move(new_item)); chrome_item->SetMetadata(std::move(item)); if (delegate_) delegate_->OnAppListItemAdded(chrome_item); } void ChromeAppListModelUpdater::OnFolderDeleted( ash::mojom::AppListItemMetadataPtr item) { DCHECK(item->is_folder); items_.erase(item->id); // We don't need to notify |delegate_| here. Currently |delegate_| sends this // event to AppListSyncableService only, and AppListSyncableService doesn't // do anything when a folder is deleted. For more details, refer to // AppListSyncableService::ModelUpdaterDelegate::OnAppListItemWillBeDeleted. } void ChromeAppListModelUpdater::OnItemUpdated( ash::mojom::AppListItemMetadataPtr item) { ChromeAppListItem* chrome_item = FindItem(item->id); DCHECK(chrome_item); chrome_item->SetMetadata(std::move(item)); if (delegate_) delegate_->OnAppListItemUpdated(chrome_item); }
6,844
476
<reponame>tejasvinu/hetu-core /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.ml; import com.google.common.util.concurrent.SimpleTimeLimiter; import com.google.common.util.concurrent.TimeLimiter; import libsvm.svm; import libsvm.svm_model; import libsvm.svm_node; import libsvm.svm_parameter; import libsvm.svm_problem; import java.io.File; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.file.Files; import java.util.List; import java.util.SortedMap; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; import static com.google.common.base.Throwables.throwIfUnchecked; import static io.airlift.concurrent.Threads.threadsNamed; import static java.util.Objects.requireNonNull; import static java.util.concurrent.Executors.newCachedThreadPool; public abstract class AbstractSvmModel implements Model { protected svm_model model; protected svm_parameter params; protected AbstractSvmModel(svm_parameter params) { this.params = requireNonNull(params, "params is null"); } protected AbstractSvmModel(svm_model model) { this.model = requireNonNull(model, "model is null"); } @Override public byte[] getSerializedData() { File file = null; try { // libsvm doesn't have a method to serialize the model into a buffer, so write it out to a file and then read it back in file = File.createTempFile("svm", null); svm.svm_save_model(file.getCanonicalPath(), model); return Files.readAllBytes(file.toPath()); } catch (IOException e) { throw new UncheckedIOException(e); } finally { if (file != null) { //noinspection ResultOfMethodCallIgnored file.delete(); } } } @Override public void train(Dataset dataset) { params.svm_type = getLibsvmType(); svm_problem problem = toSvmProblem(dataset); ExecutorService service = newCachedThreadPool(threadsNamed("libsvm-trainer-" + System.identityHashCode(this) + "-%s")); try { TimeLimiter limiter = SimpleTimeLimiter.create(service); //TODO: this time limit should be configurable model = limiter.callWithTimeout(getTrainingFunction(problem, params), 1, TimeUnit.HOURS); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (ExecutionException e) { Throwable cause = e.getCause(); if (cause != null) { throwIfUnchecked(cause); throw new RuntimeException(cause); } } catch (Exception e) { throwIfUnchecked(e); throw new RuntimeException(e); } finally { service.shutdownNow(); } } private static Callable<svm_model> getTrainingFunction(svm_problem problem, svm_parameter param) { return () -> svm.svm_train(problem, param); } protected abstract int getLibsvmType(); private static svm_problem toSvmProblem(Dataset dataset) { svm_problem problem = new svm_problem(); List<Double> labels = dataset.getLabels(); problem.l = labels.size(); problem.y = new double[labels.size()]; for (int i = 0; i < labels.size(); i++) { problem.y[i] = labels.get(i); } problem.x = new svm_node[labels.size()][]; for (int i = 0; i < dataset.getDatapoints().size(); i++) { problem.x[i] = toSvmNodes(dataset.getDatapoints().get(i)); } return problem; } protected static svm_node[] toSvmNodes(FeatureVector features) { svm_node[] nodes = new svm_node[features.size()]; int i = 0; // Features map is sorted, so we can just flatten it to a list for libsvm for (SortedMap.Entry<Integer, Double> feature : features.getFeatures().entrySet()) { nodes[i] = new svm_node(); nodes[i].index = feature.getKey(); nodes[i].value = feature.getValue(); i++; } return nodes; } }
2,015
892
{ "schema_version": "1.2.0", "id": "GHSA-942j-wgv7-6vhf", "modified": "2022-05-14T00:01:37Z", "published": "2022-05-05T00:00:18Z", "aliases": [ "CVE-2022-28556" ], "details": "Tenda AC15 US_AC15V1.0BR_V15.03.05.20_multi_TDE01.bin is vulnerable to Buffer Overflow. The stack overflow vulnerability lies in the /goform/setpptpservercfg interface of the web. The sent post data startip and endip are copied to the stack using the sanf function, resulting in stack overflow. Similarly, this vulnerability can be used together with CVE-2021-44971", "severity": [ { "type": "CVSS_V3", "score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H" } ], "affected": [ ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-28556" }, { "type": "WEB", "url": "https://github.com/doudoudedi/TendaAC15_vul/blob/main/TendaAC15-vul.md" } ], "database_specific": { "cwe_ids": [ "CWE-770" ], "severity": "HIGH", "github_reviewed": false } }
501
365
# -*- coding: utf-8 -*- from benedict import benedict import unittest class github_issue_0038_test_case(unittest.TestCase): """ https://github.com/fabiocaccamo/python-benedict/issues/38 To run this specific test: - Run python -m unittest tests.github.test_issue_0038 """ @staticmethod def get_dict_generator(): for k, v in enumerate('abcd'): yield k, v def test_init_with_generator(self): b = benedict(self.get_dict_generator()) self.assertEqual(b, {0: 'a', 1: 'b', 2: 'c', 3: 'd'}) self.assertEqual(b.to_json(), '{"0": "a", "1": "b", "2": "c", "3": "d"}') # recast benedict to dict and back to benedict b = benedict(self.get_dict_generator()) d = dict(b) b = benedict(d) self.assertEqual(b, {0: 'a', 1: 'b', 2: 'c', 3: 'd'}) self.assertEqual(b.to_json(), '{"0": "a", "1": "b", "2": "c", "3": "d"}')
455
3,934
# This sample tests that type errors within a finally clause are # property detected. def func1() -> None: file = None try: raise ValueError() except Exception: return None finally: # This should generate an error. file.name
101
316
{ "name": "@chart-parts/examples-web", "private": true, "version": "0.1.3", "repository": { "type": "git", "url": "https://github.com/microsoft/chart-parts.git" }, "scripts": { "//serve": "webpack-dev-server --mode development --open --host 0.0.0.0", "storybook": "start-storybook -p 6006", "start": "npm run storybook", "build-storybook": "build-storybook" }, "dependencies": { "@babel/core": "^7.15.5", "@chart-parts/examples": "workspace:packages/docs-examples", "@chart-parts/orchestrator": "workspace:packages/processing-orchestrator", "@chart-parts/react": "workspace:packages/api-react", "@chart-parts/react-svg-renderer": "workspace:packages/renderer-react-svg", "@chart-parts/scales": "workspace:packages/util-scales", "@chart-parts/testdata": "workspace:packages/docs-testdata", "@chart-parts/transform": "workspace:packages/util-transform", "@storybook/addon-actions": "^6.3.8", "@storybook/addon-links": "^6.3.8", "@storybook/addons": "^6.3.8", "@storybook/react": "^6.3.8", "babel-loader": "^8.2.2", "d3-array": "^2.12.1", "d3-scale": "^3.3.0", "react": "^17.0.2", "react-dom": "^17.0.2", "styled-components": "^5.3.1" }, "devDependencies": { "@essex/scripts": "^18.2.0", "@types/d3-array": "^2.12.3", "@types/d3-scale": "^2.2.6", "@types/react": "^17.0.24", "@types/react-dom": "^17.0.9", "@types/storybook__react": "^5.2.1", "@types/styled-components": "^5.1.14", "core-js": "^3.18.0", "html-webpack-plugin": "^5.3.2", "npm-run-all": "^4.1.5", "react-docgen-typescript-webpack-plugin": "^1.1.0", "regenerator-runtime": "^0.13.9", "ts-loader": "^9.2.6", "typescript": "^4.4.3", "webpack": "^5.53.0", "webpack-cli": "^4.8.0", "webpack-dev-server": "^4.2.1" } }
863
746
package org.protege.editor.owl.model.selection.axioms; import org.semanticweb.owlapi.model.OWLAxiom; import org.semanticweb.owlapi.model.OWLOntology; import java.util.HashSet; import java.util.Set; /** * User: nickdrummond * Date: May 21, 2008 */ public class AllAxiomsStrategy extends AbstractAxiomSelectionStrategy { public String getName() { return "All axioms in the specified ontologies"; } public Set<OWLAxiom> getAxioms(Set<OWLOntology> ontologies) { Set<OWLAxiom> axioms = new HashSet<>(); for (OWLOntology ont : ontologies){ axioms.addAll(ont.getAxioms()); } return axioms; } }
276
1,847
// Copyright (c) 2020 The Orbit Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef ORBIT_GL_STATUS_LISTENER_H_ #define ORBIT_GL_STATUS_LISTENER_H_ /** * This interface is used to communicate status of a background * task to the UI. Since there could be multiple background tasks * working at once the caller should keep track of status ids * and use them to update or clear status messages. * * The implementation of this class is not supposed to be thread-safe * it assumes methods are invoked on the main thread. * * Example usage: * * uint64_t status_id = listener->AddStatus(""); */ class StatusListener { public: virtual ~StatusListener() = default; [[nodiscard]] virtual uint64_t AddStatus(std::string message) = 0; virtual void ClearStatus(uint64_t status_id) = 0; virtual void UpdateStatus(uint64_t status_id, std::string message) = 0; }; #endif // ORBIT_GL_STATUS_LISTENER_H_
296
1,016
<gh_stars>1000+ package com.thinkbiganalytics.kylo.catalog.spark; /*- * #%L * Kylo Catalog Core * %% * Copyright (C) 2017 - 2018 ThinkBig Analytics, a Teradata Company * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.google.common.annotations.VisibleForTesting; import com.thinkbiganalytics.kylo.catalog.api.KyloCatalogConstants; import com.thinkbiganalytics.kylo.catalog.api.KyloCatalogDataSetAccess; import com.thinkbiganalytics.kylo.catalog.rest.model.DataSetTemplate; import com.thinkbiganalytics.kylo.catalog.spi.DataSetOptions; import org.apache.hadoop.conf.Configuration; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; /** * A skeletal implementation of a {@link KyloCatalogDataSetAccess} that stores values in a {@link DataSetOptions} object. * * @param <R> builder type */ @SuppressWarnings("unchecked") public abstract class AbstractDataSetOptionsAccess<R extends KyloCatalogDataSetAccess> extends AbstractKyloCatalogDataSetAccess<R> { /** * Hadoop configuration */ @Nonnull protected final Configuration hadoopConfiguration; /** * Read options */ @Nonnull protected final DataSetOptions options; /** * Loads resources for accessing data sets */ @Nonnull protected final DataSourceResourceLoader resourceLoader; /** * Constructs an {@code AbstractKyloCatalogDataSetAccess}. */ public AbstractDataSetOptionsAccess(@Nonnull final DataSetOptions options, @Nonnull final Configuration hadoopConfiguration, @Nonnull final DataSourceResourceLoader resourceLoader) { this.options = options; this.hadoopConfiguration = hadoopConfiguration; this.resourceLoader = resourceLoader; } @Nonnull @Override public final R addFile(@Nullable final String path) { if (path != null) { resourceLoader.addFile(path); } return (R) this; } @Nonnull @Override public final R addJar(@Nullable final String path) { if (path != null && resourceLoader.addJar(path)) { options.addJar(path); } return (R) this; } @Nonnull @Override public final R addJars(@Nullable final List<String> paths) { if (paths != null && resourceLoader.addJars(paths)) { options.addJars(paths); } return (R) this; } @Nonnull public R dataSet(@Nonnull final DataSetTemplate dataSet) { if (dataSet.getFiles() != null) { addFiles(dataSet.getFiles()); } if (dataSet.getFormat() != null) { format(dataSet.getFormat()); } if (dataSet.getJars() != null) { addJars(dataSet.getJars()); } if (dataSet.getOptions() != null) { options(dataSet.getOptions()); } if (dataSet.getPaths() != null) { options.setPaths(dataSet.getPaths()); } return (R) this; } @Nonnull @Override public final R format(@Nonnull final String source) { options.setFormat(source); return (R) this; } @Nonnull @Override public R option(@Nonnull final String key, @Nullable final String value) { if (key.startsWith(KyloCatalogConstants.HADOOP_CONF_PREFIX)) { hadoopConfiguration.set(key.substring(KyloCatalogConstants.HADOOP_CONF_PREFIX.length()), value); } options.setOption(key, value); return (R) this; } /** * Gets the data set options. */ @Nonnull @VisibleForTesting DataSetOptions getOptions() { return options; } }
1,617
855
// Copyright 2017 Google Inc. All Rights Reserved. // // Use of this source code is governed by an MIT-style // license that can be found in the LICENSE file or at // https://opensource.org/licenses/MIT. #ifndef PIK_OPSIN_IMAGE_H_ #define PIK_OPSIN_IMAGE_H_ // Converts to XYB color space. #include <stdint.h> #include <cstdlib> #include <vector> #include "pik/codec.h" #include "pik/compiler_specific.h" #include "pik/opsin_params.h" namespace pik { // r, g, b are linear. static PIK_INLINE void OpsinAbsorbance(const float r, const float g, const float b, float out[3]) { const float* mix = &kOpsinAbsorbanceMatrix[0]; const float* bias = &kOpsinAbsorbanceBias[0]; out[0] = mix[0] * r + mix[1] * g + mix[2] * b + bias[0]; out[1] = mix[3] * r + mix[4] * g + mix[5] * b + bias[1]; out[2] = mix[6] * r + mix[7] * g + mix[8] * b + bias[2]; } void LinearToXyb(const float r, const float g, const float b, float* PIK_RESTRICT valx, float* PIK_RESTRICT valy, float* PIK_RESTRICT valz); // Returns the opsin XYB for the part of the image bounded by rect. Image3F OpsinDynamicsImage(const CodecInOut* in, const Rect& rect); // DEPRECATED, used by opsin_image_wrapper. Image3F OpsinDynamicsImage(const Image3B& srgb); } // namespace pik #endif // PIK_OPSIN_IMAGE_H_
577
369
// Copyright (c) 2017-2021, Mudita Sp. z.o.o. All rights reserved. // For licensing, see https://github.com/mudita/MuditaOS/LICENSE.md #pragma once #include "AlarmOptionsItem.hpp" #include <application-alarm-clock/data/AlarmsData.hpp> #include <apps-common/ApplicationCommon.hpp> #include <apps-common/AudioOperations.hpp> #include <apps-common/audio/SoundsPlayer.hpp> #include <tags_fetcher/TagsFetcher.hpp> namespace gui { class AlarmMusicOptionsItem : public AlarmOptionsItem { private: std::function<void(const UTF8 &text)> navBarTemporaryMode = nullptr; std::function<void()> navBarRestoreFromTemporaryMode = nullptr; /// pointer to audio operations which allows to make audio preview std::vector<tags::fetcher::Tags> alarmSoundList; std::vector<tags::fetcher::Tags> getMusicFilesList(); public: explicit AlarmMusicOptionsItem(app::ApplicationCommon *app, const std::string &description, std::shared_ptr<SoundsPlayer> player, std::function<void(const UTF8 &text)> navBarTemporaryMode = nullptr, std::function<void()> navBarRestoreFromTemporaryMode = nullptr); private: [[nodiscard]] std::string getTitle(const std::string &filePath); [[nodiscard]] std::string getFilePath(const std::string &title); }; } /* namespace gui */
643
607
/* * This file is a part of the open source stm32plus library. * Copyright (c) 2011,2012,2013,2014 <NAME> <www.andybrown.me.uk> * Please see website for licensing terms. */ #pragma once namespace stm32plus { /** * Subclass of the I2CMasterPollingFeature that specialises it for the case * where both the address (which in this case is probably an EEPROM index) is * two bytes wide. The MSB of the address is the first to be transmitted, * the LSB the second. */ class I2CTwoByteMasterPollingFeature : public I2CMasterPollingFeature { public: I2CTwoByteMasterPollingFeature(I2C& i2c); bool writeByte(uint16_t address,uint8_t input) const; bool readByte(uint16_t address,uint8_t& output) const; bool prepareRead(uint16_t address) const; bool readBytes(uint16_t address,uint8_t *output,uint32_t count) const; bool prepareWrite(uint16_t address) const; bool writeBytes(uint16_t address,const uint8_t *input,uint32_t count) const; }; /** * Constructor * @param i2c The base peripheral class */ inline I2CTwoByteMasterPollingFeature::I2CTwoByteMasterPollingFeature(I2C& i2c) : I2CMasterPollingFeature(i2c) { // force the address width to two bytes i2c.setAddressSize(2); // default timeout is 5 seconds _timeout=5000; } /** * Prepare for reading * @param address The register index * @return true if it worked */ inline bool I2CTwoByteMasterPollingFeature::prepareRead(uint16_t address) const { uint8_t bytes[2]; bytes[0]=address>>8; bytes[1]=address; return I2CMasterPollingFeature::prepareRead(bytes); } /** * Read multiple bytes * @param address The register index * @param output Where to store them * @param count how many to read * @return true if it worked */ inline bool I2CTwoByteMasterPollingFeature::readBytes(uint16_t address,uint8_t *output,uint32_t count) const { uint8_t bytes[2]; bytes[0]=address>>8; bytes[1]=address; return I2CMasterPollingFeature::readBytes(bytes,output,count); } /** * Prepare for writing * @param address The register index * @return true if it worked */ inline bool I2CTwoByteMasterPollingFeature::prepareWrite(uint16_t address) const { uint8_t bytes[2]; bytes[0]=address>>8; bytes[1]=address; return I2CMasterPollingFeature::prepareWrite(bytes); } /** * Write multiple bytes * @param address The register index * @param input The bytes to write * @param count how many to write * @return true if it worked */ inline bool I2CTwoByteMasterPollingFeature::writeBytes(uint16_t address,const uint8_t *input,uint32_t count) const { uint8_t bytes[2]; bytes[0]=address>>8; bytes[1]=address; return I2CMasterPollingFeature::writeBytes(bytes,input,count); } /** * Read a byte from the slave * @param address The register number * @param output Where to put the byte * @return false if there's an error, true otherwise */ inline bool I2CTwoByteMasterPollingFeature::readByte(uint16_t address,uint8_t& output) const { uint8_t bytes[2]; bytes[0]=address>>8; bytes[1]=address; return I2CMasterPollingFeature::readBytes(bytes,&output,1); } /** * Write a single byte * @param address The register address * @param input The byte to write * @return true if it works, false if not */ inline bool I2CTwoByteMasterPollingFeature::writeByte(uint16_t address,uint8_t input) const { uint8_t bytes[2]; bytes[0]=address>>8; bytes[1]=address; return I2CMasterPollingFeature::writeBytes(bytes,&input,1); } }
1,315
559
<filename>05-AmbientOcclusion/Passes/AmbientOcclusionPass.cpp /********************************************************************************************************************** # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, are permitted provided that the # following conditions are met: # * Redistributions of code must retain the copyright notice, this list of conditions and the following disclaimer. # * Neither the name of NVIDIA CORPORATION nor the names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT # SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. **********************************************************************************************************************/ #include "AmbientOcclusionPass.h" // Some global vars, used to simplify changing shader location & entry points namespace { // Where is our shader located? const char* kFileRayTrace = "Tutorial05\\aoTracing.rt.hlsl"; // What function names are used for the shader entry points for various shaders? const char* kEntryPointRayGen = "AoRayGen"; const char* kEntryPointMiss0 = "AoMiss"; const char* kEntryAoAnyHit = "AoAnyHit"; }; bool AmbientOcclusionPass::initialize(RenderContext* pRenderContext, ResourceManager::SharedPtr pResManager) { // Keep a copy of our resource manager; request needed buffer resources mpResManager = pResManager; mpResManager->requestTextureResources({ "WorldPosition", "WorldNormal", ResourceManager::kOutputChannel }); // Set the default scene to load mpResManager->setDefaultSceneName("Data/pink_room/pink_room.fscene"); // Create wrapper around a ray tracing pass. Specify our ray generation shader and ray-specific shaders mpRays = RayLaunch::create(kFileRayTrace, kEntryPointRayGen); mpRays->addMissShader(kFileRayTrace, kEntryPointMiss0); mpRays->addHitShader (kFileRayTrace, "", kEntryAoAnyHit); // No closest hit shader needed, pass in a null shader // Now that we've passed all our shaders in, compile. If we have a scene, let it know. mpRays->compileRayProgram(); if (mpScene) mpRays->setScene(mpScene); return true; } void AmbientOcclusionPass::initScene(RenderContext* pRenderContext, Scene::SharedPtr pScene) { // Stash a copy of the scene. To use our DXR wrappers, we currently require a Falcor::RtScene // (as Falcor::Scene currently works only for rasterization). This is a distinction that will // go away as DirectX Raytracing becomes available widely. RtScene is derived from Scene, and // all scenes loaded in this set of tutorial apps are RtScenes, so we just do a cast here. mpScene = std::dynamic_pointer_cast<RtScene>(pScene); // Pass our scene to our ray tracer (if initialized) if (mpRays) mpRays->setScene(mpScene); if (!mpScene) return; // Set a default AO radius when we load a new scene. mAORadius = glm::max(0.1f, mpScene->getRadius() * 0.05f); } void AmbientOcclusionPass::renderGui(Gui* pGui) { // Add a GUI option to allow the user to dynamically change the AO radius int dirty = 0; dirty |= (int)pGui->addFloatVar("AO radius", mAORadius, 1e-4f, 1e38f, mAORadius * 0.01f); dirty |= (int)pGui->addIntVar("Num AO Rays", mNumRaysPerPixel, 1, 64); // If changed, let other passes know we changed rendering parameters if (dirty) setRefreshFlag(); } void AmbientOcclusionPass::execute(RenderContext* pRenderContext) { // Get our output buffer; clear it to black. Texture::SharedPtr pDstTex = mpResManager->getClearedTexture(ResourceManager::kOutputChannel, vec4(0.0f, 0.0f, 0.0f, 0.0f)); // Do we have all the resources we need to render? If not, return if (!pDstTex || !mpRays || !mpRays->readyToRender()) return; // Set our ray tracing shader variables for our ray generation shader auto rayGenVars = mpRays->getRayGenVars(); rayGenVars["RayGenCB"]["gFrameCount"] = mFrameCount++; rayGenVars["RayGenCB"]["gAORadius"] = mAORadius; rayGenVars["RayGenCB"]["gMinT"] = mpResManager->getMinTDist(); // From the UI dropdown rayGenVars["RayGenCB"]["gNumRays"] = uint32_t(mNumRaysPerPixel); rayGenVars["gPos"] = mpResManager->getTexture("WorldPosition"); rayGenVars["gNorm"] = mpResManager->getTexture("WorldNormal"); rayGenVars["gOutput"] = pDstTex; // Shoot our AO rays mpRays->execute( pRenderContext, mpResManager->getScreenSize() ); }
1,643
4,081
<filename>core/common/src/test/java/alluxio/conf/AlluxioPropertiesTest.java /* * The Alluxio Open Foundation licenses this work under the Apache License, version 2.0 * (the "License"). You may not use this work except in compliance with the License, which is * available at www.apache.org/licenses/LICENSE-2.0 * * This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied, as more fully set forth in the License. * * See the NOTICE file distributed with this work for information regarding copyright ownership. */ package alluxio.conf; import static java.util.stream.Collectors.toSet; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.HashSet; import java.util.Map; import java.util.Properties; import java.util.Set; /** * Unit tests for the {@link AlluxioProperties} class. */ public class AlluxioPropertiesTest { private AlluxioProperties mProperties = new AlluxioProperties(); private PropertyKey mKeyWithValue; private PropertyKey mKeyWithoutValue; @Before public void before() { mKeyWithValue = new PropertyKey.Builder("key.with.value").setDefaultValue("value").build(); mKeyWithoutValue = new PropertyKey.Builder("key.without.value").build(); } @After public void after() { PropertyKey.unregister(mKeyWithValue); PropertyKey.unregister(mKeyWithoutValue); } @Test public void get() { assertEquals("value", mProperties.get(mKeyWithValue)); assertEquals(null, mProperties.get(mKeyWithoutValue)); mProperties.put(mKeyWithoutValue, "newValue1", Source.RUNTIME); assertEquals("newValue1", mProperties.get(mKeyWithoutValue)); } @Test public void clear() { mProperties.put(mKeyWithValue, "ignored1", Source.RUNTIME); mProperties.put(mKeyWithoutValue, "ignored2", Source.RUNTIME); mProperties.clear(); assertEquals(null, mProperties.get(mKeyWithoutValue)); assertEquals("value", mProperties.get(mKeyWithValue)); } @Test public void put() { mProperties.put(mKeyWithValue, "value1", Source.SYSTEM_PROPERTY); mProperties.put(mKeyWithoutValue, "value2", Source.SYSTEM_PROPERTY); assertEquals("value1", mProperties.get(mKeyWithValue)); assertEquals("value2", mProperties.get(mKeyWithoutValue)); mProperties.put(mKeyWithValue, "valueLowerPriority", Source.siteProperty("")); assertEquals("value1", mProperties.get(mKeyWithValue)); mProperties.put(mKeyWithValue, "valueSamePriority", Source.SYSTEM_PROPERTY); assertEquals("valueSamePriority", mProperties.get(mKeyWithValue)); mProperties.put(mKeyWithValue, "valueHigherPriority", Source.RUNTIME); assertEquals("valueHigherPriority", mProperties.get(mKeyWithValue)); } @Test public void remove() { mProperties.remove(mKeyWithValue); assertEquals(mKeyWithValue.getDefaultValue(), mProperties.get(mKeyWithValue)); assertEquals(Source.DEFAULT, mProperties.getSource(mKeyWithValue)); } @Test public void isSet() { assertTrue(mProperties.isSet(mKeyWithValue)); assertFalse(mProperties.isSet(mKeyWithoutValue)); mProperties.remove(mKeyWithValue); mProperties.put(mKeyWithoutValue, "value", Source.RUNTIME); assertTrue(mProperties.isSet(mKeyWithValue)); assertTrue(mProperties.isSet(mKeyWithoutValue)); } @Test public void entrySet() { Set<Map.Entry<? extends PropertyKey, String>> expected = PropertyKey.defaultKeys().stream() .map(key -> Maps.immutableEntry(key, key.getDefaultValue())).collect(toSet()); assertThat(mProperties.entrySet(), is(expected)); mProperties.put(mKeyWithValue, "value", Source.RUNTIME); expected.add(Maps.immutableEntry(mKeyWithValue, "value")); assertThat(mProperties.entrySet(), is(expected)); } @Test public void keySet() { Set<PropertyKey> expected = new HashSet<>(PropertyKey.defaultKeys()); assertThat(mProperties.keySet(), is(expected)); PropertyKey newKey = new PropertyKey.Builder("keySetNew").build(); mProperties.put(newKey, "value", Source.RUNTIME); expected.add(newKey); assertThat(mProperties.keySet(), is(expected)); } @Test public void forEach() { Set<PropertyKey> expected = new HashSet<>(PropertyKey.defaultKeys()); Set<PropertyKey> actual = Sets.newHashSet(); mProperties.forEach((key, value) -> actual.add(key)); assertThat(actual, is(expected)); PropertyKey newKey = new PropertyKey.Builder("forEachNew").build(); mProperties.put(newKey, "value", Source.RUNTIME); Set<PropertyKey> actual2 = Sets.newHashSet(); mProperties.forEach((key, value) -> actual2.add(key)); expected.add(newKey); assertThat(actual2, is(expected)); } @Test public void setGetSource() { mProperties.put(mKeyWithValue, "valueIgnored", Source.RUNTIME); assertEquals(Source.RUNTIME, mProperties.getSource(mKeyWithValue)); assertEquals(Source.DEFAULT, mProperties.getSource(mKeyWithoutValue)); } @Test public void merge() { PropertyKey newKey = new PropertyKey.Builder("mergeNew").setDefaultValue("value3").build(); Properties sysProp = new Properties(); sysProp.put(mKeyWithValue, "value1"); sysProp.put(mKeyWithoutValue, "value2"); mProperties.merge(sysProp, Source.SYSTEM_PROPERTY); assertEquals(Source.SYSTEM_PROPERTY, mProperties.getSource(mKeyWithValue)); assertEquals(Source.SYSTEM_PROPERTY, mProperties.getSource(mKeyWithoutValue)); assertEquals(Source.DEFAULT, mProperties.getSource(newKey)); assertEquals("value1", mProperties.get(mKeyWithValue)); assertEquals("value2", mProperties.get(mKeyWithoutValue)); assertEquals("value3", mProperties.get(newKey)); } @Test public void hash() { String hash0 = mProperties.hash(); mProperties.set(mKeyWithValue, "new value"); String hash1 = mProperties.hash(); Assert.assertNotEquals(hash0, hash1); mProperties.remove(mKeyWithValue); String hash2 = mProperties.hash(); Assert.assertEquals(hash0, hash2); mProperties.set(mKeyWithValue, "new value"); String hash3 = mProperties.hash(); Assert.assertEquals(hash1, hash3); mProperties.set(mKeyWithValue, "updated new value"); String hash4 = mProperties.hash(); Assert.assertNotEquals(hash0, hash4); Assert.assertNotEquals(hash1, hash4); Assert.assertNotEquals(hash2, hash4); Assert.assertNotEquals(hash3, hash4); mProperties.set(mKeyWithoutValue, "value"); String hash5 = mProperties.hash(); Assert.assertNotEquals(hash0, hash5); Assert.assertNotEquals(hash1, hash5); Assert.assertNotEquals(hash2, hash5); Assert.assertNotEquals(hash3, hash5); Assert.assertNotEquals(hash4, hash5); } }
2,485