text
stringlengths
2
99.9k
meta
dict
var convert = require('./convert'), func = convert('takeWhile', require('../takeWhile')); func.placeholder = require('./placeholder'); module.exports = func;
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> <!-- NewPage --> <html lang="en"> <head> <!-- Generated by javadoc (9.0.1) on Tue Oct 23 23:20:02 IST 2018 --> <title>org.apache.kafka.common (kafka 2.0.0 API)</title> <meta http-equiv="Content-Type" content="text/html; charset=utf-8"> <meta name="date" content="2018-10-23"> <link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> <link rel="stylesheet" type="text/css" href="../../../../jquery/jquery-ui.css" title="Style"> <script type="text/javascript" src="../../../../script.js"></script> <script type="text/javascript" src="../../../../jquery/jszip/dist/jszip.min.js"></script> <script type="text/javascript" src="../../../../jquery/jszip-utils/dist/jszip-utils.min.js"></script> <!--[if IE]> <script type="text/javascript" src="../../../../jquery/jszip-utils/dist/jszip-utils-ie.min.js"></script> <![endif]--> <script type="text/javascript" src="../../../../jquery/jquery-1.10.2.js"></script> <script type="text/javascript" src="../../../../jquery/jquery-ui.js"></script> </head> <body> <h1 class="bar"><a href="../../../../org/apache/kafka/common/package-summary.html" target="classFrame">org.apache.kafka.common</a></h1> <div class="indexContainer"> <h2 title="Interfaces">Interfaces</h2> <ul title="Interfaces"> <li><a href="ClusterResourceListener.html" title="interface in org.apache.kafka.common" target="classFrame"><span class="interfaceName">ClusterResourceListener</span></a></li> <li><a href="Configurable.html" title="interface in org.apache.kafka.common" target="classFrame"><span class="interfaceName">Configurable</span></a></li> <li><a href="KafkaFuture.BaseFunction.html" title="interface in org.apache.kafka.common" target="classFrame"><span class="interfaceName">KafkaFuture.BaseFunction</span></a></li> <li><a href="KafkaFuture.BiConsumer.html" title="interface in org.apache.kafka.common" target="classFrame"><span class="interfaceName">KafkaFuture.BiConsumer</span></a></li> <li><a href="Metric.html" title="interface in org.apache.kafka.common" target="classFrame"><span class="interfaceName">Metric</span></a></li> <li><a href="Reconfigurable.html" title="interface in org.apache.kafka.common" target="classFrame"><span class="interfaceName">Reconfigurable</span></a></li> </ul> <h2 title="Classes">Classes</h2> <ul title="Classes"> <li><a href="Cluster.html" title="class in org.apache.kafka.common" target="classFrame">Cluster</a></li> <li><a href="ClusterResource.html" title="class in org.apache.kafka.common" target="classFrame">ClusterResource</a></li> <li><a href="KafkaFuture.html" title="class in org.apache.kafka.common" target="classFrame">KafkaFuture</a></li> <li><a href="KafkaFuture.Function.html" title="class in org.apache.kafka.common" target="classFrame">KafkaFuture.Function</a></li> <li><a href="MetricName.html" title="class in org.apache.kafka.common" target="classFrame">MetricName</a></li> <li><a href="MetricNameTemplate.html" title="class in org.apache.kafka.common" target="classFrame">MetricNameTemplate</a></li> <li><a href="Node.html" title="class in org.apache.kafka.common" target="classFrame">Node</a></li> <li><a href="PartitionInfo.html" title="class in org.apache.kafka.common" target="classFrame">PartitionInfo</a></li> <li><a href="TopicPartition.html" title="class in org.apache.kafka.common" target="classFrame">TopicPartition</a></li> <li><a href="TopicPartitionInfo.html" title="class in org.apache.kafka.common" target="classFrame">TopicPartitionInfo</a></li> <li><a href="TopicPartitionReplica.html" title="class in org.apache.kafka.common" target="classFrame">TopicPartitionReplica</a></li> </ul> <h2 title="Enums">Enums</h2> <ul title="Enums"> <li><a href="ConsumerGroupState.html" title="enum in org.apache.kafka.common" target="classFrame">ConsumerGroupState</a></li> </ul> <h2 title="Exceptions">Exceptions</h2> <ul title="Exceptions"> <li><a href="KafkaException.html" title="class in org.apache.kafka.common" target="classFrame">KafkaException</a></li> </ul> </div> </body> </html>
{ "pile_set_name": "Github" }
/************************************************************************ * Copyright 2008, Strathclyde Planning Group, * Department of Computer and Information Sciences, * University of Strathclyde, Glasgow, UK * http://planning.cis.strath.ac.uk/ * * Maria Fox, Richard Howey and Derek Long - VAL * Stephen Cresswell - PDDL Parser * * This file is part of VAL, the PDDL validator. * * VAL is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 2 of the License, or * (at your option) any later version. * * VAL is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with VAL. If not, see <http://www.gnu.org/licenses/>. * ************************************************************************/ #include "graphconstruct.h" #include "ptree.h" #include "FuncAnalysis.h" #include <fstream> #include "State.h" #include "InstPropLinker.h" #include "Evaluator.h" #include "Validator.h" using namespace VAL1_2; namespace Inst { class PlanGraph::BVEvaluator : public VAL::VisitController { private: // The BVEvaluator is going to own this bval. BoundedValue * bval; PlanGraph & pg; VAL::FastEnvironment * fenv; bool continuous; public: BVEvaluator(PlanGraph & p,VAL::FastEnvironment * fe) : bval(0), pg(p), fenv(fe), continuous(false) {}; ~BVEvaluator() { delete bval; }; bool isContinuous() const {return continuous;}; BoundedValue * getBV() { BoundedValue * bv = bval; bval = 0; return bv; }; virtual void visit_plus_expression(plus_expression * pe) { pe->getRHS()->visit(this); BoundedValue * br = bval; bval = 0; pe->getLHS()->visit(this); br = (*bval += br); if(br != bval) { delete bval; bval = br; }; }; virtual void visit_minus_expression(minus_expression * me) { me->getRHS()->visit(this); BoundedValue * br = bval; bval = 0; me->getLHS()->visit(this); br = (*bval -= br); if(br != bval) { delete bval; bval = br; }; }; virtual void visit_mul_expression(mul_expression * pe) { pe->getRHS()->visit(this); BoundedValue * br = bval; bval = 0; pe->getLHS()->visit(this); if(continuous) { bval = br; } else { br = (*bval *= br); if(br != bval) { delete bval; bval = br; }; }; }; virtual void visit_div_expression(div_expression * pe) { pe->getRHS()->visit(this); BoundedValue * br = bval; bval = 0; pe->getLHS()->visit(this); br = (*bval /= br); if(br != bval) { delete bval; bval = br; }; }; virtual void visit_uminus_expression(uminus_expression * um) { um->getExpr()->visit(this); bval->negate(); }; virtual void visit_int_expression(int_expression * ie) { bval = new PointValue(ie->double_value()); }; virtual void visit_float_expression(float_expression * fe) { bval = new PointValue(fe->double_value()); }; virtual void visit_special_val_expr(special_val_expr *) {continuous = true;}; virtual void visit_func_term(func_term * ft) { PNE pne(ft,fenv); FluentEntry * fe = pg.fluents.find(instantiatedOp::getPNE(&pne)); bval = fe?fe->getBV()->copy():new Undefined(); }; }; class SpikeEvaluator : public VisitController { private: Spike<PropEntry> & spes; Spike<FluentEntry> & sfes; FastEnvironment* f; bool evaluation; PlanGraph & pg; pred_symbol * equality; public: SpikeEvaluator(PlanGraph & p,Spike<PropEntry> & s1,Spike<FluentEntry> & s2, FastEnvironment * fe): spes(s1), sfes(s2), f(fe), evaluation(true), pg(p), equality(current_analysis->pred_tab.symbol_probe("=")) { } virtual void visit_simple_goal(simple_goal * s){ if(EPS(s->getProp()->head)->getParent() == this->equality){ evaluation = ((*f)[s->getProp()->args->front()] == (*f)[s->getProp()->args->back()]); if(s->getPolarity() == E_NEG) { evaluation = !evaluation; }; return; } else { Literal e(s->getProp(),f); Literal* lptr = instantiatedOp::getLiteral(& e); PropEntry* eid = spes.find(lptr); if(eid){ if(s->getPolarity() == E_NEG){ evaluation = eid->gotDeleters(); } else { evaluation = eid->gotAchievers(); } } else { if(s->getPolarity() == E_NEG){ evaluation = true; } else { evaluation = false; } }; }; }; bool getEvaluation() const {return evaluation;}; virtual void visit_qfied_goal(qfied_goal * qg){ cout << "Not currently handling quantified goals\n"; } virtual void visit_conj_goal(conj_goal * c){ for(goal_list::const_iterator i = c->getGoals()->begin(); i != c->getGoals()->end();++i) { (*i)->visit(this); if(!evaluation){ return; } } }; virtual void visit_disj_goal(disj_goal * c){ cout << "Not dealing with disjunctive goals\n"; }; virtual void visit_timed_goal(timed_goal * t){ cout << "Not currently handling timed goals\n"; } virtual void visit_imply_goal(imply_goal * ig){ cout << "Not dealing with implications\n"; }; virtual void visit_neg_goal(neg_goal * ng){ ng->getGoal()->visit(this); evaluation = !evaluation; } virtual void visit_comparison(comparison * c){ // Evaluate the parts and combine according to // rearrangement then do the comparison with a // bounds check. PlanGraph::BVEvaluator bve(pg,f); c->getLHS()->visit(&bve); BoundedValue * bvl = bve.getBV(); c->getRHS()->visit(&bve); BoundedValue * bvr = bve.getBV(); BoundedValue * bvres = (*bvl -= bvr); switch(c->getOp()) { case E_GREATER: evaluation = !bvres->gotUB() || bvres->getUB() > 0; break; case E_GREATEQ: evaluation = !bvres->gotUB() || bvres->getUB() >= 0; break; case E_LESS: evaluation = !bvres->gotLB() || bvres->getLB() < 0; break; case E_LESSEQ: evaluation = !bvres->gotLB() || bvres->getLB() <= 0; break; case E_EQUALS: evaluation = (!bvres->gotLB() || bvres->getLB() <= 0) && (!bvres->gotUB() || bvres->getUB() >= 0); break; default: break; }; } virtual void visit_action(action * op){ op->precondition->visit(this); }; virtual void visit_event(event * e){ e->precondition->visit(this); }; virtual void visit_process(process * p){ p->precondition->visit(this); }; virtual void visit_durative_action(durative_action * da) { cout << "Not dealing with duratives\n"; }; }; class SpikeSupporter : public VisitController { private: Spike<PropEntry> & spes; Spike<FluentEntry> & sfes; FastEnvironment* f; ActEntry * ae; GraphFactory * myFac; bool context; pred_symbol * equality; public: SpikeSupporter(Spike<PropEntry> & s1,Spike<FluentEntry> & s2, FastEnvironment * fe,ActEntry * a,GraphFactory * mf): spes(s1), sfes(s2), f(fe), ae(a), myFac(mf), context(true), equality(current_analysis->pred_tab.symbol_probe("=")) { } virtual void visit_simple_goal(simple_goal * s){ if(EPS(s->getProp()->head)->getParent() != this->equality){ Literal e(s->getProp(),f); Literal* lptr = instantiatedOp::getLiteral(& e); PropEntry* eid = spes.findInAll(lptr); if(eid){ if(context && s->getPolarity() == E_NEG || !context && s->getPolarity()==E_POS){ ae->addSupportedByNeg(eid); cout << "Support by neg: " << *ae << " with " << *eid << "\n"; } else { ae->addSupportedBy(eid); } } else { eid=myFac->makePropEntry(lptr); // make the entry for eid ae->addSupportedByNeg(eid); spes.insertAbsentee(eid); }; }; }; virtual void visit_conj_goal(conj_goal * c){ for(goal_list::const_iterator i = c->getGoals()->begin(); i != c->getGoals()->end();++i) { (*i)->visit(this); } }; virtual void visit_comparison(comparison * c){ //cout << "Er....what?\n"; } virtual void visit_neg_goal(neg_goal * ng) { context = !context; ng->getGoal()->visit(this); }; virtual void visit_action(action * op){ op->precondition->visit(this); }; virtual void visit_event(event * e){ e->precondition->visit(this); }; virtual void visit_process(process * p){ p->precondition->visit(this); }; virtual void visit_durative_action(durative_action * da) { cout << "Not dealing with duratives\n"; }; }; void FluentEntry::write(ostream & o) const { thefluent->write(o); o << "["; for(vector<Constraint *>::const_iterator i = constrs.begin();i != constrs.end();++i) { (*i)->write(o); o << " "; }; o << "]\nBounded Range: " << *bval << "\n"; }; BoundedValue * BoundedInterval::operator+=(const BoundedValue * bv) { if(!finitelbnd || !bv->gotLB()) { finitelbnd = false; } else { lbnd += bv->getLB(); }; if(!finiteubnd || !bv->gotUB()) { finiteubnd = false; } else { ubnd += bv->getUB(); }; return this; }; BoundedValue * BoundedInterval::operator-=(const BoundedValue * bv) { if(!finitelbnd || !bv->gotUB()) { finitelbnd = false; } else { lbnd -= bv->getUB(); }; if(!finiteubnd || !bv->gotLB()) { finiteubnd = false; } else { ubnd -= bv->getLB(); }; return this; }; BoundedValue * BoundedInterval::operator*=(const BoundedValue * bv) { if(!finitelbnd || !bv->gotLB()) { finitelbnd = false; } else { lbnd *= bv->getLB(); }; if(!finiteubnd || !bv->gotUB()) { finiteubnd = false; } else { ubnd *= bv->getUB(); }; return this; }; BoundedValue * BoundedInterval::operator/=(const BoundedValue * bv) { /* if(!finitelbnd || !bv->gotLB()) { finitelbnd = false; } else { lbnd += bv->getLB(); }; if(!finiteubnd || !bv->gotUB()) { finiteubnd = false; } else { ubnd += bv->getUB(); }; return this; */ // This case must be handled properly... cout << "WARNING: Division not managed properly, yet!\n"; finitelbnd = finiteubnd = false; return this; }; BoundedValue * PointValue::operator+=(const BoundedValue * bv) { BoundedInterval * bi = new BoundedInterval(val,val); *bi += bv; return bi; }; BoundedValue * PointValue::operator-=(const BoundedValue * bv) { BoundedInterval * bi = new BoundedInterval(val,val); *bi -= bv; return bi; }; BoundedValue * PointValue::operator*=(const BoundedValue * bv) { BoundedInterval * bi = new BoundedInterval(val,val); *bi *= bv; return bi; }; BoundedValue * PointValue::operator/=(const BoundedValue * bv) { BoundedInterval * bi = new BoundedInterval(val,val); *bi /= bv; return bi; }; BoundedValue * PlanGraph::update(BoundedValue * bv,const VAL::expression * exp,const VAL::assign_op op,VAL::FastEnvironment * fe) { BVEvaluator bve(*this,fe); exp->visit(&bve); BoundedValue * b = bve.getBV(); cout << "Evaluated to " << *b << "\n"; switch(op) { case E_ASSIGN: bv = b->copy(); break; case E_INCREASE: if(bve.isContinuous()) { if(!b->gotLB() || b->getLB() < 0) { bv = bv->infLower(); }; if(!b->gotUB() || b->getUB() > 0) { bv = bv->infUpper(); }; } else { bv = (*bv += b); }; break; case E_DECREASE: if(bve.isContinuous()) { if(!b->gotLB() || b->getLB() < 0) { bv = bv->infUpper(); }; if(!b->gotUB() || b->getUB() > 0) { bv = bv->infLower(); }; } else { bv = (*bv -= b); }; break; case E_SCALE_UP: bv = (*bv *= b); break; case E_SCALE_DOWN: bv = (*bv /= b); break; default: break; }; delete b; return bv; }; void Constraint::write(ostream & o) const { o << *bval; }; void InitialValue::write(ostream & o) const { o << "Initially " << *bval; }; void UpdateValue::write(ostream & o) const { o << "Updated by " << *(updater->getIO()) << " at " << (updater->getWhen()) //<< " with effect: " << *exp << " to " << *bval; }; void FluentEntry::addUpdatedBy(ActEntry * ae,const VAL::expression * expr,const VAL::assign_op op,PlanGraph * pg) { cout << "Performing BV calc on " << *bval << "\n"; BoundedValue * vv = bval->copy(); BoundedValue * v = pg->update(vv,expr,op,ae->getIO()->getEnv()); cout << "Got " << *v << "\n"; if(vv != v) { delete vv; }; Constraint * c = new UpdateValue(ae,expr,op,v); constrs.push_back(c); if(!tmpaccum) { tmpaccum = bval->copy(); }; cout << "tmpaccum is " << *tmpaccum << "\n"; BoundedValue * nv = tmpaccum->accum(v); if(nv != tmpaccum) { delete tmpaccum; }; tmpaccum = nv; }; BoundedValue * PointValue::accum(const BoundedValue * bv) { if(bv->contains(val)) { return bv->copy(); } else { BoundedValue * b = new BoundedInterval(val,val); b->accum(bv); return b; }; }; PlanGraph::PlanGraph(GraphFactory * f) : myFac(f), inactive(instantiatedOp::opsBegin(),instantiatedOp::opsEnd()) { // Set up the initial state in the proposition spike... for(pc_list<simple_effect*>::const_iterator i = current_analysis->the_problem->initial_state->add_effects.begin(); i != current_analysis->the_problem->initial_state->add_effects.end();++i) { Literal lit((*i)->prop,0); Literal * lit1 = instantiatedOp::getLiteral(&lit); PropEntry * p = myFac->makePropEntry(lit1); props.addEntry(p); }; props.finishedLevel(); for(pc_list<assignment*>::const_iterator i = current_analysis->the_problem->initial_state->assign_effects.begin(); i != current_analysis->the_problem->initial_state->assign_effects.end();++i) { PNE pne((*i)->getFTerm(),0); PNE * pne1 = instantiatedOp::getPNE(&pne); FluentEntry * fl = myFac->makeFluentEntry(pne1); fluents.addEntry(fl); fl->addInitial((EFT(pne1->getHead())->getInitial(pne1->begin(),pne1->end())).second); }; fluents.finishedLevel(); //copy(instantiatedOp::opsBegin(),instantiatedOp::opsEnd(),front_inserter(inactive)); }; Constraint::~Constraint() { delete bval; }; void FluentEntry::transferValue() { if(!tmpaccum) return; delete bval; bval = tmpaccum; tmpaccum = 0; }; struct IteratingActionChecker : public VisitController { bool iterating; IteratingActionChecker() : iterating(false) {}; virtual void visit_forall_effect(forall_effect * fa) { cout << "Not handling for all effects yet (IteratingActionChecker)!\n"; }; virtual void visit_cond_effect(cond_effect *) { cout << "Not handling conditional effects yet (IteratingActionChecker)!\n"; }; // virtual void visit_timed_effect(timed_effect *) {}; virtual void visit_effect_lists(effect_lists * effs) { for(VAL::pc_list<assignment *>::iterator i = effs->assign_effects.begin();i != effs->assign_effects.end();++i) { (*i)->visit(this); }; }; virtual void visit_assignment(assignment * a) { switch(a->getOp()) { case E_INCREASE: case E_DECREASE: case E_SCALE_UP: case E_SCALE_DOWN: iterating = true; default: break; }; }; }; void DurationHolder::readDurations(const string & nm) { std::ifstream dursFile(nm.c_str()); string a; string ax; string s; dursFile >> a; ax = a; vector<int> args; while(!dursFile.eof()) { dursFile >> s; if(s == "=") { relevantArgs[a] = args; args.clear(); double d; dursFile >> d; dursFor[ax] = new DurationConstraint(new PointValue(d)); dursFile >> a; ax = a; } else { int arg; dursFile >> arg; args.push_back(arg); ax += " "; ax += s; }; }; }; DurationHolder ActEntry::dursFor; void DurationConstraint::write(ostream & o) const { o << "Duration for "; if(start) o << *(start->getIO()) << " "; if(inv) o << *(inv->getIO()) << " "; if(end) o << *(end->getIO()) << " "; o << "is " << *bval; }; DurationConstraint * DurationHolder::lookUp(const string & nm,instantiatedOp * io) { vector<int> args = relevantArgs[nm]; string s = nm; for(vector<int>::iterator i = args.begin();i != args.end();++i) { s += " "; s += io->getArg(*i)->getName(); }; return dursFor[s]; }; ActEntry::ActEntry(instantiatedOp * io) : theact(io), iterating(false), atype(ATOMIC), dur(0) { IteratingActionChecker iac; io->forOp()->effects->visit(&iac); iterating = iac.iterating; string s = io->forOp()->name->getName(); if(s.length() < 6) return; string tl = s.substr(s.length()-4,4); if(tl == "-inv") { cout << "Found an invariant action " << *io << "\n"; atype = INV; tl = s.substr(0,s.length()-4); dur = dursFor.lookUp(tl,io); dur->setInv(this); } else if(tl == "-end") { cout << "Found an end action " << *io << "\n"; atype = END; tl = s.substr(0,s.length()-4); dur = dursFor.lookUp(tl,io); dur->setEnd(this); } else if(s.length() > 6 && s.substr(s.length()-6,6) == "-start") { cout << "Found a start action " << *io << "\n"; atype = START; tl = s.substr(0,s.length()-6); dur = dursFor.lookUp(tl,io); dur->setStart(this); }; }; bool PlanGraph::extendPlanGraph() { for(vector<ActEntry *>::iterator i = iteratingActs.begin();i != iteratingActs.end();++i) { iterateEntry(*i); }; bool levelOut = true; for(InstOps::iterator i = inactive.begin();i!= inactive.end();){ cout << "Considering: " << **i << "\n"; if(activated((*i))){ ActEntry* io = acts.addEntry(myFac->makeActEntry((*i))); cout << "Activated: " << (*(*i)) << "\n"; activateEntry(io); InstOps::iterator j = i; ++i; inactive.erase(j); levelOut = false; } else ++i; } // Determine which actions are now activated and add them to spike. // // Then add their postconditions to the proposition spike, ensuring we only add new ones. acts.finishedLevel(); props.finishedLevel(); fluents.finishedLevel(); for(Spike<FluentEntry>::SpikeIterator i = fluents.begin();i != fluents.end();++i) { (*i)->transferValue(); }; return levelOut; }; void PlanGraph::extendToGoals() { VAL::FastEnvironment bs(0); while(true) { extendPlanGraph(); SpikeEvaluator spiv(*this,props,fluents,&bs); current_analysis->the_problem->the_goal->visit(&spiv); if(spiv.getEvaluation()) break; }; }; void PlanGraph::iterateEntry(ActEntry * io) { for(instantiatedOp::PNEEffectsIterator e = io->getIO()->PNEEffectsBegin();e!=io->getIO()->PNEEffectsEnd();++e){ FluentEntry* eid = fluents.find((*e)); cout << "Fluent effect updated: " << (*(*e)) << "\n"; if(!eid){ eid = fluents.addEntry(myFac->makeFluentEntry((*e))); }; eid->addUpdatedBy(io,e.getUpdate(),e.getOp(),this); io->addUpdates(eid); } }; void PlanGraph::activateEntry(ActEntry * io){ for(instantiatedOp::PropEffectsIterator e = io->getIO()->addEffectsBegin();e!=io->getIO()->addEffectsEnd();++e){ PropEntry* eid = props.find((*e)); if(!eid){ eid=props.addEntry(myFac->makePropEntry((*e))); cout << "Prop effect added: " << (*(*e)) << "\n"; }; eid->addAchievedBy(io); io->addAchieves(eid); } for(instantiatedOp::PropEffectsIterator e = io->getIO()->delEffectsBegin();e!=io->getIO()->delEffectsEnd();++e){ PropEntry* eid = props.find((*e)); if(!eid){ eid=props.addEntry(myFac->makePropEntry((*e))); cout << "Prop effect deleted: " << (*(*e)) << "\n"; } eid->addDeletedBy(io); io->addDeletes(eid); } iterateEntry(io); SpikeSupporter spipp(props,fluents,io->getIO()->getEnv(),io,myFac); io->getIO()->forOp()->visit(&spipp); if(io->isIterating()) { iteratingActs.push_back(io); }; }; // Method to check whether an action is to be activated at a given level. bool PlanGraph::activated(instantiatedOp* io){ SpikeEvaluator spiv(*this,props,fluents,io->getEnv()); io->forOp()->visit(&spiv); return spiv.getEvaluation(); } void ActEntry::write(ostream & o) const { o << *theact; if(atype != ATOMIC && dur) { o << " " << *dur; }; }; void PlanGraph::write(ostream & o) const { o << "Propositions:\n"; props.write(o); o << "Actions:\n"; acts.write(o); o << "Fluents:\n"; fluents.write(o); }; int PropEntry::counter = 0; bool ActEntry::isActivated(const vector<bool> & actives) const { for(vector<PropEntry *>::const_iterator i = supports.begin();i != supports.end();++i) { cout << "Checking +" << **i << " = " << actives[(*i)->getID()] << "\n"; if(!actives[(*i)->getID()]) return false; }; for(vector<PropEntry *>::const_iterator i = negSupports.begin();i != negSupports.end();++i) { cout << "Checking -" << **i << " = " << actives[(*i)->getID()] << "\n"; if(actives[(*i)->getID()]) return false; }; return true; }; bool ActEntry::isActivated(Validator * v,const State * s) const { Evaluator ev(v,s,theact); theact->forOp()->visit(&ev); return ev(); }; bool ActEntry::isRelevant(Validator * v,const State * s) const { Evaluator ev(v,s,theact,true); theact->forOp()->visit(&ev); return ev(); }; vector<ActEntry *> PlanGraph::applicableActions(Validator * v,const State * s) { int lastActiveLayer = 0; for(State::const_iterator i = s->begin();i != s->end();++i) { Literal * lit = toLiteral(*i); PropEntry * pe = props.findInAll(lit); lastActiveLayer = std::max(lastActiveLayer,pe->getWhen()); }; vector<ActEntry *> actives; for(Spike<ActEntry>::SpikeIterator i = acts.begin();i != acts.toLevel(lastActiveLayer);++i) { cout << "Considering " << **i << "\n"; if((*i)->isActivated(v,s)) { actives.push_back(*i); }; }; return actives; /* This version was used to translate a State into a vector of bools * that could be used for reference against preconditions. * The problem is that it doesn't handle metric expressions, so we * have switched to evaluation in the state. * vector<bool> activations(props.size(),false); int lastActiveLayer = 0; for(State::const_iterator i = s->begin();i != s->end();++i) { Literal * lit = toLiteral(*i); PropEntry * pe = props.findInAll(lit); activations[pe->getID()] = true; cout << "Set " << *pe << " active\n"; lastActiveLayer = std::max(lastActiveLayer,pe->getWhen()); }; vector<ActEntry *> actives; for(Spike<ActEntry>::SpikeIterator i = acts.begin();i != acts.toLevel(lastActiveLayer);++i) { cout << "Considering " << **i << "\n"; if((*i)->isActivated(activations)) { actives.push_back(*i); }; }; return actives; */ }; vector<ActEntry *> PlanGraph::relevantActions(Validator * v,const State * s) { int lastActiveLayer = 0; for(State::const_iterator i = s->begin();i != s->end();++i) { Literal * lit = toLiteral(*i); PropEntry * pe = props.findInAll(lit); lastActiveLayer = std::max(lastActiveLayer,pe->getWhen()); }; vector<ActEntry *> actives; for(Spike<ActEntry>::SpikeIterator i = acts.begin();i != acts.toLevel(lastActiveLayer);++i) { // cout << "Considering " << **i << "\n"; if((*i)->isRelevant(v,s)) { actives.push_back(*i); }; }; return actives; }; };
{ "pile_set_name": "Github" }
package leafnodes import ( "encoding/json" "github.com/onsi/ginkgo/internal/failer" "github.com/onsi/ginkgo/types" "io/ioutil" "net/http" "time" ) type synchronizedAfterSuiteNode struct { runnerA *runner runnerB *runner outcome types.SpecState failure types.SpecFailure runTime time.Duration } func NewSynchronizedAfterSuiteNode(bodyA interface{}, bodyB interface{}, codeLocation types.CodeLocation, timeout time.Duration, failer *failer.Failer) SuiteNode { return &synchronizedAfterSuiteNode{ runnerA: newRunner(bodyA, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0), runnerB: newRunner(bodyB, codeLocation, timeout, failer, types.SpecComponentTypeAfterSuite, 0), } } func (node *synchronizedAfterSuiteNode) Run(parallelNode int, parallelTotal int, syncHost string) bool { node.outcome, node.failure = node.runnerA.run() if parallelNode == 1 { if parallelTotal > 1 { node.waitUntilOtherNodesAreDone(syncHost) } outcome, failure := node.runnerB.run() if node.outcome == types.SpecStatePassed { node.outcome, node.failure = outcome, failure } } return node.outcome == types.SpecStatePassed } func (node *synchronizedAfterSuiteNode) Passed() bool { return node.outcome == types.SpecStatePassed } func (node *synchronizedAfterSuiteNode) Summary() *types.SetupSummary { return &types.SetupSummary{ ComponentType: node.runnerA.nodeType, CodeLocation: node.runnerA.codeLocation, State: node.outcome, RunTime: node.runTime, Failure: node.failure, } } func (node *synchronizedAfterSuiteNode) waitUntilOtherNodesAreDone(syncHost string) { for { if node.canRun(syncHost) { return } time.Sleep(50 * time.Millisecond) } } func (node *synchronizedAfterSuiteNode) canRun(syncHost string) bool { resp, err := http.Get(syncHost + "/RemoteAfterSuiteData") if err != nil || resp.StatusCode != http.StatusOK { return false } body, err := ioutil.ReadAll(resp.Body) if err != nil { return false } resp.Body.Close() afterSuiteData := types.RemoteAfterSuiteData{} err = json.Unmarshal(body, &afterSuiteData) if err != nil { return false } return afterSuiteData.CanRun }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <!-- Copyright (C) 2015 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <animated-vector xmlns:android="http://schemas.android.com/apk/res/android" android:drawable="@drawable/ic_tick"> <target android:name="@string/tick" android:animation="@animator/tick_to_cross" /> <target android:name="@string/groupTickCross" android:animation="@animator/rotate_tick_to_cross" /> </animated-vector>
{ "pile_set_name": "Github" }
'use strict'; var fs = require('fs') var ini = require('ini') var path = require('path') var stripJsonComments = require('strip-json-comments') var parse = exports.parse = function (content) { //if it ends in .json or starts with { then it must be json. //must be done this way, because ini accepts everything. //can't just try and parse it and let it throw if it's not ini. //everything is ini. even json with a syntax error. if(/^\s*{/.test(content)) return JSON.parse(stripJsonComments(content)) return ini.parse(content) } var file = exports.file = function () { var args = [].slice.call(arguments).filter(function (arg) { return arg != null }) //path.join breaks if it's a not a string, so just skip this. for(var i in args) if('string' !== typeof args[i]) return var file = path.join.apply(null, args) var content try { return fs.readFileSync(file,'utf-8') } catch (err) { return } } var json = exports.json = function () { var content = file.apply(null, arguments) return content ? parse(content) : null } var env = exports.env = function (prefix, env) { env = env || process.env var obj = {} var l = prefix.length for(var k in env) { if(k.toLowerCase().indexOf(prefix.toLowerCase()) === 0) { var keypath = k.substring(l).split('__') // Trim empty strings from keypath array var _emptyStringIndex while ((_emptyStringIndex=keypath.indexOf('')) > -1) { keypath.splice(_emptyStringIndex, 1) } var cursor = obj keypath.forEach(function _buildSubObj(_subkey,i){ // (check for _subkey first so we ignore empty strings) // (check for cursor to avoid assignment to primitive objects) if (!_subkey || typeof cursor !== 'object') return // If this is the last key, just stuff the value in there // Assigns actual value from env variable to final key // (unless it's just an empty string- in that case use the last valid key) if (i === keypath.length-1) cursor[_subkey] = env[k] // Build sub-object if nothing already exists at the keypath if (cursor[_subkey] === undefined) cursor[_subkey] = {} // Increment cursor used to track the object at the current depth cursor = cursor[_subkey] }) } } return obj } var find = exports.find = function () { var rel = path.join.apply(null, [].slice.call(arguments)) function find(start, rel) { var file = path.join(start, rel) try { fs.statSync(file) return file } catch (err) { if(path.dirname(start) !== start) // root return find(path.dirname(start), rel) } } return find(process.cwd(), rel) }
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; namespace Exceptionless.Plugins { public class ContextData : Dictionary<string, object> { public ContextData() : base(StringComparer.OrdinalIgnoreCase) { } public ContextData(IDictionary<string, object> dictionary) : base(dictionary, StringComparer.OrdinalIgnoreCase) { } public void SetException(Exception ex) { this[KnownKeys.Exception] = ex; } public bool HasException() { return ContainsKey(KnownKeys.Exception); } public Exception GetException() { if (!HasException()) return null; return this[KnownKeys.Exception] as Exception; } /// <summary> /// Marks the event as being a unhandled error occurrence. /// </summary> public void MarkAsUnhandledError() { this[KnownKeys.IsUnhandledError] = true; } /// <summary> /// Returns true if the event was an unhandled error. /// </summary> public bool IsUnhandledError { get { if (!ContainsKey(KnownKeys.IsUnhandledError)) return false; if (!(this[KnownKeys.IsUnhandledError] is bool)) return false; return (bool)this[KnownKeys.IsUnhandledError]; } } /// <summary> /// Sets the submission method that created the event (E.G., UnobservedTaskException) /// </summary> public void SetSubmissionMethod(string method) { this[KnownKeys.SubmissionMethod] = method; } public string GetSubmissionMethod() { if (!ContainsKey(KnownKeys.SubmissionMethod)) return null; return this[KnownKeys.SubmissionMethod] as string; } public static class KnownKeys { public const string IsUnhandledError = "@@_IsUnhandledError"; public const string SubmissionMethod = "@@_SubmissionMethod"; public const string Exception = "@@_Exception"; } } }
{ "pile_set_name": "Github" }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2020 the original author or authors. */ package org.assertj.core.test; import java.util.LinkedHashMap; import java.util.TreeMap; import org.assertj.core.data.MapEntry; /** * @author Alex Ruiz */ public final class Maps { @SafeVarargs public static <K, V> LinkedHashMap<K, V> mapOf(MapEntry<K, V>... entries) { LinkedHashMap<K, V> map = new LinkedHashMap<>(); for (MapEntry<K, V> entry : entries) { map.put(entry.key, entry.value); } return map; } @SafeVarargs public static <K extends Comparable<? super K>, V> TreeMap<K, V> treeMapOf(MapEntry<K, V>... entries) { TreeMap<K, V> map = new TreeMap<>(); for (MapEntry<K, V> entry : entries) { map.put(entry.key, entry.value); } return map; } private Maps() {} }
{ "pile_set_name": "Github" }
require 'capybara/rails' require 'capybara/poltergeist' Capybara.javascript_driver = :poltergeist
{ "pile_set_name": "Github" }
// license:BSD-3-Clause // copyright-holders:Olivier Galibert // handler_entry_read_unmapped/handler_entry_write_unmapped // Logs an unmapped access template<int Width, int AddrShift, endianness_t Endian> class handler_entry_read_unmapped : public handler_entry_read<Width, AddrShift, Endian> { public: using uX = typename emu::detail::handler_entry_size<Width>::uX; using inh = handler_entry_read<Width, AddrShift, Endian>; handler_entry_read_unmapped(address_space *space) : handler_entry_read<Width, AddrShift, Endian>(space, 0) {} ~handler_entry_read_unmapped() = default; uX read(offs_t offset, uX mem_mask) const override; std::string name() const override; }; template<int Width, int AddrShift, endianness_t Endian> class handler_entry_write_unmapped : public handler_entry_write<Width, AddrShift, Endian> { public: using uX = typename emu::detail::handler_entry_size<Width>::uX; using inh = handler_entry_write<Width, AddrShift, Endian>; handler_entry_write_unmapped(address_space *space) : handler_entry_write<Width, AddrShift, Endian>(space, 0) {} ~handler_entry_write_unmapped() = default; void write(offs_t offset, uX data, uX mem_mask) const override; std::string name() const override; }; // handler_entry_read_nop/handler_entry_write_nop // Drops an unmapped access silently template<int Width, int AddrShift, endianness_t Endian> class handler_entry_read_nop : public handler_entry_read<Width, AddrShift, Endian> { public: using uX = typename emu::detail::handler_entry_size<Width>::uX; using inh = handler_entry_read<Width, AddrShift, Endian>; handler_entry_read_nop(address_space *space) : handler_entry_read<Width, AddrShift, Endian>(space, 0) {} ~handler_entry_read_nop() = default; uX read(offs_t offset, uX mem_mask) const override; std::string name() const override; }; template<int Width, int AddrShift, endianness_t Endian> class handler_entry_write_nop : public handler_entry_write<Width, AddrShift, Endian> { public: using uX = typename emu::detail::handler_entry_size<Width>::uX; using inh = handler_entry_write<Width, AddrShift, Endian>; handler_entry_write_nop(address_space *space) : handler_entry_write<Width, AddrShift, Endian>(space, 0) {} ~handler_entry_write_nop() = default; void write(offs_t offset, uX data, uX mem_mask) const override; std::string name() const override; };
{ "pile_set_name": "Github" }
/***************************************************************************** $Id$ File: kb.cpp Date: 24Aug07 Copyright (C) 2006-07 by Francis Cianfrocca. All Rights Reserved. Gmail: blackhedd This program is free software; you can redistribute it and/or modify it under the terms of either: 1) the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version; or 2) Ruby's License. See the file COPYING for complete licensing information. *****************************************************************************/ #include "project.h" /************************************** KeyboardDescriptor::KeyboardDescriptor **************************************/ KeyboardDescriptor::KeyboardDescriptor (EventMachine_t *parent_em): EventableDescriptor (0, parent_em), bReadAttemptedAfterClose (false) { #ifdef HAVE_EPOLL EpollEvent.events = EPOLLIN; #endif #ifdef HAVE_KQUEUE MyEventMachine->ArmKqueueReader (this); #endif } /*************************************** KeyboardDescriptor::~KeyboardDescriptor ***************************************/ KeyboardDescriptor::~KeyboardDescriptor() { } /************************* KeyboardDescriptor::Write *************************/ void KeyboardDescriptor::Write() { // Why are we here? throw std::runtime_error ("bad code path in keyboard handler"); } /***************************** KeyboardDescriptor::Heartbeat *****************************/ void KeyboardDescriptor::Heartbeat() { // no-op } /************************ KeyboardDescriptor::Read ************************/ void KeyboardDescriptor::Read() { char c; read (GetSocket(), &c, 1); _GenericInboundDispatch(&c, 1); }
{ "pile_set_name": "Github" }
/* Class = "NSTextFieldCell"; placeholderString = "/Users/Shared/munki_repo"; ObjectID = "25F-Lj-I0v"; */ "25F-Lj-I0v.placeholderString" = "/Users/Shared/munki_repo"; /* Class = "NSButtonCell"; title = "Enable MunkiSetDefaultCatalog preprocessor"; ObjectID = "B41-Jj-tN4"; */ "B41-Jj-tN4.title" = "Habilitar preprocesador MunkiSetDefaultCatalog "; /* Class = "NSButtonCell"; title = "Open In Finder"; ObjectID = "dZQ-y3-qfS"; */ "dZQ-y3-qfS.title" = "Abrir en Finder"; /* Class = "NSButtonCell"; title = "Choose..."; ObjectID = "srz-Tz-wvA"; */ "srz-Tz-wvA.title" = "Seleccione..."; /* Class = "NSTextFieldCell"; title = "Munki Repo:"; ObjectID = "xlY-TY-p8w"; */ "xlY-TY-p8w.title" = "Repositorio Munki:"; /* Class = "NSButtonCell"; title = "Check for Munki development releases"; ObjectID = "zrd-q4-IIc"; */ "zrd-q4-IIc.title" = "Buscar versiones Munki en desarrollo";
{ "pile_set_name": "Github" }
// Copyright(c) 2017 POLYGONTEK // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http ://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "Precompiled.h" #include "Asset/Asset.h" #include "Asset/AssetImporter.h" #include "File/FileSystem.h" BE_NAMESPACE_BEGIN const SignalDef AssetImporter::SIG_ApplyChanged("AssetImporter::ApplyChanged"); ABSTRACT_DECLARATION("AssetImporter", AssetImporter, Object) BEGIN_EVENTS(AssetImporter) END_EVENTS void AssetImporter::RegisterProperties() { } AssetImporter::AssetImporter() { asset = nullptr; } AssetImporter::~AssetImporter() { } Str AssetImporter::ToString() const { return asset->ToString(); } void AssetImporter::RevertChanged() { Str metaFileName = Asset::GetMetaFileNameFromAssetPath(asset->GetAssetFilename()); Json::Value metaDataValue; bool validRootNode = false; char *text; if (fileSystem.LoadFile(metaFileName, false, (void **)&text) > 0) { Json::Reader jsonReader; validRootNode = jsonReader.parse(text, metaDataValue); } if (validRootNode) { Json::Value importerValue = metaDataValue["importer"]; Deserialize(importerValue); } } void AssetImporter::ApplyChanged() { Import(); asset->Reload(); asset->WriteMetaDataFile(); EmitSignal(&SIG_ApplyChanged); } BE_NAMESPACE_END
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>CFBundleDevelopmentRegion</key> <string>en</string> <key>CFBundleExecutable</key> <string>${EXECUTABLE_NAME}</string> <key>CFBundleIdentifier</key> <string>${PRODUCT_BUNDLE_IDENTIFIER}</string> <key>CFBundleInfoDictionaryVersion</key> <string>6.0</string> <key>CFBundleName</key> <string>${PRODUCT_NAME}</string> <key>CFBundlePackageType</key> <string>FMWK</string> <key>CFBundleShortVersionString</key> <string>1.0.0</string> <key>CFBundleSignature</key> <string>????</string> <key>CFBundleVersion</key> <string>${CURRENT_PROJECT_VERSION}</string> <key>NSPrincipalClass</key> <string></string> </dict> </plist>
{ "pile_set_name": "Github" }
@model Animal @{ ViewData["Title"] = "Details"; } <h2>Details</h2> <div> <h4>Animal</h4> <hr /> <dl class="dl-horizontal"> <dt> @Html.DisplayNameFor(model => model.Name) </dt> <dd> @Html.DisplayFor(model => model.Name) </dd> <dt> @Html.DisplayNameFor(model => model.Category) </dt> <dd> @Html.DisplayFor(model => model.Category) </dd> <dt> @Html.DisplayNameFor(model => model.UniqueInformation) </dt> <dd> @Html.DisplayFor(model => model.UniqueInformation) </dd> </dl> <div style="padding:10px;"> @if (Model.ImageName != "") { <img src="~/images/@Model.ImageName" alt="Sample Image" height="300" /> } </div> </div> <div> <a asp-action="Index">Back to List</a> </div>
{ "pile_set_name": "Github" }
var = 5 a = "my string {}".format(var)
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="2.0" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:x="urn:schemas-microsoft-com:office:excel" xmlns:ss="urn:schemas-microsoft-com:office:spreadsheet" xmlns:html="http://www.w3.org/TR/REC-html40" xmlns:msg="org.pentaho.messages.Messages" xmlns:str_util="http://www.w3.org/2001/10/str-util.xsl" exclude-result-prefixes="o x ss html msg str_util"> <xsl:output method="html" encoding="UTF-8" /> <xsl:template name="breadcrumbs"> <xsl:param name="names"/> <xsl:param name="path"/> <xsl:param name="level" select="1"/> <xsl:variable name="name"> <xsl:value-of select="substring-before($names,'/')"/> </xsl:variable> <xsl:variable name="solution-name"> <xsl:choose> <xsl:when test="$level=1"> <xsl:text></xsl:text> </xsl:when> <xsl:otherwise> <xsl:value-of select="$solution"/> </xsl:otherwise> </xsl:choose> </xsl:variable> <xsl:variable name="thispath"> <xsl:choose> <xsl:when test="$level=1"> <xsl:text></xsl:text> </xsl:when> <xsl:when test="$level=2"> <xsl:text></xsl:text> </xsl:when> <xsl:otherwise> <xsl:call-template name="breakPath"> <xsl:with-param name="path" select="$path"/> <xsl:with-param name="level" select="number($level)-2"/> </xsl:call-template> </xsl:otherwise> </xsl:choose> </xsl:variable> <td> <div class="icon_folder_sm"> <a> <xsl:attribute name="href">Navigate?solution=<xsl:value-of select="$solution-name"/>&amp;path=<xsl:value-of select="$thispath"/></xsl:attribute> <xsl:call-template name="removeIndex"> <xsl:with-param name="title" select="$name"/> </xsl:call-template> </a> </div> </td> <xsl:variable name="tmpNames" select="substring-after($names,'/')"/> <xsl:variable name="tmpPath"> <xsl:choose> <xsl:when test="$level=1"> <xsl:value-of select="substring-after($path,'/')"/> </xsl:when> <xsl:when test="$level=2"> <xsl:value-of select="substring-after($path,'/')"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="$path"/> </xsl:otherwise> </xsl:choose> </xsl:variable> <xsl:if test="$tmpNames!=''"> <xsl:call-template name="breadcrumbs"> <xsl:with-param name="names" select="$tmpNames"/> <xsl:with-param name="path" select="$tmpPath"/> <xsl:with-param name="level" select="$level+1"/> </xsl:call-template> </xsl:if> </xsl:template> <xsl:template name="breakPath"> <xsl:param name="path"/> <xsl:param name="level"/> <xsl:param name="idx" select="1"/> <xsl:value-of select="substring-before($path,'/')"/> <xsl:if test="$level &gt; $idx"> <xsl:text>/</xsl:text> <xsl:call-template name="breakPath"> <xsl:with-param name="path" select="substring-after($path,'/')"/> <xsl:with-param name="level" select="$level"/> <xsl:with-param name="idx" select="$idx+1"/> </xsl:call-template> </xsl:if> </xsl:template> <xsl:template name="removeIndex"> <xsl:param name="title"/> <xsl:choose> <xsl:when test="substring($title,2,1)='.' and number(substring($title,1,1))&lt;10"> <xsl:value-of select="substring($title,3)" disable-output-escaping="yes"/> </xsl:when> <xsl:when test="substring($title,3,1)='.' and number(substring($title,1,2))&lt;100"> <xsl:value-of select="substring($title,4)" disable-output-escaping="yes"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="$title" disable-output-escaping="yes"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template name="setupFly"> <div id="flydiv" style="position:absolute;top:-1000px;left:-1000px;height:203px;width:502px;z-index:100" > <xsl:attribute name="onmouseover"> <xsl:text>flyStay()</xsl:text> </xsl:attribute> <xsl:attribute name="onmouseout"> <xsl:text>hideFly()</xsl:text> </xsl:attribute> <table border="0" cellpadding='0' cellspacing='0' style="vertical-align:bottom"> <tr> <td width="5" height="9"><img border="0" src="/pentaho-style/images/fly-top-left.png"/></td> <td colspan="2" style="background-image: url(/pentaho-style/images/fly-top.png);background-repeat: repeat-x;"></td> <td><img border="0" src="/pentaho-style/images/fly-top-right.png"/></td> </tr> <tr class="flyContent"> <td valign="top" style="background-image: url(/pentaho-style/images/fly-left.png);background-repeat: repeat-y; height: 171px;" colspan="2"> <a href="javascript:void" onclick="changeFlyTab( 1 ); return false;"> <img id="img-t1" src="/pentaho-style/images/btn_info_active.png" alt="Info" border="0" /> </a> <br /> <a href="javascript:void" onclick="changeFlyTab( 2 ); return false;"> <img id="img-t2" src="/pentaho-style/images/btn_actions.png" border="0" /> </a> </td> <td> <table border="0" cellpadding='0' cellspacing='0' style="vertical-align:bottom"> <tr> <td valign="top"> <div style="height:150px;width:420px;overflow:auto;padding-left:3px"> <table id="flyTable" width="100%" cellpadding='0' cellspacing='0' > <tr> <td> <div id="flyTab1" style="display:block;padding:0px;margin:0px"> <table cellpadding='0' cellspacing='0' border='0' width='400'> <tr> <td valign="top"> <div id="flyTitle1" class="flyTitle">Title1</div> <div id="flyDesc1" class="flyDesc"> Desc1 <p>xx</p> </div> </td> <td style="padding:5px;width:140px"><img id="flyimg" border="0" src="/pentaho-style/images/spacer.gif" width="140" height="140" style="display:block"/></td> </tr> </table> </div> <div id="flyTab2" style="display:none"> <div id="flyTitle2" class="flyTitle">Actions</div> <div id="flyDesc2" class="flyDesc"> </div> </div> </td> </tr> </table> </div> </td> </tr> </table> </td> <td style="background-image: url(/pentaho-style/images/fly-right.png);background-repeat: repeat-y;"></td> </tr> <tr> <td><img border="0" src="/pentaho-style/images/fly-bot-left.png"/></td> <td colspan="2"><img src="/pentaho-style/images/fly-bot.png" height="16" width="100%"/></td> <td><img border="0" src="/pentaho-style/images/fly-bot-right.png"/></td> </tr> </table> </div> </xsl:template> </xsl:stylesheet>
{ "pile_set_name": "Github" }
<html><head><title>S_CustMenu12</title> <LINK REL="stylesheet" TYPE="text/css" HREF="../../sakura.css"> <meta http-equiv="Content-type" content="text/html; charset=UTF-8"> </head> <small> Sakura-Editor Macro Reference </small> <h2>S_CustMenu12</h2> <dl> <dt>機能</dt> <dd>カスタムメニュー12</dd> <dt>構文</dt> <dd><i>void S_CustMenu12 ( )</i></dd> </dl> <hr> </body></html>
{ "pile_set_name": "Github" }
//! moment.js locale configuration //! locale : afrikaans (af) //! author : Werner Mollentze : https://github.com/wernerm import moment from '../moment'; export default moment.defineLocale('af', { months : 'Januarie_Februarie_Maart_April_Mei_Junie_Julie_Augustus_September_Oktober_November_Desember'.split('_'), monthsShort : 'Jan_Feb_Mar_Apr_Mei_Jun_Jul_Aug_Sep_Okt_Nov_Des'.split('_'), weekdays : 'Sondag_Maandag_Dinsdag_Woensdag_Donderdag_Vrydag_Saterdag'.split('_'), weekdaysShort : 'Son_Maa_Din_Woe_Don_Vry_Sat'.split('_'), weekdaysMin : 'So_Ma_Di_Wo_Do_Vr_Sa'.split('_'), meridiemParse: /vm|nm/i, isPM : function (input) { return /^nm$/i.test(input); }, meridiem : function (hours, minutes, isLower) { if (hours < 12) { return isLower ? 'vm' : 'VM'; } else { return isLower ? 'nm' : 'NM'; } }, longDateFormat : { LT : 'HH:mm', LTS : 'HH:mm:ss', L : 'DD/MM/YYYY', LL : 'D MMMM YYYY', LLL : 'D MMMM YYYY HH:mm', LLLL : 'dddd, D MMMM YYYY HH:mm' }, calendar : { sameDay : '[Vandag om] LT', nextDay : '[Môre om] LT', nextWeek : 'dddd [om] LT', lastDay : '[Gister om] LT', lastWeek : '[Laas] dddd [om] LT', sameElse : 'L' }, relativeTime : { future : 'oor %s', past : '%s gelede', s : '\'n paar sekondes', m : '\'n minuut', mm : '%d minute', h : '\'n uur', hh : '%d ure', d : '\'n dag', dd : '%d dae', M : '\'n maand', MM : '%d maande', y : '\'n jaar', yy : '%d jaar' }, ordinalParse: /\d{1,2}(ste|de)/, ordinal : function (number) { return number + ((number === 1 || number === 8 || number >= 20) ? 'ste' : 'de'); // Thanks to Joris Röling : https://github.com/jjupiter }, week : { dow : 1, // Maandag is die eerste dag van die week. doy : 4 // Die week wat die 4de Januarie bevat is die eerste week van die jaar. } });
{ "pile_set_name": "Github" }
package me.yluo.ruisiapp.widget.htmlview.spann; import android.text.TextPaint; import android.text.style.ClickableSpan; import android.view.View; import me.yluo.ruisiapp.widget.htmlview.HtmlTag; import me.yluo.ruisiapp.widget.htmlview.HtmlView; import me.yluo.ruisiapp.widget.htmlview.callback.SpanClickListener; public class Link extends ClickableSpan { private final String url; private final SpanClickListener listener; public Link(String url, SpanClickListener listener) { this.url = url; this.listener = listener; } @Override public void onClick(View widget) { if (listener != null && url != null && !url.isEmpty()) { listener.onSpanClick(HtmlTag.A, url); } } @Override public void updateDrawState(TextPaint ds) { ds.setColor(HtmlView.URL_COLOR); ds.setUnderlineText(false); } }
{ "pile_set_name": "Github" }
require "rexml/dtd/elementdecl" require "rexml/dtd/entitydecl" require "rexml/comment" require "rexml/dtd/notationdecl" require "rexml/dtd/attlistdecl" require "rexml/parent" module REXML module DTD class Parser def Parser.parse( input ) case input when String parse_helper input when File parse_helper input.read end end # Takes a String and parses it out def Parser.parse_helper( input ) contents = Parent.new while input.size > 0 case input when ElementDecl.PATTERN_RE match = $& source = $' contents << ElementDecl.new( match ) when AttlistDecl.PATTERN_RE matchdata = $~ source = $' contents << AttlistDecl.new( matchdata ) when EntityDecl.PATTERN_RE matchdata = $~ source = $' contents << EntityDecl.new( matchdata ) when Comment.PATTERN_RE matchdata = $~ source = $' contents << Comment.new( matchdata ) when NotationDecl.PATTERN_RE matchdata = $~ source = $' contents << NotationDecl.new( matchdata ) end end contents end end end end
{ "pile_set_name": "Github" }
/* * u8g2_esp32_hal.h * * Created on: Feb 12, 2017 * Author: kolban */ #ifndef U8G2_ESP32_HAL_H_ #define U8G2_ESP32_HAL_H_ #include "u8g2.h" #include "driver/gpio.h" #include "driver/spi_master.h" #include "driver/i2c.h" #define U8G2_ESP32_HAL_UNDEFINED (-1) #define I2C_MASTER_NUM I2C_NUM_1 /*!< I2C port number for master dev */ #define I2C_MASTER_TX_BUF_DISABLE 0 /*!< I2C master do not need buffer */ #define I2C_MASTER_RX_BUF_DISABLE 0 /*!< I2C master do not need buffer */ #define I2C_MASTER_FREQ_HZ 50000 /*!< I2C master clock frequency */ #define ACK_CHECK_EN 0x1 /*!< I2C master will check ack from slave*/ #define ACK_CHECK_DIS 0x0 /*!< I2C master will not check ack from slave */ typedef struct { gpio_num_t clk; gpio_num_t mosi; gpio_num_t sda; // data for I²C gpio_num_t scl; // clock for I²C gpio_num_t cs; gpio_num_t reset; gpio_num_t dc; } u8g2_esp32_hal_t ; #define U8G2_ESP32_HAL_DEFAULT {U8G2_ESP32_HAL_UNDEFINED, U8G2_ESP32_HAL_UNDEFINED, U8G2_ESP32_HAL_UNDEFINED, U8G2_ESP32_HAL_UNDEFINED, U8G2_ESP32_HAL_UNDEFINED, U8G2_ESP32_HAL_UNDEFINED, U8G2_ESP32_HAL_UNDEFINED } void u8g2_esp32_hal_init(u8g2_esp32_hal_t u8g2_esp32_hal_param); uint8_t u8g2_esp32_msg_comms_cb(u8x8_t *u8x8, uint8_t msg, uint8_t arg_int, void *arg_ptr); uint8_t u8g2_esp32_msg_gpio_and_delay_cb(u8x8_t *u8x8, uint8_t msg, uint8_t arg_int, void *arg_ptr); uint8_t u8g2_esp32_msg_i2c_cb(u8x8_t *u8x8, uint8_t msg, uint8_t arg_int, void *arg_ptr); uint8_t u8g2_esp32_msg_i2c_and_delay_cb(u8x8_t *u8x8, uint8_t msg, uint8_t arg_int, void *arg_ptr); #endif /* U8G2_ESP32_HAL_H_ */
{ "pile_set_name": "Github" }
table_create Values TABLE_NO_KEY [[0,0.0,0.0],true] column_create Values value COLUMN_SCALAR Int16 [[0,0.0,0.0],true] load --table Values [ {"value":-1} ] [[0,0.0,0.0],1] select Values --filter true --output_columns '_id,_score' --scorer '_score = max(value)' [[0,0.0,0.0],[[[1],[["_id","UInt32"],["_score","Int32"]],[1,-1]]]]
{ "pile_set_name": "Github" }
var baseToString = require('./_baseToString'), castSlice = require('./_castSlice'), charsEndIndex = require('./_charsEndIndex'), stringToArray = require('./_stringToArray'), toString = require('./toString'); /** Used to match leading and trailing whitespace. */ var reTrimEnd = /\s+$/; /** * Removes trailing whitespace or specified characters from `string`. * * @static * @memberOf _ * @since 4.0.0 * @category String * @param {string} [string=''] The string to trim. * @param {string} [chars=whitespace] The characters to trim. * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. * @returns {string} Returns the trimmed string. * @example * * _.trimEnd(' abc '); * // => ' abc' * * _.trimEnd('-_-abc-_-', '_-'); * // => '-_-abc' */ function trimEnd(string, chars, guard) { string = toString(string); if (string && (guard || chars === undefined)) { return string.replace(reTrimEnd, ''); } if (!string || !(chars = baseToString(chars))) { return string; } var strSymbols = stringToArray(string), end = charsEndIndex(strSymbols, stringToArray(chars)) + 1; return castSlice(strSymbols, 0, end).join(''); } module.exports = trimEnd;
{ "pile_set_name": "Github" }
// The MIT License // // Copyright (c) 2020 Temporal Technologies Inc. All rights reserved. // // Copyright (c) 2020 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package task import ( "errors" "sync/atomic" "testing" "time" "github.com/golang/mock/gomock" "github.com/stretchr/testify/require" "github.com/stretchr/testify/suite" "github.com/uber-go/tally" "go.temporal.io/server/common" "go.temporal.io/server/common/backoff" "go.temporal.io/server/common/log/loggerimpl" "go.temporal.io/server/common/metrics" ) type ( parallelTaskProcessorSuite struct { *require.Assertions suite.Suite controller *gomock.Controller processor *parallelTaskProcessorImpl } ) var ( errRetryable = errors.New("retryable error") errNonRetryable = errors.New("non-retryable error") ) func TestParallelTaskProcessorSuite(t *testing.T) { s := new(parallelTaskProcessorSuite) suite.Run(t, s) } func (s *parallelTaskProcessorSuite) SetupTest() { s.Assertions = require.New(s.T()) s.controller = gomock.NewController(s.T()) s.processor = NewParallelTaskProcessor( loggerimpl.NewDevelopmentForTest(s.Suite), metrics.NewClient(tally.NoopScope, metrics.Common), &ParallelTaskProcessorOptions{ QueueSize: 0, WorkerCount: 1, RetryPolicy: backoff.NewExponentialRetryPolicy(time.Millisecond), }, ).(*parallelTaskProcessorImpl) } func (s *parallelTaskProcessorSuite) TearDownTest() { s.controller.Finish() } func (s *parallelTaskProcessorSuite) TestSubmit_Success() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().Execute().Return(nil).MaxTimes(1) mockTask.EXPECT().Ack().MaxTimes(1) s.processor.Start() err := s.processor.Submit(mockTask) s.NoError(err) s.processor.Stop() } func (s *parallelTaskProcessorSuite) TestSubmit_Fail() { mockTask := NewMockTask(s.controller) s.processor.Start() s.processor.Stop() err := s.processor.Submit(mockTask) s.Equal(ErrTaskProcessorClosed, err) } func (s *parallelTaskProcessorSuite) TestTaskWorker() { numTasks := 5 done := make(chan struct{}) s.processor.workerWG.Add(1) go func() { for i := 0; i != numTasks; i++ { mockTask := NewMockTask(s.controller) mockTask.EXPECT().Execute().Return(nil).Times(1) mockTask.EXPECT().Ack().Times(1) err := s.processor.Submit(mockTask) s.NoError(err) } close(s.processor.shutdownCh) close(done) }() s.processor.taskWorker() <-done } func (s *parallelTaskProcessorSuite) TestExecuteTask_RetryableError() { mockTask := NewMockTask(s.controller) gomock.InOrder( mockTask.EXPECT().Execute().Return(errRetryable), mockTask.EXPECT().HandleErr(errRetryable).Return(errRetryable), mockTask.EXPECT().RetryErr(errRetryable).Return(true), mockTask.EXPECT().Execute().Return(errRetryable), mockTask.EXPECT().HandleErr(errRetryable).Return(errRetryable), mockTask.EXPECT().RetryErr(errRetryable).Return(true), mockTask.EXPECT().Execute().Return(nil), mockTask.EXPECT().Ack(), ) s.processor.executeTask(mockTask) } func (s *parallelTaskProcessorSuite) TestExecuteTask_NonRetryableError() { mockTask := NewMockTask(s.controller) gomock.InOrder( mockTask.EXPECT().Execute().Return(errNonRetryable), mockTask.EXPECT().HandleErr(errNonRetryable).Return(errNonRetryable), mockTask.EXPECT().RetryErr(errNonRetryable).Return(false).AnyTimes(), mockTask.EXPECT().Nack(), ) s.processor.executeTask(mockTask) } func (s *parallelTaskProcessorSuite) TestExecuteTask_ProcessorStopped() { mockTask := NewMockTask(s.controller) mockTask.EXPECT().Execute().Return(errRetryable).AnyTimes() mockTask.EXPECT().HandleErr(errRetryable).Return(errRetryable).AnyTimes() mockTask.EXPECT().RetryErr(errRetryable).Return(true).AnyTimes() done := make(chan struct{}) go func() { s.processor.executeTask(mockTask) close(done) }() time.Sleep(100 * time.Millisecond) atomic.StoreInt32(&s.processor.status, common.DaemonStatusStopped) <-done }
{ "pile_set_name": "Github" }
input_desc = [ "v128:base", "u32:data", "u32:dummy", ] output_desc = [ "u32:v0", "u32:v1", "u32:v2", "u32:v3", "u32:v4", "u32:v5", "u32:v6", "u32:v7", "u32:v8", "u32:v9", "u32:v10", "u32:v11", "u32:v12", "u32:v13", "u32:v14", "u32:v15", ] rsp_code = """ li a0,$0 li a1,$800 lqv v0[e0],$00(a0) lw t0,$10(a0) mfc2 t0,v0[e0] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e1] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e2] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e3] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e4] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e5] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e6] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e7] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e8] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e9] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e10] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e11] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e12] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e13] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e14] sw t0,$00(a1) addi a1,$4 lw t0,$10(a0) mfc2 t0,v0[e15] sw t0,$00(a1) addi a1,$4 break """ [[test]] name = "basic" input = [ 0x1122_3344, 0x5566_7788, 0x99AA_BBCC, 0xDDEE_FFAA, # base 0x1234_5678, # data 0, ]
{ "pile_set_name": "Github" }
#Copyright 2011, Google Inc. #All rights reserved. # #Redistribution and use in source and binary forms, with or without #modification, are permitted provided that the following conditions are #met: # # * Redistributions of source code must retain the above copyright #notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above #copyright notice, this list of conditions and the following disclaimer #in the documentation and/or other materials provided with the #distribution. # * Neither the name of Google Inc. nor the names of its #contributors may be used to endorse or promote products derived from #this software without specific prior written permission. # #THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR #A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT #OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, #SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT #LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, #DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY #THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT #(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE #OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. .class public LzzzInstanceFields; .super Ljava/lang/Object; .method public constructor <init>()V .registers 1 invoke-direct {v0}, Ljava/lang/Object;-><init>()V return-void .end method .field public field99999:I .field public field99999Wide:J .field public field99999Object:Ljava/lang/Object; .field public field99999Boolean:Z .field public field99999Byte:B .field public field99999Char:C .field public field99999Short:S
{ "pile_set_name": "Github" }
<vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="24dp" android:height="24dp" android:viewportWidth="24" android:viewportHeight="24" android:tint="?attr/colorControlNormal"> <path android:fillColor="@android:color/white" android:pathData="M10,9h4L14,6h3l-5,-5 -5,5h3v3zM9,10L6,10L6,7l-5,5 5,5v-3h3v-4zM23,12l-5,-5v3h-3v4h3v3l5,-5zM14,15h-4v3L7,18l5,5 5,-5h-3v-3z"/> </vector>
{ "pile_set_name": "Github" }
# groupBy #### signature: `groupBy(keySelector: Function, elementSelector: Function): Observable` ## Group into observables based on provided value. [![Ultimate RxJS](https://drive.google.com/uc?export=view&id=1qq2-q-eVe-F_-d0eSvTyqaGRjpfLDdJz 'Ultimate RxJS')](https://ultimatecourses.com/courses/rxjs?ref=4) ### Examples ##### Example 1: Group by property ( [StackBlitz](https://stackblitz.com/edit/typescript-dozkcg?file=index.ts&devtoolsheight=100) | [jsBin](http://jsbin.com/buworowuye/edit?js,console) | [jsFiddle](https://jsfiddle.net/btroncone/utncxxvf/) ) ```js // RxJS v6+ import { from } from 'rxjs'; import { groupBy, mergeMap, toArray } from 'rxjs/operators'; const people = [ { name: 'Sue', age: 25 }, { name: 'Joe', age: 30 }, { name: 'Frank', age: 25 }, { name: 'Sarah', age: 35 } ]; //emit each person const source = from(people); //group by age const example = source.pipe( groupBy(person => person.age), // return each item in group as array mergeMap(group => group.pipe(toArray())) ); /* output: [{age: 25, name: "Sue"},{age: 25, name: "Frank"}] [{age: 30, name: "Joe"}] [{age: 35, name: "Sarah"}] */ const subscribe = example.subscribe(val => console.log(val)); ``` ##### Example 2: Group by into key - values ( [StackBlitz](https://stackblitz.com/edit/rxjs-groupby-key-vals?file=index.ts&devtoolsheight=100) ) ```js // RxJS v6+ import { from, of, zip } from 'rxjs'; import { groupBy, mergeMap, toArray } from 'rxjs/operators'; const people = [ { name: 'Sue', age: 25 }, { name: 'Joe', age: 30 }, { name: 'Frank', age: 25 }, { name: 'Sarah', age: 35 } ]; from(people) .pipe( groupBy( person => person.age, p => p.name ), mergeMap(group => zip(of(group.key), group.pipe(toArray()))) ) .subscribe(console.log); /* output: [25, ["Sue", "Frank"]] [30, ["Joe"]] [35, ["Sarah"]] */ ``` ### Additional Resources - [groupBy](https://rxjs.dev/api/operators/groupBy) 📰 - Official docs - [Group higher order observables with RxJS groupBy](https://egghead.io/lessons/rxjs-group-higher-order-observables-with-rxjs-groupby?course=use-higher-order-observables-in-rxjs-effectively) 🎥 💵 - André Staltz - [Use groupBy in real RxJS applications](https://egghead.io/lessons/rxjs-use-groupby-in-real-rxjs-applications?course=use-higher-order-observables-in-rxjs-effectively) 🎥 💵 - André Staltz --- > 📁 Source Code: > [https://github.com/ReactiveX/rxjs/blob/master/src/internal/operators/groupBy.ts](https://github.com/ReactiveX/rxjs/blob/master/src/internal/operators/groupBy.ts)
{ "pile_set_name": "Github" }
Freescale Amadeus Plus M5253EVBE board ====================================== Hayden Fraser([email protected]) Created 06/05/2007 =========================================== 1. SWITCH SETTINGS ================== 1.1 N/A 2. MEMORY MAP UNDER U-BOOT AND LINUX KERNEL =========================================== 2.1. For the initial bringup, we adopted a consistent memory scheme between u-boot and linux kernel, you can customize it based on your system requirements: SDR: 0x00000000-0x00ffffff SRAM0: 0x20010000-0x20017fff SRAM1: 0x20000000-0x2000ffff MBAR1: 0x10000000-0x4fffffff MBAR2: 0x80000000-0xCfffffff Flash: 0xffe00000-0xffffffff 3. DEFINITIONS AND COMPILATION ============================== 3.1 Explanation on NEW definitions in include/configs/M5253EVBE.h CONFIG_MCF52x2 Processor family CONFIG_MCF5253 MCF5253 specific CONFIG_M5253EVBE Amadeus Plus board specific CONFIG_SYS_CLK Define Amadeus Plus CPU Clock CONFIG_SYS_MBAR MBAR base address CONFIG_SYS_MBAR2 MBAR2 base address 3.2 Compilation export CROSS_COMPILE=/usr/local/freescale-coldfire-4.1-elf/bin/m68k-elf- cd u-boot-1-2-x make distclean make M5253EVBE_config make 4. SCREEN DUMP ============== 4.1 U-Boot 1.2.0 (Jun 18 2007 - 18:20:00) CPU: Freescale Coldfire MCF5253 at 62 MHz Board: Freescale MCF5253 EVBE DRAM: 16 MB FLASH: 2 MB In: serial Out: serial Err: serial => flinfo Bank # 1: CFI conformant FLASH (16 x 16) Size: 2 MB in 35 Sectors AMD Standard command set, Manufacturer ID: 0x01, Device ID: 0x49 Erase timeout: 16384 ms, write timeout: 1 ms Sector Start Addresses: FFE00000 RO FFE04000 RO FFE06000 RO FFE08000 RO FFE10000 RO FFE20000 FFE30000 FFE40000 FFE50000 FFE60000 FFE70000 FFE80000 FFE90000 FFEA0000 FFEB0000 FFEC0000 FFED0000 FFEE0000 FFEF0000 FFF00000 FFF10000 FFF20000 FFF30000 FFF40000 FFF50000 FFF60000 FFF70000 FFF80000 FFF90000 FFFA0000 FFFB0000 FFFC0000 FFFD0000 FFFE0000 FFFF0000 => bdinfo boot_params = 0x00F62F90 memstart = 0x00000000 memsize = 0x01000000 flashstart = 0xFFE00000 flashsize = 0x00200000 flashoffset = 0x00000000 baudrate = 19200 bps => printenv bootdelay=5 baudrate=19200 stdin=serial stdout=serial stderr=serial Environment size: 134/8188 bytes => saveenv Saving Environment to Flash... Un-Protected 1 sectors Erasing Flash... . done Erased 1 sectors Writing to Flash... done Protected 1 sectors => 5. COMPILER ----------- To create U-Boot the CodeSourcery's version of the GNU Toolchain for the ColdFire architecture compiler set (freescale-coldfire-4.1-elf) from www.codesourcery.com was used. You can download it from:http://www.codesourcery.com/gnu_toolchains/coldfire/download.html compiler that you used - for example, codesourcery_elf requires -MQ in rules.mk, old M68K 2.95.3 just -M codesourcery_elf requires -MQ in rules.mk, old M68K 2.95.3 just -M
{ "pile_set_name": "Github" }
ncolors= 101 # r g b 0 0 128 0 0 133 0 0 138 0 0 143 0 0 148 0 0 153 0 0 158 0 0 164 0 0 169 0 0 174 0 0 179 0 0 184 0 0 189 0 0 194 0 0 199 0 0 204 0 0 209 0 0 214 0 0 219 0 0 225 0 0 230 0 0 235 0 0 240 0 0 245 0 0 250 0 0 255 10 10 255 20 20 255 31 31 255 41 41 255 51 51 255 61 61 255 71 71 255 82 82 255 92 92 255 102 102 255 112 112 255 122 122 255 133 133 255 143 143 255 153 153 255 163 163 255 173 173 255 184 184 255 194 194 255 204 204 255 214 214 255 224 224 255 235 235 255 245 245 255 255 255 255 255 245 245 255 235 235 255 224 224 255 214 214 255 204 204 255 194 194 255 184 184 255 173 173 255 163 163 255 153 153 255 143 143 255 133 133 255 122 122 255 112 112 255 102 102 255 92 92 255 82 82 255 71 71 255 61 61 255 51 51 255 41 41 255 31 31 255 20 20 255 10 10 255 0 0 250 0 0 245 0 0 240 0 0 235 0 0 230 0 0 225 0 0 219 0 0 214 0 0 209 0 0 204 0 0 199 0 0 194 0 0 189 0 0 184 0 0 179 0 0 174 0 0 169 0 0 164 0 0 158 0 0 153 0 0 148 0 0 143 0 0 138 0 0 133 0 0 128 0 0
{ "pile_set_name": "Github" }
/** * @file Source/timer_component.cpp. * * @brief Implements the timer component class */ #include <timer_component.h> namespace Rubeus { namespace UtilityComponents { constexpr auto ONESECONDINMILLISECONDS = 1000000000; std::chrono::steady_clock RTimer::m_Clock; RTimer::RTimer(int numberOfTimePoints) { m_TimePoints = new std::chrono::time_point<std::chrono::steady_clock>[numberOfTimePoints]; m_TimePoints[0] = m_Clock.now(); } RTimer::~RTimer() { delete[] m_TimePoints; } long long int RTimer::getRelativeTime(int sinceThisSlotIndex, int tillThisSlotIndex) { return (m_TimePoints[tillThisSlotIndex] - m_TimePoints[sinceThisSlotIndex]).count(); } void RTimer::addTimePoint(int slotIndex) { m_TimePoints[slotIndex] = m_Clock.now(); } void RTimer::setFrameCounter() { m_Frames = 0; addTimePoint(0); } void RTimer::evaluateFrames() { addTimePoint(1); if ((getRelativeTime(0, 1)) >= ONESECONDINMILLISECONDS) { LOG(std::to_string(++m_Frames) + " fps"); addTimePoint(0); m_Frames = 0; } m_Frames++; } } }
{ "pile_set_name": "Github" }
// // detail/array_fwd.hpp // ~~~~~~~~~~~~~~~~~~~~ // // Copyright (c) 2003-2016 Christopher M. Kohlhoff (chris at kohlhoff dot com) // // Distributed under the Boost Software License, Version 1.0. (See accompanying // file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // #ifndef BOOST_ASIO_DETAIL_ARRAY_FWD_HPP #define BOOST_ASIO_DETAIL_ARRAY_FWD_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1200) # pragma once #endif // defined(_MSC_VER) && (_MSC_VER >= 1200) #include <boost/asio/detail/config.hpp> namespace boost { template<class T, std::size_t N> class array; } // namespace boost // Standard library components can't be forward declared, so we'll have to // include the array header. Fortunately, it's fairly lightweight and doesn't // add significantly to the compile time. #if defined(BOOST_ASIO_HAS_STD_ARRAY) # include <array> #endif // defined(BOOST_ASIO_HAS_STD_ARRAY) #endif // BOOST_ASIO_DETAIL_ARRAY_FWD_HPP
{ "pile_set_name": "Github" }
/** * @author mrdoob / http://mrdoob.com/ * @author Mugen87 / https://github.com/Mugen87 */ THREE.BabylonLoader = function ( manager ) { this.manager = ( manager !== undefined ) ? manager : THREE.DefaultLoadingManager; }; THREE.BabylonLoader.prototype = { constructor: THREE.BabylonLoader, load: function ( url, onLoad, onProgress, onError ) { var scope = this; var loader = new THREE.FileLoader( scope.manager ); loader.load( url, function ( text ) { onLoad( scope.parse( JSON.parse( text ) ) ); }, onProgress, onError ); }, parse: function ( json ) { function parseMaterials( json ) { var materials = {}; for ( var i = 0, l = json.materials.length; i < l; i ++ ) { var data = json.materials[ i ]; var material = new THREE.MeshPhongMaterial(); material.name = data.name; material.color.fromArray( data.diffuse ); material.emissive.fromArray( data.emissive ); material.specular.fromArray( data.specular ); material.shininess = data.specularPower; material.opacity = data.alpha; materials[ data.id ] = material; } if ( json.multiMaterials ) { for ( var i = 0, l = json.multiMaterials.length; i < l; i ++ ) { var data = json.multiMaterials[ i ]; console.warn( 'THREE.BabylonLoader: Multi materials not yet supported.' ); materials[ data.id ] = new THREE.MeshPhongMaterial(); } } return materials; } function parseGeometry( json ) { var geometry = new THREE.BufferGeometry(); var indices = json.indices; var positions = json.positions; var normals = json.normals; var uvs = json.uvs; // indices geometry.setIndex( indices ); // positions for ( var j = 2, jl = positions.length; j < jl; j += 3 ) { positions[ j ] = - positions[ j ]; } geometry.addAttribute( 'position', new THREE.Float32BufferAttribute( positions, 3 ) ); // normals if ( normals ) { for ( var j = 2, jl = normals.length; j < jl; j += 3 ) { normals[ j ] = - normals[ j ]; } geometry.addAttribute( 'normal', new THREE.Float32BufferAttribute( normals, 3 ) ); } // uvs if ( uvs ) { geometry.addAttribute( 'uv', new THREE.Float32BufferAttribute( uvs, 2 ) ); } // offsets var subMeshes = json.subMeshes; if ( subMeshes ) { for ( var j = 0, jl = subMeshes.length; j < jl; j ++ ) { var subMesh = subMeshes[ j ]; geometry.addGroup( subMesh.indexStart, subMesh.indexCount ); } } return geometry; } function parseObjects( json, materials ) { var objects = {}; var scene = new THREE.Scene(); var cameras = json.cameras; for ( var i = 0, l = cameras.length; i < l; i ++ ) { var data = cameras[ i ]; var camera = new THREE.PerspectiveCamera( ( data.fov / Math.PI ) * 180, 1.33, data.minZ, data.maxZ ); camera.name = data.name; camera.position.fromArray( data.position ); if ( data.rotation ) camera.rotation.fromArray( data.rotation ); objects[ data.id ] = camera; } var lights = json.lights; for ( var i = 0, l = lights.length; i < l; i ++ ) { var data = lights[ i ]; var light; switch ( data.type ) { case 0: light = new THREE.PointLight(); break; case 1: light = new THREE.DirectionalLight(); break; case 2: light = new THREE.SpotLight(); break; case 3: light = new THREE.HemisphereLight(); break; } light.name = data.name; if ( data.position ) light.position.set( data.position[ 0 ], data.position[ 1 ], - data.position[ 2 ] ); light.color.fromArray( data.diffuse ); if ( data.groundColor ) light.groundColor.fromArray( data.groundColor ); if ( data.intensity ) light.intensity = data.intensity; objects[ data.id ] = light; scene.add( light ); } var meshes = json.meshes; for ( var i = 0, l = meshes.length; i < l; i ++ ) { var data = meshes[ i ]; var object; if ( data.indices ) { var geometry = parseGeometry( data ); object = new THREE.Mesh( geometry, materials[ data.materialId ] ); } else { object = new THREE.Group(); } object.name = data.name; object.position.set( data.position[ 0 ], data.position[ 1 ], - data.position[ 2 ] ); object.rotation.fromArray( data.rotation ); if ( data.rotationQuaternion ) object.quaternion.fromArray( data.rotationQuaternion ); object.scale.fromArray( data.scaling ); // object.visible = data.isVisible; if ( data.parentId ) { objects[ data.parentId ].add( object ); } else { scene.add( object ); } objects[ data.id ] = object; } return scene; } var materials = parseMaterials( json ); var scene = parseObjects( json, materials ); return scene; } };
{ "pile_set_name": "Github" }
# Device Location Demo ## What You Will Need * [Amazon Developer Account](http://developer.amazon.com/alexa) * (Optional) [Amazon Web Services Account](http://aws.amazon.com/) * The sample code on [GitHub](https://github.com/alexa/alexa-cookbook/tree/master/feature-demos/skill-demo-device-location/). * (Optional) [Maven build tool](https://maven.apache.org/) ## Setting Up the Demo ### Node.js version (default) This folder contains the interaction model and skill code. It is structured to make it easy to deploy if you have the ASK CLI already setup. If you would like to use the Developer Portal, you can follow the steps outlined in the [Hello World](https://github.com/alexa/skill-sample-nodejs-hello-world) example, substituting the [Model](./models/en-US.json) and the [skill code](./lambda/node/index.js) when called for. ### Python version > Using the Python version requires an AWS Account. These instructions also assume you have the ASK CLI installed and properly initialized (performed using the **ask init** command). To use the Python version: 1. Rename the **config** file in the **.ask** folder. `ren .ask/config config.node` 1. Rename the **config.py** file in the **.ask** folder. `ren .ask/config.py config` 1. Modify the **skill.json** file. Change the **sourceDir** value to `custom\py`. 1. Deploy the skill using `ask deploy`. ### Java version > Using the Java version requires an AWS Account. It also assumes you have Maven installed and working correctly. To use the Java version: 1. Create the skill in a manner similar to the process described in the [Java Fact Sample Skill](https://github.com/alexa/skill-sample-java-fact#setup), with the following adjustments. * Use `512 MB` for the Lambda function's **Memory** setting. * Use `com.amazon.ask.demo.DeviceLocation.DeviceLocationDemoStreamHandler` as the **Handler**. 1. Build the jar file with dependencies: ``` cd custom/java mvn assembly:assembly -DdescriptorId=jar-with-dependencies package ``` 1. Upload the jar file which includes the dependencies. > Remember to configure the Alexa Skills Kit trigger on the Lambda function. ## Running the Demo To start the demo say "alexa open device location demo". Because accessing the device location requires customer consent, once your skill is deployed and enabled, you will need to visit the Alexa app (or http://alexa.amzon.com) to enable the permission. Making a request to the skill will trigger the permissions card, making it easy for you to locate the correct place to enable the required permissions.
{ "pile_set_name": "Github" }
// WimRegister.cpp #include "StdAfx.h" #include "../../Common/RegisterArc.h" #include "WimHandler.h" static IInArchive *CreateArc() { return new NArchive::NWim::CHandler; } static CArcInfo g_ArcInfo = { L"Wim", L"wim swm", 0, 0xE6, { 'M', 'S', 'W', 'I', 'M', 0, 0, 0 }, 8, false, CreateArc, 0 }; REGISTER_ARC(Wim)
{ "pile_set_name": "Github" }
package gregtech.common.tileentities.machines.multi; import gregtech.api.GregTech_API; import gregtech.api.interfaces.metatileentity.IMetaTileEntity; import gregtech.api.interfaces.tileentity.IGregTechTileEntity; import net.minecraft.block.Block; public class GT_MetaTileEntity_LargeBoiler_TungstenSteel extends GT_MetaTileEntity_LargeBoiler { public GT_MetaTileEntity_LargeBoiler_TungstenSteel(int aID, String aName, String aNameRegional) { super(aID, aName, aNameRegional); } public GT_MetaTileEntity_LargeBoiler_TungstenSteel(String aName) { super(aName); } public IMetaTileEntity newMetaEntity(IGregTechTileEntity aTileEntity) { return new GT_MetaTileEntity_LargeBoiler_TungstenSteel(this.mName); } public String getCasingMaterial(){ return "TungstenSteel"; } @Override public String getCasingBlockType() { return "Machine Casings"; } public Block getCasingBlock() { return GregTech_API.sBlockCasings4; } public byte getCasingMeta() { return 0; } public byte getCasingTextureIndex() { return 48; } public Block getPipeBlock() { return GregTech_API.sBlockCasings2; } public byte getPipeMeta() { return 15; } public Block getFireboxBlock() { return GregTech_API.sBlockCasings3; } public byte getFireboxMeta() { return 15; } public byte getFireboxTextureIndex() { return 47; } public int getEUt() { return 1000; } public int getEfficiencyIncrease() { return 4; } @Override int runtimeBoost(int mTime) { return mTime * 120 / 100; } }
{ "pile_set_name": "Github" }
/* * This file was automatically generated by EvoSuite * Thu Sep 20 12:41:03 GMT 2018 */ package uk.ac.sanger.artemis.components; import org.junit.Test; import static org.junit.Assert.*; import static org.evosuite.shaded.org.mockito.Mockito.*; import static org.evosuite.runtime.EvoAssertions.*; import java.awt.HeadlessException; import org.evosuite.runtime.EvoRunner; import org.evosuite.runtime.EvoRunnerParameters; import org.evosuite.runtime.ViolatedAssumptionAnswer; import org.junit.runner.RunWith; import uk.ac.sanger.artemis.components.ActMain; import uk.ac.sanger.artemis.components.ComparatorDialog; @RunWith(EvoRunner.class) @EvoRunnerParameters(mockJVMNonDeterminism = true, useVFS = true, useVNET = true, resetStaticState = true, useJEE = true) public class ComparatorDialog_ESTest extends ComparatorDialog_ESTest_scaffolding { @Test(timeout = 4000) public void test0() throws Throwable { ActMain actMain0 = mock(ActMain.class, new ViolatedAssumptionAnswer()); ComparatorDialog comparatorDialog0 = null; try { comparatorDialog0 = new ComparatorDialog(actMain0); fail("Expecting exception: HeadlessException"); } catch(HeadlessException e) { // // no message in exception (getMessage() returned null) // verifyException("java.awt.GraphicsEnvironment", e); } } }
{ "pile_set_name": "Github" }
<?php /** * This file is part of the Carbon package. * * (c) Brian Nesbitt <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ return array_replace_recursive(require __DIR__.'/en.php', [ 'first_day_of_week' => 1, ]);
{ "pile_set_name": "Github" }
/* * Copyright (C) 2006, 2007, 2008, 2010 Apple Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include "config.h" #include "ImageDocument.h" #include "CachedImage.h" #include "DocumentLoader.h" #include "EventListener.h" #include "EventNames.h" #include "Frame.h" #include "FrameLoaderClient.h" #include "FrameView.h" #include "HTMLHtmlElement.h" #include "HTMLImageElement.h" #include "HTMLNames.h" #include "LocalizedStrings.h" #include "MouseEvent.h" #include "NotImplemented.h" #include "Page.h" #include "RawDataDocumentParser.h" #include "Settings.h" using std::min; namespace WebCore { using namespace HTMLNames; class ImageEventListener : public EventListener { public: static PassRefPtr<ImageEventListener> create(ImageDocument* document) { return adoptRef(new ImageEventListener(document)); } static const ImageEventListener* cast(const EventListener* listener) { return listener->type() == ImageEventListenerType ? static_cast<const ImageEventListener*>(listener) : 0; } virtual bool operator==(const EventListener& other); private: ImageEventListener(ImageDocument* document) : EventListener(ImageEventListenerType) , m_doc(document) { } virtual void handleEvent(ScriptExecutionContext*, Event*); ImageDocument* m_doc; }; class ImageDocumentParser : public RawDataDocumentParser { public: static PassRefPtr<ImageDocumentParser> create(ImageDocument* document) { return adoptRef(new ImageDocumentParser(document)); } ImageDocument* document() const { return static_cast<ImageDocument*>(RawDataDocumentParser::document()); } private: ImageDocumentParser(ImageDocument* document) : RawDataDocumentParser(document) { } virtual void appendBytes(DocumentWriter*, const char*, int, bool); virtual void finish(); }; class ImageDocumentElement : public HTMLImageElement { public: static PassRefPtr<ImageDocumentElement> create(ImageDocument*); private: ImageDocumentElement(ImageDocument* document) : HTMLImageElement(imgTag, document) , m_imageDocument(document) { } virtual ~ImageDocumentElement(); virtual void willMoveToNewOwnerDocument(); ImageDocument* m_imageDocument; }; inline PassRefPtr<ImageDocumentElement> ImageDocumentElement::create(ImageDocument* document) { return adoptRef(new ImageDocumentElement(document)); } // -------- static float pageZoomFactor(const Document* document) { Frame* frame = document->frame(); return frame ? frame->pageZoomFactor() : 1; } void ImageDocumentParser::appendBytes(DocumentWriter*, const char*, int, bool) { Frame* frame = document()->frame(); Settings* settings = frame->settings(); if (!frame->loader()->client()->allowImages(!settings || settings->areImagesEnabled())) return; CachedImage* cachedImage = document()->cachedImage(); cachedImage->data(frame->loader()->documentLoader()->mainResourceData(), false); document()->imageUpdated(); } void ImageDocumentParser::finish() { if (!isStopped() && document()->imageElement()) { CachedImage* cachedImage = document()->cachedImage(); RefPtr<SharedBuffer> data = document()->frame()->loader()->documentLoader()->mainResourceData(); // If this is a multipart image, make a copy of the current part, since the resource data // will be overwritten by the next part. if (document()->frame()->loader()->documentLoader()->isLoadingMultipartContent()) data = data->copy(); cachedImage->data(data.release(), true); cachedImage->finish(); cachedImage->setResponse(document()->frame()->loader()->documentLoader()->response()); // Report the natural image size in the page title, regardless of zoom // level. IntSize size = cachedImage->imageSize(1.0f); if (size.width()) { // Compute the title, we use the decoded filename of the resource, falling // back on the (decoded) hostname if there is no path. String fileName = decodeURLEscapeSequences(document()->url().lastPathComponent()); if (fileName.isEmpty()) fileName = document()->url().host(); document()->setTitle(imageTitle(fileName, size)); } document()->imageUpdated(); } document()->finishedParsing(); } // -------- ImageDocument::ImageDocument(Frame* frame, const KURL& url) : HTMLDocument(frame, url) , m_imageElement(0) , m_imageSizeIsKnown(false) , m_didShrinkImage(false) , m_shouldShrinkImage(shouldShrinkToFit()) { setCompatibilityMode(QuirksMode); lockCompatibilityMode(); } PassRefPtr<DocumentParser> ImageDocument::createParser() { return ImageDocumentParser::create(this); } void ImageDocument::createDocumentStructure() { ExceptionCode ec; RefPtr<Element> rootElement = Document::createElement(htmlTag, false); appendChild(rootElement, ec); #if ENABLE(OFFLINE_WEB_APPLICATIONS) static_cast<HTMLHtmlElement*>(rootElement.get())->insertedByParser(); #endif if (frame() && frame()->loader()) frame()->loader()->dispatchDocumentElementAvailable(); RefPtr<Element> body = Document::createElement(bodyTag, false); body->setAttribute(styleAttr, "margin: 0px;"); rootElement->appendChild(body, ec); RefPtr<ImageDocumentElement> imageElement = ImageDocumentElement::create(this); imageElement->setAttribute(styleAttr, "-webkit-user-select: none"); imageElement->setLoadManually(true); imageElement->setSrc(url().string()); body->appendChild(imageElement, ec); if (shouldShrinkToFit()) { // Add event listeners RefPtr<EventListener> listener = ImageEventListener::create(this); if (DOMWindow* domWindow = this->domWindow()) domWindow->addEventListener("resize", listener, false); imageElement->addEventListener("click", listener.release(), false); } m_imageElement = imageElement.get(); } float ImageDocument::scale() const { if (!m_imageElement) return 1.0f; FrameView* view = frame()->view(); if (!view) return 1; IntSize imageSize = m_imageElement->cachedImage()->imageSize(pageZoomFactor(this)); IntSize windowSize = IntSize(view->width(), view->height()); float widthScale = (float)windowSize.width() / imageSize.width(); float heightScale = (float)windowSize.height() / imageSize.height(); return min(widthScale, heightScale); } void ImageDocument::resizeImageToFit() { if (!m_imageElement) return; IntSize imageSize = m_imageElement->cachedImage()->imageSize(pageZoomFactor(this)); float scale = this->scale(); m_imageElement->setWidth(static_cast<int>(imageSize.width() * scale)); m_imageElement->setHeight(static_cast<int>(imageSize.height() * scale)); ExceptionCode ec; m_imageElement->style()->setProperty("cursor", "-webkit-zoom-in", ec); } void ImageDocument::imageClicked(int x, int y) { if (!m_imageSizeIsKnown || imageFitsInWindow()) return; m_shouldShrinkImage = !m_shouldShrinkImage; if (m_shouldShrinkImage) windowSizeChanged(); else { restoreImageSize(); updateLayout(); float scale = this->scale(); int scrollX = static_cast<int>(x / scale - (float)frame()->view()->width() / 2); int scrollY = static_cast<int>(y / scale - (float)frame()->view()->height() / 2); frame()->view()->setScrollPosition(IntPoint(scrollX, scrollY)); } } void ImageDocument::imageUpdated() { ASSERT(m_imageElement); if (m_imageSizeIsKnown) return; if (m_imageElement->cachedImage()->imageSize(pageZoomFactor(this)).isEmpty()) return; m_imageSizeIsKnown = true; if (shouldShrinkToFit()) { // Force resizing of the image windowSizeChanged(); } } void ImageDocument::restoreImageSize() { if (!m_imageElement || !m_imageSizeIsKnown) return; m_imageElement->setWidth(m_imageElement->cachedImage()->imageSize(pageZoomFactor(this)).width()); m_imageElement->setHeight(m_imageElement->cachedImage()->imageSize(pageZoomFactor(this)).height()); ExceptionCode ec; if (imageFitsInWindow()) m_imageElement->style()->removeProperty("cursor", ec); else m_imageElement->style()->setProperty("cursor", "-webkit-zoom-out", ec); m_didShrinkImage = false; } bool ImageDocument::imageFitsInWindow() const { if (!m_imageElement) return true; FrameView* view = frame()->view(); if (!view) return true; IntSize imageSize = m_imageElement->cachedImage()->imageSize(pageZoomFactor(this)); IntSize windowSize = IntSize(view->width(), view->height()); return imageSize.width() <= windowSize.width() && imageSize.height() <= windowSize.height(); } void ImageDocument::windowSizeChanged() { if (!m_imageElement || !m_imageSizeIsKnown) return; bool fitsInWindow = imageFitsInWindow(); // If the image has been explicitly zoomed in, restore the cursor if the image fits // and set it to a zoom out cursor if the image doesn't fit if (!m_shouldShrinkImage) { ExceptionCode ec; if (fitsInWindow) m_imageElement->style()->removeProperty("cursor", ec); else m_imageElement->style()->setProperty("cursor", "-webkit-zoom-out", ec); return; } if (m_didShrinkImage) { // If the window has been resized so that the image fits, restore the image size // otherwise update the restored image size. if (fitsInWindow) restoreImageSize(); else resizeImageToFit(); } else { // If the image isn't resized but needs to be, then resize it. if (!fitsInWindow) { resizeImageToFit(); m_didShrinkImage = true; } } } CachedImage* ImageDocument::cachedImage() { if (!m_imageElement) createDocumentStructure(); return m_imageElement->cachedImage(); } bool ImageDocument::shouldShrinkToFit() const { return frame()->page()->settings()->shrinksStandaloneImagesToFit() && frame()->page()->mainFrame() == frame(); } // -------- void ImageEventListener::handleEvent(ScriptExecutionContext*, Event* event) { if (event->type() == eventNames().resizeEvent) m_doc->windowSizeChanged(); else if (event->type() == eventNames().clickEvent && event->isMouseEvent()) { MouseEvent* mouseEvent = static_cast<MouseEvent*>(event); m_doc->imageClicked(mouseEvent->x(), mouseEvent->y()); } } bool ImageEventListener::operator==(const EventListener& listener) { if (const ImageEventListener* imageEventListener = ImageEventListener::cast(&listener)) return m_doc == imageEventListener->m_doc; return false; } // -------- ImageDocumentElement::~ImageDocumentElement() { if (m_imageDocument) m_imageDocument->disconnectImageElement(); } void ImageDocumentElement::willMoveToNewOwnerDocument() { if (m_imageDocument) { m_imageDocument->disconnectImageElement(); m_imageDocument = 0; } HTMLImageElement::willMoveToNewOwnerDocument(); } }
{ "pile_set_name": "Github" }
var baseValues = require('../internal/baseValues'), keys = require('./keys'); /** * Creates an array of the own enumerable property values of `object`. * * **Note:** Non-object values are coerced to objects. * * @static * @memberOf _ * @category Object * @param {Object} object The object to query. * @returns {Array} Returns the array of property values. * @example * * function Foo() { * this.a = 1; * this.b = 2; * } * * Foo.prototype.c = 3; * * _.values(new Foo); * // => [1, 2] (iteration order is not guaranteed) * * _.values('hi'); * // => ['h', 'i'] */ function values(object) { return baseValues(object, keys(object)); } module.exports = values;
{ "pile_set_name": "Github" }
StartChar: uni062D.medi_MemHaaMemInit Encoding: 67255 -1 2003 Width: 195 Flags: HW AnchorPoint: "TashkilAbove" 122 801 basechar 0 AnchorPoint: "TashkilBelow" 155 -327 basechar 0 LayerCount: 3 Fore Refer: 126 -1 N 1 0 0 1 0 0 3 EndChar
{ "pile_set_name": "Github" }
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magento.com for more information. * * @category Mage * @package Mage_ConfigurableSwatches * @copyright Copyright (c) 2006-2020 Magento, Inc. (http://www.magento.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ class Mage_ConfigurableSwatches_Model_Resource_Catalog_Product_Type_Configurable extends Mage_Catalog_Model_Resource_Product_Type_Configurable { /** * Retrieve Required children ids * Grouped by parent id. * * @param mixed $parentId may be array of integers or scalar integer. * @param bool $required * @return array * @see Mage_Catalog_Model_Resource_Product_Type_Configurable::getChildrenIds() */ public function getChildrenIds($parentId, $required = true) { if (is_array($parentId)) { $childrenIds = array(); if (!empty($parentId)) { $select = $this->_getReadAdapter()->select() ->from(array('l' => $this->getMainTable()), array('product_id', 'parent_id')) ->join( array('e' => $this->getTable('catalog/product')), 'e.entity_id = l.product_id AND e.required_options = 0', array() ) ->where('parent_id IN (?)', $parentId); foreach ($this->_getReadAdapter()->fetchAll($select) as $row) { $childrenIds[$row['parent_id']][$row['product_id']] = $row['product_id']; } } return $childrenIds; } else { return parent::getChildrenIds($parentId, $required); } } }
{ "pile_set_name": "Github" }
999 dxfrw 0.5.10 0 SECTION 2 HEADER 9 $ACADVER 1 AC1021 9 $HANDSEED 5 20000 9 $DWGCODEPAGE 3 ANSI_1252 9 $INSBASE 10 0 20 0 30 0 9 $EXTMIN 10 0 20 0 30 0 9 $EXTMAX 10 0 20 0 30 0 9 $LIMMIN 10 0 20 0 9 $LIMMAX 10 420 20 297 9 $ORTHOMODE 70 0 9 $LTSCALE 40 1 9 $TEXTSTYLE 7 STANDARD 9 $CLAYER 8 0 9 $DIMASZ 40 2.5 9 $DIMLFAC 40 1 9 $DIMSCALE 40 1 9 $DIMEXO 40 0.625 9 $DIMEXE 40 1.25 9 $DIMTXT 40 2.5 9 $DIMTSZ 40 0 9 $DIMAUNIT 70 0 9 $DIMADEC 70 0 9 $DIMLUNIT 70 2 9 $DIMSTYLE 2 STANDARD 9 $DIMGAP 40 0.625 9 $DIMTIH 70 0 9 $LUNITS 70 2 9 $LUPREC 70 4 9 $AUNITS 70 0 9 $AUPREC 70 2 9 $SPLINESEGS 70 8 9 $GRIDMODE 70 1 9 $SNAPSTYLE 70 0 9 $PINSBASE 10 3.59203924525e-289 20 1.77864316487e-313 30 6.79038653148e-313 9 $PLIMMIN 10 0 20 0 9 $PLIMMAX 10 0 20 0 9 $INSUNITS 70 0 9 $PSVPSCALE 40 1 0 ENDSEC 0 SECTION 2 CLASSES 0 ENDSEC 0 SECTION 2 TABLES 0 TABLE 2 VPORT 5 8 330 0 100 AcDbSymbolTable 70 1 0 VPORT 5 31 330 2 100 AcDbSymbolTableRecord 100 AcDbViewportTableRecord 2 *ACTIVE 70 0 10 0 20 0 11 1 21 1 12 153.75 22 71.125 13 0 23 0 14 10 24 10 15 10 25 10 16 0 26 0 36 1 17 0 27 0 37 0 40 154.75 41 2.06785137318 42 50 43 0 44 0 50 0 51 0 71 0 72 100 73 1 74 3 75 0 76 1 77 0 78 0 281 0 65 1 110 0 120 0 130 0 111 1 121 0 131 0 112 0 122 1 132 0 79 0 146 0 348 10020 60 7 61 5 292 1 282 1 141 0 142 0 63 250 421 3358443 0 ENDTAB 0 TABLE 2 LTYPE 5 5 330 0 100 AcDbSymbolTable 70 4 0 LTYPE 5 14 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 ByBlock 70 0 3 72 65 73 0 40 0 0 LTYPE 5 15 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 ByLayer 70 0 3 72 65 73 0 40 0 0 LTYPE 5 16 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 Continuous 70 0 3 Solid line 72 65 73 0 40 0 0 LTYPE 5 32 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DOT 70 0 3 Dot . . . . . . . . . . . . . . . . . . . . . . 72 65 73 2 40 6.35 49 0 74 0 49 -6.35 74 0 0 LTYPE 5 33 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DOT2 70 0 3 Dot (.5x) ..................................... 72 65 73 2 40 3.175 49 0 74 0 49 -3.175 74 0 0 LTYPE 5 34 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DOTX2 70 0 3 Dot (2x) . . . . . . . . . . . . . 72 65 73 2 40 12.7 49 0 74 0 49 -12.7 74 0 0 LTYPE 5 35 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DASHED 70 0 3 Dot . . . . . . . . . . . . . . . . . . . . . . 72 65 73 2 40 19.05 49 12.7 74 0 49 -6.35 74 0 0 LTYPE 5 36 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DASHED2 70 0 3 Dashed (.5x) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 72 65 73 2 40 9.525 49 6.35 74 0 49 -3.175 74 0 0 LTYPE 5 37 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DASHEDX2 70 0 3 Dashed (2x) ____ ____ ____ ____ ____ ___ 72 65 73 2 40 38.1 49 25.4 74 0 49 -12.7 74 0 0 LTYPE 5 38 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DASHDOT 70 0 3 Dash dot __ . __ . __ . __ . __ . __ . __ . __ 72 65 73 4 40 25.4 49 12.7 74 0 49 -6.35 74 0 49 0 74 0 49 -6.35 74 0 0 LTYPE 5 39 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DASHDOT2 70 0 3 Dash dot (.5x) _._._._._._._._._._._._._._._. 72 65 73 4 40 12.7 49 6.35 74 0 49 -3.175 74 0 49 0 74 0 49 -3.175 74 0 0 LTYPE 5 3A 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DASHDOTX2 70 0 3 Dash dot (2x) ____ . ____ . ____ . ___ 72 65 73 4 40 50.8 49 25.4 74 0 49 -12.7 74 0 49 0 74 0 49 -12.7 74 0 0 LTYPE 5 3B 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DIVIDE 70 0 3 Divide ____ . . ____ . . ____ . . ____ . . ____ 72 65 73 6 40 31.75 49 12.7 74 0 49 -6.35 74 0 49 0 74 0 49 -6.35 74 0 49 0 74 0 49 -6.35 74 0 0 LTYPE 5 3C 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DIVIDE2 70 0 3 Divide (.5x) __..__..__..__..__..__..__..__.._ 72 65 73 6 40 15.875 49 6.35 74 0 49 -3.175 74 0 49 0 74 0 49 -3.175 74 0 49 0 74 0 49 -3.175 74 0 0 LTYPE 5 3D 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 DIVIDEX2 70 0 3 Divide (2x) ________ . . ________ . . _ 72 65 73 6 40 63.5 49 25.4 74 0 49 -12.7 74 0 49 0 74 0 49 -12.7 74 0 49 0 74 0 49 -12.7 74 0 0 LTYPE 5 3E 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 BORDER 70 0 3 Border __ __ . __ __ . __ __ . __ __ . __ __ . 72 65 73 6 40 44.45 49 12.7 74 0 49 -6.35 74 0 49 12.7 74 0 49 -6.35 74 0 49 0 74 0 49 -6.35 74 0 0 LTYPE 5 3F 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 BORDER2 70 0 3 Border (.5x) __.__.__.__.__.__.__.__.__.__.__. 72 65 73 6 40 22.225 49 6.35 74 0 49 -3.175 74 0 49 6.35 74 0 49 -3.175 74 0 49 0 74 0 49 -3.175 74 0 0 LTYPE 5 40 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 BORDERX2 70 0 3 Border (2x) ____ ____ . ____ ____ . ___ 72 65 73 6 40 88.9 49 25.4 74 0 49 -12.7 74 0 49 25.4 74 0 49 -12.7 74 0 49 0 74 0 49 -12.7 74 0 0 LTYPE 5 41 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 CENTER 70 0 3 Center ____ _ ____ _ ____ _ ____ _ ____ _ ____ 72 65 73 4 40 50.8 49 31.75 74 0 49 -6.35 74 0 49 6.35 74 0 49 -6.35 74 0 0 LTYPE 5 42 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 CENTER2 70 0 3 Center (.5x) ___ _ ___ _ ___ _ ___ _ ___ _ ___ 72 65 73 4 40 28.575 49 19.05 74 0 49 -3.175 74 0 49 3.175 74 0 49 -3.175 74 0 0 LTYPE 5 43 330 5 100 AcDbSymbolTableRecord 100 AcDbLinetypeTableRecord 2 CENTERX2 70 0 3 Center (2x) ________ __ ________ __ _____ 72 65 73 4 40 101.6 49 63.5 74 0 49 -12.7 74 0 49 12.7 74 0 49 -12.7 74 0 0 ENDTAB 0 TABLE 2 LAYER 5 2 330 0 100 AcDbSymbolTable 70 1 0 LAYER 5 10 330 2 100 AcDbSymbolTableRecord 100 AcDbLayerTableRecord 2 0 70 0 62 7 6 CONTINUOUS 370 0 390 F 0 ENDTAB 0 TABLE 2 STYLE 5 3 330 0 100 AcDbSymbolTable 70 3 0 STYLE 5 44 330 2 100 AcDbSymbolTableRecord 100 AcDbTextStyleTableRecord 2 Standard 70 0 40 0 41 1 50 0 71 0 42 1 3 txt 4 0 ENDTAB 0 TABLE 2 VIEW 5 6 330 0 100 AcDbSymbolTable 70 0 0 ENDTAB 0 TABLE 2 UCS 5 7 330 0 100 AcDbSymbolTable 70 0 0 ENDTAB 0 TABLE 2 APPID 5 9 330 0 100 AcDbSymbolTable 70 1 0 APPID 5 12 330 9 100 AcDbSymbolTableRecord 100 AcDbRegAppTableRecord 2 ACAD 70 0 0 ENDTAB 0 TABLE 2 DIMSTYLE 5 A 330 0 100 AcDbSymbolTable 70 1 100 AcDbDimStyleTable 71 1 0 DIMSTYLE 105 45 330 A 100 AcDbSymbolTableRecord 100 AcDbDimStyleTableRecord 2 Standard 70 0 40 1 41 2.5 42 0.625 43 0.38 44 1.25 45 0 46 0 47 0 48 0 140 2.5 141 0.09 142 0 143 25.4 144 1 145 0 146 1 147 0.625 148 0 71 0 72 0 73 0 74 1 75 0 76 0 77 0 78 0 79 0 170 0 171 2 172 0 173 0 174 0 175 0 176 0 177 0 178 0 179 0 271 4 272 4 273 2 274 2 275 0 276 0 277 2 278 46 279 0 280 0 281 0 282 0 283 1 284 0 285 0 286 0 288 0 289 3 340 Standard 341 371 -2 372 -2 0 ENDTAB 0 TABLE 2 BLOCK_RECORD 5 1 330 0 100 AcDbSymbolTable 70 2 0 BLOCK_RECORD 5 1F 330 1 100 AcDbSymbolTableRecord 100 AcDbBlockTableRecord 2 *Model_Space 70 0 280 1 281 0 0 BLOCK_RECORD 5 1E 330 1 100 AcDbSymbolTableRecord 100 AcDbBlockTableRecord 2 *Paper_Space 70 0 280 1 281 0 0 ENDTAB 0 ENDSEC 0 SECTION 2 BLOCKS 0 BLOCK 5 20 330 1F 100 AcDbEntity 8 0 100 AcDbBlockBegin 2 *Model_Space 70 0 10 0 20 0 30 0 3 *Model_Space 1 0 ENDBLK 5 21 330 1F 100 AcDbEntity 8 0 100 AcDbBlockEnd 0 BLOCK 5 1C 330 1B 100 AcDbEntity 8 0 100 AcDbBlockBegin 2 *Paper_Space 70 0 10 0 20 0 30 0 3 *Paper_Space 1 0 ENDBLK 5 1D 330 1F 100 AcDbEntity 8 0 100 AcDbBlockEnd 0 ENDSEC 0 SECTION 2 ENTITIES 0 LINE 5 46 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 11.818182 20 5.909091 11 15.409091 21 5.909091 0 LINE 5 47 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 0 20 5.818182 11 -3.590909 21 5.818182 0 LINE 5 48 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 0 20 0 11 11.818182 21 11.818182 0 ELLIPSE 5 49 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbEllipse 10 5.906147 20 1.118182 30 0 11 -4.40872847693e-016 21 -2.4 31 0 40 0.882149 41 2.35619449029 42 3.92699081709 0 ELLIPSE 5 4A 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbEllipse 10 8.90026 20 4.512295 30 0 11 -4.40872847693e-016 21 -2.4 31 0 40 0.882149 41 5.49778714388 42 0.785398163501 0 LINE 5 4B 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 5.609091 20 9.518182 11 1.609091 21 9.518182 0 LINE 5 4C 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 3.409091 20 1.718182 11 6.353577 21 0.018182 0 LINE 5 4D 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 3.409091 20 -1.681818 11 6.353577 21 0.018182 0 LINE 5 4E 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 0 20 11.818182 11 11.818182 21 11.818182 0 LINE 5 4F 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 11.818182 20 11.818182 11 11.818182 21 0 0 LINE 5 50 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 11.818182 20 0 11 0 21 0 0 LINE 5 51 100 AcDbEntity 8 0 6 CONTINUOUS 62 7 370 18 100 AcDbLine 10 0 20 0 11 0 21 11.818182 0 ENDSEC 0 SECTION 2 OBJECTS 0 DICTIONARY 5 C 330 0 100 AcDbDictionary 281 1 3 ACAD_GROUP 350 D 0 DICTIONARY 5 D 330 C 100 AcDbDictionary 281 1 0 ENDSEC 0 EOF
{ "pile_set_name": "Github" }
--- title: "707 - HttpSendStop" ms.date: "03/30/2017" ms.assetid: 5c8a607a-be7a-4e36-a885-67746f03cae6 --- # 707 - HttpSendStop ## Properties ||| |-|-| |ID|707| |Keywords|HTTP| |Level|Verbose| |Channel|Microsoft-Windows-Application Server-Applications/Debug| ## Description This event is emitted when Http Send Request is stopped. ## Message Http Send Request Stop. ## Details
{ "pile_set_name": "Github" }
/***************************************************************************** Copyright (c) 2014, Intel Corp. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Intel Corporation nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ***************************************************************************** * Contents: Native high-level C interface to LAPACK function ctrexc * Author: Intel Corporation * Generated November 2015 *****************************************************************************/ #include "lapacke_utils.h" lapack_int LAPACKE_ctrexc( int matrix_layout, char compq, lapack_int n, lapack_complex_float* t, lapack_int ldt, lapack_complex_float* q, lapack_int ldq, lapack_int ifst, lapack_int ilst ) { if( matrix_layout != LAPACK_COL_MAJOR && matrix_layout != LAPACK_ROW_MAJOR ) { LAPACKE_xerbla( "LAPACKE_ctrexc", -1 ); return -1; } #ifndef LAPACK_DISABLE_NAN_CHECK if( LAPACKE_get_nancheck() ) { /* Optionally check input matrices for NaNs */ if( LAPACKE_lsame( compq, 'v' ) ) { if( LAPACKE_cge_nancheck( matrix_layout, n, n, q, ldq ) ) { return -6; } } if( LAPACKE_cge_nancheck( matrix_layout, n, n, t, ldt ) ) { return -4; } } #endif return LAPACKE_ctrexc_work( matrix_layout, compq, n, t, ldt, q, ldq, ifst, ilst ); }
{ "pile_set_name": "Github" }
// // Copyright(C) 1993-1996 Id Software, Inc. // Copyright(C) 2005-2014 Simon Howard // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // DESCRIPTION: // Main program, simply calls D_DoomMain high level loop. // #include "config.h" #include <stdio.h> #include <stdlib.h> #include "SDL.h" #include "doomtype.h" #include "i_system.h" #include "m_argv.h" // // D_DoomMain() // Not a globally visible function, just included for source reference, // calls all startup code, parses command line options. // void D_DoomMain (void); int main(int argc, char **argv) { // save arguments myargc = argc; myargv = argv; //! // Print the program version and exit. // if (M_ParmExists("-version") || M_ParmExists("--version")) { puts(PACKAGE_STRING); exit(0); } #if defined(_WIN32) // compose a proper command line from loose file paths passed as arguments // to allow for loading WADs and DEHACKED patches by drag-and-drop M_AddLooseFiles(); #endif M_FindResponseFile(); #ifdef SDL_HINT_NO_SIGNAL_HANDLERS SDL_SetHint(SDL_HINT_NO_SIGNAL_HANDLERS, "1"); #endif // start doom D_DoomMain (); return 0; }
{ "pile_set_name": "Github" }
#EXTM3U #EXT-X-VERSION:3 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=300000,AVERAGE-BANDWIDTH=300000,CODECS="avc1.42c015,mp4a.40.2",FRAME-RATE=25.000 chunklist-b300000.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=600000,AVERAGE-BANDWIDTH=600000,CODECS="avc1.42c015,mp4a.40.2",FRAME-RATE=25.000 chunklist-b600000.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=850000,AVERAGE-BANDWIDTH=850000,CODECS="avc1.42c015,mp4a.40.2",FRAME-RATE=25.000 chunklist-b850000.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=1000000,AVERAGE-BANDWIDTH=1000000,CODECS="avc1.42c015,mp4a.40.2",FRAME-RATE=25.000 chunklist-b1000000.m3u8 #EXT-X-STREAM-INF:PROGRAM-ID=1,BANDWIDTH=1500000,AVERAGE-BANDWIDTH=1500000,CODECS="avc1.42c015,mp4a.40.2",FRAME-RATE=25.000 chunklist-b1500000.m3u8
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge,chrome=1"> <title>this.$router 和 $route 的区别</title> <meta name="description" content=""> <meta name="keywords" content=""> <script type="text/javascript" src='js/vue.min.js'></script> <script type="text/javascript" src='js/vue-router.js'></script> </head> <body> <div id='router'> <h1>Hello App !</h1> <ul> <router-link tag='li' to='/home' >Go to Home</router-link><br/> <router-link tag='li' to='/new'> $route是当前的路由信息 {{$route}}</router-link> </ul> <router-view></router-view> </div> <script type="text/javascript"> //1.创建组件 const Home={ template:`<span>我是主页</span>` }; const News={ template:`<span>我是新闻</span>` }; //2.配置路由 const routersname=[ { path:'/home',name:'我是主页',component:Home }, { path:'/new',name:'我是新闻',component:News }, //重定向.相当于404, { path:'*',redirect:'/home' } ]; //3.生成路由实例 const router=new VueRouter({ routes:routersname //此时 routers 和 routername 并不相等,所以不能在采用 es6 里面对象的简写方法 }); new Vue({ el:'#router', router, mounted(){ console.log('this.$router是路由的实例:'); console.log(this.$router); } }) </script> </body> </html>
{ "pile_set_name": "Github" }
#include "LanguageServerConfig.h" #include <algorithm> #include <NodeJSLocator.h> LanguageServerConfig::LanguageServerConfig() : clConfigItem("LSPConfig") { NodeJSLocator locator; locator.Locate(); } LanguageServerConfig::~LanguageServerConfig() {} LanguageServerConfig& LanguageServerConfig::Load() { clConfig conf("LanguageServer.conf"); conf.ReadItem(this); return *this; } LanguageServerConfig& LanguageServerConfig::Save() { clConfig conf("LanguageServer.conf"); conf.WriteItem(this); return *this; } LanguageServerConfig& LanguageServerConfig::Get() { static LanguageServerConfig config; return config; } void LanguageServerConfig::FromJSON(const JSONItem& json) { m_servers.clear(); m_flags = json.namedObject("flags").toSize_t(m_flags); if(json.hasNamedObject("servers")) { JSONItem servers = json.namedObject("servers"); size_t count = servers.arraySize(); for(size_t i = 0; i < count; ++i) { JSONItem server = servers.arrayItem(i); LanguageServerEntry entry; entry.FromJSON(server); m_servers.insert({ entry.GetName(), entry }); } } } JSONItem LanguageServerConfig::ToJSON() const { JSONItem json = JSONItem::createObject(GetName()); json.addProperty("flags", m_flags); JSONItem servers = JSONItem::createArray("servers"); std::for_each(m_servers.begin(), m_servers.end(), [&](const LanguageServerEntry::Map_t::value_type& vt) { servers.append(vt.second.ToJSON()); }); json.append(servers); return json; } void LanguageServerConfig::AddServer(const LanguageServerEntry& server) { RemoveServer(server.GetName()); m_servers.insert({ server.GetName(), server }); } void LanguageServerConfig::RemoveServer(const wxString& name) { if(m_servers.count(name)) { m_servers.erase(name); } } const LanguageServerEntry& LanguageServerConfig::GetServer(const wxString& name) const { static LanguageServerEntry NullEntry; if(m_servers.count(name) == 0) { return NullEntry; } return m_servers.find(name)->second; } LanguageServerEntry& LanguageServerConfig::GetServer(const wxString& name) { static LanguageServerEntry NullEntry; if(m_servers.count(name) == 0) { return NullEntry; } return m_servers[name]; }
{ "pile_set_name": "Github" }
--- name: "Netty" metadata: categories: - "web" status: "stable" unlisted: true
{ "pile_set_name": "Github" }
/*============================================================================= Copyright (c) 2001-2007 Joel de Guzman Copyright (c) 2007 Dan Marsden Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) ==============================================================================*/ #if !defined(BOOST_FUSION_FOR_EACH_20070527_0943) #define BOOST_FUSION_FOR_EACH_20070527_0943 #include <boost/fusion/algorithm/iteration/detail/for_each.hpp> #include <boost/fusion/algorithm/iteration/detail/segmented_for_each.hpp> #include <boost/fusion/support/is_segmented.hpp> #include <boost/fusion/support/is_sequence.hpp> #include <boost/utility/enable_if.hpp> namespace boost { namespace fusion { namespace result_of { template <typename Sequence, typename F> struct for_each { typedef void type; }; } template <typename Sequence, typename F> inline typename enable_if< traits::is_sequence<Sequence> , void >::type for_each(Sequence& seq, F const& f) { detail::for_each(seq, f, typename traits::is_segmented<Sequence>::type()); } template <typename Sequence, typename F> inline typename enable_if< traits::is_sequence<Sequence> , void >::type for_each(Sequence const& seq, F const& f) { detail::for_each(seq, f, typename traits::is_segmented<Sequence>::type()); } }} #endif
{ "pile_set_name": "Github" }
// Copyright (c) 2010 Nuovation System Designs, LLC // Grant Erickson <[email protected]> // // Reworked somewhat by Marshall Clow; August 2010 // // Distributed under the Boost Software License, Version 1.0. (See // accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // See http://www.boost.org/ for latest version. // #ifndef BOOST_ALGORITHM_ORDERED_HPP #define BOOST_ALGORITHM_ORDERED_HPP #include <algorithm> #include <functional> #include <iterator> #include <boost/range/begin.hpp> #include <boost/range/end.hpp> #include <boost/utility/enable_if.hpp> #include <boost/type_traits/is_same.hpp> #include <boost/mpl/identity.hpp> namespace boost { namespace algorithm { /// \fn is_sorted_until ( ForwardIterator first, ForwardIterator last, Pred p ) /// \return the point in the sequence [first, last) where the elements are unordered /// (according to the comparison predicate 'p'). /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// \param p A binary predicate that returns true if two elements are ordered. /// template <typename ForwardIterator, typename Pred> ForwardIterator is_sorted_until ( ForwardIterator first, ForwardIterator last, Pred p ) { if ( first == last ) return last; // the empty sequence is ordered ForwardIterator next = first; while ( ++next != last ) { if ( p ( *next, *first )) return next; first = next; } return last; } /// \fn is_sorted_until ( ForwardIterator first, ForwardIterator last ) /// \return the point in the sequence [first, last) where the elements are unordered /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// template <typename ForwardIterator> ForwardIterator is_sorted_until ( ForwardIterator first, ForwardIterator last ) { typedef typename std::iterator_traits<ForwardIterator>::value_type value_type; return boost::algorithm::is_sorted_until ( first, last, std::less<value_type>()); } /// \fn is_sorted ( ForwardIterator first, ForwardIterator last, Pred p ) /// \return whether or not the entire sequence is sorted /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// \param p A binary predicate that returns true if two elements are ordered. /// template <typename ForwardIterator, typename Pred> bool is_sorted ( ForwardIterator first, ForwardIterator last, Pred p ) { return boost::algorithm::is_sorted_until (first, last, p) == last; } /// \fn is_sorted ( ForwardIterator first, ForwardIterator last ) /// \return whether or not the entire sequence is sorted /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// template <typename ForwardIterator> bool is_sorted ( ForwardIterator first, ForwardIterator last ) { return boost::algorithm::is_sorted_until (first, last) == last; } /// /// -- Range based versions of the C++11 functions /// /// \fn is_sorted_until ( const R &range, Pred p ) /// \return the point in the range R where the elements are unordered /// (according to the comparison predicate 'p'). /// /// \param range The range to be tested. /// \param p A binary predicate that returns true if two elements are ordered. /// template <typename R, typename Pred> typename boost::lazy_disable_if_c< boost::is_same<R, Pred>::value, typename boost::range_iterator<const R> >::type is_sorted_until ( const R &range, Pred p ) { return boost::algorithm::is_sorted_until ( boost::begin ( range ), boost::end ( range ), p ); } /// \fn is_sorted_until ( const R &range ) /// \return the point in the range R where the elements are unordered /// /// \param range The range to be tested. /// template <typename R> typename boost::range_iterator<const R>::type is_sorted_until ( const R &range ) { return boost::algorithm::is_sorted_until ( boost::begin ( range ), boost::end ( range )); } /// \fn is_sorted ( const R &range, Pred p ) /// \return whether or not the entire range R is sorted /// (according to the comparison predicate 'p'). /// /// \param range The range to be tested. /// \param p A binary predicate that returns true if two elements are ordered. /// template <typename R, typename Pred> typename boost::lazy_disable_if_c< boost::is_same<R, Pred>::value, boost::mpl::identity<bool> >::type is_sorted ( const R &range, Pred p ) { return boost::algorithm::is_sorted ( boost::begin ( range ), boost::end ( range ), p ); } /// \fn is_sorted ( const R &range ) /// \return whether or not the entire range R is sorted /// /// \param range The range to be tested. /// template <typename R> bool is_sorted ( const R &range ) { return boost::algorithm::is_sorted ( boost::begin ( range ), boost::end ( range )); } /// /// -- Range based versions of the C++11 functions /// /// \fn is_increasing ( ForwardIterator first, ForwardIterator last ) /// \return true if the entire sequence is increasing; i.e, each item is greater than or /// equal to the previous one. /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// /// \note This function will return true for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_strictly_increasing instead. template <typename ForwardIterator> bool is_increasing ( ForwardIterator first, ForwardIterator last ) { typedef typename std::iterator_traits<ForwardIterator>::value_type value_type; return boost::algorithm::is_sorted (first, last, std::less<value_type>()); } /// \fn is_increasing ( const R &range ) /// \return true if the entire sequence is increasing; i.e, each item is greater than or /// equal to the previous one. /// /// \param range The range to be tested. /// /// \note This function will return true for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_strictly_increasing instead. template <typename R> bool is_increasing ( const R &range ) { return is_increasing ( boost::begin ( range ), boost::end ( range )); } /// \fn is_decreasing ( ForwardIterator first, ForwardIterator last ) /// \return true if the entire sequence is decreasing; i.e, each item is less than /// or equal to the previous one. /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// /// \note This function will return true for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_strictly_decreasing instead. template <typename ForwardIterator> bool is_decreasing ( ForwardIterator first, ForwardIterator last ) { typedef typename std::iterator_traits<ForwardIterator>::value_type value_type; return boost::algorithm::is_sorted (first, last, std::greater<value_type>()); } /// \fn is_decreasing ( const R &range ) /// \return true if the entire sequence is decreasing; i.e, each item is less than /// or equal to the previous one. /// /// \param range The range to be tested. /// /// \note This function will return true for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_strictly_decreasing instead. template <typename R> bool is_decreasing ( const R &range ) { return is_decreasing ( boost::begin ( range ), boost::end ( range )); } /// \fn is_strictly_increasing ( ForwardIterator first, ForwardIterator last ) /// \return true if the entire sequence is strictly increasing; i.e, each item is greater /// than the previous one /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// /// \note This function will return false for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_increasing instead. template <typename ForwardIterator> bool is_strictly_increasing ( ForwardIterator first, ForwardIterator last ) { typedef typename std::iterator_traits<ForwardIterator>::value_type value_type; return boost::algorithm::is_sorted (first, last, std::less_equal<value_type>()); } /// \fn is_strictly_increasing ( const R &range ) /// \return true if the entire sequence is strictly increasing; i.e, each item is greater /// than the previous one /// /// \param range The range to be tested. /// /// \note This function will return false for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_increasing instead. template <typename R> bool is_strictly_increasing ( const R &range ) { return is_strictly_increasing ( boost::begin ( range ), boost::end ( range )); } /// \fn is_strictly_decreasing ( ForwardIterator first, ForwardIterator last ) /// \return true if the entire sequence is strictly decreasing; i.e, each item is less than /// the previous one /// /// \param first The start of the sequence to be tested. /// \param last One past the end of the sequence /// /// \note This function will return false for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_decreasing instead. template <typename ForwardIterator> bool is_strictly_decreasing ( ForwardIterator first, ForwardIterator last ) { typedef typename std::iterator_traits<ForwardIterator>::value_type value_type; return boost::algorithm::is_sorted (first, last, std::greater_equal<value_type>()); } /// \fn is_strictly_decreasing ( const R &range ) /// \return true if the entire sequence is strictly decreasing; i.e, each item is less than /// the previous one /// /// \param range The range to be tested. /// /// \note This function will return false for sequences that contain items that compare /// equal. If that is not what you intended, you should use is_decreasing instead. template <typename R> bool is_strictly_decreasing ( const R &range ) { return is_strictly_decreasing ( boost::begin ( range ), boost::end ( range )); } }} // namespace boost #endif // BOOST_ALGORITHM_ORDERED_HPP
{ "pile_set_name": "Github" }
+++ Title = "Davy Trinugraha" Twitter = "" image = "davy-trinugraha.png" type = "speaker" linktitle = "davy-trinugraha" +++ Davy is a Customer Engineer in Google Cloud, his main job is to help customers to adopt Google Cloud solutions. He has more than 15 years experience in cloud architecture, networking & security technology. Before joined Google, he was working for other technology companies such as Rubrik, Microsoft (part of Azure Specialist team), Trend Micro and VMware. His main interest now is in Container, Serverless, and DevOps.
{ "pile_set_name": "Github" }
#X-Generator: crowdin.com bugtracker.name=Hata Ay\u0131klay\u0131c\u0131 bugtracker.desc=ZAP i\u00e7indeki hatalar\u0131n takibi i\u00e7in kullan\u0131c\u0131lara hatay\u0131 belirlemelerinde yard\u0131mc\u0131 olan bir ZAP uzant\u0131s\u0131 bugtracker.msg.raised=Hata ba\u015far\u0131l\u0131 bir \u015fekilde iletildi\! bugtracker.popup.issue.manual=Sorun \u0130let\: Manuel Mod bugtracker.popup.issue.semi=Hata Takip\u00e7isi i\u00e7in sorun ilet bugtracker.topmenu.tools.msg=Hata takip\u00e7isi\: \u00d6rnek mesaj bugtracker.trackers.list=Bir hata takip\u00e7isi se\u00e7 bugtracker.trackers.github.tab=Github bugtracker.trackers.bugzilla.tab=Bugzilla bugtracker.trackers.jira.tab=JIRA bugtracker.trackers.atlassan.tab=Atlassian bugtracker.trackers.github.issue.repo=Depo Linki (y\u00f6netici/depo) bugtracker.trackers.github.issue.title=Ba\u015fl\u0131k bugtracker.trackers.github.issue.body=V\u00fccut bugtracker.trackers.github.issue.labels=Etiketler bugtracker.trackers.github.issue.assignee.manual=Atanan bugtracker.trackers.github.issue.assignee.list=\u0130\u015fbirli\u011fi yap\u0131lanlardan atanacak olan\u0131 se\u00e7 bugtracker.trackers.github.issue.username=Kullan\u0131c\u0131 Ad\u0131 bugtracker.trackers.github.issue.password=\u015eifre bugtracker.trackers.github.issue.config=Kaydedilmi\u015f bir konfig\u00fcrasyon se\u00e7in bugtracker.trackers.github.issue.msg.auth=Ba\u015far\u0131s\u0131z Yetkilendirme\: Kimlik bilgilerinizi kontrol edin bugtracker.trackers.github.issue.msg.repo=Depo/Kaynak bulunamad\u0131 bugtracker.trackers.github.issue.msg.param=B\u00fct\u00fcn parametreleri kontrol edin bugtracker.trackers.github.issue.msg.missing=Baz\u0131 alanlar eksik bugtracker.trackers.github.issue.msg.raised=Sorun bildirildi\! bugtracker.trackers.bugzilla.label=Bugzilla bugtracker.trackers.bugzilla.msg=Destek yolda bugtracker.dialog.manual.title=Hata Takip\u00e7isi\: Manuel Mod bugtracker.dialog.semi.title=Hata Takip\u00e7isi\: Yar\u0131 otomatik mod bugtracker.trackers.bugzilla.issue.url=Bugzilla Linki bugtracker.trackers.bugzilla.issue.product=\u00dcr\u00fcn ad\u0131 bugtracker.trackers.bugzilla.issue.component=Bile\u015fen ad\u0131 bugtracker.trackers.bugzilla.issue.version=S\u00fcr\u00fcm bugtracker.trackers.bugzilla.issue.os=\u0130\u015fletim Sistemi bugtracker.trackers.bugzilla.issue.platform=Platform bugtracker.trackers.bugzilla.issue.description=A\u00e7\u0131klama bugtracker.trackers.bugzilla.issue.summary=\u00d6zet bugtracker.trackers.bugzilla.issue.username=Kullan\u0131c\u0131 Ad\u0131 bugtracker.trackers.bugzilla.issue.password=\u015eifre bugtracker.trackers.bugzilla.issue.config=Kaydedilmi\u015f bir konfig\u00fcrasyon se\u00e7in bugtracker.trackers.bugzilla.issue.msg.raised=Hata iletildi\! bugtracker.dialog.add.title=Sorun kural\u0131 ekle bugtracker.dialog.add.button.confirm=Onaylamak bugtracker.dialog.add.field.label.alert=Uyar\u0131 bugtracker.dialog.add.field.label.newlevel=Risk Seviyesi\: bugtracker.dialog.add.field.label.enabled=Aktif\: bugtracker.dialog.add.field.label.url=URL\: bugtracker.dialog.add.field.label.regex=Kurall\u0131 \u0130fade URL'i\: bugtracker.dialog.add.field.label.tracker=Bir hata takip\u00e7isi se\u00e7 bugtracker.dialog.add.field.label.config=Konfig\u00fcrasyon se\u00e7in bugtracker.dialog.modify.title=Sorun kural\u0131 g\u00fcncelle bugtracker.dialog.modify.button.confirm=Onaylamak bugtracker.dialog.remove.button.cancel=\u0130ptal bugtracker.dialog.remove.button.confirm=Kald\u0131r bugtracker.dialog.remove.checkbox.label=Bu mesaj\u0131 bir daha g\u00f6sterme bugtracker.dialog.remove.text=Se\u00e7ili Sorun Kural\u0131n\u0131 kald\u0131rmak istedi\u011finizden emin misiniz? bugtracker.dialog.remove.title=Sorun Kural\u0131n\u0131 kald\u0131r bugtracker.error.nofile=Dosya bulunamad\u0131\: {0} bugtracker.panel.description=Taray\u0131c\u0131lar taraf\u0131ndan bulunan uyar\u0131lar\u0131n risk oran\u0131n\u0131 de\u011fi\u015ftir bugtracker.panel.title=Otomatik Hata Takip\u00e7isi bugtracker.panel.newalert.fp=Yan\u0131lma Oran\u0131 bugtracker.panel.newalert.info=Bilgi bugtracker.panel.newalert.low=D\u00fc\u015f\u00fck bugtracker.panel.newalert.medium=Orta bugtracker.panel.newalert.high=Y\u00fcksek bugtracker.table.header.enabled=Aktif bugtracker.table.header.alertid=Uyar\u0131 bugtracker.table.header.url=URL bugtracker.table.header.newalert=Risk Seviyesi bugtracker.table.header.tracker=Hata Ay\u0131klay\u0131c\u0131 bugtracker.table.header.config=Konfig\u00fcrasyon bugtracker.trackers.github.table.header.username=Kullan\u0131c\u0131 Ad\u0131/E-Posta bugtracker.trackers.github.table.header.password=\u015eifre bugtracker.trackers.github.table.header.repoUrl=Depo Linki bugtracker.trackers.github.label.configs=<html><body><p>Bu yap\u0131land\u0131rmalar "Siteler aras\u0131 istek sahtecili\u011fi" kar\u015f\u0131t\u0131 konfig\u00fcrasyonlar gibi davran\u0131r.</p> <p>\u015eu anda yaln\u0131zca FORM parametre konfig\u00fcrasyonlar\u0131 destekleniyor</p> <p>B\u00fct\u00fcn konfig\u00fcrasyon adlar\u0131nda b\u00fcy\u00fck/k\u00fc\u00e7\u00fck harf farketmez.</p> <p>Konfig\u00fcrasyon ismi ekler ya da mevcut olanlardan birini de\u011fi\u015ftirirseniz, de\u011fi\u015fikliklerin yans\u0131mas\u0131 i\u00e7in bu yap\u0131land\u0131rmalar\u0131 i\u00e7eren sayfalar\u0131 tekrar ziyaret etmeniz gerekir.</p></body></html> bugtracker.trackers.github.title=Siteler Aras\u0131 \u0130stek Sahtecili\u011fi \u00f6nleyici jetonlar bugtracker.trackers.github.dialog.config.add.button.cancel=\u0130ptal bugtracker.trackers.github.dialog.config.add.button.confirm=Ekle bugtracker.trackers.github.dialog.config.add.title=Github Kullan\u0131c\u0131 Konfig\u00fcrasyonu Ekle bugtracker.trackers.github.dialog.config.field.label.enabled=Aktif\: bugtracker.trackers.github.dialog.config.field.label.name=Kullan\u0131c\u0131 Ad\u0131/E-Posta\: bugtracker.trackers.github.dialog.config.field.label.password=\u015eifre\: bugtracker.trackers.github.dialog.config.field.label.repoUrl=Depo Linki\: bugtracker.trackers.github.dialog.config.modify.button.confirm=De\u011fi\u015fiklik Yap bugtracker.trackers.github.dialog.config.modify.title=Github Kullan\u0131c\u0131 Konfig\u00fcrasyonu G\u00fcncelle bugtracker.trackers.github.dialog.config.remove.button.cancel=\u0130ptal bugtracker.trackers.github.dialog.config.remove.button.confirm=Sil bugtracker.trackers.github.dialog.config.remove.checkbox.label=Bu mesaj\u0131 bir daha g\u00f6sterme bugtracker.trackers.github.dialog.config.remove.text=Se\u00e7ili uygulamay\u0131 kald\u0131rmak istedi\u011finizden emin misiniz? bugtracker.trackers.github.dialog.config.remove.title=Github Kullan\u0131c\u0131 Konfig\u00fcrasyonunu Sil bugtracker.trackers.github.dialog.config.warning.name.repeated.text=Bu kullan\u0131c\u0131 ad\u0131yla bir Github konfig\u00fcrasyonu mevcut. bugtracker.trackers.github.dialog.config.warning.name.repeated.title=Birden fazla ayn\u0131 isimle Github konfig\u00fcrasyonu var bugtracker.trackers.bugzilla.table.header.username=Kullan\u0131c\u0131 Ad\u0131/E-Posta bugtracker.trackers.bugzilla.table.header.password=\u015eifre bugtracker.trackers.bugzilla.table.header.bugzillaUrl=Bugzilla Linki bugtracker.trackers.bugzilla.label.configs=<html><body><p>Bu yap\u0131land\u0131rmalar "Siteler aras\u0131 istek sahtecili\u011fi" kar\u015f\u0131t\u0131 konfig\u00fcrasyonlar gibi davran\u0131r.</p> <p>\u015eu anda yaln\u0131zca FORM parametre konfig\u00fcrasyonlar\u0131 destekleniyor</p> <p>B\u00fct\u00fcn konfig\u00fcrasyon adlar\u0131nda b\u00fcy\u00fck/k\u00fc\u00e7\u00fck harf farketmez.</p> <p>Konfig\u00fcrasyon ismi ekler ya da mevcut olanlardan birini de\u011fi\u015ftirirseniz, de\u011fi\u015fikliklerin yans\u0131mas\u0131 i\u00e7in bu yap\u0131land\u0131rmalar\u0131 i\u00e7eren sayfalar\u0131 tekrar ziyaret etmeniz gerekir.</p></body></html> bugtracker.trackers.bugzilla.title=Siteler Aras\u0131 \u0130stek Sahtecili\u011fi \u00f6nleyici jetonlar bugtracker.trackers.bugzilla.dialog.config.add.button.cancel=\u0130ptal bugtracker.trackers.bugzilla.dialog.config.add.button.confirm=Ekle bugtracker.trackers.bugzilla.dialog.config.add.title=Bugzilla Kullan\u0131c\u0131 Konfig\u00fcrasyonu Ekle bugtracker.trackers.bugzilla.dialog.config.field.label.enabled=Aktif\: bugtracker.trackers.bugzilla.dialog.config.field.label.name=Kullan\u0131c\u0131 Ad\u0131/E-Posta\: bugtracker.trackers.bugzilla.dialog.config.field.label.password=\u015eifre\: bugtracker.trackers.bugzilla.dialog.config.field.label.bugzillaUrl=Bugzilla Linki\: bugtracker.trackers.bugzilla.dialog.config.modify.button.confirm=De\u011fi\u015fiklik Yap bugtracker.trackers.bugzilla.dialog.config.modify.title=Github Kullan\u0131c\u0131 Konfig\u00fcrasyonu G\u00fcncelle bugtracker.trackers.bugzilla.dialog.config.remove.button.cancel=\u0130ptal bugtracker.trackers.bugzilla.dialog.config.remove.button.confirm=Sil bugtracker.trackers.bugzilla.dialog.config.remove.checkbox.label=Bu mesaj\u0131 bir daha g\u00f6sterme bugtracker.trackers.bugzilla.dialog.config.remove.text=Se\u00e7ili uygulamay\u0131 kald\u0131rmak istedi\u011finizden emin misiniz? bugtracker.trackers.bugzilla.dialog.config.remove.title=Bugzilla Kullan\u0131c\u0131 Konfig\u00fcrasyonu Sil bugtracker.trackers.bugzilla.dialog.config.warning.name.repeated.text=Bu kullan\u0131c\u0131 ad\u0131yla bir Bugzilla konfig\u00fcrasyonu mevcut. bugtracker.trackers.bugzilla.dialog.config.warning.name.repeated.title=Birden fazla ayn\u0131 isimle Bugzilla konfig\u00fcrasyonu var bugtracker.msg.alert=*SORUDAK\u0130 UYARI*\n bugtracker.msg.url=*URL*\n bugtracker.msg.desc=*A\u00c7IKLAMA*\n bugtracker.msg.otherinfo=*D\u0130\u011eER B\u0130LG\u0130LER*\n bugtracker.msg.solution=*\u00c7\u00d6Z\u00dcM*\n bugtracker.msg.reference=*REFERANS*\n bugtracker.msg.parameter=*PARAMETRE*\n bugtracker.msg.attack=*SALDIRI*\n bugtracker.msg.evidence=*KANIT*\n bugtracker.msg.risk=R\u0130SK\: bugtracker.msg.conf=\u0130T\u0130MAT\: bugtracker.msg.cwe=Bilinen Zay\u0131fl\u0131k Listesi\: bugtracker.msg.wasc=Web Uygulama G\u00fcvenli\u011fi Birli\u011fi\:
{ "pile_set_name": "Github" }
# Translation of Odoo Server. # This file contains the translation of the following modules: # * rating # # Translators: # Martin Trigaux, 2019 # Alina Lisnenko <[email protected]>, 2019 # msgid "" msgstr "" "Project-Id-Version: Odoo Server saas~12.5\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2019-08-26 08:17+0000\n" "PO-Revision-Date: 2019-08-26 09:13+0000\n" "Last-Translator: Alina Lisnenko <[email protected]>, 2019\n" "Language-Team: Ukrainian (https://www.transifex.com/odoo/teams/41243/uk/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: \n" "Language: uk\n" "Plural-Forms: nplurals=4; plural=(n % 1 == 0 && n % 10 == 1 && n % 100 != 11 ? 0 : n % 1 == 0 && n % 10 >= 2 && n % 10 <= 4 && (n % 100 < 12 || n % 100 > 14) ? 1 : n % 1 == 0 && (n % 10 ==0 || (n % 10 >=5 && n % 10 <=9) || (n % 100 >=11 && n % 100 <=14 )) ? 2: 3);\n" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_submit msgid "" "<br/>\n" " on our services on \"" msgstr "" "<br/>\n" " на наших сервісах \"" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_view msgid "" "<i class=\"fa fa-check-circle fa-5x text-success\" role=\"img\" aria-" "label=\"Thank you!\" title=\"Thank you!\"/>" msgstr "" "<i class=\"fa fa-check-circle fa-5x text-success\" role=\"img\" aria-" "label=\"Thank you!\" title=\"Thank you!\"/>" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_form msgid "<span class=\"o_stat_text\">Resource</span>" msgstr "<span class=\"o_stat_text\">Ресурс</span>" #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__access_token msgid "Access token to set the rating of the value" msgstr "Токен доступу для встановлення рейтингу значення" #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__message_id msgid "" "Associated message when posting a review. Mainly used in website addons." msgstr "" "Пов'язане повідомлення під час опублікування відгуку. В основному " "використовується в аддонах веб-сайтів." #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__partner_id msgid "Author of the rating" msgstr "Автор рейтингу" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__feedback msgid "Comment" msgstr "Коментар" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__create_uid msgid "Created by" msgstr "Створив" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__create_date msgid "Created on" msgstr "Створено на" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__partner_id #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Customer" msgstr "Клієнт" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Date" msgstr "Дата" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__display_name #: model:ir.model.fields,field_description:rating.field_rating_parent_mixin__display_name #: model:ir.model.fields,field_description:rating.field_rating_rating__display_name msgid "Display Name" msgstr "Назва для відображення" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__res_id msgid "Document" msgstr "Документ" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__res_model msgid "Document Model" msgstr "Модель документу" #. module: rating #: model:ir.model,name:rating.model_mail_thread msgid "Email Thread" msgstr "Тема електронної пошти" #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__consumed msgid "Enabled if the rating has been filled." msgstr "Увімкнено, якщо рейтинг був заповнений." #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__consumed msgid "Filled Rating" msgstr "Заповнений рейтинг" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_view msgid "Go to our website" msgstr "Перейдіть на наш веб-сайт" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Group By" msgstr "Групувати за" #. module: rating #: model:ir.model.fields.selection,name:rating.selection__rating_rating__rating_text__highly_dissatisfied #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Highly dissatisfied" msgstr "Високо незадоволений" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__id #: model:ir.model.fields,field_description:rating.field_rating_parent_mixin__id #: model:ir.model.fields,field_description:rating.field_rating_rating__id msgid "ID" msgstr "ID" #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__res_id msgid "Identifier of the rated object" msgstr "Ідентифікатор оцінювання об'єкта" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__rating_image msgid "Image" msgstr "Зображення" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin____last_update #: model:ir.model.fields,field_description:rating.field_rating_parent_mixin____last_update #: model:ir.model.fields,field_description:rating.field_rating_rating____last_update msgid "Last Modified on" msgstr "Останні зміни на" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__write_uid msgid "Last Updated by" msgstr "Востаннє оновив" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__write_date msgid "Last Updated on" msgstr "Останнє оновлення" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__message_id msgid "Linked message" msgstr "Об'єднане повідомлення" #. module: rating #: model:ir.model,name:rating.model_mail_message msgid "Message" msgstr "Повідомлення" #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__res_model_id msgid "Model of the followed resource" msgstr "Модель наступного ресурсу" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "My Ratings" msgstr "Мої оцінки" #. module: rating #: model:ir.model.fields.selection,name:rating.selection__rating_rating__rating_text__no_rating msgid "No Rating yet" msgstr "Ще немає жодного оцінювання" #. module: rating #: model_terms:ir.actions.act_window,help:rating.action_view_rating msgid "No rating yet" msgstr "Ще немає оцінювання" #. module: rating #: model:ir.model.fields.selection,name:rating.selection__rating_rating__rating_text__not_satisfied #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Not satisfied" msgstr "Незадоволений" #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__rated_partner_id msgid "Owner of the rated resource" msgstr "Власник ресурсу оцінювання" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__parent_res_id msgid "Parent Document" msgstr "Батьківський документ" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__parent_res_model msgid "Parent Document Model" msgstr "Модель батьківського документу" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__parent_res_name msgid "Parent Document Name" msgstr "Назва батьківського документу" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__parent_res_model_id msgid "Parent Related Document Model" msgstr "Модель, пов'язана з батьківським документом" #. module: rating #: model:ir.model.fields,help:rating.field_rating_parent_mixin__rating_percentage_satisfaction msgid "Percentage of happy ratings" msgstr "Відсоток щасливих оцінювань" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Rated User" msgstr "Оцінений користувач" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__rated_partner_id msgid "Rated person" msgstr "Номінальна особа" #. module: rating #: model:ir.actions.act_window,name:rating.action_view_rating #: model:ir.model,name:rating.model_rating_rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__rating_ids #: model:ir.model.fields,field_description:rating.field_rating_rating__rating_text #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_form #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_tree msgid "Rating" msgstr "Оцінка" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__rating_avg #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_graph #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_pivot msgid "Rating Average" msgstr "Середнє оцінювання" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__rating_last_feedback msgid "Rating Last Feedback" msgstr "Останній зворотній зв'язок оцінки" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__rating_last_image msgid "Rating Last Image" msgstr "Останнє зображення оцінювання" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__rating_last_value msgid "Rating Last Value" msgstr "Останнє значення оцінювання" #. module: rating #: model:ir.model,name:rating.model_rating_mixin msgid "Rating Mixin" msgstr "Збірне оцінювання" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__rating msgid "Rating Number" msgstr "Номер оцінювання" #. module: rating #: model:ir.model,name:rating.model_rating_parent_mixin msgid "Rating Parent Mixin" msgstr "Збір батьківського рейтингу" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_parent_mixin__rating_percentage_satisfaction msgid "Rating Satisfaction" msgstr "Оцінювання вдоволеності" #. module: rating #: model:ir.model.fields,field_description:rating.field_mail_mail__rating_value #: model:ir.model.fields,field_description:rating.field_mail_message__rating_value msgid "Rating Value" msgstr "Значення оцінювання" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_mixin__rating_count msgid "Rating count" msgstr "Підрахунок оцінювання" #. module: rating #: model:ir.model.constraint,message:rating.constraint_rating_rating_rating_range msgid "Rating should be between 0 to 10" msgstr "Оцінювання повинно бути від 0 до 10" #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__rating msgid "Rating value: 0=Unhappy, 10=Happy" msgstr "Значення оцінювання: 0=Нещасливий, 10=Щасливий" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_parent_mixin__rating_ids #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Ratings" msgstr "Оцінювання" #. module: rating #: model:ir.model.fields,help:rating.field_rating_mixin__rating_last_feedback #: model:ir.model.fields,help:rating.field_rating_rating__feedback msgid "Reason of the rating" msgstr "Причина оцінювання" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__res_model_id msgid "Related Document Model" msgstr "Пов'язана модель документа" #. module: rating #: model:ir.model.fields,field_description:rating.field_mail_mail__rating_ids #: model:ir.model.fields,field_description:rating.field_mail_message__rating_ids msgid "Related ratings" msgstr "Пов'язані оцінювання" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Resource" msgstr "Кадр" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__res_name msgid "Resource name" msgstr "Назва ресурсу" #. module: rating #: model:ir.model.fields.selection,name:rating.selection__rating_rating__rating_text__satisfied #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_search msgid "Satisfied" msgstr "Задоволений" #. module: rating #: model:ir.model.fields,field_description:rating.field_rating_rating__access_token msgid "Security Token" msgstr "Токен безпеки" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_submit msgid "Send Feedback" msgstr "Зворотній зв'язок" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_form #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_tree msgid "Submitted on" msgstr "Відправлено на" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_submit msgid "Thanks! We appreciate your feedback." msgstr "Дякуємо! Ми цінуємо ваш відгук." #. module: rating #: model:ir.model.fields,help:rating.field_rating_rating__res_name msgid "The name of the rated resource." msgstr "Назва ресурсу оцінювання." #. module: rating #: model_terms:ir.actions.act_window,help:rating.action_view_rating msgid "There is no rating for this object at the moment." msgstr "На даний момент немає жодного оцінювання цього об'єкта." #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_view msgid "We appreciate your feedback!" msgstr "Ми цінуємо ваш відгук." #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_submit msgid "Would be great if you can provide more information:" msgstr "Було би чудово, якщо ви би надали більше інформації:" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_submit msgid "Your rating has been submitted." msgstr "Ваше оцінювання було подане." #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_submit #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_kanban msgid "by" msgstr "від" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_kanban msgid "for" msgstr "для" #. module: rating #: code:addons/rating/controllers/main.py:0 #, python-format msgid "highly dissatisfied" msgstr "дуже незадоволений" #. module: rating #: code:addons/rating/controllers/main.py:0 #, python-format msgid "not satisfied" msgstr "незадоволений" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_rating_view_kanban msgid "on" msgstr "на" #. module: rating #: code:addons/rating/controllers/main.py:0 #, python-format msgid "satisfied" msgstr "задоволений" #. module: rating #: model_terms:ir.ui.view,arch_db:rating.rating_external_page_submit msgid "you are" msgstr "ви є"
{ "pile_set_name": "Github" }
<html> <body> Home Page </body> </html>
{ "pile_set_name": "Github" }
#!/bin/sh # This script is based on a git-svn tree. BOOST=$1 mkdir -p src/include/firebird/impl bcp --boost=$BOOST --namespace=FirebirdImpl preprocessor/seq src/include/firebird/impl find src/include/firebird/impl/boost -type f -exec sed -i 's/BOOST_/FB_BOOST_/g' {} \; find src/include/firebird/impl/boost -type f -exec sed -i 's/<boost\//<firebird\/impl\/boost\//g' {} \; g++ -ggdb -Isrc/include/gen -Isrc/include -E src/include/firebird/Message.h | sed -n -e 's/.*"\(.*impl.*\)".*/\1/p' | sort -u > gen/boost for line in `cat gen/boost`; do git add $line; done git add src/include/firebird/impl/boost/preprocessor/control git add src/include/firebird/impl/boost/preprocessor/detail git add src/include/firebird/impl/boost/preprocessor/repetition/detail rm gen/boost echo Now run this: echo git commit src/include/firebird/impl/boost echo rm -rf src/include/firebird/impl/boost echo git checkout -- src/include/firebird
{ "pile_set_name": "Github" }
#!/bin/bash ## $ReOpenLDAP$ ## Copyright 1998-2018 ReOpenLDAP AUTHORS: please see AUTHORS file. ## All rights reserved. ## ## This file is part of ReOpenLDAP. ## ## Redistribution and use in source and binary forms, with or without ## modification, are permitted only as authorized by the OpenLDAP ## Public License. ## ## A copy of this license is available in the file LICENSE in the ## top-level directory of the distribution or, alternatively, at ## <http://www.OpenLDAP.org/license.html>. echo "running defines.sh" . ${TOP_SRCDIR}/tests/scripts/defines.sh if test ${AC_conf[syncprov]} = no; then echo "Syncrepl provider overlay not available, test skipped" exit 0 fi mkdir -p $TESTDIR $DBDIR1A $DBDIR1B $DBDIR1C $DBDIR2A $DBDIR2B echo "Running slapadd to build glued slapd databases..." config_filter $BACKEND ${AC_conf[monitor]} < $GLUECONF > $CONF1 $SLAPADD -f $CONF1 -l $LDIFORDERED > $SLAPADDLOG1 2>&1 RC=$? if test $RC != 0 ; then echo "slapadd failed ($RC)!" exit $RC fi rm -rf $DBDIR1A/* $DBDIR1B/* cp -pr $DBDIR1C $DBDIR2C echo "Starting slapd 1 on TCP/IP port $PORT1..." config_filter $BACKEND ${AC_conf[monitor]} < $GLUESYNCCONF1 > $CONF1 $SLAPD -f $CONF1 -h $URI1 $TIMING > $LOG1 2>&1 & PID=$! if test $WAIT != 0 ; then echo PID $PID read foo fi KILLPIDS="$PID" check_running 1 first echo "Starting slapd 2 on TCP/IP port $PORT2..." config_filter $BACKEND ${AC_conf[monitor]} < $GLUESYNCCONF2 > $CONF2 $SLAPD -f $CONF2 -h $URI2 $TIMING > $LOG2 2>&1 & PID=$! if test $WAIT != 0 ; then echo PID $PID read foo fi KILLPIDS="$KILLPIDS $PID" check_running 2 second SUBTREE1="ou=Information Technology Division,ou=People,dc=example,dc=com" SUBTREE2="ou=Groups,dc=example,dc=com" echo "Using ldapadd to populate subtree=\"${SUBTREE1}\" on port $PORT1..." $LDAPADD -D "cn=Manager 1,$BASEDN" -w $PASSWD -h $LOCALHOST -p $PORT1 \ -f $LDIFORDERED -c \ > /dev/null 2>&1 RC=$? case $RC in 0) echo "ldapadd should have failed ($RC)!" killservers exit 2 ;; 10|68) # Fine if we get alreadyExists or referrals ;; *) echo "ldapadd failed ($RC)!" killservers exit $RC ;; esac echo "Using ldapadd to populate subtree=\"${SUBTREE2}\" on port $PORT2..." $LDAPADD -D "cn=Manager 2,$BASEDN" -w $PASSWD -h $LOCALHOST -p $PORT2 \ -f $LDIFORDERED -c \ > /dev/null 2>&1 RC=$? case $RC in 0) echo "ldapadd should have failed ($RC)!" killservers exit 2 ;; 10|68) # Fine if we get alreadyExists or referrals ;; *) echo "ldapadd failed ($RC)!" killservers exit $RC ;; esac #echo "Waiting $SLEEP1 seconds for shadow subtrees to sync..." wait_syncrepl $PORT1 $PORT2 sub echo "Filtering original ldif used to create database..." $LDIFFILTER < $GLUESYNCOUT > $LDIFFLT for P in $PORT1 $PORT2 ; do echo "Using ldapsearch to read all the entries from port $P..." $LDAPSEARCH -b "$BASEDN" -h $LOCALHOST -p $P \ -S "" '(objectclass=*)' > "${SEARCHOUT}.${P}" 2>&1 RC=$? if test $RC != 0 ; then echo "ldapsearch failed ($RC)!" killservers exit $RC fi echo "Filtering ldapsearch results..." $LDIFFILTER < "${SEARCHOUT}.${P}" > $SEARCHFLT echo "Comparing filter output..." $CMP $SEARCHFLT $LDIFFLT > $CMPOUT if test $? != 0 ; then echo "comparison failed - database was not created correctly" killservers exit 1 fi done echo "Testing ldapdelete propagation..." $LDAPDELETE -D "cn=Manager 1,$BASEDN" -w $PASSWD -H $URI1 "$BABSDN" \ > $TESTOUT 2>&1 RC=$? if test $RC != 0 ; then echo "ldapdelete failed ($RC)!" killservers exit $RC fi # This usually propagates immediately wait_syncrepl $PORT1 $PORT2 sub $LDAPSEARCH -H $URI2 -b "$BABSDN" > $TESTOUT 2>&1 RC=$? if test $RC = 0 && test $BACKEND != null ; then echo "ldapsearch should have failed ($RC)!" killservers exit 2 fi killservers echo ">>>>> Test succeeded" exit 0
{ "pile_set_name": "Github" }
// // Array+Safe.swift // BmoViewPager // // Created by LEE ZHE YU on 2017/7/2. // Copyright © 2017年 CocoaPods. All rights reserved. // extension Collection { /// Returns the element at the specified index iff it is within bounds, otherwise nil. subscript (safe index: Index) -> Iterator.Element? { return indices.contains(index) ? self[index] : nil } }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: df9bfb5c080dac24cbbc6c9f595ed413 MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData:
{ "pile_set_name": "Github" }
#### [DefaultEcs](./index.md 'index') ### [DefaultEcs.Serialization](./DefaultEcs-Serialization.md 'DefaultEcs.Serialization').[ISerializer](./DefaultEcs-Serialization-ISerializer.md 'DefaultEcs.Serialization.ISerializer') ## ISerializer.Deserialize(System.IO.Stream) Method Deserializes a [World](./DefaultEcs-World.md 'DefaultEcs.World') instance from the given [System.IO.Stream](https://docs.microsoft.com/en-us/dotnet/api/System.IO.Stream 'System.IO.Stream'). ```csharp DefaultEcs.World Deserialize(System.IO.Stream stream); ``` #### Parameters <a name='DefaultEcs-Serialization-ISerializer-Deserialize(System-IO-Stream)-stream'></a> `stream` [System.IO.Stream](https://docs.microsoft.com/en-us/dotnet/api/System.IO.Stream 'System.IO.Stream') The [System.IO.Stream](https://docs.microsoft.com/en-us/dotnet/api/System.IO.Stream 'System.IO.Stream') from which the data will be loaded. #### Returns [World](./DefaultEcs-World.md 'DefaultEcs.World') The [World](./DefaultEcs-World.md 'DefaultEcs.World') instance loaded.
{ "pile_set_name": "Github" }
/** @file Main file for Dh shell Driver1 function. (C) Copyright 2014-2015 Hewlett-Packard Development Company, L.P.<BR> Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR> (C) Copyright 2017 Hewlett Packard Enterprise Development LP<BR> This program and the accompanying materials are licensed and made available under the terms and conditions of the BSD License which accompanies this distribution. The full text of the license may be found at http://opensource.org/licenses/bsd-license.php THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. **/ #include "UefiShellDriver1CommandsLib.h" STATIC CONST SHELL_PARAM_ITEM ParamList[] = { {L"-p", TypeValue}, {L"-d", TypeFlag}, {L"-v", TypeFlag}, {L"-verbose", TypeFlag}, {L"-sfo", TypeFlag}, {L"-l", TypeValue}, {NULL, TypeMax} }; STATIC CONST EFI_GUID *UefiDriverModelProtocolsGuidArray[] = { &gEfiDriverBindingProtocolGuid, &gEfiPlatformDriverOverrideProtocolGuid, &gEfiBusSpecificDriverOverrideProtocolGuid, &gEfiDriverDiagnosticsProtocolGuid, &gEfiDriverDiagnostics2ProtocolGuid, &gEfiComponentNameProtocolGuid, &gEfiComponentName2ProtocolGuid, &gEfiPlatformToDriverConfigurationProtocolGuid, &gEfiDriverSupportedEfiVersionProtocolGuid, &gEfiDriverFamilyOverrideProtocolGuid, &gEfiDriverHealthProtocolGuid, &gEfiLoadedImageProtocolGuid, NULL }; UINTN mGuidDataLen[] = {8, 4, 4, 4, 12}; /** Function to determine if the string can convert to a GUID. The string must be restricted as "xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" format. @param[in] String The string to test. @retval TRUE The string can convert to a GUID. @retval FALSE The string can't convert to a GUID. **/ BOOLEAN IsValidGuidString( IN CONST CHAR16 *String ) { CONST CHAR16 *Walker; CONST CHAR16 *PrevWalker; UINTN Index; if (String == NULL) { return FALSE; } Walker = String; PrevWalker = String; Index = 0; while (Walker != NULL && *Walker != CHAR_NULL) { if ( (*Walker >= '0' && *Walker <= '9') || (*Walker >= 'a' && *Walker <= 'f') || (*Walker >= 'A' && *Walker <= 'F') ) { Walker++; } else { if (*Walker == L'-' && (((UINTN)Walker - (UINTN)PrevWalker) / sizeof (CHAR16)) == mGuidDataLen[Index]) { Walker++; PrevWalker = Walker; Index++; } else { return FALSE; } } } if ((((UINTN)Walker - (UINTN)PrevWalker) / sizeof (CHAR16)) == mGuidDataLen[Index]) { return TRUE; } else { return FALSE; } } /** Convert a hex-character to decimal value. This internal function only deal with Unicode character which maps to a valid hexadecimal ASII character, i.e. L'0' to L'9', L'a' to L'f' or L'A' to L'F'. For other Unicode character, the value returned does not make sense. @param[in] Char The character to convert. @retval The numerical value converted. **/ UINTN HexCharToDecimal( IN CHAR16 Char ) { if (Char >= '0' && Char <= '9') { return Char - L'0'; } else if (Char >= 'a' && Char <= 'f') { return Char - L'a' + 10; } else { return Char - L'A' + 10; } } /** Function try to convert a string to GUID format. @param[in] String The string will be converted. @param[out] Guid Save the result convert from string. @retval EFI_SUCCESS The string was successfully converted to a GUID. @retval EFI_UNSUPPORTED The input string is not in registry format. **/ EFI_STATUS ConvertStrToGuid( IN CONST CHAR16 *String, OUT GUID *Guid ) { CONST CHAR16 *Walker; UINT8 TempValue; UINTN Index; if (String == NULL || !IsValidGuidString (String)) { return EFI_UNSUPPORTED; } Index = 0; Walker = String; Guid->Data1 = (UINT32)StrHexToUint64 (Walker); Walker += 9; Guid->Data2 = (UINT16)StrHexToUint64 (Walker); Walker += 5; Guid->Data3 = (UINT16)StrHexToUint64 (Walker); Walker += 5; while (Walker != NULL && *Walker != CHAR_NULL) { if (*Walker == L'-') { Walker++; } else { TempValue = (UINT8)HexCharToDecimal (*Walker); TempValue = (UINT8)LShiftU64 (TempValue, 4); Walker++; TempValue += (UINT8)HexCharToDecimal (*Walker); Walker++; Guid->Data4[Index] = TempValue; Index++; } } return EFI_SUCCESS; } /** Get the name of a driver by it's handle. If a name is found the memory must be callee freed. @param[in] TheHandle The driver's handle. @param[in] Language The language to use. @param[in] NameFound Upon a successful return the name found. @retval EFI_SUCCESS The name was found. **/ EFI_STATUS GetDriverName ( IN EFI_HANDLE TheHandle, IN CONST CHAR8 *Language, IN CHAR16 **NameFound ) { CHAR8 *Lang; EFI_STATUS Status; EFI_COMPONENT_NAME2_PROTOCOL *CompName2; CHAR16 *NameToReturn; // // Go through those handles until we get one that passes for GetComponentName // Status = gBS->OpenProtocol( TheHandle, &gEfiComponentName2ProtocolGuid, (VOID**)&CompName2, gImageHandle, NULL, EFI_OPEN_PROTOCOL_GET_PROTOCOL); if (EFI_ERROR(Status)) { Status = gBS->OpenProtocol( TheHandle, &gEfiComponentNameProtocolGuid, (VOID**)&CompName2, gImageHandle, NULL, EFI_OPEN_PROTOCOL_GET_PROTOCOL); } if (EFI_ERROR(Status)) { return (EFI_NOT_FOUND); } Lang = GetBestLanguageForDriver (CompName2->SupportedLanguages, Language, FALSE); Status = CompName2->GetDriverName(CompName2, Lang, &NameToReturn); FreePool(Lang); if (!EFI_ERROR(Status) && NameToReturn != NULL) { *NameFound = NULL; StrnCatGrow(NameFound, NULL, NameToReturn, 0); } return (Status); } /** Discover if a protocol guid is one of the UEFI Driver Model Protocols. @param[in] Guid The guid to test. @retval TRUE The guid does represent a driver model protocol. @retval FALSE The guid does not represent a driver model protocol. **/ BOOLEAN IsDriverProt ( IN CONST EFI_GUID *Guid ) { CONST EFI_GUID **GuidWalker; BOOLEAN GuidFound; GuidFound = FALSE; for (GuidWalker = UefiDriverModelProtocolsGuidArray ; GuidWalker != NULL && *GuidWalker != NULL ; GuidWalker++ ){ if (CompareGuid(*GuidWalker, Guid)) { GuidFound = TRUE; break; } } return (GuidFound); } /** Get information for a handle. @param[in] TheHandle The handles to show info on. @param[in] Language Language string per UEFI specification. @param[in] Separator Separator string between information blocks. @param[in] Verbose TRUE for extra info, FALSE otherwise. @param[in] ExtraInfo TRUE for extra info, FALSE otherwise. @retval SHELL_SUCCESS The operation was successful. @retval SHELL_INVALID_PARAMETER ProtocolName was NULL or invalid. **/ CHAR16* GetProtocolInfoString( IN CONST EFI_HANDLE TheHandle, IN CONST CHAR8 *Language, IN CONST CHAR16 *Separator, IN CONST BOOLEAN Verbose, IN CONST BOOLEAN ExtraInfo ) { EFI_GUID **ProtocolGuidArray; UINTN ArrayCount; UINTN ProtocolIndex; EFI_STATUS Status; CHAR16 *RetVal; UINTN Size; CHAR16 *Temp; CHAR16 GuidStr[40]; VOID *Instance; CHAR16 InstanceStr[17]; ProtocolGuidArray = NULL; RetVal = NULL; Size = 0; Status = gBS->ProtocolsPerHandle ( TheHandle, &ProtocolGuidArray, &ArrayCount ); if (!EFI_ERROR (Status)) { for (ProtocolIndex = 0; ProtocolIndex < ArrayCount; ProtocolIndex++) { Temp = GetStringNameFromGuid(ProtocolGuidArray[ProtocolIndex], Language); ASSERT((RetVal == NULL && Size == 0) || (RetVal != NULL)); if (Size != 0) { StrnCatGrow(&RetVal, &Size, Separator, 0); } StrnCatGrow(&RetVal, &Size, L"%H", 0); if (Temp == NULL) { UnicodeSPrint (GuidStr, sizeof (GuidStr), L"%g", ProtocolGuidArray[ProtocolIndex]); StrnCatGrow (&RetVal, &Size, GuidStr, 0); } else { StrnCatGrow(&RetVal, &Size, Temp, 0); FreePool(Temp); } StrnCatGrow(&RetVal, &Size, L"%N", 0); if(Verbose) { Status = gBS->HandleProtocol (TheHandle, ProtocolGuidArray[ProtocolIndex], &Instance); if (!EFI_ERROR (Status)) { StrnCatGrow (&RetVal, &Size, L"(%H", 0); UnicodeSPrint (InstanceStr, sizeof (InstanceStr), L"%x", Instance); StrnCatGrow (&RetVal, &Size, InstanceStr, 0); StrnCatGrow (&RetVal, &Size, L"%N)", 0); } } if (ExtraInfo) { Temp = GetProtocolInformationDump(TheHandle, ProtocolGuidArray[ProtocolIndex], Verbose); if (Temp != NULL) { ASSERT((RetVal == NULL && Size == 0) || (RetVal != NULL)); if (!Verbose) { StrnCatGrow(&RetVal, &Size, L"(", 0); StrnCatGrow(&RetVal, &Size, Temp, 0); StrnCatGrow(&RetVal, &Size, L")", 0); } else { StrnCatGrow(&RetVal, &Size, Separator, 0); StrnCatGrow(&RetVal, &Size, Temp, 0); } FreePool(Temp); } } } } SHELL_FREE_NON_NULL(ProtocolGuidArray); if (RetVal == NULL) { return (NULL); } ASSERT((RetVal == NULL && Size == 0) || (RetVal != NULL)); StrnCatGrow(&RetVal, &Size, Separator, 0); return (RetVal); } /** Gets the name of the loaded image. @param[in] TheHandle The handle of the driver to get info on. @param[out] Name The pointer to the pointer. Valid upon a successful return. @retval EFI_SUCCESS The operation was successful. **/ EFI_STATUS GetDriverImageName ( IN EFI_HANDLE TheHandle, OUT CHAR16 **Name ) { // get loaded image and devicepathtotext on image->Filepath EFI_LOADED_IMAGE_PROTOCOL *LoadedImage; EFI_STATUS Status; EFI_DEVICE_PATH_PROTOCOL *DevicePath; if (TheHandle == NULL || Name == NULL) { return (EFI_INVALID_PARAMETER); } Status = gBS->OpenProtocol ( TheHandle, &gEfiLoadedImageProtocolGuid, (VOID **) &LoadedImage, gImageHandle, NULL, EFI_OPEN_PROTOCOL_GET_PROTOCOL ); if (EFI_ERROR(Status)) { return (Status); } DevicePath = LoadedImage->FilePath; *Name = ConvertDevicePathToText(DevicePath, TRUE, TRUE); return (EFI_SUCCESS); } /** Display driver model information for a given handle. @param[in] Handle The handle to display info on. @param[in] BestName Use the best name? @param[in] Language The language to output in. **/ EFI_STATUS DisplayDriverModelHandle ( IN EFI_HANDLE Handle, IN BOOLEAN BestName, IN CONST CHAR8 *Language OPTIONAL ) { EFI_STATUS Status; BOOLEAN ConfigurationStatus; BOOLEAN DiagnosticsStatus; UINTN DriverBindingHandleCount; EFI_HANDLE *DriverBindingHandleBuffer; UINTN ParentControllerHandleCount; EFI_HANDLE *ParentControllerHandleBuffer; UINTN ChildControllerHandleCount; EFI_HANDLE *ChildControllerHandleBuffer; CHAR16 *TempStringPointer; EFI_DEVICE_PATH_PROTOCOL *DevicePath; UINTN Index; CHAR16 *DriverName; EFI_DRIVER_BINDING_PROTOCOL *DriverBinding; UINTN NumberOfChildren; UINTN HandleIndex; UINTN ControllerHandleCount; EFI_HANDLE *ControllerHandleBuffer; UINTN ChildIndex; BOOLEAN Image; DriverName = NULL; // // See if Handle is a device handle and display its details. // DriverBindingHandleBuffer = NULL; Status = PARSE_HANDLE_DATABASE_UEFI_DRIVERS ( Handle, &DriverBindingHandleCount, &DriverBindingHandleBuffer ); ParentControllerHandleBuffer = NULL; Status = PARSE_HANDLE_DATABASE_PARENTS ( Handle, &ParentControllerHandleCount, &ParentControllerHandleBuffer ); ChildControllerHandleBuffer = NULL; Status = ParseHandleDatabaseForChildControllers ( Handle, &ChildControllerHandleCount, &ChildControllerHandleBuffer ); DiagnosticsStatus = FALSE; ConfigurationStatus = FALSE; if (!EFI_ERROR(gBS->OpenProtocol(Handle, &gEfiDriverConfigurationProtocolGuid, NULL, NULL, gImageHandle, EFI_OPEN_PROTOCOL_TEST_PROTOCOL))) { ConfigurationStatus = TRUE; } if (!EFI_ERROR(gBS->OpenProtocol(Handle, &gEfiDriverConfiguration2ProtocolGuid, NULL, NULL, gImageHandle, EFI_OPEN_PROTOCOL_TEST_PROTOCOL))) { ConfigurationStatus = TRUE; } if (!EFI_ERROR(gBS->OpenProtocol(Handle, &gEfiDriverDiagnosticsProtocolGuid, NULL, NULL, gImageHandle, EFI_OPEN_PROTOCOL_TEST_PROTOCOL))) { DiagnosticsStatus = TRUE; } if (!EFI_ERROR(gBS->OpenProtocol(Handle, &gEfiDriverDiagnostics2ProtocolGuid, NULL, NULL, gImageHandle, EFI_OPEN_PROTOCOL_TEST_PROTOCOL))) { DiagnosticsStatus = TRUE; } Status = EFI_SUCCESS; if (DriverBindingHandleCount > 0 || ParentControllerHandleCount > 0 || ChildControllerHandleCount > 0) { DevicePath = NULL; TempStringPointer = NULL; Status = gBS->HandleProtocol (Handle, &gEfiDevicePathProtocolGuid, (VOID**)&DevicePath); Status = gEfiShellProtocol->GetDeviceName(Handle, EFI_DEVICE_NAME_USE_COMPONENT_NAME|EFI_DEVICE_NAME_USE_DEVICE_PATH, (CHAR8*)Language, &TempStringPointer); ShellPrintHiiEx(-1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER1), gShellDriver1HiiHandle, TempStringPointer!=NULL?TempStringPointer:L"<Unknown>"); SHELL_FREE_NON_NULL(TempStringPointer); TempStringPointer = ConvertDevicePathToText(DevicePath, TRUE, FALSE); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER2), gShellDriver1HiiHandle, TempStringPointer!=NULL?TempStringPointer:L"<None>", ParentControllerHandleCount == 0?L"ROOT":(ChildControllerHandleCount > 0)?L"BUS":L"DEVICE", ConfigurationStatus?L"YES":L"NO", DiagnosticsStatus?L"YES":L"NO" ); SHELL_FREE_NON_NULL(TempStringPointer); if (DriverBindingHandleCount == 0) { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER3), gShellDriver1HiiHandle, L"<None>" ); } else { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER3), gShellDriver1HiiHandle, L"" ); for (Index = 0; Index < DriverBindingHandleCount; Index++) { Image = FALSE; Status = GetDriverName ( DriverBindingHandleBuffer[Index], Language, &DriverName ); if (EFI_ERROR (Status)) { Status = GetDriverImageName ( DriverBindingHandleBuffer[Index], &DriverName ); if (EFI_ERROR (Status)) { DriverName = NULL; } } if (Image) { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER4A), gShellDriver1HiiHandle, ConvertHandleToHandleIndex (DriverBindingHandleBuffer[Index]), DriverName!=NULL?DriverName:L"<Unknown>" ); } else { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER4B), gShellDriver1HiiHandle, ConvertHandleToHandleIndex (DriverBindingHandleBuffer[Index]), DriverName!=NULL?DriverName:L"<Unknown>" ); } SHELL_FREE_NON_NULL(DriverName); } } if (ParentControllerHandleCount == 0) { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER5), gShellDriver1HiiHandle, L"<None>" ); } else { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER5), gShellDriver1HiiHandle, L"" ); for (Index = 0; Index < ParentControllerHandleCount; Index++) { Status = gEfiShellProtocol->GetDeviceName(ParentControllerHandleBuffer[Index], EFI_DEVICE_NAME_USE_COMPONENT_NAME|EFI_DEVICE_NAME_USE_DEVICE_PATH, (CHAR8*)Language, &TempStringPointer); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER5B), gShellDriver1HiiHandle, ConvertHandleToHandleIndex (ParentControllerHandleBuffer[Index]), TempStringPointer!=NULL?TempStringPointer:L"<Unknown>" ); SHELL_FREE_NON_NULL(TempStringPointer); } } if (ChildControllerHandleCount == 0) { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER6), gShellDriver1HiiHandle, L"<None>" ); } else { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER6), gShellDriver1HiiHandle, L"" ); for (Index = 0; Index < ChildControllerHandleCount; Index++) { Status = gEfiShellProtocol->GetDeviceName(ChildControllerHandleBuffer[Index], EFI_DEVICE_NAME_USE_COMPONENT_NAME|EFI_DEVICE_NAME_USE_DEVICE_PATH, (CHAR8*)Language, &TempStringPointer); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER6B), gShellDriver1HiiHandle, ConvertHandleToHandleIndex (ChildControllerHandleBuffer[Index]), TempStringPointer!=NULL?TempStringPointer:L"<Unknown>" ); SHELL_FREE_NON_NULL(TempStringPointer); } } } SHELL_FREE_NON_NULL(DriverBindingHandleBuffer); SHELL_FREE_NON_NULL(ParentControllerHandleBuffer); SHELL_FREE_NON_NULL(ChildControllerHandleBuffer); if (EFI_ERROR (Status)) { return Status; } // // See if Handle is a driver binding handle and display its details. // Status = gBS->OpenProtocol ( Handle, &gEfiDriverBindingProtocolGuid, (VOID **) &DriverBinding, NULL, NULL, EFI_OPEN_PROTOCOL_GET_PROTOCOL ); if (EFI_ERROR (Status)) { return EFI_SUCCESS; } NumberOfChildren = 0; ControllerHandleBuffer = NULL; Status = PARSE_HANDLE_DATABASE_DEVICES ( Handle, &ControllerHandleCount, &ControllerHandleBuffer ); if (ControllerHandleCount > 0) { for (HandleIndex = 0; HandleIndex < ControllerHandleCount; HandleIndex++) { Status = PARSE_HANDLE_DATABASE_MANAGED_CHILDREN ( Handle, ControllerHandleBuffer[HandleIndex], &ChildControllerHandleCount, NULL ); NumberOfChildren += ChildControllerHandleCount; } } Status = GetDriverName (Handle, Language, &DriverName); if (EFI_ERROR (Status)) { DriverName = NULL; } ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER7), gShellDriver1HiiHandle, ConvertHandleToHandleIndex(Handle), DriverName!=NULL?DriverName:L"<Unknown>" ); SHELL_FREE_NON_NULL(DriverName); Status = GetDriverImageName ( Handle, &DriverName ); if (EFI_ERROR (Status)) { DriverName = NULL; } ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER7B), gShellDriver1HiiHandle, DriverName!=NULL?DriverName:L"<Unknown>" ); SHELL_FREE_NON_NULL(DriverName); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER8), gShellDriver1HiiHandle, DriverBinding->Version, NumberOfChildren > 0?L"Bus":ControllerHandleCount > 0?L"Device":L"<Unknown>", ConfigurationStatus?L"YES":L"NO", DiagnosticsStatus?L"YES":L"NO" ); if (ControllerHandleCount == 0) { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER9), gShellDriver1HiiHandle, L"None" ); } else { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER9), gShellDriver1HiiHandle, L"" ); for (HandleIndex = 0; HandleIndex < ControllerHandleCount; HandleIndex++) { Status = gEfiShellProtocol->GetDeviceName(ControllerHandleBuffer[HandleIndex], EFI_DEVICE_NAME_USE_COMPONENT_NAME|EFI_DEVICE_NAME_USE_DEVICE_PATH, (CHAR8*)Language, &TempStringPointer); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER9B), gShellDriver1HiiHandle, ConvertHandleToHandleIndex(ControllerHandleBuffer[HandleIndex]), TempStringPointer!=NULL?TempStringPointer:L"<Unknown>" ); SHELL_FREE_NON_NULL(TempStringPointer); Status = PARSE_HANDLE_DATABASE_MANAGED_CHILDREN ( Handle, ControllerHandleBuffer[HandleIndex], &ChildControllerHandleCount, &ChildControllerHandleBuffer ); if (!EFI_ERROR (Status)) { for (ChildIndex = 0; ChildIndex < ChildControllerHandleCount; ChildIndex++) { Status = gEfiShellProtocol->GetDeviceName(ChildControllerHandleBuffer[ChildIndex], EFI_DEVICE_NAME_USE_COMPONENT_NAME|EFI_DEVICE_NAME_USE_DEVICE_PATH, (CHAR8*)Language, &TempStringPointer); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DRIVER6C), gShellDriver1HiiHandle, ConvertHandleToHandleIndex(ChildControllerHandleBuffer[ChildIndex]), TempStringPointer!=NULL?TempStringPointer:L"<Unknown>" ); SHELL_FREE_NON_NULL(TempStringPointer); } SHELL_FREE_NON_NULL (ChildControllerHandleBuffer); } } SHELL_FREE_NON_NULL (ControllerHandleBuffer); } return EFI_SUCCESS; } /** Display information for a handle. @param[in] TheHandle The handles to show info on. @param[in] Verbose TRUE for extra info, FALSE otherwise. @param[in] Sfo TRUE to output in standard format output (spec). @param[in] Language Language string per UEFI specification. @param[in] DriverInfo TRUE to show all info about the handle. @param[in] Multiple TRUE indicates more than will be output, FALSE for a single one. **/ VOID DoDhByHandle( IN CONST EFI_HANDLE TheHandle, IN CONST BOOLEAN Verbose, IN CONST BOOLEAN Sfo, IN CONST CHAR8 *Language, IN CONST BOOLEAN DriverInfo, IN CONST BOOLEAN Multiple ) { CHAR16 *ProtocolInfoString; ProtocolInfoString = NULL; if (!Sfo) { if (Multiple) { ProtocolInfoString = GetProtocolInfoString(TheHandle, Language, L" ", Verbose, TRUE); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT), gShellDriver1HiiHandle, ConvertHandleToHandleIndex(TheHandle), ProtocolInfoString==NULL?L"":ProtocolInfoString ); } else { ProtocolInfoString = GetProtocolInfoString(TheHandle, Language, Verbose ? L"\r\n" : L" ", Verbose, TRUE); if (Verbose) { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_SINGLE), gShellDriver1HiiHandle, ConvertHandleToHandleIndex(TheHandle), TheHandle, ProtocolInfoString==NULL?L"":ProtocolInfoString ); } else { ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_SINGLE_D), gShellDriver1HiiHandle, ConvertHandleToHandleIndex(TheHandle), ProtocolInfoString==NULL?L"":ProtocolInfoString ); } } if (DriverInfo) { DisplayDriverModelHandle ((EFI_HANDLE)TheHandle, TRUE, Language); } } else { ProtocolInfoString = GetProtocolInfoString(TheHandle, Language, L";", FALSE, FALSE); ShellPrintHiiEx( -1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_SFO), gShellDriver1HiiHandle, Multiple ?L"HandlesInfo":L"HandleInfo", L"DriverName", L"ControllerName", ConvertHandleToHandleIndex(TheHandle), L"DevPath", ProtocolInfoString==NULL?L"":ProtocolInfoString ); } if (ProtocolInfoString != NULL) { FreePool(ProtocolInfoString); } } /** Display information for all handles on a list. @param[in] HandleList The NULL-terminated list of handles. @param[in] Verbose TRUE for extra info, FALSE otherwise. @param[in] Sfo TRUE to output in standard format output (spec). @param[in] Language Language string per UEFI specification. @param[in] DriverInfo TRUE to show all info about the handle. @retval SHELL_SUCCESS The operation was successful. @retval SHELL_ABORTED The operation was aborted. **/ SHELL_STATUS DoDhForHandleList( IN CONST EFI_HANDLE *HandleList, IN CONST BOOLEAN Verbose, IN CONST BOOLEAN Sfo, IN CONST CHAR8 *Language, IN CONST BOOLEAN DriverInfo ) { CONST EFI_HANDLE *HandleWalker; SHELL_STATUS ShellStatus; ShellStatus = SHELL_SUCCESS; for (HandleWalker = HandleList; HandleWalker != NULL && *HandleWalker != NULL; HandleWalker++) { DoDhByHandle (*HandleWalker, Verbose, Sfo, Language, DriverInfo, TRUE); if (ShellGetExecutionBreakFlag ()) { ShellStatus = SHELL_ABORTED; break; } } return (ShellStatus); } /** Display information for a GUID of protocol. @param[in] Guid The pointer to the name of the protocol. @param[in] Verbose TRUE for extra info, FALSE otherwise. @param[in] Sfo TRUE to output in standard format output (spec). @param[in] Language Language string per UEFI specification. @param[in] DriverInfo TRUE to show all info about the handle. @retval SHELL_SUCCESS The operation was successful. @retval SHELL_NOT_FOUND The GUID was not found. @retval SHELL_INVALID_PARAMETER ProtocolName was NULL or invalid. **/ SHELL_STATUS DoDhByProtocolGuid( IN CONST GUID *Guid, IN CONST BOOLEAN Verbose, IN CONST BOOLEAN Sfo, IN CONST CHAR8 *Language, IN CONST BOOLEAN DriverInfo ) { CHAR16 *Name; SHELL_STATUS ShellStatus; EFI_HANDLE *HandleList; if (!Sfo) { if (Guid == NULL) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_ALL_HEADER), gShellDriver1HiiHandle); } else { Name = GetStringNameFromGuid (Guid, NULL); if (Name == NULL) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_GUID_HEADER), gShellDriver1HiiHandle, Guid); } else { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_NAME_HEADER), gShellDriver1HiiHandle, Name); } } } HandleList = GetHandleListByProtocol(Guid); ShellStatus = DoDhForHandleList(HandleList, Verbose, Sfo, Language, DriverInfo); SHELL_FREE_NON_NULL(HandleList); return ShellStatus; } /** Function to determine use which method to print information. If Protocol is NULL, The function will print all information. @param[in] Protocol The pointer to the name or GUID of protocol or NULL. @param[in] Verbose TRUE for extra info, FALSE otherwise. @param[in] Sfo TRUE to output in standard format output (spec). @param[in] Language Language string per UEFI specification. @param[in] DriverInfo TRUE to show all info about the handle. @retval SHELL_SUCCESS The operation was successful. @retval SHELL_NOT_FOUND The protocol was not found. @retval SHELL_INVALID_PARAMETER Protocol is invalid parameter. **/ SHELL_STATUS DoDhByProtocol ( IN CONST CHAR16 *Protocol, IN CONST BOOLEAN Verbose, IN CONST BOOLEAN Sfo, IN CONST CHAR8 *Language, IN CONST BOOLEAN DriverInfo ) { EFI_GUID Guid; EFI_GUID *GuidPtr; EFI_STATUS Status; if (Protocol == NULL) { return DoDhByProtocolGuid (NULL, Verbose, Sfo, Language, DriverInfo); } else { Status = ConvertStrToGuid (Protocol, &Guid); if (!EFI_ERROR (Status)) { GuidPtr = &Guid; } else { // // Protocol is a Name, convert it to GUID // Status = GetGuidFromStringName (Protocol, Language, &GuidPtr); if (EFI_ERROR(Status)) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_NO_NAME_FOUND), gShellDriver1HiiHandle, Protocol); return (SHELL_NOT_FOUND); } } return DoDhByProtocolGuid (GuidPtr, Verbose, Sfo, Language, DriverInfo); } } /** Function to display decode information by Protocol. The parameter Protocol is either a GUID or the name of protocol. If the parameter Protocol is NULL, the function will print all decode information. @param[in] Protocol The pointer to the name or GUID of protocol. @param[in] Language Language string per UEFI specification. @retval SHELL_SUCCESS The operation was successful. @retval SHELL_OUT_OT_RESOURCES A memory allocation failed. **/ SHELL_STATUS DoDecodeByProtocol( IN CONST CHAR16 *Protocol, IN CONST CHAR8 *Language ) { EFI_STATUS Status; EFI_GUID *Guids; EFI_GUID Guid; UINTN Counts; UINTN Index; CHAR16 *Name; if (Protocol == NULL) { Counts = 0; Status = GetAllMappingGuids (NULL, &Counts); if (Status == EFI_BUFFER_TOO_SMALL) { Guids = AllocatePool (Counts * sizeof(EFI_GUID)); if (Guids == NULL) { return SHELL_OUT_OF_RESOURCES; } Status = GetAllMappingGuids (Guids, &Counts); if (Status == EFI_SUCCESS) { for (Index = 0; Index < Counts; Index++) { Name = GetStringNameFromGuid (&Guids[Index], Language); if (Name != NULL) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DECODE), gShellDriver1HiiHandle, Name, &Guids[Index]); } else { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_NO_GUID_FOUND), gShellDriver1HiiHandle, &Guids[Index]); } SHELL_FREE_NON_NULL (Name); } } FreePool (Guids); } } else { if (ConvertStrToGuid (Protocol, &Guid) == EFI_SUCCESS) { Name = GetStringNameFromGuid (&Guid, Language); if (Name != NULL) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DECODE), gShellDriver1HiiHandle, Name, &Guid); } else { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_NO_GUID_FOUND), gShellDriver1HiiHandle, &Guid); } SHELL_FREE_NON_NULL(Name); } else { Status = GetGuidFromStringName (Protocol, Language, &Guids); if (Status == EFI_SUCCESS) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_OUTPUT_DECODE), gShellDriver1HiiHandle, Protocol, Guids); } else { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_DH_NO_NAME_FOUND), gShellDriver1HiiHandle, Protocol); } } } return SHELL_SUCCESS; } /** Function for 'dh' command. @param[in] ImageHandle Handle to the Image (NULL if Internal). @param[in] SystemTable Pointer to the System Table (NULL if Internal). **/ SHELL_STATUS EFIAPI ShellCommandRunDh ( IN EFI_HANDLE ImageHandle, IN EFI_SYSTEM_TABLE *SystemTable ) { EFI_STATUS Status; LIST_ENTRY *Package; CHAR16 *ProblemParam; SHELL_STATUS ShellStatus; CHAR8 *Language; CONST CHAR16 *Lang; CONST CHAR16 *RawValue; CONST CHAR16 *ProtocolVal; BOOLEAN SfoFlag; BOOLEAN DriverFlag; BOOLEAN VerboseFlag; UINT64 Intermediate; EFI_HANDLE Handle; ShellStatus = SHELL_SUCCESS; Status = EFI_SUCCESS; Language = NULL; // // initialize the shell lib (we must be in non-auto-init...) // Status = ShellInitialize(); ASSERT_EFI_ERROR(Status); Status = CommandInit(); ASSERT_EFI_ERROR(Status); // // parse the command line // Status = ShellCommandLineParse (ParamList, &Package, &ProblemParam, TRUE); if (EFI_ERROR(Status)) { if (Status == EFI_VOLUME_CORRUPTED && ProblemParam != NULL) { ShellPrintHiiEx(-1, -1, NULL, STRING_TOKEN (STR_GEN_PROBLEM), gShellDriver1HiiHandle, L"dh", ProblemParam); FreePool(ProblemParam); ShellStatus = SHELL_INVALID_PARAMETER; } else { ASSERT(FALSE); } } else { if (ShellCommandLineGetCount(Package) > 2) { ShellPrintHiiEx(-1, -1, NULL, STRING_TOKEN (STR_GEN_TOO_MANY), gShellDriver1HiiHandle, L"dh"); ShellCommandLineFreeVarList (Package); return (SHELL_INVALID_PARAMETER); } if (ShellCommandLineGetFlag(Package, L"-l")) { Lang = ShellCommandLineGetValue(Package, L"-l"); if (Lang != NULL) { Language = AllocateZeroPool(StrSize(Lang)); AsciiSPrint(Language, StrSize(Lang), "%S", Lang); } else { ASSERT(Language == NULL); ShellPrintHiiEx(-1, -1, NULL, STRING_TOKEN(STR_GEN_NO_VALUE), gShellDriver1HiiHandle, L"dh", L"-l"); ShellCommandLineFreeVarList(Package); return (SHELL_INVALID_PARAMETER); } } else { Language = AllocateZeroPool(10); AsciiSPrint(Language, 10, "en-us"); } SfoFlag = ShellCommandLineGetFlag (Package, L"-sfo"); DriverFlag = ShellCommandLineGetFlag (Package, L"-d"); VerboseFlag = (BOOLEAN)(ShellCommandLineGetFlag (Package, L"-v") || ShellCommandLineGetFlag (Package, L"-verbose")); RawValue = ShellCommandLineGetRawValue (Package, 1); ProtocolVal = ShellCommandLineGetValue (Package, L"-p"); if (RawValue == NULL) { if (ShellCommandLineGetFlag (Package, L"-p") && (ProtocolVal == NULL)) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_GEN_NO_VALUE), gShellDriver1HiiHandle, L"dh", L"-p"); ShellStatus = SHELL_INVALID_PARAMETER; } else { // // Print information by protocol, The ProtocolVal maybe is name or GUID or NULL. // ShellStatus = DoDhByProtocol (ProtocolVal, VerboseFlag, SfoFlag, Language, DriverFlag); } } else if ((RawValue != NULL) && (gUnicodeCollation->StriColl(gUnicodeCollation, L"decode", (CHAR16 *) RawValue) == 0)) { if (ShellCommandLineGetFlag (Package, L"-p") && (ProtocolVal == NULL)) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_GEN_NO_VALUE), gShellDriver1HiiHandle, L"dh", L"-p"); ShellStatus = SHELL_INVALID_PARAMETER; } else { // // Print decode informatino by protocol. // ShellStatus = DoDecodeByProtocol (ProtocolVal, Language); } } else { if (ShellCommandLineGetFlag (Package, L"-p")) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_GEN_TOO_MANY), gShellDriver1HiiHandle, L"dh"); ShellStatus = SHELL_INVALID_PARAMETER; } else { Status = ShellConvertStringToUint64 (RawValue, &Intermediate, TRUE, FALSE); if (EFI_ERROR(Status)) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_GEN_INV_HANDLE), gShellDriver1HiiHandle, L"dh", RawValue); ShellStatus = SHELL_INVALID_PARAMETER; } else { Handle = ConvertHandleIndexToHandle ((UINTN) Intermediate); if (Handle == NULL) { ShellPrintHiiEx (-1, -1, NULL, STRING_TOKEN (STR_GEN_INV_HANDLE), gShellDriver1HiiHandle, L"dh", RawValue); ShellStatus = SHELL_INVALID_PARAMETER; } else { // // Print information by handle. // DoDhByHandle (Handle, VerboseFlag, SfoFlag, Language, DriverFlag, FALSE); } } } } ShellCommandLineFreeVarList (Package); SHELL_FREE_NON_NULL(Language); } return (ShellStatus); }
{ "pile_set_name": "Github" }
#!/bin/sh # This is a generated file; do not edit or check into version control. export "FLUTTER_ROOT=/Users/rafaelcmm/SDKs/flutter" export "FLUTTER_APPLICATION_PATH=/Users/rafaelcmm/Workspace/flutter_clean_architecture/example" export "FLUTTER_TARGET=lib/main.dart" export "FLUTTER_BUILD_DIR=build" export "SYMROOT=${SOURCE_ROOT}/../build/ios" export "OTHER_LDFLAGS=$(inherited) -framework Flutter" export "FLUTTER_FRAMEWORK_DIR=/Users/rafaelcmm/SDKs/flutter/bin/cache/artifacts/engine/ios" export "FLUTTER_BUILD_NAME=1.0.0" export "FLUTTER_BUILD_NUMBER=1" export "DART_OBFUSCATION=false" export "TRACK_WIDGET_CREATION=false" export "TREE_SHAKE_ICONS=false" export "PACKAGE_CONFIG=.packages"
{ "pile_set_name": "Github" }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.jboss.netty.buffer; import static org.easymock.EasyMock.*; import static org.jboss.netty.buffer.ChannelBuffers.*; import static org.junit.Assert.*; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.ByteBuffer; import java.nio.channels.GatheringByteChannel; import java.nio.channels.ScatteringByteChannel; import org.junit.Test; /** * Tests read-only channel buffers */ public class ReadOnlyChannelBufferTest { @Test(expected = NullPointerException.class) public void shouldNotAllowNullInConstructor() { new ReadOnlyChannelBuffer(null); } @Test public void testUnmodifiableBuffer() { assertTrue(unmodifiableBuffer(buffer(1)) instanceof ReadOnlyChannelBuffer); } @Test public void testUnwrap() { ChannelBuffer buf = buffer(1); assertSame(buf, ((WrappedChannelBuffer) ChannelBuffers.unmodifiableBuffer(buf)).unwrap()); } @Test public void shouldHaveSameByteOrder() { ChannelBuffer buf = buffer(LITTLE_ENDIAN, 1); assertSame(LITTLE_ENDIAN, ChannelBuffers.unmodifiableBuffer(buf).order()); } @Test public void shouldReturnReadOnlyDerivedBuffer() { ChannelBuffer buf = unmodifiableBuffer(buffer(1)); assertTrue(buf.duplicate() instanceof ReadOnlyChannelBuffer); assertTrue(buf.slice() instanceof ReadOnlyChannelBuffer); assertTrue(buf.slice(0, 1) instanceof ReadOnlyChannelBuffer); assertTrue(buf.duplicate() instanceof ReadOnlyChannelBuffer); } @Test public void shouldReturnWritableCopy() { ChannelBuffer buf = unmodifiableBuffer(buffer(1)); assertFalse(buf.copy() instanceof ReadOnlyChannelBuffer); } @Test public void shouldForwardReadCallsBlindly() throws Exception { ChannelBuffer buf = createStrictMock(ChannelBuffer.class); expect(buf.readerIndex()).andReturn(0).anyTimes(); expect(buf.writerIndex()).andReturn(0).anyTimes(); expect(buf.capacity()).andReturn(0).anyTimes(); expect(buf.getBytes(1, (GatheringByteChannel) null, 2)).andReturn(3); buf.getBytes(4, (OutputStream) null, 5); buf.getBytes(6, (byte[]) null, 7, 8); buf.getBytes(9, (ChannelBuffer) null, 10, 11); buf.getBytes(12, (ByteBuffer) null); expect(buf.getByte(13)).andReturn(Byte.valueOf((byte) 14)); expect(buf.getShort(15)).andReturn(Short.valueOf((short) 16)); expect(buf.getUnsignedMedium(17)).andReturn(18); expect(buf.getInt(19)).andReturn(20); expect(buf.getLong(21)).andReturn(22L); ByteBuffer bb = ByteBuffer.allocate(100); ByteBuffer[] bbs = { ByteBuffer.allocate(101), ByteBuffer.allocate(102) }; expect(buf.toByteBuffer(23, 24)).andReturn(bb); expect(buf.toByteBuffers(25, 26)).andReturn(bbs); expect(buf.capacity()).andReturn(27); replay(buf); ChannelBuffer roBuf = unmodifiableBuffer(buf); assertEquals(3, roBuf.getBytes(1, (GatheringByteChannel) null, 2)); roBuf.getBytes(4, (OutputStream) null, 5); roBuf.getBytes(6, (byte[]) null, 7, 8); roBuf.getBytes(9, (ChannelBuffer) null, 10, 11); roBuf.getBytes(12, (ByteBuffer) null); assertEquals((byte) 14, roBuf.getByte(13)); assertEquals((short) 16, roBuf.getShort(15)); assertEquals(18, roBuf.getUnsignedMedium(17)); assertEquals(20, roBuf.getInt(19)); assertEquals(22L, roBuf.getLong(21)); ByteBuffer roBB = roBuf.toByteBuffer(23, 24); assertEquals(100, roBB.capacity()); assertTrue(roBB.isReadOnly()); ByteBuffer[] roBBs = roBuf.toByteBuffers(25, 26); assertEquals(2, roBBs.length); assertEquals(101, roBBs[0].capacity()); assertTrue(roBBs[0].isReadOnly()); assertEquals(102, roBBs[1].capacity()); assertTrue(roBBs[1].isReadOnly()); assertEquals(27, roBuf.capacity()); verify(buf); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectDiscardReadBytes() { unmodifiableBuffer(EMPTY_BUFFER).discardReadBytes(); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetByte() { unmodifiableBuffer(EMPTY_BUFFER).setByte(0, (byte) 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetShort() { unmodifiableBuffer(EMPTY_BUFFER).setShort(0, (short) 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetMedium() { unmodifiableBuffer(EMPTY_BUFFER).setMedium(0, 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetInt() { unmodifiableBuffer(EMPTY_BUFFER).setInt(0, 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetLong() { unmodifiableBuffer(EMPTY_BUFFER).setLong(0, 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetBytes1() throws IOException { unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (InputStream) null, 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetBytes2() throws IOException { unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (ScatteringByteChannel) null, 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetBytes3() { unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (byte[]) null, 0, 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetBytes4() { unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (ChannelBuffer) null, 0, 0); } @Test(expected = UnsupportedOperationException.class) public void shouldRejectSetBytes5() { unmodifiableBuffer(EMPTY_BUFFER).setBytes(0, (ByteBuffer) null); } }
{ "pile_set_name": "Github" }
using System; namespace Grace.DependencyInjection { /// <summary> /// Similar to Owned with the difference being a new scope is created and used to resolve instance /// </summary> /// <typeparam name="T"></typeparam> public class Scoped<T> : IDisposable { private readonly IExportLocatorScope _scope; private readonly IInjectionContext _context; private readonly ActivationStrategyDelegate _activationDelegate; private readonly string _scopeName; private IExportLocatorScope _childScope; private T _instance; /// <summary> /// Default constructor /// </summary> /// <param name="scope"></param> /// <param name="context"></param> /// <param name="activationDelegate"></param> /// <param name="scopeName"></param> public Scoped(IExportLocatorScope scope, IInjectionContext context, ActivationStrategyDelegate activationDelegate, string scopeName = null) { _scope = scope; _context = context; _activationDelegate = activationDelegate; _scopeName = scopeName; } /// <summary> /// Instance /// </summary> public T Instance { get { if (_childScope == null) { _childScope = _scope.BeginLifetimeScope(_scopeName); _instance = (T)_activationDelegate(_childScope, _childScope, _context); } return _instance; } } /// <summary>Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.</summary> public void Dispose() { _childScope?.Dispose(); } } }
{ "pile_set_name": "Github" }
from __future__ import print_function import sys sys.path.insert(1,"../../../") from tests import pyunit_utils import random from h2o.estimators.isolation_forest import H2OIsolationForestEstimator from h2o.grid.grid_search import H2OGridSearch from h2o.frame import H2OFrame import numpy as np import pandas as pd def grid_synthetic_IF(): N = 1000 cont = 0.05 regular_data = np.random.normal(0, 0.5, (int(N * (1 - cont)), 2)) anomaly_data = np.column_stack((np.random.normal(-1.5, 1, int(N * cont)), np.random.normal(1.5, 1, int(N * cont)))) regular_pd = pd.DataFrame( {'x': regular_data[:, 0], 'y': regular_data[:, 1], 'label': np.zeros(regular_data.shape[0])}) anomaly_pd = pd.DataFrame( {'x': anomaly_data[:, 0], 'y': anomaly_data[:, 1], 'label': np.ones(anomaly_data.shape[0])}) dataset = H2OFrame(regular_pd.append(anomaly_pd).sample(frac=1)) train_with_label, test = dataset.split_frame([0.8]) train = train_with_label.drop(["label"]) test["label"] = test["label"].asfactor() grid_space = { 'max_depth': random.sample(list(range(1, 6)), random.randint(2, 3)) } print("Grid space: {0}".format(grid_space)) predictors = ["x", "y"] print("Constructing the grid of IF models...") if_grid = H2OGridSearch(H2OIsolationForestEstimator, hyper_params=grid_space) if_grid.train(x=predictors, training_frame=train, validation_frame=test, validation_response_column="label") print("Check correct type value....") model_type = if_grid[0].type assert model_type == 'unsupervised', "Type of model ({0}) is incorrect, expected value is 'unsupervised'.".format(model_type) print("Performing various checks of the constructed grid...") print("Check cardinality of grid, that is, the correct number of models have been created...") size_of_grid_space = 1 for v in list(grid_space.values()): size_of_grid_space = size_of_grid_space * len(v) actual_size = len(if_grid) print("Expected size of grid space: {0}".format(size_of_grid_space)) assert size_of_grid_space == actual_size, "Expected size of grid to be {0}, but got {1}" \ "".format(size_of_grid_space, actual_size) print(if_grid) if __name__ == "__main__": pyunit_utils.standalone_test(grid_synthetic_IF) else: grid_synthetic_IF()
{ "pile_set_name": "Github" }
import _ from 'lodash'; import createScrollHelper from '../../../helpers/dragScrollHelper'; /* @ngInject */ function customFilterList( authentication, dispatchers, networkActivityTracker, Filter, gettextCatalog, notification, filterModal, confirmModal, eventManager ) { return { replace: true, restrict: 'E', templateUrl: require('../../../templates/filter/customFilterList.tpl.html'), scope: {}, link(scope) { // Variables const { on, unsubscribe } = dispatchers(); scope.customFilters = null; const promise = Filter.query().then((filters) => { scope.$applyAsync(() => { scope.customFilters = filters; }); }); networkActivityTracker.track(promise); on('changeCustomFilterStatus', (event, { data: { id, status } }) => { const filter = _.find(scope.customFilters, { ID: id }); if (filter) { changeCustomFilterStatus(filter, status); } }); const { dragStart, dragMove, dragEnd } = createScrollHelper({ scrollableSelector: '#pm_settings .settings' }); // Drag and Drop configuration scope.filterDragControlListeners = { containment: '.pm_sort', accept(sourceItemHandleScope, destSortableScope) { return sourceItemHandleScope.itemScope.sortableScope.$id === destSortableScope.$id; }, dragStart(event) { dragStart(event); scope.itemMoved = true; }, dragMove, dragEnd() { dragEnd(); scope.itemMoved = false; }, orderChanged() { const FilterIDs = _.map(scope.customFilters, 'ID'); _.each(scope.customFilters, (filter, index) => { filter.Priority = index + 1; }); // Save priority order networkActivityTracker.track( Filter.order({ FilterIDs }).then(({ data = {} } = {}) => { notification.success(gettextCatalog.getString('Order saved', null, 'Info')); return data; }) ); } }; scope.$on('$destroy', unsubscribe); on('filter', (event, { type, data: { ID: filterId, Filter: filter } }) => { if (scope.itemMoved) { return; } if (type === 'delete') { const index = _.findIndex(scope.customFilters, { ID: filterId }); if (index !== -1) { scope.customFilters.splice(index, 1); } } if (type === 'create' || type === 'update') { const index = _.findIndex(scope.customFilters, { ID: filterId }); if (index === -1) { scope.customFilters.push(filter); } else { // We need to override everything so it loses the simple tag if the filter is not simple anymore scope.customFilters[index] = filter; } } }); scope.addCustomFilter = () => { const activeCustomFilters = _.filter(scope.customFilters, { Status: 1 }); if (!authentication.hasPaidMail() && activeCustomFilters.length === 1) { return notification.info( gettextCatalog.getString( 'Free ProtonMail accounts are limited to 1 custom filter. Please <a href="/dashboard">upgrade</a> to get unlimited filters.', null, 'Info' ) ); } filterModal.activate({ params: { mode: 'simple', close() { filterModal.deactivate(); } } }); }; scope.addSieveFilter = () => { const activeCustomFilters = _.filter(scope.customFilters, { Status: 1 }); if (!authentication.hasPaidMail() && activeCustomFilters.length === 1) { return notification.info( gettextCatalog.getString( 'Free ProtonMail accounts are limited to 1 custom filter. Please <a href="/dashboard">upgrade</a> to get unlimited filters.', null, 'Info' ) ); } filterModal.activate({ params: { mode: 'complex', close() { filterModal.deactivate(); } } }); }; scope.isSimple = (filter) => filter.Simple && Object.keys(filter.Simple).length; scope.editCustomFilter = function(filter, complex = false) { filterModal.activate({ params: { mode: !complex && scope.isSimple(filter) ? 'simple' : 'complex', filter, close: function close() { filterModal.deactivate(); } } }); }; scope.deleteCustomFilter = (filter) => { const title = gettextCatalog.getString('Delete Filter', null, 'Title'); const message = gettextCatalog.getString('Are you sure you want to delete this filter?', null, 'Info'); confirmModal.activate({ params: { title, message, confirm() { const promise = Filter.delete(filter) .then(eventManager.call) .then(() => { notification.success( gettextCatalog.getString('Custom filter deleted', null, 'Info') ); }); networkActivityTracker.track(promise); confirmModal.deactivate(); }, cancel() { confirmModal.deactivate(); } } }); }; const enableDisable = (fn, revertStatus) => (filter) => { const promise = fn(filter) .then(() => { notification.success(gettextCatalog.getString('Status updated', null, 'Info')); }) .catch((e) => { filter.Status = revertStatus; // Has to be a Boolean to work with the toggle directive throw e; }); return networkActivityTracker.track(promise); }; scope.enableCustomFilter = enableDisable(Filter.enable, false); scope.disableCustomFilter = enableDisable(Filter.disable, true); function changeCustomFilterStatus(filter, status) { if (status) { scope.enableCustomFilter(filter); return; } scope.disableCustomFilter(filter); } } }; } export default customFilterList;
{ "pile_set_name": "Github" }
<?xml version="1.0" standalone="no" ?> <!DOCTYPE pov SYSTEM "/usr/share/cgc-docs/replay.dtd"> <pov> <cbid>service</cbid> <replay> <write><data>UUUUUUUU</data></write> <read><delim>\x0a</delim><match><data>What is your name?\x0a</data></match></read> <write><data>User\x0a</data></write> <read><delim>\x0a</delim><match><data>Hi\x2c User\x0a</data></match></read> <read><delim>\x3e</delim><match><pcre>.*?0 B ></pcre></match></read> <write><data>12 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?1 B ></pcre></match></read> <write><data>8 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?2 B ></pcre></match></read> <write><data>14 14\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?3 B ></pcre></match></read> <write><data>4 18\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?4 B ></pcre></match></read> <write><data>9 17\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?5 B ></pcre></match></read> <write><data>13 1\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?6 B ></pcre></match></read> <write><data>8 11\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?7 B ></pcre></match></read> <write><data>15 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?8 B ></pcre></match></read> <write><data>11 0\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?9 B ></pcre></match></read> <write><data>9 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?10 B ></pcre></match></read> <write><data>5 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?11 B ></pcre></match></read> <write><data>8 10\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?12 B ></pcre></match></read> <write><data>15 8\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?13 B ></pcre></match></read> <write><data>15 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?14 B ></pcre></match></read> <write><data>6 4\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?15 B ></pcre></match></read> <write><data>2 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?16 B ></pcre></match></read> <write><data>7 4\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?17 B ></pcre></match></read> <write><data>4 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?18 B ></pcre></match></read> <write><data>16 13\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?19 B ></pcre></match></read> <write><data>12 18\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?20 B ></pcre></match></read> <write><data>17 18\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?21 B ></pcre></match></read> <write><data>14 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?22 B ></pcre></match></read> <write><data>1 19\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?22 B ></pcre></match></read> <write><data>5 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?23 B ></pcre></match></read> <write><data>18 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?24 B ></pcre></match></read> <write><data>1 13\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?25 B ></pcre></match></read> <write><data>5 13\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?26 B ></pcre></match></read> <write><data>17 7\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?27 B ></pcre></match></read> <write><data>6 17\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?28 B ></pcre></match></read> <write><data>1 18\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?29 B ></pcre></match></read> <write><data>3 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?30 B ></pcre></match></read> <write><data>9 19\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?30 B ></pcre></match></read> <write><data>4 13\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?31 B ></pcre></match></read> <write><data>8 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?32 B ></pcre></match></read> <write><data>7 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?32 B ></pcre></match></read> <write><data>18 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?32 B ></pcre></match></read> <write><data>7 18\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?33 B ></pcre></match></read> <write><data>15 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?33 B ></pcre></match></read> <write><data>0 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?34 B ></pcre></match></read> <write><data>18 7\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?35 B ></pcre></match></read> <write><data>11 9\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?36 B ></pcre></match></read> <write><data>5 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?36 B ></pcre></match></read> <write><data>9 5\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?36 B ></pcre></match></read> <write><data>9 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?37 B ></pcre></match></read> <write><data>15 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?38 B ></pcre></match></read> <write><data>5 14\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?39 B ></pcre></match></read> <write><data>16 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?40 B ></pcre></match></read> <write><data>19 1\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?40 B ></pcre></match></read> <write><data>9 1\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?40 B ></pcre></match></read> <write><data>3 8\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?41 B ></pcre></match></read> <write><data>2 9\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?42 B ></pcre></match></read> <write><data>15 11\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?43 B ></pcre></match></read> <write><data>18 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?44 B ></pcre></match></read> <write><data>17 8\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?45 B ></pcre></match></read> <write><data>2 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?45 B ></pcre></match></read> <write><data>17 17\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?46 B ></pcre></match></read> <write><data>7 13\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?47 B ></pcre></match></read> <write><data>0 1\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?48 B ></pcre></match></read> <write><data>4 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?49 B ></pcre></match></read> <write><data>9 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?49 B ></pcre></match></read> <write><data>10 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?50 B ></pcre></match></read> <write><data>9 17\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?50 B ></pcre></match></read> <write><data>10 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?51 B ></pcre></match></read> <write><data>0 9\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?52 B ></pcre></match></read> <write><data>7 4\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?52 B ></pcre></match></read> <write><data>9 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?52 B ></pcre></match></read> <write><data>11 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?53 B ></pcre></match></read> <write><data>17 8\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?53 B ></pcre></match></read> <write><data>13 13\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?54 B ></pcre></match></read> <write><data>5 8\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?55 B ></pcre></match></read> <write><data>11 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?56 B ></pcre></match></read> <write><data>9 19\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?56 B ></pcre></match></read> <write><data>0 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?56 B ></pcre></match></read> <write><data>8 4\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?56 B ></pcre></match></read> <write><data>12 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?57 B ></pcre></match></read> <write><data>4 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?58 B ></pcre></match></read> <write><data>0 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?58 B ></pcre></match></read> <write><data>9 19\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?58 B ></pcre></match></read> <write><data>9 4\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?58 B ></pcre></match></read> <write><data>13 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?59 B ></pcre></match></read> <write><data>5 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?60 B ></pcre></match></read> <write><data>4 19\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?60 B ></pcre></match></read> <write><data>2 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?61 B ></pcre></match></read> <write><data>1 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?61 B ></pcre></match></read> <write><data>15 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?61 B ></pcre></match></read> <write><data>8 14\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?62 B ></pcre></match></read> <write><data>0 5\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?63 B ></pcre></match></read> <write><data>15 17\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?64 B ></pcre></match></read> <write><data>19 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?64 B ></pcre></match></read> <write><data>14 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?65 B ></pcre></match></read> <write><data>4 14\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?66 B ></pcre></match></read> <write><data>8 0\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?66 B ></pcre></match></read> <write><data>16 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?67 B ></pcre></match></read> <write><data>13 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?68 B ></pcre></match></read> <write><data>14 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?69 B ></pcre></match></read> <write><data>4 1\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?70 B ></pcre></match></read> <write><data>8 18\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?71 B ></pcre></match></read> <write><data>4 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?72 B ></pcre></match></read> <write><data>11 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?72 B ></pcre></match></read> <write><data>6 13\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?73 B ></pcre></match></read> <write><data>18 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?73 B ></pcre></match></read> <write><data>17 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?74 B ></pcre></match></read> <write><data>17 2\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?74 B ></pcre></match></read> <write><data>6 0\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?74 B ></pcre></match></read> <write><data>14 12\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?74 B ></pcre></match></read> <write><data>14 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?74 B ></pcre></match></read> <write><data>11 17\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?75 B ></pcre></match></read> <write><data>18 9\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?76 B ></pcre></match></read> <write><data>15 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?76 B ></pcre></match></read> <write><data>3 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?76 B ></pcre></match></read> <write><data>9 19\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?76 B ></pcre></match></read> <write><data>4 14\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?76 B ></pcre></match></read> <write><data>13 19\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?76 B ></pcre></match></read> <write><data>17 18\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?76 B ></pcre></match></read> <write><data>13 11\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?77 B ></pcre></match></read> <write><data>11 0\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?77 B ></pcre></match></read> <write><data>pass\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?78 B ></pcre></match></read> <write><data>13 7\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?79 B ></pcre></match></read> <write><data>12 10\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?80 B ></pcre></match></read> <write><data>8 10\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?80 B ></pcre></match></read> <write><data>1 8\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?80 B ></pcre></match></read> <write><data>7 17\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?81 B ></pcre></match></read> <write><data>17 14\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?82 B ></pcre></match></read> <write><data>2 3\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?82 B ></pcre></match></read> <write><data>8 6\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?82 B ></pcre></match></read> <write><data>19 4\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?82 B ></pcre></match></read> <write><data>12 9\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?83 B ></pcre></match></read> <write><data>2 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?83 B ></pcre></match></read> <write><data>7 11\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?84 B ></pcre></match></read> <write><data>1 16\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?85 B ></pcre></match></read> <write><data>16 8\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?86 B ></pcre></match></read> <write><data>14 0\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?87 B ></pcre></match></read> <write><data>7 11\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?87 B ></pcre></match></read> <write><data>11 5\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?88 B ></pcre></match></read> <write><data>10 11\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?89 B ></pcre></match></read> <write><data>11 15\x0a</data></write> <read><delim>\x3e</delim><match><pcre>.*?90 B ></pcre></match></read> <write><data>6 14\x0a</data></write> <read><delim>User</delim><match><pcre>.*?Game Over Stones Exhausted You are a Winner, User</pcre></match></read> </replay> </pov>
{ "pile_set_name": "Github" }
{ "name": "thread", "doc_namespace": "xmlns.http://moyaproject.com", "doc_class": "tag", "references": [ "doc.index", "tags.index" ], "data": { "name": "thread", "lib": null, "namespace_slug": "moyaproject_dot_com", "defined": "/home/will/projects/moya/moya/tags/system.py", "doc": "\nRun enclosed block in a thread.\n\nWhen Moya encounters this tag it executes the enclosed code in a new [url \"https://en.wikipedia.org/wiki/Thread_(computing)\"]thread[/url]. The calling thread will jump to the end of the [tag]thread[/tag] tag.\n\nThe enclosed code may return a value (with [tag]return[/tag]), which can be retrieved by the calling thread via it's [c]dst[/c] parameter. If the calling thread references the return value before the thread has returned, it will block until the thread returns.\n\nThis tag is useful in a request when you have some long running action to do, and you don't want to delay returning a response. Here's an example:\n\n[code xml]\n<view libname=\"view.process\" template=\"slowprocess.html\">\n <thread>\n <call macro=\"slow\"/>\n </thread>\n</view>\n[/code]\n\nThis view will render a template and return immediately, while the macro is processing in the background. Note, that the slow macro will have no way of returning anything in the response, but could still send an email or store something in the database to communicate the result to the user.\n\n", "namespace": "http://moyaproject.com", "synopsis": "run a thread", "tag_name": "thread", "params": { "scope": { "default_display": "no", "name": "scope", "missing": true, "default": false, "doc": "Use the current scope?", "required": false, "type": "boolean", "metavar": null, "empty": true, "choices": null }, "join": { "default_display": "no", "name": "join", "missing": true, "default": false, "doc": "Join threads before end of request?", "required": false, "type": "boolean", "metavar": null, "empty": true, "choices": null }, "name": { "default_display": "", "name": "name", "missing": true, "default": null, "doc": "Name of thread", "required": false, "type": "text", "metavar": null, "empty": true, "choices": null }, "timeout": { "default_display": "", "name": "timeout", "missing": true, "default": null, "doc": "Maximum time to wait for thread to complete", "required": false, "type": "timespan", "metavar": null, "empty": true, "choices": null } }, "example": null, "inherited_params": { "dst": { "default_display": "", "name": "dst", "missing": true, "default": null, "doc": "Destination", "required": false, "type": "reference", "metavar": null, "empty": true, "choices": null }, "value": { "default_display": "None", "name": "value", "missing": true, "default": null, "doc": "Value", "required": false, "type": "expression", "metavar": null, "empty": true, "choices": null }, "if": { "default_display": "yes", "name": "if", "missing": true, "default": true, "doc": "Conditional expression", "required": false, "type": "expression", "metavar": null, "empty": true, "choices": null } } }, "id": "xmlns.http://moyaproject.com.thread" }
{ "pile_set_name": "Github" }
<?php namespace gameme\PHPGamification; use Exception; use gameme\PHPGamification; use gameme\PHPGamification\Model; use gameme\PHPGamification\Model\Event; use gameme\PHPGamification\Model\Badge; define('AT_INCLUDE_PATH', '../../../include/'); require (AT_INCLUDE_PATH.'vitals.inc.php'); $_GET["id"] = intval($_GET["id"]); if($_GET["id"]!= ''){ global $_base_path; $course_id = $_SESSION['course_id']; $this_path = preg_replace ('#/get.php#','',$_SERVER['DOCUMENT_ROOT'].$_base_path); $sql = "SELECT * FROM %sgm_events WHERE id=%d AND course_id=%d"; $default_event = queryDB($sql, array(TABLE_PREFIX, $_GET["id"], 0), TRUE); require_once($this_path.'mods/_standard/gameme/gamify.lib.php'); require_once($this_path.'mods/_standard/gameme/PHPGamification/PHPGamification.class.php'); $gamification = new PHPGamification(); $gamification->setDAO(new DAO(DB_HOST, DB_NAME, DB_USER, DB_PASSWORD)); $event = new Event(); if($default_event['id']){ $event->setId($default_event['id'], $_SESSION['course_id']); } if(isset($_SESSION['course_id'])){ $event->setCourseId($_SESSION['course_id']); } $event->setAlias($default_event['alias']); if($default_event['description']){ $event->setDescription($default_event['description']); } if($default_event['allow_repetitions']){ $event->setAllowRepetitions($default_event['allow_repetitions']); } if($default_event['reach_required_repetitions']){ $event->setReachRequiredRepetitions($default_event['reach_required_repetitions']); } if($default_event['max_points']){ $event->setMaxPointsGranted($default_event['max_points']); } if($default_event['each_points']){ $event->setEachPointsGranted($default_event['each_points']); } if($default_event['reach_points']){ $event->setReachPointsGranted($default_event['reach_points']); //$event->setReachPointsGranted($_POST['reach_points']); } if($default_event['id_each_badge']){ $event->copyEachBadgeGranted($default_event['id']); } if($default_event['id_reach_badge']){ $event->copyReachBadgeGranted($default_event['id']); } if($default_event['each_callback']){ $event->setEachCallback($default_event['each_callback']); } if($default_event['reach_callback']){ $event->setReachCallback($default_event['reach_callback']); } if($default_event['reach_message']){ $event->setReachMessage($default_event['reach_message']); } if( $gamification->addEvent($event, $course_id)){ $msg->addFeedback('GM_EVENT_COPIED'); header("Location: ".AT_BASE_HREF."mods/_standard/gameme/index_instructor.php"); exit; } else{ $msg->addError('GM_EVENT_COPY_FAILED'); header("Location: ".AT_BASE_HREF."mods/_standard/gameme/index_instructor.php"); exit; } } ?>
{ "pile_set_name": "Github" }
<?php namespace app\assets; use yii\web\AssetBundle; /** * CodeMirrorAsset groups assets for code editing areas */ class CodeMirrorAsset extends AssetBundle { public $sourcePath = '@bower/codemirror'; public $js = [ 'lib/codemirror.js', // langs to highlight in markdown blocks 'mode/shell/shell.js', 'mode/clike/clike.js', 'mode/css/css.js', 'mode/javascript/javascript.js', 'mode/php/php.js', 'mode/sass/sass.js', 'mode/sql/sql.js', 'mode/twig/twig.js', 'mode/xml/xml.js', 'mode/yaml/yaml.js', 'mode/htmlmixed/htmlmixed.js', // markdown and gfm 'mode/meta.js', 'mode/markdown/markdown.js', 'addon/mode/overlay.js', 'mode/gfm/gfm.js', 'addon/edit/continuelist.js', // code editing goods 'addon/fold/xml-fold.js', 'addon/edit/matchbrackets.js', 'addon/edit/closebrackets.js', 'addon/edit/closetag.js', // for controls 'addon/display/panel.js', ]; public $css = [ 'lib/codemirror.css', ]; }
{ "pile_set_name": "Github" }
<?php namespace BotMan\BotMan\Tests\Cache\RedisCache; use BotMan\BotMan\Cache\ArrayCache; use BotMan\BotMan\Cache\RedisCache; use PHPUnit\Framework\TestCase; use Redis; use RedisException; /** * @group integration * @group redis-insecure */ class InsecureTest extends TestCase { protected function setUp(): void { if (! extension_loaded('redis')) { $this->markTestSkipped('Redis extension required'); } } protected function tearDown(): void { $script = sprintf("for i, name in ipairs(redis.call('KEYS', '%s*')) do redis.call('DEL', name); end", RedisCache::KEY_PREFIX); $redis = new Redis(); $redis->connect($this->getRedisHost(), $this->getRedisPort()); $redis->eval($script); $redis->close(); } /** @test */ public function has() { $cache = new RedisCache($this->getRedisHost(), $this->getRedisPort()); $cache->put('foo', 'bar', 1); static::assertTrue($cache->has('foo')); } /** @test */ public function has_not() { $cache = new RedisCache($this->getRedisHost(), $this->getRedisPort()); static::assertFalse($cache->has('foo')); } /** @test */ public function get_existing_key() { $cache = new RedisCache($this->getRedisHost(), $this->getRedisPort()); $cache->put('foo', 'bar', 5); static::assertTrue($cache->has('foo')); static::assertEquals('bar', $cache->get('foo')); } /** @test */ public function get_non_existing_key() { $cache = new RedisCache($this->getRedisHost(), $this->getRedisPort()); static::assertNull($cache->get('foo')); } /** @test */ public function pull_existing_key() { $cache = new RedisCache($this->getRedisHost(), $this->getRedisPort()); $cache->put('foo', 'bar', 5); static::assertTrue($cache->has('foo')); static::assertEquals('bar', $cache->pull('foo')); static::assertFalse($cache->has('foo')); static::assertNull($cache->get('foo')); } /** @test */ public function pull_non_existing_key() { $cache = new RedisCache($this->getRedisHost(), $this->getRedisPort()); static::assertNull($cache->pull('foo')); } /** @test */ public function pull_non_existing_key_with_default_value() { $cache = new ArrayCache(); static::assertEquals('bar', $cache->pull('foo', 'bar')); } /** * Get redis host. * * @return string */ protected function getRedisHost() { return $_ENV['REDIS_HOST'] ?? '127.0.0.1'; } /** * Get redis port. * * @return int */ protected function getRedisPort() { return (int) ($_ENV['REDIS_PORT'] ?? 6379); } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <root> <!-- Microsoft ResX Schema Version 2.0 The primary goals of this format is to allow a simple XML format that is mostly human readable. The generation and parsing of the various data types are done through the TypeConverter classes associated with the data types. Example: ... ado.net/XML headers & schema ... <resheader name="resmimetype">text/microsoft-resx</resheader> <resheader name="version">2.0</resheader> <resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader> <resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader> <data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data> <data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data> <data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64"> <value>[base64 mime encoded serialized .NET Framework object]</value> </data> <data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64"> <value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value> <comment>This is a comment</comment> </data> There are any number of "resheader" rows that contain simple name/value pairs. Each data row contains a name, and value. The row also contains a type or mimetype. Type corresponds to a .NET class that support text/value conversion through the TypeConverter architecture. Classes that don't support this are serialized and stored with the mimetype set. The mimetype is used for serialized objects, and tells the ResXResourceReader how to depersist the object. This is currently not extensible. For a given mimetype the value must be set accordingly: Note - application/x-microsoft.net.object.binary.base64 is the format that the ResXResourceWriter will generate, however the reader can read any of the formats listed below. mimetype: application/x-microsoft.net.object.binary.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Binary.BinaryFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.soap.base64 value : The object must be serialized with : System.Runtime.Serialization.Formatters.Soap.SoapFormatter : and then encoded with base64 encoding. mimetype: application/x-microsoft.net.object.bytearray.base64 value : The object must be serialized into a byte array : using a System.ComponentModel.TypeConverter : and then encoded with base64 encoding. --> <xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata"> <xsd:import namespace="http://www.w3.org/XML/1998/namespace" /> <xsd:element name="root" msdata:IsDataSet="true"> <xsd:complexType> <xsd:choice maxOccurs="unbounded"> <xsd:element name="metadata"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" /> </xsd:sequence> <xsd:attribute name="name" use="required" type="xsd:string" /> <xsd:attribute name="type" type="xsd:string" /> <xsd:attribute name="mimetype" type="xsd:string" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="assembly"> <xsd:complexType> <xsd:attribute name="alias" type="xsd:string" /> <xsd:attribute name="name" type="xsd:string" /> </xsd:complexType> </xsd:element> <xsd:element name="data"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> <xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" /> <xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" /> <xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" /> <xsd:attribute ref="xml:space" /> </xsd:complexType> </xsd:element> <xsd:element name="resheader"> <xsd:complexType> <xsd:sequence> <xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" /> </xsd:sequence> <xsd:attribute name="name" type="xsd:string" use="required" /> </xsd:complexType> </xsd:element> </xsd:choice> </xsd:complexType> </xsd:element> </xsd:schema> <resheader name="resmimetype"> <value>text/microsoft-resx</value> </resheader> <resheader name="version"> <value>2.0</value> </resheader> <resheader name="reader"> <value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <resheader name="writer"> <value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value> </resheader> <data name="General_ArgumentCannotBeNullOrEmpty" xml:space="preserve"> <value>Value cannot be null or empty.</value> </data> </root>
{ "pile_set_name": "Github" }
/* * Copyright 2004-2008 Sun Microsystems, Inc. All Rights Reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER * * This code is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License version 2 * only, as published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * General Public License version 2 for more details (a copy is * included in the LICENSE file that accompanied this code). * * You should have received a copy of the GNU General Public License * version 2 along with this work; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA * * Please contact Sun Microsystems, Inc., 16 Network Circle, Menlo * Park, CA 94025 or visit www.sun.com if you need additional * information or have any questions. */ package com.sun.squawk.platform.posix.natives; import com.sun.cldc.jna.*; /** * * Import common functions variables and constants from libc. */ @Includes({"<errno.h>", "<fcntl.h>", "<sys/stat.h>"}) public interface LibC extends Library { LibC INSTANCE = (LibC) Native.loadLibrary("RTLD", LibC.class); int EPERM = IMPORT, /* Operation not permitted */ ENOENT = IMPORT, /* No such file or directory */ ESRCH = IMPORT, /* No such process */ EINTR = IMPORT, /* Interrupted system call */ EIO = IMPORT, /* Input/output error */ ENXIO = IMPORT, /* Device not configured */ E2BIG = IMPORT, /* Argument list too long */ ENOEXEC = IMPORT, /* Exec format error */ EBADF = IMPORT, /* Bad file descriptor */ ECHILD = IMPORT, /* No child processes */ EDEADLK = IMPORT, /* Resource deadlock avoided */ /* 11 was EAGAIN */ ENOMEM = IMPORT, /* Cannot allocate memory */ EACCES = IMPORT, /* Permission denied */ EFAULT = IMPORT, /* Bad address */ EBUSY = IMPORT, /* Device busy */ EEXIST = IMPORT, /* File exists */ EXDEV = IMPORT, /* Cross-device link */ ENODEV = IMPORT, /* Operation not supported by device */ ENOTDIR = IMPORT, /* Not a directory */ EISDIR = IMPORT, /* Is a directory */ EINVAL = IMPORT, /* Invalid argument */ ENFILE = IMPORT, /* Too many open files in system */ EMFILE = IMPORT, /* Too many open files */ ENOTTY = IMPORT, /* Inappropriate ioctl for device */ ETXTBSY = IMPORT, /* Text file busy */ EFBIG = IMPORT, /* File too large */ ENOSPC = IMPORT, /* No space left on device */ ESPIPE = IMPORT, /* Illegal seek */ EROFS = IMPORT, /* Read-only file system */ EMLINK = IMPORT, /* Too many links */ EPIPE = IMPORT, /* Broken pipe */ /* math software */ EDOM = IMPORT, /* Numerical argument out of domain */ ERANGE = IMPORT, /* Result too large */ /* non-blocking and interrupt i/o */ EAGAIN = IMPORT, /* Resource temporarily unavailable */ EWOULDBLOCK = IMPORT, /* Operation would block */ EINPROGRESS = IMPORT, /* Operation now in progress */ EALREADY = IMPORT, /* Operation already in progress */ /* ipc/network software -- argument errors */ ENOTSOCK = IMPORT, /* Socket operation on non-socket */ EDESTADDRREQ = IMPORT, /* Destination address required */ EMSGSIZE = IMPORT, /* Message too long */ EPROTOTYPE = IMPORT, /* Protocol wrong type for socket */ ENOPROTOOPT = IMPORT, /* Protocol not available */ EPROTONOSUPPORT = IMPORT, /* Protocol not supported */ ENOTSUP = IMPORT, /* Operation not supported */ EAFNOSUPPORT = IMPORT, /* Address family not supported by protocol family */ EADDRINUSE = IMPORT, /* Address already in use */ EADDRNOTAVAIL = IMPORT, /* Can't assign requested address */ /* ipc/network software -- operational errors */ ENETDOWN = IMPORT, /* Network is down */ ENETUNREACH = IMPORT, /* Network is unreachable */ ENETRESET = IMPORT, /* Network dropped connection on reset */ ECONNABORTED = IMPORT, /* Software caused connection abort */ ECONNRESET = IMPORT, /* Connection reset by peer */ ENOBUFS = IMPORT, /* No buffer space available */ EISCONN = IMPORT, /* Socket is already connected */ ENOTCONN = IMPORT, /* Socket is not connected */ ETIMEDOUT = IMPORT, /* Operation timed out */ ECONNREFUSED = IMPORT, /* Connection refused */ ELOOP = IMPORT, /* Too many levels of symbolic links */ ENAMETOOLONG = IMPORT, /* File name too long */ /* should be rearranged */ EHOSTUNREACH = IMPORT, /* No route to host */ ENOTEMPTY = IMPORT, /* Directory not empty */ /* quotas & mush */ EDQUOT = IMPORT, /* Disc quota exceeded */ ENOLCK = IMPORT, /* No locks available */ ENOSYS = IMPORT, /* Function not implemented */ EOVERFLOW = IMPORT, /* Value too large to be stored in data type */ ECANCELED = IMPORT, /* Operation canceled */ EIDRM = IMPORT, /* Identifier removed */ ENOMSG = IMPORT, /* No message of desired type */ EILSEQ = IMPORT, /* Illegal byte sequence */ EBADMSG = IMPORT, /* Bad message */ EMULTIHOP = IMPORT, /* Reserved */ ENODATA = IMPORT, /* No message available on STREAM */ ENOLINK = IMPORT, /* Reserved */ ENOSR = IMPORT, /* No STREAM resources */ ENOSTR = IMPORT, /* Not a STREAM */ EPROTO = IMPORT, /* Protocol error */ ETIME = IMPORT, /* STREAM ioctl timeout */ /* command values */ F_DUPFD = IMPORT, /* duplicate file descriptor */ F_GETFD = IMPORT, /* get file descriptor flags */ F_SETFD = IMPORT, /* set file descriptor flags */ F_GETFL = IMPORT, /* get file status flags */ F_SETFL = IMPORT, /* set file status flags */ /* * File status flags: these are used by open(2), fcntl(2). * They are also used (indirectly) in the kernel file structure f_flags, * which is a superset of the open/fcntl flags. Open flags and f_flags * are inter-convertible using OFLAGS(fflags) and FFLAGS(oflags). * Open/fcntl flags begin with O_; kernel-internal flags begin with F. */ /* open-only flags */ O_RDONLY = IMPORT, /* open for reading only */ O_WRONLY = IMPORT, /* open for writing only */ O_RDWR = IMPORT, /* open for reading and writing */ O_ACCMODE = IMPORT, /* mask for above modes */ O_NONBLOCK = IMPORT, /* no delay */ O_APPEND = IMPORT, /* set append mode */ O_SYNC = IMPORT, /* synchronous writes */ O_CREAT = IMPORT, /* create if nonexistant */ O_TRUNC = IMPORT, /* truncate to zero length */ O_EXCL = IMPORT, /* error if already exists */ /* [XSI] directory restrcted delete */ /* [XSI] directory */ S_IFBLK = IMPORT, /* [XSI] named pipe (fifo) */ S_IFCHR = IMPORT, /* [XSI] character special */ S_IFDIR = IMPORT, /* [XSI] type of file mask */ S_IFIFO = IMPORT, /* [XSI] regular */ S_IFLNK = IMPORT, /* * [XSI] The following are symbolic names for the values of type mode_t. They * are bitmap values. */ /* File type */ S_IFMT = IMPORT, /* [XSI] block special */ S_IFREG = IMPORT, /* [XSI] symbolic link */ S_IFSOCK = IMPORT, /* [XSI] RWX mask for group */ S_IRGRP = IMPORT, /* [XSI] RWX mask for other */ S_IROTH = IMPORT, /* [XSI] RWX mask for owner */ S_IRUSR = IMPORT, /* [XSI] X for owner */ /* Read, write, execute/search by group */ S_IRWXG = IMPORT, /* [XSI] X for group */ /* Read, write, execute/search by others */ S_IRWXO = IMPORT, /* [XSI] socket */ /* File mode */ /* Read, write, execute/search by owner */ S_IRWXU = IMPORT, /* [XSI] set user id on execution */ S_ISGID = IMPORT, /* [XSI] X for other */ S_ISUID = IMPORT, /* [XSI] set group id on execution */ S_ISVTX = IMPORT, /* [XSI] R for group */ S_IWGRP = IMPORT, /* [XSI] R for other */ S_IWOTH = IMPORT, /* [XSI] R for owner */ S_IWUSR = IMPORT, /* [XSI] W for group */ S_IXGRP = IMPORT, /* [XSI] W for other */ S_IXOTH = IMPORT, /* [XSI] W for owner */ S_IXUSR = IMPORT, /** set file offset to offset */ SEEK_SET = IMPORT, /** set file offset to current plus offset */ SEEK_CUR = IMPORT, /** set file offset to EOF plus offset */ SEEK_END = IMPORT ; // END OF DEFINES /** * Gets the value of the C variable "errno". * Only valid after certain system calls, and only if the system call failed in some way. * * @return typically a positive number */ @GlobalVar int errno(); /** * provides for control over descriptors. * * @param fd a descriptor to be operated on by cmd * @param cmd one of the cmd constants * @param arg * @return a value that depends on the cmd. */ int fcntl(int fd, int cmd, int arg); /** * open or create a file for reading or writing * * @param name String * @param oflag std libc open flags * @param mode the mode for any created file * @return If successful, returns a non-negative integer, termed a file descriptor. Returns * -1 on failure, and sets errno to indicate the error. */ int open(String name, int oflag, int mode); /** * delete a descriptor * * @param fd a descriptor to be operated on by cmd * @return Upon successful completion, a value of 0 is returned. Otherwise, a value of -1 is returned * and the global integer variable errno is set to indicate the error. */ int close(int fd); /** * Flush output on a descriptor * * @param fd a descriptor to be flushed * @return Upon successful completion, a value of 0 is returned. Otherwise, a value of -1 is returned * and the global integer variable errno is set to indicate the error. */ int fsync(int fd); /** * reposition read/write file offset * * @param fd file descriptor * @param offset the offset to seek to * @param whence the kind of offset (SEEK_SET, SEEK_CUR, or SEEK_END) * @return the resulting offset location as measured in * bytes from the beginning of the file. If error, -1 is returned and errno is set * to indicate the error. */ int lseek(int fd, long offset, int whence); /** * read input * * @param fd file descriptor * @param buf data buffer to read into * @param nbyte number of bytes to read * @return the number of bytes actually read is returned. Upon reading end-of-file, zero * is returned. If error, a -1 is returned and the global variable errno is set to indicate * the error */ int read(int fd, byte[] buf, int nbyte); /** * write output * * @param fd file descriptor * @param buf data buffer to write * @param nbyte number of bytes to read * @return the number of bytes which were written is returned. If error, * -1 is returned and the global variable errno is set to indicate the error. */ int write(int fd, byte[] buf, int nbyte); /** * C struct stat * // struct stat { * // dev_t st_dev; /* [XSI] ID of device containing file 4 0 * // ino_t st_ino; /* [XSI] File serial number 4 4 * // mode_t st_mode; /* [XSI] Mode of file (see below) 2 8 * // nlink_t st_nlink; /* [XSI] Number of hard links 2 10 * // uid_t st_uid; /* [XSI] User ID of the file 4 12 * // gid_t st_gid; /* [XSI] Group ID of the file 4 16 * // dev_t st_rdev; /* [XSI] Device ID 4 20 * // time_t st_atime; /* [XSI] Time of last access 4 24 * // long st_atimensec; /* nsec of last access 4 28 * // time_t st_mtime; /* [XSI] Last data modification time 4 32 * // long st_mtimensec; /* last data modification nsec 4 36 * // time_t st_ctime; /* [XSI] Time of last status change 4 40 * // long st_ctimensec; /* nsec of last status change 4 44 * // off_t st_size; /* [XSI] file size, in bytes 8 48 * // blkcnt_t st_blocks; /* [XSI] blocks allocated for file 8 * // blksize_t st_blksize; /* [XSI] optimal blocksize for I/O 4 * // __uint32_t st_flags; /* user defined flags for file 4 * // __uint32_t st_gen; /* file generation number 4 * // __int32_t st_lspare; /* RESERVED: DO NOT USE! 4 * // __int64_t st_qspare[2]; /* RESERVED: DO NOT USE! 16 * // }; */ public static class stat extends Structure { public final static int EPERM = IMPORT; /** mode_t */ public int st_mode; /** time_t Last data modification time */ public int st_mtime; /** file size, in bytes */ public long st_size; } /** * Get information on the open file with file descriptor "fd". * * @param fd file descriptor * @param stat Stat structure that will be filled with the current values * @return -1 is returned if an error occurs, otherwise zero is returned */ int fstat(int fd, stat stat); /** * Get information on the named "name". * * @param name String * @param stat Stat structure that will be filled with the current values * @return -1 is returned if an error occurs, otherwise zero is returned */ int stat(String name, stat stat); }
{ "pile_set_name": "Github" }
// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /*--- info: > If S contains any character that is not a radix-R digit, then let Z be the substring of S consisting of all characters before the first such character; otherwise, let Z be S es5id: 15.1.2.2_A6.1_T2 description: Complex test. Radix-R notation in [0..9, A-Z] ---*/ //CHECK# var R_digit = ["1", "2", "3", "4", "5", "6", "7", "8", "9", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"]; for (var i = 2; i <= 36; i++) { if (parseInt(R_digit[i - 2] + "$", i) !== i - 1) { $ERROR('#' + i + ': '); } }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <!-- Copyright (C) 2014 The Android Open Source Project Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <selector xmlns:android="http://schemas.android.com/apk/res/android"> <item android:state_accelerated="false" android:color="@color/background_material_light" /> <item android:color="@android:color/transparent" /> </selector> <!-- From: file:/usr/local/google/buildbot/repo_clients/https___googleplex-android.googlesource.com_a_platform_manifest.git/mnc-sdk-release/frameworks/support/v7/appcompat/res/color-v11/abc_background_cache_hint_selector_material_light.xml -->
{ "pile_set_name": "Github" }
/* * Copyright (c) 2010-2013 Evolveum and contributors * * This work is dual-licensed under the Apache License 2.0 * and European Union Public License. See LICENSE file for details. */ package com.evolveum.midpoint.util; /** * Very simple object to hold a single value. This comes handy when a final value is required but there is a * need to use immutable value (e.g. int, String). This comes very handy in Java anonymous instances (almost-closures). * * @author Radovan Semancik */ public class Holder<T> { private T value; public Holder() { super(); this.value = null; } public Holder(T value) { super(); this.value = value; } public T getValue() { return value; } public void setValue(T value) { this.value = value; } public boolean isEmpty() { return value == null; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((value == null) ? 0 : value.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } Holder other = (Holder) obj; if (value == null) { if (other.value != null) { return false; } } else if (!value.equals(other.value)) { return false; } return true; } @Override public String toString() { return "Holder(" + value + ")"; } }
{ "pile_set_name": "Github" }
/** * 颜色 */ @import "../var.scss"; //面板 .mt-tag{ display: inline-block; padding: 2px 5px; }
{ "pile_set_name": "Github" }
#define BORINGSSL_PREFIX CNIOBoringSSL #if defined(__aarch64__) && defined(__linux__) // This file is generated from a similarly-named Perl script in the BoringSSL // source tree. Do not edit by hand. #if !defined(__has_feature) #define __has_feature(x) 0 #endif #if __has_feature(memory_sanitizer) && !defined(OPENSSL_NO_ASM) #define OPENSSL_NO_ASM #endif #if !defined(OPENSSL_NO_ASM) #if defined(__aarch64__) #if defined(BORINGSSL_PREFIX) #include <CNIOBoringSSL_boringssl_prefix_symbols_asm.h> #endif // Copyright 2014-2016 The OpenSSL Project Authors. All Rights Reserved. // // Licensed under the OpenSSL license (the "License"). You may not use // this file except in compliance with the License. You can obtain a copy // in the file LICENSE in the source distribution or at // https://www.openssl.org/source/license.html // ==================================================================== // Written by Andy Polyakov <[email protected]> for the OpenSSL // project. The module is, however, dual licensed under OpenSSL and // CRYPTOGAMS licenses depending on where you obtain it. For further // details see http://www.openssl.org/~appro/cryptogams/. // // Permission to use under GPLv2 terms is granted. // ==================================================================== // // SHA256/512 for ARMv8. // // Performance in cycles per processed byte and improvement coefficient // over code generated with "default" compiler: // // SHA256-hw SHA256(*) SHA512 // Apple A7 1.97 10.5 (+33%) 6.73 (-1%(**)) // Cortex-A53 2.38 15.5 (+115%) 10.0 (+150%(***)) // Cortex-A57 2.31 11.6 (+86%) 7.51 (+260%(***)) // Denver 2.01 10.5 (+26%) 6.70 (+8%) // X-Gene 20.0 (+100%) 12.8 (+300%(***)) // Mongoose 2.36 13.0 (+50%) 8.36 (+33%) // // (*) Software SHA256 results are of lesser relevance, presented // mostly for informational purposes. // (**) The result is a trade-off: it's possible to improve it by // 10% (or by 1 cycle per round), but at the cost of 20% loss // on Cortex-A53 (or by 4 cycles per round). // (***) Super-impressive coefficients over gcc-generated code are // indication of some compiler "pathology", most notably code // generated with -mgeneral-regs-only is significanty faster // and the gap is only 40-90%. #ifndef __KERNEL__ # include <CNIOBoringSSL_arm_arch.h> #endif .text .globl sha512_block_data_order .hidden sha512_block_data_order .type sha512_block_data_order,%function .align 6 sha512_block_data_order: AARCH64_SIGN_LINK_REGISTER stp x29,x30,[sp,#-128]! add x29,sp,#0 stp x19,x20,[sp,#16] stp x21,x22,[sp,#32] stp x23,x24,[sp,#48] stp x25,x26,[sp,#64] stp x27,x28,[sp,#80] sub sp,sp,#4*8 ldp x20,x21,[x0] // load context ldp x22,x23,[x0,#2*8] ldp x24,x25,[x0,#4*8] add x2,x1,x2,lsl#7 // end of input ldp x26,x27,[x0,#6*8] adrp x30,.LK512 add x30,x30,:lo12:.LK512 stp x0,x2,[x29,#96] .Loop: ldp x3,x4,[x1],#2*8 ldr x19,[x30],#8 // *K++ eor x28,x21,x22 // magic seed str x1,[x29,#112] #ifndef __ARMEB__ rev x3,x3 // 0 #endif ror x16,x24,#14 add x27,x27,x19 // h+=K[i] eor x6,x24,x24,ror#23 and x17,x25,x24 bic x19,x26,x24 add x27,x27,x3 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x6,ror#18 // Sigma1(e) ror x6,x20,#28 add x27,x27,x17 // h+=Ch(e,f,g) eor x17,x20,x20,ror#5 add x27,x27,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x23,x23,x27 // d+=h eor x28,x28,x21 // Maj(a,b,c) eor x17,x6,x17,ror#34 // Sigma0(a) add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x27,x27,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x4,x4 // 1 #endif ldp x5,x6,[x1],#2*8 add x27,x27,x17 // h+=Sigma0(a) ror x16,x23,#14 add x26,x26,x28 // h+=K[i] eor x7,x23,x23,ror#23 and x17,x24,x23 bic x28,x25,x23 add x26,x26,x4 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x7,ror#18 // Sigma1(e) ror x7,x27,#28 add x26,x26,x17 // h+=Ch(e,f,g) eor x17,x27,x27,ror#5 add x26,x26,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x22,x22,x26 // d+=h eor x19,x19,x20 // Maj(a,b,c) eor x17,x7,x17,ror#34 // Sigma0(a) add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x26,x26,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x5,x5 // 2 #endif add x26,x26,x17 // h+=Sigma0(a) ror x16,x22,#14 add x25,x25,x19 // h+=K[i] eor x8,x22,x22,ror#23 and x17,x23,x22 bic x19,x24,x22 add x25,x25,x5 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x8,ror#18 // Sigma1(e) ror x8,x26,#28 add x25,x25,x17 // h+=Ch(e,f,g) eor x17,x26,x26,ror#5 add x25,x25,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x21,x21,x25 // d+=h eor x28,x28,x27 // Maj(a,b,c) eor x17,x8,x17,ror#34 // Sigma0(a) add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x25,x25,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x6,x6 // 3 #endif ldp x7,x8,[x1],#2*8 add x25,x25,x17 // h+=Sigma0(a) ror x16,x21,#14 add x24,x24,x28 // h+=K[i] eor x9,x21,x21,ror#23 and x17,x22,x21 bic x28,x23,x21 add x24,x24,x6 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x9,ror#18 // Sigma1(e) ror x9,x25,#28 add x24,x24,x17 // h+=Ch(e,f,g) eor x17,x25,x25,ror#5 add x24,x24,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x20,x20,x24 // d+=h eor x19,x19,x26 // Maj(a,b,c) eor x17,x9,x17,ror#34 // Sigma0(a) add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x24,x24,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x7,x7 // 4 #endif add x24,x24,x17 // h+=Sigma0(a) ror x16,x20,#14 add x23,x23,x19 // h+=K[i] eor x10,x20,x20,ror#23 and x17,x21,x20 bic x19,x22,x20 add x23,x23,x7 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x10,ror#18 // Sigma1(e) ror x10,x24,#28 add x23,x23,x17 // h+=Ch(e,f,g) eor x17,x24,x24,ror#5 add x23,x23,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x27,x27,x23 // d+=h eor x28,x28,x25 // Maj(a,b,c) eor x17,x10,x17,ror#34 // Sigma0(a) add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x23,x23,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x8,x8 // 5 #endif ldp x9,x10,[x1],#2*8 add x23,x23,x17 // h+=Sigma0(a) ror x16,x27,#14 add x22,x22,x28 // h+=K[i] eor x11,x27,x27,ror#23 and x17,x20,x27 bic x28,x21,x27 add x22,x22,x8 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x11,ror#18 // Sigma1(e) ror x11,x23,#28 add x22,x22,x17 // h+=Ch(e,f,g) eor x17,x23,x23,ror#5 add x22,x22,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x26,x26,x22 // d+=h eor x19,x19,x24 // Maj(a,b,c) eor x17,x11,x17,ror#34 // Sigma0(a) add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x22,x22,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x9,x9 // 6 #endif add x22,x22,x17 // h+=Sigma0(a) ror x16,x26,#14 add x21,x21,x19 // h+=K[i] eor x12,x26,x26,ror#23 and x17,x27,x26 bic x19,x20,x26 add x21,x21,x9 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x12,ror#18 // Sigma1(e) ror x12,x22,#28 add x21,x21,x17 // h+=Ch(e,f,g) eor x17,x22,x22,ror#5 add x21,x21,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x25,x25,x21 // d+=h eor x28,x28,x23 // Maj(a,b,c) eor x17,x12,x17,ror#34 // Sigma0(a) add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x21,x21,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x10,x10 // 7 #endif ldp x11,x12,[x1],#2*8 add x21,x21,x17 // h+=Sigma0(a) ror x16,x25,#14 add x20,x20,x28 // h+=K[i] eor x13,x25,x25,ror#23 and x17,x26,x25 bic x28,x27,x25 add x20,x20,x10 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x13,ror#18 // Sigma1(e) ror x13,x21,#28 add x20,x20,x17 // h+=Ch(e,f,g) eor x17,x21,x21,ror#5 add x20,x20,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x24,x24,x20 // d+=h eor x19,x19,x22 // Maj(a,b,c) eor x17,x13,x17,ror#34 // Sigma0(a) add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x20,x20,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x11,x11 // 8 #endif add x20,x20,x17 // h+=Sigma0(a) ror x16,x24,#14 add x27,x27,x19 // h+=K[i] eor x14,x24,x24,ror#23 and x17,x25,x24 bic x19,x26,x24 add x27,x27,x11 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x14,ror#18 // Sigma1(e) ror x14,x20,#28 add x27,x27,x17 // h+=Ch(e,f,g) eor x17,x20,x20,ror#5 add x27,x27,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x23,x23,x27 // d+=h eor x28,x28,x21 // Maj(a,b,c) eor x17,x14,x17,ror#34 // Sigma0(a) add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x27,x27,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x12,x12 // 9 #endif ldp x13,x14,[x1],#2*8 add x27,x27,x17 // h+=Sigma0(a) ror x16,x23,#14 add x26,x26,x28 // h+=K[i] eor x15,x23,x23,ror#23 and x17,x24,x23 bic x28,x25,x23 add x26,x26,x12 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x15,ror#18 // Sigma1(e) ror x15,x27,#28 add x26,x26,x17 // h+=Ch(e,f,g) eor x17,x27,x27,ror#5 add x26,x26,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x22,x22,x26 // d+=h eor x19,x19,x20 // Maj(a,b,c) eor x17,x15,x17,ror#34 // Sigma0(a) add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x26,x26,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x13,x13 // 10 #endif add x26,x26,x17 // h+=Sigma0(a) ror x16,x22,#14 add x25,x25,x19 // h+=K[i] eor x0,x22,x22,ror#23 and x17,x23,x22 bic x19,x24,x22 add x25,x25,x13 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x0,ror#18 // Sigma1(e) ror x0,x26,#28 add x25,x25,x17 // h+=Ch(e,f,g) eor x17,x26,x26,ror#5 add x25,x25,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x21,x21,x25 // d+=h eor x28,x28,x27 // Maj(a,b,c) eor x17,x0,x17,ror#34 // Sigma0(a) add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x25,x25,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x14,x14 // 11 #endif ldp x15,x0,[x1],#2*8 add x25,x25,x17 // h+=Sigma0(a) str x6,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] eor x6,x21,x21,ror#23 and x17,x22,x21 bic x28,x23,x21 add x24,x24,x14 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x6,ror#18 // Sigma1(e) ror x6,x25,#28 add x24,x24,x17 // h+=Ch(e,f,g) eor x17,x25,x25,ror#5 add x24,x24,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x20,x20,x24 // d+=h eor x19,x19,x26 // Maj(a,b,c) eor x17,x6,x17,ror#34 // Sigma0(a) add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x24,x24,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x15,x15 // 12 #endif add x24,x24,x17 // h+=Sigma0(a) str x7,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] eor x7,x20,x20,ror#23 and x17,x21,x20 bic x19,x22,x20 add x23,x23,x15 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x7,ror#18 // Sigma1(e) ror x7,x24,#28 add x23,x23,x17 // h+=Ch(e,f,g) eor x17,x24,x24,ror#5 add x23,x23,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x27,x27,x23 // d+=h eor x28,x28,x25 // Maj(a,b,c) eor x17,x7,x17,ror#34 // Sigma0(a) add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x23,x23,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x0,x0 // 13 #endif ldp x1,x2,[x1] add x23,x23,x17 // h+=Sigma0(a) str x8,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] eor x8,x27,x27,ror#23 and x17,x20,x27 bic x28,x21,x27 add x22,x22,x0 // h+=X[i] orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x8,ror#18 // Sigma1(e) ror x8,x23,#28 add x22,x22,x17 // h+=Ch(e,f,g) eor x17,x23,x23,ror#5 add x22,x22,x16 // h+=Sigma1(e) and x19,x19,x28 // (b^c)&=(a^b) add x26,x26,x22 // d+=h eor x19,x19,x24 // Maj(a,b,c) eor x17,x8,x17,ror#34 // Sigma0(a) add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round //add x22,x22,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x1,x1 // 14 #endif ldr x6,[sp,#24] add x22,x22,x17 // h+=Sigma0(a) str x9,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] eor x9,x26,x26,ror#23 and x17,x27,x26 bic x19,x20,x26 add x21,x21,x1 // h+=X[i] orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x9,ror#18 // Sigma1(e) ror x9,x22,#28 add x21,x21,x17 // h+=Ch(e,f,g) eor x17,x22,x22,ror#5 add x21,x21,x16 // h+=Sigma1(e) and x28,x28,x19 // (b^c)&=(a^b) add x25,x25,x21 // d+=h eor x28,x28,x23 // Maj(a,b,c) eor x17,x9,x17,ror#34 // Sigma0(a) add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round //add x21,x21,x17 // h+=Sigma0(a) #ifndef __ARMEB__ rev x2,x2 // 15 #endif ldr x7,[sp,#0] add x21,x21,x17 // h+=Sigma0(a) str x10,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x9,x4,#1 and x17,x26,x25 ror x8,x1,#19 bic x28,x27,x25 ror x10,x21,#28 add x20,x20,x2 // h+=X[i] eor x16,x16,x25,ror#18 eor x9,x9,x4,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x10,x10,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x8,x8,x1,ror#61 eor x9,x9,x4,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x10,x21,ror#39 // Sigma0(a) eor x8,x8,x1,lsr#6 // sigma1(X[i+14]) add x3,x3,x12 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x3,x3,x9 add x20,x20,x17 // h+=Sigma0(a) add x3,x3,x8 .Loop_16_xx: ldr x8,[sp,#8] str x11,[sp,#0] ror x16,x24,#14 add x27,x27,x19 // h+=K[i] ror x10,x5,#1 and x17,x25,x24 ror x9,x2,#19 bic x19,x26,x24 ror x11,x20,#28 add x27,x27,x3 // h+=X[i] eor x16,x16,x24,ror#18 eor x10,x10,x5,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x24,ror#41 // Sigma1(e) eor x11,x11,x20,ror#34 add x27,x27,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x9,x9,x2,ror#61 eor x10,x10,x5,lsr#7 // sigma0(X[i+1]) add x27,x27,x16 // h+=Sigma1(e) eor x28,x28,x21 // Maj(a,b,c) eor x17,x11,x20,ror#39 // Sigma0(a) eor x9,x9,x2,lsr#6 // sigma1(X[i+14]) add x4,x4,x13 add x23,x23,x27 // d+=h add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x4,x4,x10 add x27,x27,x17 // h+=Sigma0(a) add x4,x4,x9 ldr x9,[sp,#16] str x12,[sp,#8] ror x16,x23,#14 add x26,x26,x28 // h+=K[i] ror x11,x6,#1 and x17,x24,x23 ror x10,x3,#19 bic x28,x25,x23 ror x12,x27,#28 add x26,x26,x4 // h+=X[i] eor x16,x16,x23,ror#18 eor x11,x11,x6,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x23,ror#41 // Sigma1(e) eor x12,x12,x27,ror#34 add x26,x26,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x10,x10,x3,ror#61 eor x11,x11,x6,lsr#7 // sigma0(X[i+1]) add x26,x26,x16 // h+=Sigma1(e) eor x19,x19,x20 // Maj(a,b,c) eor x17,x12,x27,ror#39 // Sigma0(a) eor x10,x10,x3,lsr#6 // sigma1(X[i+14]) add x5,x5,x14 add x22,x22,x26 // d+=h add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x5,x5,x11 add x26,x26,x17 // h+=Sigma0(a) add x5,x5,x10 ldr x10,[sp,#24] str x13,[sp,#16] ror x16,x22,#14 add x25,x25,x19 // h+=K[i] ror x12,x7,#1 and x17,x23,x22 ror x11,x4,#19 bic x19,x24,x22 ror x13,x26,#28 add x25,x25,x5 // h+=X[i] eor x16,x16,x22,ror#18 eor x12,x12,x7,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x22,ror#41 // Sigma1(e) eor x13,x13,x26,ror#34 add x25,x25,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x11,x11,x4,ror#61 eor x12,x12,x7,lsr#7 // sigma0(X[i+1]) add x25,x25,x16 // h+=Sigma1(e) eor x28,x28,x27 // Maj(a,b,c) eor x17,x13,x26,ror#39 // Sigma0(a) eor x11,x11,x4,lsr#6 // sigma1(X[i+14]) add x6,x6,x15 add x21,x21,x25 // d+=h add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x6,x6,x12 add x25,x25,x17 // h+=Sigma0(a) add x6,x6,x11 ldr x11,[sp,#0] str x14,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] ror x13,x8,#1 and x17,x22,x21 ror x12,x5,#19 bic x28,x23,x21 ror x14,x25,#28 add x24,x24,x6 // h+=X[i] eor x16,x16,x21,ror#18 eor x13,x13,x8,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x21,ror#41 // Sigma1(e) eor x14,x14,x25,ror#34 add x24,x24,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x12,x12,x5,ror#61 eor x13,x13,x8,lsr#7 // sigma0(X[i+1]) add x24,x24,x16 // h+=Sigma1(e) eor x19,x19,x26 // Maj(a,b,c) eor x17,x14,x25,ror#39 // Sigma0(a) eor x12,x12,x5,lsr#6 // sigma1(X[i+14]) add x7,x7,x0 add x20,x20,x24 // d+=h add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x7,x7,x13 add x24,x24,x17 // h+=Sigma0(a) add x7,x7,x12 ldr x12,[sp,#8] str x15,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] ror x14,x9,#1 and x17,x21,x20 ror x13,x6,#19 bic x19,x22,x20 ror x15,x24,#28 add x23,x23,x7 // h+=X[i] eor x16,x16,x20,ror#18 eor x14,x14,x9,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x20,ror#41 // Sigma1(e) eor x15,x15,x24,ror#34 add x23,x23,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x13,x13,x6,ror#61 eor x14,x14,x9,lsr#7 // sigma0(X[i+1]) add x23,x23,x16 // h+=Sigma1(e) eor x28,x28,x25 // Maj(a,b,c) eor x17,x15,x24,ror#39 // Sigma0(a) eor x13,x13,x6,lsr#6 // sigma1(X[i+14]) add x8,x8,x1 add x27,x27,x23 // d+=h add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x8,x8,x14 add x23,x23,x17 // h+=Sigma0(a) add x8,x8,x13 ldr x13,[sp,#16] str x0,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] ror x15,x10,#1 and x17,x20,x27 ror x14,x7,#19 bic x28,x21,x27 ror x0,x23,#28 add x22,x22,x8 // h+=X[i] eor x16,x16,x27,ror#18 eor x15,x15,x10,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x27,ror#41 // Sigma1(e) eor x0,x0,x23,ror#34 add x22,x22,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x14,x14,x7,ror#61 eor x15,x15,x10,lsr#7 // sigma0(X[i+1]) add x22,x22,x16 // h+=Sigma1(e) eor x19,x19,x24 // Maj(a,b,c) eor x17,x0,x23,ror#39 // Sigma0(a) eor x14,x14,x7,lsr#6 // sigma1(X[i+14]) add x9,x9,x2 add x26,x26,x22 // d+=h add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x9,x9,x15 add x22,x22,x17 // h+=Sigma0(a) add x9,x9,x14 ldr x14,[sp,#24] str x1,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] ror x0,x11,#1 and x17,x27,x26 ror x15,x8,#19 bic x19,x20,x26 ror x1,x22,#28 add x21,x21,x9 // h+=X[i] eor x16,x16,x26,ror#18 eor x0,x0,x11,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x26,ror#41 // Sigma1(e) eor x1,x1,x22,ror#34 add x21,x21,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x15,x15,x8,ror#61 eor x0,x0,x11,lsr#7 // sigma0(X[i+1]) add x21,x21,x16 // h+=Sigma1(e) eor x28,x28,x23 // Maj(a,b,c) eor x17,x1,x22,ror#39 // Sigma0(a) eor x15,x15,x8,lsr#6 // sigma1(X[i+14]) add x10,x10,x3 add x25,x25,x21 // d+=h add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x10,x10,x0 add x21,x21,x17 // h+=Sigma0(a) add x10,x10,x15 ldr x15,[sp,#0] str x2,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x1,x12,#1 and x17,x26,x25 ror x0,x9,#19 bic x28,x27,x25 ror x2,x21,#28 add x20,x20,x10 // h+=X[i] eor x16,x16,x25,ror#18 eor x1,x1,x12,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x2,x2,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x0,x0,x9,ror#61 eor x1,x1,x12,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x2,x21,ror#39 // Sigma0(a) eor x0,x0,x9,lsr#6 // sigma1(X[i+14]) add x11,x11,x4 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x11,x11,x1 add x20,x20,x17 // h+=Sigma0(a) add x11,x11,x0 ldr x0,[sp,#8] str x3,[sp,#0] ror x16,x24,#14 add x27,x27,x19 // h+=K[i] ror x2,x13,#1 and x17,x25,x24 ror x1,x10,#19 bic x19,x26,x24 ror x3,x20,#28 add x27,x27,x11 // h+=X[i] eor x16,x16,x24,ror#18 eor x2,x2,x13,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x20,x21 // a^b, b^c in next round eor x16,x16,x24,ror#41 // Sigma1(e) eor x3,x3,x20,ror#34 add x27,x27,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x1,x1,x10,ror#61 eor x2,x2,x13,lsr#7 // sigma0(X[i+1]) add x27,x27,x16 // h+=Sigma1(e) eor x28,x28,x21 // Maj(a,b,c) eor x17,x3,x20,ror#39 // Sigma0(a) eor x1,x1,x10,lsr#6 // sigma1(X[i+14]) add x12,x12,x5 add x23,x23,x27 // d+=h add x27,x27,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x12,x12,x2 add x27,x27,x17 // h+=Sigma0(a) add x12,x12,x1 ldr x1,[sp,#16] str x4,[sp,#8] ror x16,x23,#14 add x26,x26,x28 // h+=K[i] ror x3,x14,#1 and x17,x24,x23 ror x2,x11,#19 bic x28,x25,x23 ror x4,x27,#28 add x26,x26,x12 // h+=X[i] eor x16,x16,x23,ror#18 eor x3,x3,x14,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x27,x20 // a^b, b^c in next round eor x16,x16,x23,ror#41 // Sigma1(e) eor x4,x4,x27,ror#34 add x26,x26,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x2,x2,x11,ror#61 eor x3,x3,x14,lsr#7 // sigma0(X[i+1]) add x26,x26,x16 // h+=Sigma1(e) eor x19,x19,x20 // Maj(a,b,c) eor x17,x4,x27,ror#39 // Sigma0(a) eor x2,x2,x11,lsr#6 // sigma1(X[i+14]) add x13,x13,x6 add x22,x22,x26 // d+=h add x26,x26,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x13,x13,x3 add x26,x26,x17 // h+=Sigma0(a) add x13,x13,x2 ldr x2,[sp,#24] str x5,[sp,#16] ror x16,x22,#14 add x25,x25,x19 // h+=K[i] ror x4,x15,#1 and x17,x23,x22 ror x3,x12,#19 bic x19,x24,x22 ror x5,x26,#28 add x25,x25,x13 // h+=X[i] eor x16,x16,x22,ror#18 eor x4,x4,x15,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x26,x27 // a^b, b^c in next round eor x16,x16,x22,ror#41 // Sigma1(e) eor x5,x5,x26,ror#34 add x25,x25,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x3,x3,x12,ror#61 eor x4,x4,x15,lsr#7 // sigma0(X[i+1]) add x25,x25,x16 // h+=Sigma1(e) eor x28,x28,x27 // Maj(a,b,c) eor x17,x5,x26,ror#39 // Sigma0(a) eor x3,x3,x12,lsr#6 // sigma1(X[i+14]) add x14,x14,x7 add x21,x21,x25 // d+=h add x25,x25,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x14,x14,x4 add x25,x25,x17 // h+=Sigma0(a) add x14,x14,x3 ldr x3,[sp,#0] str x6,[sp,#24] ror x16,x21,#14 add x24,x24,x28 // h+=K[i] ror x5,x0,#1 and x17,x22,x21 ror x4,x13,#19 bic x28,x23,x21 ror x6,x25,#28 add x24,x24,x14 // h+=X[i] eor x16,x16,x21,ror#18 eor x5,x5,x0,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x25,x26 // a^b, b^c in next round eor x16,x16,x21,ror#41 // Sigma1(e) eor x6,x6,x25,ror#34 add x24,x24,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x4,x4,x13,ror#61 eor x5,x5,x0,lsr#7 // sigma0(X[i+1]) add x24,x24,x16 // h+=Sigma1(e) eor x19,x19,x26 // Maj(a,b,c) eor x17,x6,x25,ror#39 // Sigma0(a) eor x4,x4,x13,lsr#6 // sigma1(X[i+14]) add x15,x15,x8 add x20,x20,x24 // d+=h add x24,x24,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x15,x15,x5 add x24,x24,x17 // h+=Sigma0(a) add x15,x15,x4 ldr x4,[sp,#8] str x7,[sp,#0] ror x16,x20,#14 add x23,x23,x19 // h+=K[i] ror x6,x1,#1 and x17,x21,x20 ror x5,x14,#19 bic x19,x22,x20 ror x7,x24,#28 add x23,x23,x15 // h+=X[i] eor x16,x16,x20,ror#18 eor x6,x6,x1,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x24,x25 // a^b, b^c in next round eor x16,x16,x20,ror#41 // Sigma1(e) eor x7,x7,x24,ror#34 add x23,x23,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x5,x5,x14,ror#61 eor x6,x6,x1,lsr#7 // sigma0(X[i+1]) add x23,x23,x16 // h+=Sigma1(e) eor x28,x28,x25 // Maj(a,b,c) eor x17,x7,x24,ror#39 // Sigma0(a) eor x5,x5,x14,lsr#6 // sigma1(X[i+14]) add x0,x0,x9 add x27,x27,x23 // d+=h add x23,x23,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x0,x0,x6 add x23,x23,x17 // h+=Sigma0(a) add x0,x0,x5 ldr x5,[sp,#16] str x8,[sp,#8] ror x16,x27,#14 add x22,x22,x28 // h+=K[i] ror x7,x2,#1 and x17,x20,x27 ror x6,x15,#19 bic x28,x21,x27 ror x8,x23,#28 add x22,x22,x0 // h+=X[i] eor x16,x16,x27,ror#18 eor x7,x7,x2,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x23,x24 // a^b, b^c in next round eor x16,x16,x27,ror#41 // Sigma1(e) eor x8,x8,x23,ror#34 add x22,x22,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x6,x6,x15,ror#61 eor x7,x7,x2,lsr#7 // sigma0(X[i+1]) add x22,x22,x16 // h+=Sigma1(e) eor x19,x19,x24 // Maj(a,b,c) eor x17,x8,x23,ror#39 // Sigma0(a) eor x6,x6,x15,lsr#6 // sigma1(X[i+14]) add x1,x1,x10 add x26,x26,x22 // d+=h add x22,x22,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x1,x1,x7 add x22,x22,x17 // h+=Sigma0(a) add x1,x1,x6 ldr x6,[sp,#24] str x9,[sp,#16] ror x16,x26,#14 add x21,x21,x19 // h+=K[i] ror x8,x3,#1 and x17,x27,x26 ror x7,x0,#19 bic x19,x20,x26 ror x9,x22,#28 add x21,x21,x1 // h+=X[i] eor x16,x16,x26,ror#18 eor x8,x8,x3,ror#8 orr x17,x17,x19 // Ch(e,f,g) eor x19,x22,x23 // a^b, b^c in next round eor x16,x16,x26,ror#41 // Sigma1(e) eor x9,x9,x22,ror#34 add x21,x21,x17 // h+=Ch(e,f,g) and x28,x28,x19 // (b^c)&=(a^b) eor x7,x7,x0,ror#61 eor x8,x8,x3,lsr#7 // sigma0(X[i+1]) add x21,x21,x16 // h+=Sigma1(e) eor x28,x28,x23 // Maj(a,b,c) eor x17,x9,x22,ror#39 // Sigma0(a) eor x7,x7,x0,lsr#6 // sigma1(X[i+14]) add x2,x2,x11 add x25,x25,x21 // d+=h add x21,x21,x28 // h+=Maj(a,b,c) ldr x28,[x30],#8 // *K++, x19 in next round add x2,x2,x8 add x21,x21,x17 // h+=Sigma0(a) add x2,x2,x7 ldr x7,[sp,#0] str x10,[sp,#24] ror x16,x25,#14 add x20,x20,x28 // h+=K[i] ror x9,x4,#1 and x17,x26,x25 ror x8,x1,#19 bic x28,x27,x25 ror x10,x21,#28 add x20,x20,x2 // h+=X[i] eor x16,x16,x25,ror#18 eor x9,x9,x4,ror#8 orr x17,x17,x28 // Ch(e,f,g) eor x28,x21,x22 // a^b, b^c in next round eor x16,x16,x25,ror#41 // Sigma1(e) eor x10,x10,x21,ror#34 add x20,x20,x17 // h+=Ch(e,f,g) and x19,x19,x28 // (b^c)&=(a^b) eor x8,x8,x1,ror#61 eor x9,x9,x4,lsr#7 // sigma0(X[i+1]) add x20,x20,x16 // h+=Sigma1(e) eor x19,x19,x22 // Maj(a,b,c) eor x17,x10,x21,ror#39 // Sigma0(a) eor x8,x8,x1,lsr#6 // sigma1(X[i+14]) add x3,x3,x12 add x24,x24,x20 // d+=h add x20,x20,x19 // h+=Maj(a,b,c) ldr x19,[x30],#8 // *K++, x28 in next round add x3,x3,x9 add x20,x20,x17 // h+=Sigma0(a) add x3,x3,x8 cbnz x19,.Loop_16_xx ldp x0,x2,[x29,#96] ldr x1,[x29,#112] sub x30,x30,#648 // rewind ldp x3,x4,[x0] ldp x5,x6,[x0,#2*8] add x1,x1,#14*8 // advance input pointer ldp x7,x8,[x0,#4*8] add x20,x20,x3 ldp x9,x10,[x0,#6*8] add x21,x21,x4 add x22,x22,x5 add x23,x23,x6 stp x20,x21,[x0] add x24,x24,x7 add x25,x25,x8 stp x22,x23,[x0,#2*8] add x26,x26,x9 add x27,x27,x10 cmp x1,x2 stp x24,x25,[x0,#4*8] stp x26,x27,[x0,#6*8] b.ne .Loop ldp x19,x20,[x29,#16] add sp,sp,#4*8 ldp x21,x22,[x29,#32] ldp x23,x24,[x29,#48] ldp x25,x26,[x29,#64] ldp x27,x28,[x29,#80] ldp x29,x30,[sp],#128 AARCH64_VALIDATE_LINK_REGISTER ret .size sha512_block_data_order,.-sha512_block_data_order .section .rodata .align 6 .type .LK512,%object .LK512: .quad 0x428a2f98d728ae22,0x7137449123ef65cd .quad 0xb5c0fbcfec4d3b2f,0xe9b5dba58189dbbc .quad 0x3956c25bf348b538,0x59f111f1b605d019 .quad 0x923f82a4af194f9b,0xab1c5ed5da6d8118 .quad 0xd807aa98a3030242,0x12835b0145706fbe .quad 0x243185be4ee4b28c,0x550c7dc3d5ffb4e2 .quad 0x72be5d74f27b896f,0x80deb1fe3b1696b1 .quad 0x9bdc06a725c71235,0xc19bf174cf692694 .quad 0xe49b69c19ef14ad2,0xefbe4786384f25e3 .quad 0x0fc19dc68b8cd5b5,0x240ca1cc77ac9c65 .quad 0x2de92c6f592b0275,0x4a7484aa6ea6e483 .quad 0x5cb0a9dcbd41fbd4,0x76f988da831153b5 .quad 0x983e5152ee66dfab,0xa831c66d2db43210 .quad 0xb00327c898fb213f,0xbf597fc7beef0ee4 .quad 0xc6e00bf33da88fc2,0xd5a79147930aa725 .quad 0x06ca6351e003826f,0x142929670a0e6e70 .quad 0x27b70a8546d22ffc,0x2e1b21385c26c926 .quad 0x4d2c6dfc5ac42aed,0x53380d139d95b3df .quad 0x650a73548baf63de,0x766a0abb3c77b2a8 .quad 0x81c2c92e47edaee6,0x92722c851482353b .quad 0xa2bfe8a14cf10364,0xa81a664bbc423001 .quad 0xc24b8b70d0f89791,0xc76c51a30654be30 .quad 0xd192e819d6ef5218,0xd69906245565a910 .quad 0xf40e35855771202a,0x106aa07032bbd1b8 .quad 0x19a4c116b8d2d0c8,0x1e376c085141ab53 .quad 0x2748774cdf8eeb99,0x34b0bcb5e19b48a8 .quad 0x391c0cb3c5c95a63,0x4ed8aa4ae3418acb .quad 0x5b9cca4f7763e373,0x682e6ff3d6b2b8a3 .quad 0x748f82ee5defb2fc,0x78a5636f43172f60 .quad 0x84c87814a1f0ab72,0x8cc702081a6439ec .quad 0x90befffa23631e28,0xa4506cebde82bde9 .quad 0xbef9a3f7b2c67915,0xc67178f2e372532b .quad 0xca273eceea26619c,0xd186b8c721c0c207 .quad 0xeada7dd6cde0eb1e,0xf57d4f7fee6ed178 .quad 0x06f067aa72176fba,0x0a637dc5a2c898a6 .quad 0x113f9804bef90dae,0x1b710b35131c471b .quad 0x28db77f523047d84,0x32caab7b40c72493 .quad 0x3c9ebe0a15c9bebc,0x431d67c49c100d4c .quad 0x4cc5d4becb3e42b6,0x597f299cfc657e2a .quad 0x5fcb6fab3ad6faec,0x6c44198c4a475817 .quad 0 // terminator .size .LK512,.-.LK512 .byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,65,82,77,118,56,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .align 2 #ifndef __KERNEL__ .comm OPENSSL_armcap_P,4,4 .hidden OPENSSL_armcap_P #endif #endif #endif // !OPENSSL_NO_ASM .section .note.GNU-stack,"",%progbits #endif // defined(__aarch64__) && defined(__linux__) #if defined(__linux__) && defined(__ELF__) .section .note.GNU-stack,"",%progbits #endif
{ "pile_set_name": "Github" }
package main import ( "bytes" "context" "encoding/json" "flag" "fmt" "io" "io/ioutil" "log" "os" "os/exec" "regexp" "strconv" "strings" "sync" "golang.org/x/oauth2" "github.com/google/go-github/github" ) var ( org = flag.String("org", "", "Name of the Organization to scan. Example: secretorg123") token = flag.String("token", "", "Github Personal Access Token. This is required.") outputFile = flag.String("output", "results.txt", "Output file to save the results.") user = flag.String("user", "", "Name of the Github user to scan. Example: secretuser1") repoURL = flag.String("repoURL", "", "HTTPS URL of the Github repo to scan. Example: https://github.com/anshumantestorg/repo1.git") gistURL = flag.String("gistURL", "", "HTTPS URL of the Github gist to scan. Example: https://gist.github.com/secretuser1/81963f276280d484767f9be895316afc") cloneForks = flag.Bool("cloneForks", false, "Option to clone org and user repos that are forks. Default is false") orgOnly = flag.Bool("orgOnly", false, "Option to skip cloning user repo's when scanning an org. Default is false") toolName = flag.String("toolName", "all", "Specify whether to run thog or repo-supervisor") teamName = flag.String("teamName", "", "Name of the Organization Team which has access to private repositories for scanning.") scanPrivateReposOnly = flag.Bool("scanPrivateReposOnly", false, "Option to scan private repositories only. Default is false") enterpriseURL = flag.String("enterpriseURL", "", "Base URL of the Github Enterprise") threads = flag.Int("threads", 10, "Amount of parallel threads") thogEntropy = flag.Bool("thogEntropy", false, "Option to include high entropy secrets when truffleHog is used") mergeOutput = flag.Bool("mergeOutput", false, "Merge the output files of all the tools used into one JSON file") blacklist = flag.String("blacklist", "", "Comma seperated values of Repos to Skip Scanning for") executionQueue chan bool ) type truffleHogOutput struct { Branch string `json:"branch"` Commit string `json:"commit"` CommitHash string `json:"commitHash"` Date string `json:"date"` Diff string `json:"diff"` Path string `json:"path"` PrintDiff string `json:"printDiff"` Reason string `json:"reason"` StringsFound []string `json:"stringsFound"` } type reposupervisorOutput struct { Result map[string][]string `json:"result"` } type repositoryScan struct { Repository string `json:"repository"` Results map[string][]string `json:"stringsFound"` } func enqueueJob(item func()) { executionQueue <- true go func() { item() <-executionQueue }() } // Info Function to show colored text func Info(format string, args ...interface{}) { fmt.Printf("\x1b[34;1m%s\x1b[0m\n", fmt.Sprintf(format, args...)) } func check(e error) { if e != nil { panic(e) } else if _, ok := e.(*github.RateLimitError); ok { log.Println("hit rate limit") } else if _, ok := e.(*github.AcceptedError); ok { log.Println("scheduled on GitHub side") } } func gitclone(cloneURL string, repoName string, wg *sync.WaitGroup) { defer wg.Done() cmd := exec.Command("/usr/bin/git", "clone", cloneURL, repoName) var out, stderr bytes.Buffer cmd.Stdout = &out cmd.Stderr = &stderr err := cmd.Run() if err != nil { fmt.Println(fmt.Sprint(err) + ": " + stderr.String()) // panic(err) } } func gitRepoURL(path string) (string, error) { out, err := exec.Command("/usr/bin/git", "-C", path, "config", "--get", "remote.origin.url").Output() if err != nil { return "", err } url := strings.TrimSuffix(string(out), "\n") return url, nil } // Moving cloning logic out of individual functions func executeclone(repo *github.Repository, directory string, wg *sync.WaitGroup) { urlToClone := "" switch *scanPrivateReposOnly { case false: urlToClone = *repo.CloneURL case true: urlToClone = *repo.SSHURL default: urlToClone = *repo.CloneURL } if *enterpriseURL != "" { urlToClone = *repo.SSHURL } var orgclone sync.WaitGroup if !*cloneForks && *repo.Fork { fmt.Println(*repo.Name + " is a fork and the cloneFork flag was set to false so moving on..") } else { // clone it orgclone.Add(1) fmt.Println(urlToClone) func(orgclone *sync.WaitGroup, urlToClone string, directory string) { enqueueJob(func() { gitclone(urlToClone, directory, orgclone) }) }(&orgclone, urlToClone, directory) } orgclone.Wait() wg.Done() } func cloneorgrepos(ctx context.Context, client *github.Client, org string) error { Info("Cloning the repositories of the organization: " + org) Info("If the token provided belongs to a user in this organization, this will also clone all public AND private repositories of this org, irrespecitve of the scanPrivateReposOnly flag being set..") var orgRepos []*github.Repository opt := &github.RepositoryListByOrgOptions{ ListOptions: github.ListOptions{PerPage: 10}, } for { repos, resp, err := client.Repositories.ListByOrg(ctx, org, opt) check(err) orgRepos = append(orgRepos, repos...) //adding to the repo array if resp.NextPage == 0 { break } opt.Page = resp.NextPage } var orgrepowg sync.WaitGroup //iterating through the repo array for _, repo := range orgRepos { if strings.Contains(*blacklist, *repo.Name) { fmt.Println("Repo " + *repo.Name + " is in the repo blacklist, moving on..") } else { orgrepowg.Add(1) go executeclone(repo, "/tmp/repos/org/"+org+"/"+*repo.Name, &orgrepowg) } } orgrepowg.Wait() fmt.Println("Done cloning org repos.") return nil } func cloneuserrepos(ctx context.Context, client *github.Client, user string) error { Info("Cloning " + user + "'s repositories") Info("If the scanPrivateReposOnly flag is set, this will only scan the private repositories of this user. If that flag is not set, only public repositories are scanned. ") var uname string var userRepos []*github.Repository var opt3 *github.RepositoryListOptions if *scanPrivateReposOnly { uname = "" opt3 = &github.RepositoryListOptions{ Visibility: "private", ListOptions: github.ListOptions{PerPage: 10}, } } else { uname = user opt3 = &github.RepositoryListOptions{ ListOptions: github.ListOptions{PerPage: 10}, } } for { uRepos, resp, err := client.Repositories.List(ctx, uname, opt3) check(err) userRepos = append(userRepos, uRepos...) //adding to the userRepos array if resp.NextPage == 0 { break } opt3.Page = resp.NextPage } var userrepowg sync.WaitGroup //iterating through the userRepos array for _, userRepo := range userRepos { userrepowg.Add(1) go executeclone(userRepo, "/tmp/repos/users/"+user+"/"+*userRepo.Name, &userrepowg) } userrepowg.Wait() fmt.Println("Done cloning user repos.") return nil } func cloneusergists(ctx context.Context, client *github.Client, user string) error { Info("Cloning " + user + "'s gists") Info("Irrespective of the scanPrivateReposOnly flag being set or not, this will scan all public AND secret gists of a user whose token is provided") var gisturl string var userGists []*github.Gist opt4 := &github.GistListOptions{ ListOptions: github.ListOptions{PerPage: 10}, } for { uGists, resp, err := client.Gists.List(ctx, user, opt4) check(err) userGists = append(userGists, uGists...) if resp.NextPage == 0 { break } opt4.Page = resp.NextPage } var usergistclone sync.WaitGroup //iterating through the userGists array for _, userGist := range userGists { usergistclone.Add(1) if *enterpriseURL != "" { d := strings.Split(*userGist.GitPullURL, "/")[2] f := strings.Split(*userGist.GitPullURL, "/")[4] gisturl = "git@" + d + ":gist/" + f } else { gisturl = *userGist.GitPullURL } fmt.Println(gisturl) //cloning the individual user gists func(gisturl string, userGist *github.Gist, user string, usergistclone *sync.WaitGroup) { enqueueJob(func() { gitclone(gisturl, "/tmp/repos/users/"+user+"/"+*userGist.ID, usergistclone) }) }(gisturl, userGist, user, &usergistclone) } usergistclone.Wait() return nil } func listallusers(ctx context.Context, client *github.Client, org string) ([]*github.User, error) { Info("Listing users of the organization and their repositories and gists") var allUsers []*github.User opt2 := &github.ListMembersOptions{ ListOptions: github.ListOptions{PerPage: 10}, } for { users, resp, err := client.Organizations.ListMembers(ctx, org, opt2) check(err) allUsers = append(allUsers, users...) //adding to the allUsers array if resp.NextPage == 0 { break } opt2.Page = resp.NextPage } return allUsers, nil } func runTrufflehog(filepath string, reponame string, orgoruser string) error { outputDir := "/tmp/results/" + orgoruser + "/" + reponame os.MkdirAll(outputDir, 0700) outputFile1 := outputDir + "/" + "truffleHog" // open the out file for writing outfile, fileErr := os.OpenFile(outputFile1, os.O_CREATE|os.O_RDWR, 0644) check(fileErr) defer outfile.Close() params := []string{filepath, "--rules=/root/truffleHog/rules.json", "--regex"} if *mergeOutput { params = append(params, "--json") } var cmd1 *exec.Cmd if *thogEntropy { params = append(params, "--entropy=True") } else { params = append(params, "--entropy=False") } cmd1 = exec.Command("trufflehog", params...) // direct stdout to the outfile cmd1.Stdout = outfile err1 := cmd1.Run() // truffleHog returns an exit code 1 if it finds anything if err1 != nil && err1.Error() != "exit status 1" { Info("truffleHog Scanning failed for: " + orgoruser + "_" + reponame + ". Please scan it manually.") fmt.Println(err1) } else { fmt.Println("Finished truffleHog Scanning for: " + orgoruser + "_" + reponame) } return nil } func runReposupervisor(filepath string, reponame string, orgoruser string) error { outputDir := "/tmp/results/" + orgoruser + "/" + reponame os.MkdirAll(outputDir, 0700) outputFile3 := outputDir + "/" + "repo-supervisor" cmd3 := exec.Command("/root/repo-supervisor/runreposupervisor.sh", filepath, outputFile3) var out3 bytes.Buffer cmd3.Stdout = &out3 err3 := cmd3.Run() if err3 != nil { Info("Repo Supervisor Scanning failed for: " + orgoruser + "_" + reponame + ". Please scan it manually.") fmt.Println(err3) } else { fmt.Println("Finished Repo Supervisor Scanning for: " + orgoruser + "_" + reponame) } return nil } func runGitTools(tool string, filepath string, wg *sync.WaitGroup, reponame string, orgoruser string) { defer wg.Done() switch tool { case "all": err := runTrufflehog(filepath, reponame, orgoruser) check(err) err = runReposupervisor(filepath, reponame, orgoruser) check(err) case "thog": err := runTrufflehog(filepath, reponame, orgoruser) check(err) case "repo-supervisor": err := runReposupervisor(filepath, reponame, orgoruser) check(err) } } func scanforeachuser(user string, wg *sync.WaitGroup) { defer wg.Done() var wguserrepogist sync.WaitGroup gituserrepos, _ := ioutil.ReadDir("/tmp/repos/users/" + user) for _, f := range gituserrepos { wguserrepogist.Add(1) func(user string, wg *sync.WaitGroup, wguserrepogist *sync.WaitGroup, f os.FileInfo) { enqueueJob(func() { runGitTools(*toolName, "/tmp/repos/users/"+user+"/"+f.Name()+"/", wguserrepogist, f.Name(), user) }) }(user, wg, &wguserrepogist, f) } wguserrepogist.Wait() } func toolsOutput(toolname string, of *os.File) error { linedelimiter := "----------------------------------------------------------------------------" + "----------------------------------------------------------------------------" + "----------------------------------------------------------------------------" + "----------------------------------------------------------------------------" _, err := of.WriteString("Tool: " + toolname + "\n") check(err) users, _ := ioutil.ReadDir("/tmp/results/") for _, user := range users { repos, _ := ioutil.ReadDir("/tmp/results/" + user.Name() + "/") for _, repo := range repos { file, err := os.Open("/tmp/results/" + user.Name() + "/" + repo.Name() + "/" + toolname) check(err) fi, err := file.Stat() check(err) if fi.Size() == 0 { continue } else if fi.Size() > 0 { orgoruserstr := user.Name() rnamestr := repo.Name() _, err1 := of.WriteString("OrgorUser: " + orgoruserstr + " RepoName: " + rnamestr + "\n") check(err1) if _, err2 := io.Copy(of, file); err2 != nil { return err2 } _, err3 := of.WriteString(linedelimiter + "\n") check(err3) of.Sync() } defer file.Close() } } return nil } func singletoolOutput(toolname string, of *os.File) error { users, _ := ioutil.ReadDir("/tmp/results/") for _, user := range users { repos, _ := ioutil.ReadDir("/tmp/results/" + user.Name() + "/") for _, repo := range repos { file, err := os.Open("/tmp/results/" + user.Name() + "/" + repo.Name() + "/" + toolname) check(err) fi, err := file.Stat() check(err) if fi.Size() == 0 { continue } else if fi.Size() > 0 { if _, err2 := io.Copy(of, file); err2 != nil { return err2 } of.Sync() } defer file.Close() } } return nil } func combineOutput(toolname string, outputfile string) error { // Read all files in /tmp/results/<tool-name>/ directories for all the tools // open a new file and save it in the output directory - outputFile // for each results file, write user/org and reponame, copy results from the file in the outputFile, end with some delimiter of, err := os.Create(outputfile) check(err) switch toolname { case "all": tools := []string{"truffleHog", "repo-supervisor"} for _, tool := range tools { err = toolsOutput(tool, of) check(err) } case "truffleHog": err = singletoolOutput("truffleHog", of) check(err) case "repo-supervisor": err = singletoolOutput("repo-supervisor", of) check(err) } defer func() { cerr := of.Close() if err == nil { err = cerr } }() return nil } func mergeOutputJSON(outputfile string) { var results []repositoryScan var basePaths []string if *repoURL != "" || *gistURL != "" { basePaths = []string{"/tmp/repos"} } else { basePaths = []string{"/tmp/repos/org", "/tmp/repos/users", "/tmp/repos/team"} } for _, basePath := range basePaths { users, _ := ioutil.ReadDir(basePath) for _, user := range users { repos, _ := ioutil.ReadDir("/tmp/results/" + user.Name() + "/") for _, repo := range repos { repoPath := basePath + "/" + user.Name() + "/" + repo.Name() + "/" repoResultsPath := "/tmp/results/" + user.Name() + "/" + repo.Name() + "/" reposupvPath := repoResultsPath + "repo-supervisor" thogPath := repoResultsPath + "truffleHog" reposupvExists := fileExists(reposupvPath) thogExists := fileExists(thogPath) repoURL, _ := gitRepoURL(repoPath) var mergedOut map[string][]string if reposupvExists && thogExists { reposupvOut, _ := loadReposupvOut(reposupvPath, repoPath) thogOut, _ := loadThogOutput(thogPath) mergedOut = mergeOutputs(reposupvOut, thogOut) } else if reposupvExists { mergedOut, _ = loadReposupvOut(reposupvPath, repoPath) } else if thogExists { mergedOut, _ = loadThogOutput(thogPath) } if len(mergedOut) > 0 { results = append(results, repositoryScan{Repository: repoURL, Results: mergedOut}) } } } } marshalledResults, err := json.Marshal(results) check(err) err = ioutil.WriteFile(outputfile, marshalledResults, 0644) check(err) } func appendIfMissing(slice []string, i string) []string { for _, ele := range slice { if ele == i { return slice } } return append(slice, i) } func loadThogOutput(outfile string) (map[string][]string, error) { results := make(map[string][]string) output, err := ioutil.ReadFile(outfile) if err != nil { return nil, err } // There was an issue concerning truffleHog's output not being valid JSON // https://github.com/dxa4481/truffleHog/issues/95 // but apparently it was closed without a fix. entries := strings.Split(string(output), "\n") for _, entry := range entries[:len(entries)-1] { var issue truffleHogOutput err := json.Unmarshal([]byte(entry), &issue) if err != nil { return nil, err } if _, found := results[issue.Path]; found { for _, str := range issue.StringsFound { results[issue.Path] = appendIfMissing(results[issue.Path], str) } } else { results[issue.Path] = issue.StringsFound } } return results, nil } func loadReposupvOut(outfile string, home string) (map[string][]string, error) { results := make(map[string][]string) output, err := ioutil.ReadFile(outfile) if err != nil { return nil, err } var rsupervisorOutput reposupervisorOutput json.Unmarshal(output, &rsupervisorOutput) for path, stringFound := range rsupervisorOutput.Result { relativePath := strings.TrimPrefix(path, home) // Make sure there aren't any leading slashes fileName := strings.TrimPrefix(relativePath, "/") results[fileName] = stringFound } return results, nil } func mergeOutputs(outputA map[string][]string, outputB map[string][]string) map[string][]string { for path, stringsFound := range outputA { if _, included := outputB[path]; included { outputB[path] = append(outputB[path], stringsFound...) } else { outputB[path] = stringsFound } } return outputB } // Moving directory scanning logic out of individual functions func scanDir(dir string, org string) error { var wg sync.WaitGroup allRepos, _ := ioutil.ReadDir(dir) for _, f := range allRepos { wg.Add(1) func(f os.FileInfo, wg *sync.WaitGroup, org string) { enqueueJob(func() { runGitTools(*toolName, dir+f.Name()+"/", wg, f.Name(), org) }) }(f, &wg, org) } wg.Wait() return nil } func scanorgrepos(org string) error { err := scanDir("/tmp/repos/org/"+org+"/", org) check(err) return nil } func stringInSlice(a string, list []*github.Repository) (bool, error) { for _, b := range list { if *b.SSHURL == a || *b.CloneURL == a { return true, nil } } return false, nil } func checkifsshkeyexists() error { fmt.Println("Checking to see if the SSH key exists or not..") fi, err := os.Stat("/root/.ssh/id_rsa") if err == nil && fi.Size() > 0 { fmt.Println("SSH key exists and file size > 0 so continuing..") } if err != nil { fmt.Println(err) os.Exit(2) } return nil } func checkflags(token string, org string, user string, repoURL string, gistURL string, teamName string, scanPrivateReposOnly bool, orgOnly bool, toolName string, enterpriseURL string, thogEntropy bool) error { if token == "" { fmt.Println("Need a Github personal access token. Please provide that using the -token flag") os.Exit(2) } else if org == "" && user == "" && repoURL == "" && gistURL == "" { fmt.Println("org, user, repoURL and gistURL can't all be empty. Please provide just one of these values") os.Exit(2) } else if org != "" && (user != "" || repoURL != "" || gistURL != "") { fmt.Println("Can't have org along with any of user, repoURL or gistURL. Please provide just one of these values") os.Exit(2) } else if user != "" && (org != "" || repoURL != "" || gistURL != "") { fmt.Println("Can't have user along with any of org, repoURL or gistURL. Please provide just one of these values") os.Exit(2) } else if repoURL != "" && (org != "" || user != "" || gistURL != "") { fmt.Println("Can't have repoURL along with any of org, user or gistURL. Please provide just one of these values") os.Exit(2) } else if gistURL != "" && (org != "" || repoURL != "" || user != "") { fmt.Println("Can't have gistURL along with any of org, user or repoURL. Please provide just one of these values") os.Exit(2) } else if thogEntropy && !(toolName == "all" || toolName == "thog") { fmt.Println("thogEntropy flag should be used only when thog is being run. So, either leave the toolName blank or the toolName should be thog") os.Exit(2) } else if enterpriseURL == "" && (repoURL != "" || gistURL != "") { var ed, url string if repoURL != "" { url = repoURL } else if gistURL != "" { url = gistURL } if strings.Split(strings.Split(url, ":")[0], "@")[0] == "git" { fmt.Println("SSH URL") ed = strings.Split(strings.Split(url, ":")[0], "@")[1] } else if strings.Split(url, "/")[0] == "https:" { fmt.Println("HTTPS URL") ed = strings.Split(url, "/")[2] } matched, err := regexp.MatchString("github.com", ed) check(err) if !matched { fmt.Println("By the domain provided in the repoURL/gistURL, it looks like you are trying to scan a Github Enterprise repo/gist. Therefore, you need to provide the enterpriseURL flag as well") os.Exit(2) } } else if teamName != "" && org == "" { fmt.Println("Can't have a teamName without an org! Please provide a value for org along with the team name") os.Exit(2) } else if orgOnly && org == "" { fmt.Println("orgOnly flag should be used with a valid org") os.Exit(2) } else if scanPrivateReposOnly && user == "" && repoURL == "" && org == "" { fmt.Println("scanPrivateReposOnly flag should be used along with either the user, org or the repoURL") os.Exit(2) } else if scanPrivateReposOnly && (user != "" || repoURL != "" || org != "") { fmt.Println("scanPrivateReposOnly flag is provided with either the user, the repoURL or the org") err := checkifsshkeyexists() check(err) //Authenticating to Github using the token ctx1 := context.Background() client1, err := authenticatetogit(ctx1, token) check(err) if user != "" || repoURL != "" { var userRepos []*github.Repository opt3 := &github.RepositoryListOptions{ Affiliation: "owner", ListOptions: github.ListOptions{PerPage: 10}, } for { uRepos, resp, err := client1.Repositories.List(ctx1, "", opt3) check(err) userRepos = append(userRepos, uRepos...) //adding to the userRepos array if resp.NextPage == 0 { break } opt3.Page = resp.NextPage } if user != "" { fmt.Println("scanPrivateReposOnly flag is provided along with the user") fmt.Println("Checking to see if the token provided belongs to the user or not..") if *userRepos[0].Owner.Login == user { fmt.Println("Token belongs to the user") } else { fmt.Println("Token does not belong to the user. Please provide the correct token for the user mentioned.") os.Exit(2) } } else if repoURL != "" { fmt.Println("scanPrivateReposOnly flag is provided along with the repoURL") fmt.Println("Checking to see if the repo provided belongs to the user or not..") val, err := stringInSlice(repoURL, userRepos) check(err) if val { fmt.Println("Repo belongs to the user provided") } else { fmt.Println("Repo does not belong to the user whose token is provided. Please provide a valid repoURL that belongs to the user whose token is provided.") os.Exit(2) } } } else if org != "" && teamName == "" { var orgRepos []*github.Repository opt3 := &github.RepositoryListByOrgOptions{ Type: "private", ListOptions: github.ListOptions{PerPage: 10}, } for { repos, resp, err := client1.Repositories.ListByOrg(ctx1, org, opt3) check(err) orgRepos = append(orgRepos, repos...) if resp.NextPage == 0 { break } opt3.Page = resp.NextPage } fmt.Println("scanPrivateReposOnly flag is provided along with the org") fmt.Println("Checking to see if the token provided belongs to a user in the org or not..") var i int if i >= 0 && i < len(orgRepos) { fmt.Println("Private Repos exist in this org and token belongs to a user in this org") } else { fmt.Println("Even though the token belongs to a user in this org, there are no Private repos in this org") os.Exit(2) } } } else if scanPrivateReposOnly && gistURL != "" { fmt.Println("scanPrivateReposOnly flag should NOT be provided with the gistURL since its a private repository or multiple private repositories that we are looking to scan. Please provide either a user, an org or a private repoURL") os.Exit(2) } else if !(toolName == "thog" || toolName == "repo-supervisor" || toolName == "all") { fmt.Println("Please enter either thog or repo-supervisor. Default is all.") os.Exit(2) } else if repoURL != "" && !scanPrivateReposOnly && enterpriseURL == "" { if strings.Split(repoURL, "@")[0] == "git" { fmt.Println("Since the repoURL is a SSH URL and no enterprise URL is provided, it is required to have the scanPrivateReposOnly flag and the SSH key mounted on a volume") os.Exit(2) } } else if enterpriseURL != "" { fmt.Println("Since enterpriseURL is provided, checking to see if the SSH key is also mounted or not") err := checkifsshkeyexists() check(err) } return nil } func makeDirectories() error { os.MkdirAll("/tmp/repos/org", 0700) os.MkdirAll("/tmp/repos/team", 0700) os.MkdirAll("/tmp/repos/users", 0700) return nil } func fileExists(file string) bool { if _, err := os.Stat(file); os.IsNotExist(err) { return false } return true } func findTeamByName(ctx context.Context, client *github.Client, org string, teamName string) (*github.Team, error) { listTeamsOpts := &github.ListOptions{ PerPage: 10, } Info("Listing teams...") for { teams, resp, err := client.Organizations.ListTeams(ctx, org, listTeamsOpts) check(err) //check the name here--try to avoid additional API calls if we've found the team for _, team := range teams { if *team.Name == teamName { return team, nil } } if resp.NextPage == 0 { break } listTeamsOpts.Page = resp.NextPage } return nil, nil } func cloneTeamRepos(ctx context.Context, client *github.Client, org string, teamName string) error { // var team *github.Team team, err := findTeamByName(ctx, client, org, teamName) if team != nil { Info("Cloning the repositories of the team: " + *team.Name + "(" + strconv.FormatInt(*team.ID, 10) + ")") var teamRepos []*github.Repository listTeamRepoOpts := &github.ListOptions{ PerPage: 10, } Info("Listing team repositories...") for { repos, resp, err := client.Organizations.ListTeamRepos(ctx, *team.ID, listTeamRepoOpts) check(err) teamRepos = append(teamRepos, repos...) //adding to the repo array if resp.NextPage == 0 { break } listTeamRepoOpts.Page = resp.NextPage } var teamrepowg sync.WaitGroup //iterating through the repo array for _, repo := range teamRepos { teamrepowg.Add(1) go executeclone(repo, "/tmp/repos/team/"+*repo.Name, &teamrepowg) } teamrepowg.Wait() } else { fmt.Println("Unable to find the team '" + teamName + "'; perhaps the user is not a member?\n") if err != nil { fmt.Println("Error was:") fmt.Println(err) } os.Exit(2) } return nil } func scanTeamRepos(org string) error { err := scanDir("/tmp/repos/team/", org) check(err) return nil } func authenticatetogit(ctx context.Context, token string) (*github.Client, error) { var client *github.Client var err error //Authenticating to Github using the token ts := oauth2.StaticTokenSource( &oauth2.Token{AccessToken: token}, ) tc := oauth2.NewClient(ctx, ts) if *enterpriseURL == "" { client = github.NewClient(tc) } else if *enterpriseURL != "" { client, err = github.NewEnterpriseClient(*enterpriseURL, *enterpriseURL, tc) if err != nil { fmt.Printf("NewEnterpriseClient returned unexpected error: %v", err) } } return client, nil } func main() { //Parsing the flags flag.Parse() executionQueue = make(chan bool, *threads) //Logic to check the program is ingesting proper flags err := checkflags(*token, *org, *user, *repoURL, *gistURL, *teamName, *scanPrivateReposOnly, *orgOnly, *toolName, *enterpriseURL, *thogEntropy) check(err) ctx := context.Background() //authN client, err := authenticatetogit(ctx, *token) check(err) //Creating some temp directories to store repos & results. These will be deleted in the end err = makeDirectories() check(err) //By now, we either have the org, user, repoURL or the gistURL. The program flow changes accordingly.. if *org != "" { //If org was supplied m := "Since org was provided, the tool will proceed to scan all the org repos, then all the user repos and user gists in a recursive manner" if *orgOnly { m = "Org was specified combined with orgOnly, the tool will proceed to scan only the org repos and nothing related to its users" } Info(m) //cloning all the repos of the org err := cloneorgrepos(ctx, client, *org) check(err) if *teamName != "" { //If team was supplied Info("Since team name was provided, the tool will clone all repos to which the team has access") //cloning all the repos of the team err := cloneTeamRepos(ctx, client, *org, *teamName) check(err) } //getting all the users of the org into the allUsers array allUsers, err := listallusers(ctx, client, *org) check(err) if !*orgOnly { //iterating through the allUsers array for _, user := range allUsers { //cloning all the repos of a user err1 := cloneuserrepos(ctx, client, *user.Login) check(err1) //cloning all the gists of a user err2 := cloneusergists(ctx, client, *user.Login) check(err2) } } Info("Scanning all org repositories now..This may take a while so please be patient\n") err = scanorgrepos(*org) check(err) Info("Finished scanning all org repositories\n") if *teamName != "" { //If team was supplied Info("Scanning all team repositories now...This may take a while so please be patient\n") err = scanTeamRepos(*org) check(err) Info("Finished scanning all team repositories\n") } if !*orgOnly { Info("Scanning all user repositories and gists now..This may take a while so please be patient\n") var wguser sync.WaitGroup for _, user := range allUsers { wguser.Add(1) go scanforeachuser(*user.Login, &wguser) } wguser.Wait() Info("Finished scanning all user repositories and gists\n") } } else if *user != "" { //If user was supplied Info("Since user was provided, the tool will proceed to scan all the user repos and user gists\n") err1 := cloneuserrepos(ctx, client, *user) check(err1) err2 := cloneusergists(ctx, client, *user) check(err2) Info("Scanning all user repositories and gists now..This may take a while so please be patient\n") var wguseronly sync.WaitGroup wguseronly.Add(1) go scanforeachuser(*user, &wguseronly) wguseronly.Wait() Info("Finished scanning all user repositories and gists\n") } else if *repoURL != "" || *gistURL != "" { //If either repoURL or gistURL was supplied var url, repoorgist, fpath, rn, lastString, orgoruserName string var splitArray []string var bpath = "/tmp/repos/" if *repoURL != "" { //repoURL if *enterpriseURL != "" && strings.Split(strings.Split(*repoURL, "/")[0], "@")[0] != "git" { url = "git@" + strings.Split(*repoURL, "/")[2] + ":" + strings.Split(*repoURL, "/")[3] + "/" + strings.Split(*repoURL, "/")[4] } else { url = *repoURL } repoorgist = "repo" } else { //gistURL if *enterpriseURL != "" && strings.Split(strings.Split(*gistURL, "/")[0], "@")[0] != "git" { url = "git@" + strings.Split(*gistURL, "/")[2] + ":" + strings.Split(*gistURL, "/")[3] + "/" + strings.Split(*gistURL, "/")[4] } else { url = *gistURL } repoorgist = "gist" } Info("The tool will proceed to clone and scan: " + url + " only\n") if *enterpriseURL == "" && strings.Split(strings.Split(*gistURL, "/")[0], "@")[0] == "git" { splitArray = strings.Split(url, ":") lastString = splitArray[len(splitArray)-1] } else { splitArray = strings.Split(url, "/") lastString = splitArray[len(splitArray)-1] } if !*scanPrivateReposOnly { if *enterpriseURL != "" { orgoruserName = strings.Split(splitArray[0], ":")[1] } else { if *enterpriseURL == "" && strings.Split(strings.Split(*gistURL, "/")[0], "@")[0] == "git" { orgoruserName = splitArray[1] } else { orgoruserName = splitArray[3] } } } else { orgoruserName = strings.Split(splitArray[0], ":")[1] } switch repoorgist { case "repo": rn = strings.Split(lastString, ".")[0] case "gist": rn = lastString } fpath = bpath + orgoruserName + "/" + rn //cloning Info("Starting to clone: " + url + "\n") var wgo sync.WaitGroup wgo.Add(1) func(url string, fpath string, wgo *sync.WaitGroup) { enqueueJob(func() { gitclone(url, fpath, wgo) }) }(url, fpath, &wgo) wgo.Wait() Info("Cloning of: " + url + " finished\n") //scanning Info("Starting to scan: " + url + "\n") var wgs sync.WaitGroup wgs.Add(1) func(rn string, fpath string, wgs *sync.WaitGroup, orgoruserName string) { enqueueJob(func() { runGitTools(*toolName, fpath+"/", wgs, rn, orgoruserName) }) }(rn, fpath, &wgs, orgoruserName) wgs.Wait() Info("Scanning of: " + url + " finished\n") } //Now, that all the scanning has finished, time to combine the output // There are two option here: if *mergeOutput { // The first is to merge everything in /tmp/results into one JSON file Info("Merging the output into one JSON file\n") mergeOutputJSON(*outputFile) } else { // The second is to just concat the outputs Info("Combining the output into one file\n") err = combineOutput(*toolName, *outputFile) check(err) } }
{ "pile_set_name": "Github" }
package credentials func defaultCredentialsStore() string { return "osxkeychain" }
{ "pile_set_name": "Github" }
#ifndef BOOST_MPL_AUX_MSVC_TYPE_HPP_INCLUDED #define BOOST_MPL_AUX_MSVC_TYPE_HPP_INCLUDED // Copyright Aleksey Gurtovoy 2001-2004 // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // See http://www.boost.org/libs/mpl for documentation. // $Id$ // $Date$ // $Revision$ #include <boost/mpl/aux_/config/eti.hpp> #include <boost/mpl/aux_/is_msvc_eti_arg.hpp> namespace boost { namespace mpl { namespace aux { #if defined(BOOST_MPL_CFG_MSVC_70_ETI_BUG) template< bool > struct msvc_type_impl { template< typename T > struct result_ { typedef typename T::type type; }; }; template<> struct msvc_type_impl<true> { template< typename T > struct result_ { typedef result_ type; }; }; template< typename T > struct msvc_type : msvc_type_impl< is_msvc_eti_arg<T>::value > ::template result_<T> { }; #else // BOOST_MPL_CFG_MSVC_70_ETI_BUG template< typename T > struct msvc_type { typedef typename T::type type; }; template<> struct msvc_type<int> { typedef int type; }; #endif }}} #endif // BOOST_MPL_AUX_MSVC_TYPE_HPP_INCLUDED
{ "pile_set_name": "Github" }
<?php /* * This file is part of PHPExifTool. * * (c) 2012 Romain Neutron <[email protected]> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace PHPExiftool\Driver\Tag\DICOM; use JMS\Serializer\Annotation\ExclusionPolicy; use PHPExiftool\Driver\AbstractTag; /** * @ExclusionPolicy("all") */ class ContourNumber extends AbstractTag { protected $Id = '3006,0048'; protected $Name = 'ContourNumber'; protected $FullName = 'DICOM::Main'; protected $GroupName = 'DICOM'; protected $g0 = 'DICOM'; protected $g1 = 'DICOM'; protected $g2 = 'Image'; protected $Type = '?'; protected $Writable = false; protected $Description = 'Contour Number'; }
{ "pile_set_name": "Github" }
// See LICENSE for license details. #ifndef _BBL_H #define _BBL_H #ifndef __ASSEMBLER__ #include <stdint.h> #include <stddef.h> void print_logo(); #endif // !__ASSEMBLER__ #endif
{ "pile_set_name": "Github" }
<?php /** * 用户关系 封禁用户 */ namespace RongCloud\Lib\User\Tag; use RongCloud\Lib\Request; use RongCloud\Lib\Utils; class Tag { /** * 用户模块 用户标签 * * @var string */ private $jsonPath = 'Lib/User/Tag/'; /** * 请求配置文件 * * @var string */ private $conf = ""; /** * 校验配置文件 * * @var string */ private $verify = ""; /** * User constructor. */ function __construct() { //初始化请求配置和校验文件路径 $this->conf = Utils::getJson($this->jsonPath.'api.json'); $this->verify = Utils::getJson($this->jsonPath.'../verify.json'); } /** * 添加用户标签 * * @param $User array * @param * $User = [ 'userId'=> 'ujadk90ha1',//用户id 'tags'=> ['标签1','标签2']//用户标签 ]; * @return array */ public function set(array $User=[]){ $conf = $this->conf['setTag']; $error = (new Utils())->check([ 'api'=> $conf, 'model'=> 'user', 'data'=> $User, 'verify'=> $this->verify['tag'] ]); if($error) return $error; $result = (new Request())->Request($conf['url'],$User,'json'); $result = (new Utils())->responseError($result, $conf['response']['fail']); return $result; } /** * 批量添加用户标签 * * @param $User array * @param * $User = [ 'userIds'=> ['ujadk90ha1','ujadk90ha1'],//用户id 列表 'tags'=> ['标签1','标签2']//用户标签 ]; * @return array */ public function batchset(array $User=[]){ $conf = $this->conf['batchSetTag']; $error = (new Utils())->check([ 'api'=> $conf, 'model'=> 'user', 'data'=> $User, 'verify'=> $this->verify['batchTag'] ]); if($error) return $error; $result = (new Request())->Request($conf['url'],$User,'json'); $result = (new Utils())->responseError($result, $conf['response']['fail']); return $result; } /** *获取用户标签 * @param $User array * @param * $User = [ 'userIds'=> ['ujadk90ha1','ujadk90ha1'],//用户id 列表 ]; * @return array * @return array */ public function get(array $User=[]){ $conf = $this->conf['getTag']; $error = (new Utils())->check([ 'api'=> $conf, 'model'=> 'user', 'data'=> $User, 'verify'=> $this->verify['getTag'] ]); if($error) return $error; $result = (new Request())->Request($conf['url'],$User); $result = (new Utils())->responseError($result, $conf['response']['fail']); return $result; } }
{ "pile_set_name": "Github" }
# syntax=docker/dockerfile:experimental FROM --platform=$TARGETPLATFORM debian RUN --mount=type=bind,from=khs1994/s6:2.1.0.0,source=/,target=/tmp/s6 \ set -x \ && tar -zxvf /tmp/s6/s6-overlay.tar.gz -C / \ && ln -s /init /s6-init RUN set -x ; uname -a ; /bin/s6-ls / ENTRYPOINT ["/s6-init"]
{ "pile_set_name": "Github" }
/** Copyright © 2018 Odzhan. All Rights Reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. The name of the author may not be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY AUTHORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #define R(v,n)(((v)>>(n))|((v)<<(32-(n)))) #define F(n)for(i=0;i<n;i++) typedef unsigned char B; typedef unsigned int W; // Multiplication over GF(2**8) W M(W x){ W t=x&0x80808080; return((x^t)*2)^((t>>7)*27); } // SubByte B S(B x) { B i,y,c; if(x) { for(c=i=0,y=1;--i;y=(!c&&y==x)?c=1:y,y^=M(y)); x=y;F(4)x^=y=(y<<1)|(y>>7); } return x^99; } void E(B *s) { W i,w,x[8],c=1,*k=(W*)&x[4]; // copy plain text + master key to x F(8)x[i]=((W*)s)[i]; for(;;){ // AddRoundKey, 1st part of ExpandRoundKey w=k[3];F(4)w=(w&-256)|S(w),w=R(w,8),((W*)s)[i]=x[i]^k[i]; // AddRoundConstant, perform 2nd part of ExpandRoundKey w=R(w,8)^c;F(4)w=k[i]^=w; // if round 11, stop; if(c==108)break; // update round constant c=M(c); // SubBytes and ShiftRows F(16)((B*)x)[(i%4)+(((i/4)-(i%4))%4)*4]=S(s[i]); // if not round 11, MixColumns if(c!=108) F(4)w=x[i],x[i]=R(w,8)^R(w,16)^R(w,24)^M(R(w,8)^w); } } #ifdef CTR // encrypt using Counter (CTR) mode void encrypt(W l, B*c, B*p, B*k){ W i,r; B t[32]; // copy master key to local buffer F(16)t[i+16]=k[i]; while(l) { // copy counter+nonce to local buffer F(16)t[i]=c[i]; // encrypt t E(t); // XOR plaintext with ciphertext r=l>16?16:l; F(r)p[i]^=t[i]; // update length + position l-=r;p+=r; // update counter for(i=16;i>0;i--) if(++c[i-1])break; } } #endif
{ "pile_set_name": "Github" }
net: "examples/mnist/mnist_autoencoder.prototxt" test_state: { stage: 'test-on-train' } test_iter: 500 test_state: { stage: 'test-on-test' } test_iter: 100 test_interval: 500 test_compute_loss: true base_lr: 1.0 lr_policy: "fixed" momentum: 0.95 delta: 1e-8 display: 100 max_iter: 65000 weight_decay: 0.0005 snapshot: 10000 snapshot_prefix: "examples/mnist/mnist_autoencoder_adadelta_train" # solver mode: CPU or GPU solver_mode: GPU type: "AdaDelta"
{ "pile_set_name": "Github" }
// things we might want: #pragma stack 1000000 #pragma code 100000 #include "this.ufh" Should we dump the shell comment convention? Would let us use cpp (if it accepts //). Should we dump assignment as a value? Probably not. Constant elements in the symbol table should be tagged to prevent assignment (does the grammar prevent this?). Subrange into a HLH isn't implemented. Tr.Series = 0 works as a vector - because of member access q = 0 makes q a 0 no matter what was there before.
{ "pile_set_name": "Github" }
/** * Created by azu on 2014/04/29. * LICENSE : MIT */ "use strict"; var growl = require('node-notifier'); var EventEmitter = require('events').EventEmitter; var download = require('download-cache'); var notificationEvent = new EventEmitter(); var __CLICK_EVENT = "GROWL__CLICK_EVENT"; function addClickCallback(callback) { notificationEvent.on(__CLICK_EVENT, function (event, options) { callback(options); }); } growl.on('click', function (notifierObject, options) { notificationEvent.emit(__CLICK_EVENT, notifierObject, options) }); function sendNotification(options, callback) { download(options.icon).then(function (filePath) { growl.notify({ appIcon: __dirname + "/icon.png", id: options.id, title: options.title, message: options.text, icon: filePath, html_url: options.url, sound: true, // Only Notification Center or Windows Toasters wait: true // wait with callback until user action is taken on notification }, function (err, response) { if (err) { return callback(err); } }); }).catch(function (error) { callback(error); }); } module.exports = { addClickCallback: addClickCallback, sendNotification: sendNotification };
{ "pile_set_name": "Github" }
package api // import "github.com/microscaling/imagelayers/api" import ( "encoding/json" "fmt" "io/ioutil" "log" "net/http" "sync" "time" "github.com/CenturyLinkLabs/docker-reg-client/registry" "github.com/gorilla/mux" "github.com/microscaling/imagelayers/server" "github.com/pmylund/go-cache" ) const ( cacheDuration = 15 * time.Minute cacheCleanupInterval = 5 * time.Minute ) type Status struct { Message string `json:"message"` Service string `json:"service"` } type Request struct { Repos []Repo `json:"repos"` } type Response struct { Repo *Repo `json:"repo"` Layers []*registry.ImageMetadata `json:"layers"` Status int `json:"status"` } type Repo struct { Name string `json:"name"` Tag string `json:"tag"` Size int64 `json:"size"` Count int `json:"count"` } type RegistryConnection interface { Status() (Status, error) GetTags(string) (registry.TagMap, error) Search(string) (*registry.SearchResults, error) GetImageLayers(name, tag string) ([]*registry.ImageMetadata, error) } type registryApi struct { connection RegistryConnection imageCache *cache.Cache } func newRegistryApi(conn RegistryConnection) *registryApi { return &registryApi{ connection: conn, imageCache: cache.New(cacheDuration, cacheCleanupInterval), } } func (reg *registryApi) Routes(context string, router *server.Router) { routes := server.RouteMap{ "GET": { "/status": reg.handleStatus, "/search": reg.handleSearch, "/images/{front}/tags": reg.handleTags, "/images/{front}/{tail}/tags": reg.handleTags, }, "POST": { "/analyze": reg.handleAnalysis, }, } router.AddCorsRoutes(context, routes) } func (reg *registryApi) handleTags(w http.ResponseWriter, r *http.Request) { image := mux.Vars(r)["front"] tail := mux.Vars(r)["tail"] if tail != "" { image = image + "/" + tail } res, err := reg.connection.GetTags(image) if err != nil { respondToError(w, err) return } respondWithJSON(w, res) } func (reg *registryApi) handleSearch(w http.ResponseWriter, r *http.Request) { value := r.FormValue("name") res, err := reg.connection.Search(value) if err != nil { respondToError(w, err) return } respondWithJSON(w, res) } func (reg *registryApi) handleStatus(w http.ResponseWriter, r *http.Request) { res, err := reg.connection.Status() if err != nil { respondToError(w, err) return } log.Printf("Status: %s", res.Service) respondWithJSON(w, res) } func (reg *registryApi) handleAnalysis(w http.ResponseWriter, r *http.Request) { var request Request body, err := ioutil.ReadAll(r.Body) if err != nil { respondToError(w, err) return } if err := json.Unmarshal(body, &request); err != nil { respondToError(w, err) return } res := reg.inspectImages(request.Repos) respondWithJSON(w, res) } func (reg *registryApi) inspectImages(images []Repo) []*Response { var wg sync.WaitGroup list := make([]*Response, len(images)) for i, image := range images { wg.Add(1) go func(idx int, img Repo) { defer wg.Done() var resp *Response key := fmt.Sprintf("%s:%s", img.Name, img.Tag) val, found := reg.imageCache.Get(key) if found { resp = val.(*Response) } else { resp = reg.loadMetaData(img) if resp.Status == http.StatusOK { reg.imageCache.Set(key, resp, cache.DefaultExpiration) } } list[idx] = resp }(i, image) } wg.Wait() return list } func (reg *registryApi) loadMetaData(repo Repo) *Response { resp := new(Response) resp.Repo = &repo layers, err := reg.connection.GetImageLayers(repo.Name, repo.Tag) if err == nil { resp.Status = http.StatusOK resp.Layers = layers resp.Repo.Count = len(resp.Layers) for _, layer := range resp.Layers { resp.Repo.Size += layer.Size } } else { switch e := err.(type) { case registry.RegistryError: resp.Status = e.Code default: resp.Status = http.StatusInternalServerError } log.Printf("Error: %s", err) } return resp } func respondToError(w http.ResponseWriter, err error) { w.WriteHeader(http.StatusInternalServerError) fmt.Fprint(w, err.Error()) } func respondWithJSON(w http.ResponseWriter, o interface{}) { if err := json.NewEncoder(w).Encode(o); err != nil { respondToError(w, err) return } w.Header().Set("Content-Type", "application/json") }
{ "pile_set_name": "Github" }
// Copyright 2007, Google Inc. // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Author: [email protected] (Zhanyong Wan) // Google Mock - a framework for writing C++ mock classes. // // This file tests the function mocker classes. #include "gmock/gmock-generated-function-mockers.h" #if GTEST_OS_WINDOWS // MSDN says the header file to be included for STDMETHOD is BaseTyps.h but // we are getting compiler errors if we use basetyps.h, hence including // objbase.h for definition of STDMETHOD. # include <objbase.h> #endif // GTEST_OS_WINDOWS #include <map> #include <string> #include "gmock/gmock.h" #include "gtest/gtest.h" // There is a bug in MSVC (fixed in VS 2008) that prevents creating a // mock for a function with const arguments, so we don't test such // cases for MSVC versions older than 2008. #if !GTEST_OS_WINDOWS || (_MSC_VER >= 1500) # define GMOCK_ALLOWS_CONST_PARAM_FUNCTIONS #endif // !GTEST_OS_WINDOWS || (_MSC_VER >= 1500) namespace testing { namespace gmock_generated_function_mockers_test { using testing::internal::string; using testing::_; using testing::A; using testing::An; using testing::AnyNumber; using testing::Const; using testing::DoDefault; using testing::Eq; using testing::Lt; using testing::MockFunction; using testing::Ref; using testing::Return; using testing::ReturnRef; using testing::TypedEq; class FooInterface { public: virtual ~FooInterface() {} virtual void VoidReturning(int x) = 0; virtual int Nullary() = 0; virtual bool Unary(int x) = 0; virtual long Binary(short x, int y) = 0; // NOLINT virtual int Decimal(bool b, char c, short d, int e, long f, // NOLINT float g, double h, unsigned i, char* j, const string& k) = 0; virtual bool TakesNonConstReference(int& n) = 0; // NOLINT virtual string TakesConstReference(const int& n) = 0; #ifdef GMOCK_ALLOWS_CONST_PARAM_FUNCTIONS virtual bool TakesConst(const int x) = 0; #endif // GMOCK_ALLOWS_CONST_PARAM_FUNCTIONS virtual int OverloadedOnArgumentNumber() = 0; virtual int OverloadedOnArgumentNumber(int n) = 0; virtual int OverloadedOnArgumentType(int n) = 0; virtual char OverloadedOnArgumentType(char c) = 0; virtual int OverloadedOnConstness() = 0; virtual char OverloadedOnConstness() const = 0; virtual int TypeWithHole(int (*func)()) = 0; virtual int TypeWithComma(const std::map<int, string>& a_map) = 0; #if GTEST_OS_WINDOWS STDMETHOD_(int, CTNullary)() = 0; STDMETHOD_(bool, CTUnary)(int x) = 0; STDMETHOD_(int, CTDecimal)(bool b, char c, short d, int e, long f, // NOLINT float g, double h, unsigned i, char* j, const string& k) = 0; STDMETHOD_(char, CTConst)(int x) const = 0; #endif // GTEST_OS_WINDOWS }; class MockFoo : public FooInterface { public: MockFoo() {} // Makes sure that a mock function parameter can be named. MOCK_METHOD1(VoidReturning, void(int n)); // NOLINT MOCK_METHOD0(Nullary, int()); // NOLINT // Makes sure that a mock function parameter can be unnamed. MOCK_METHOD1(Unary, bool(int)); // NOLINT MOCK_METHOD2(Binary, long(short, int)); // NOLINT MOCK_METHOD10(Decimal, int(bool, char, short, int, long, float, // NOLINT double, unsigned, char*, const string& str)); MOCK_METHOD1(TakesNonConstReference, bool(int&)); // NOLINT MOCK_METHOD1(TakesConstReference, string(const int&)); #ifdef GMOCK_ALLOWS_CONST_PARAM_FUNCTIONS MOCK_METHOD1(TakesConst, bool(const int)); // NOLINT #endif // GMOCK_ALLOWS_CONST_PARAM_FUNCTIONS MOCK_METHOD0(OverloadedOnArgumentNumber, int()); // NOLINT MOCK_METHOD1(OverloadedOnArgumentNumber, int(int)); // NOLINT MOCK_METHOD1(OverloadedOnArgumentType, int(int)); // NOLINT MOCK_METHOD1(OverloadedOnArgumentType, char(char)); // NOLINT MOCK_METHOD0(OverloadedOnConstness, int()); // NOLINT MOCK_CONST_METHOD0(OverloadedOnConstness, char()); // NOLINT MOCK_METHOD1(TypeWithHole, int(int (*)())); // NOLINT MOCK_METHOD1(TypeWithComma, int(const std::map<int, string>&)); // NOLINT #if GTEST_OS_WINDOWS MOCK_METHOD0_WITH_CALLTYPE(STDMETHODCALLTYPE, CTNullary, int()); MOCK_METHOD1_WITH_CALLTYPE(STDMETHODCALLTYPE, CTUnary, bool(int)); MOCK_METHOD10_WITH_CALLTYPE(STDMETHODCALLTYPE, CTDecimal, int(bool b, char c, short d, int e, long f, float g, double h, unsigned i, char* j, const string& k)); MOCK_CONST_METHOD1_WITH_CALLTYPE(STDMETHODCALLTYPE, CTConst, char(int)); #endif // GTEST_OS_WINDOWS private: GTEST_DISALLOW_COPY_AND_ASSIGN_(MockFoo); }; class FunctionMockerTest : public testing::Test { protected: FunctionMockerTest() : foo_(&mock_foo_) {} FooInterface* const foo_; MockFoo mock_foo_; }; // Tests mocking a void-returning function. TEST_F(FunctionMockerTest, MocksVoidFunction) { EXPECT_CALL(mock_foo_, VoidReturning(Lt(100))); foo_->VoidReturning(0); } // Tests mocking a nullary function. TEST_F(FunctionMockerTest, MocksNullaryFunction) { EXPECT_CALL(mock_foo_, Nullary()) .WillOnce(DoDefault()) .WillOnce(Return(1)); EXPECT_EQ(0, foo_->Nullary()); EXPECT_EQ(1, foo_->Nullary()); } // Tests mocking a unary function. TEST_F(FunctionMockerTest, MocksUnaryFunction) { EXPECT_CALL(mock_foo_, Unary(Eq(2))) .Times(2) .WillOnce(Return(true)); EXPECT_TRUE(foo_->Unary(2)); EXPECT_FALSE(foo_->Unary(2)); } // Tests mocking a binary function. TEST_F(FunctionMockerTest, MocksBinaryFunction) { EXPECT_CALL(mock_foo_, Binary(2, _)) .WillOnce(Return(3)); EXPECT_EQ(3, foo_->Binary(2, 1)); } // Tests mocking a decimal function. TEST_F(FunctionMockerTest, MocksDecimalFunction) { EXPECT_CALL(mock_foo_, Decimal(true, 'a', 0, 0, 1L, A<float>(), Lt(100), 5U, NULL, "hi")) .WillOnce(Return(5)); EXPECT_EQ(5, foo_->Decimal(true, 'a', 0, 0, 1, 0, 0, 5, NULL, "hi")); } // Tests mocking a function that takes a non-const reference. TEST_F(FunctionMockerTest, MocksFunctionWithNonConstReferenceArgument) { int a = 0; EXPECT_CALL(mock_foo_, TakesNonConstReference(Ref(a))) .WillOnce(Return(true)); EXPECT_TRUE(foo_->TakesNonConstReference(a)); } // Tests mocking a function that takes a const reference. TEST_F(FunctionMockerTest, MocksFunctionWithConstReferenceArgument) { int a = 0; EXPECT_CALL(mock_foo_, TakesConstReference(Ref(a))) .WillOnce(Return("Hello")); EXPECT_EQ("Hello", foo_->TakesConstReference(a)); } #ifdef GMOCK_ALLOWS_CONST_PARAM_FUNCTIONS // Tests mocking a function that takes a const variable. TEST_F(FunctionMockerTest, MocksFunctionWithConstArgument) { EXPECT_CALL(mock_foo_, TakesConst(Lt(10))) .WillOnce(DoDefault()); EXPECT_FALSE(foo_->TakesConst(5)); } #endif // GMOCK_ALLOWS_CONST_PARAM_FUNCTIONS // Tests mocking functions overloaded on the number of arguments. TEST_F(FunctionMockerTest, MocksFunctionsOverloadedOnArgumentNumber) { EXPECT_CALL(mock_foo_, OverloadedOnArgumentNumber()) .WillOnce(Return(1)); EXPECT_CALL(mock_foo_, OverloadedOnArgumentNumber(_)) .WillOnce(Return(2)); EXPECT_EQ(2, foo_->OverloadedOnArgumentNumber(1)); EXPECT_EQ(1, foo_->OverloadedOnArgumentNumber()); } // Tests mocking functions overloaded on the types of argument. TEST_F(FunctionMockerTest, MocksFunctionsOverloadedOnArgumentType) { EXPECT_CALL(mock_foo_, OverloadedOnArgumentType(An<int>())) .WillOnce(Return(1)); EXPECT_CALL(mock_foo_, OverloadedOnArgumentType(TypedEq<char>('a'))) .WillOnce(Return('b')); EXPECT_EQ(1, foo_->OverloadedOnArgumentType(0)); EXPECT_EQ('b', foo_->OverloadedOnArgumentType('a')); } // Tests mocking functions overloaded on the const-ness of this object. TEST_F(FunctionMockerTest, MocksFunctionsOverloadedOnConstnessOfThis) { EXPECT_CALL(mock_foo_, OverloadedOnConstness()); EXPECT_CALL(Const(mock_foo_), OverloadedOnConstness()) .WillOnce(Return('a')); EXPECT_EQ(0, foo_->OverloadedOnConstness()); EXPECT_EQ('a', Const(*foo_).OverloadedOnConstness()); } #if GTEST_OS_WINDOWS // Tests mocking a nullary function with calltype. TEST_F(FunctionMockerTest, MocksNullaryFunctionWithCallType) { EXPECT_CALL(mock_foo_, CTNullary()) .WillOnce(Return(-1)) .WillOnce(Return(0)); EXPECT_EQ(-1, foo_->CTNullary()); EXPECT_EQ(0, foo_->CTNullary()); } // Tests mocking a unary function with calltype. TEST_F(FunctionMockerTest, MocksUnaryFunctionWithCallType) { EXPECT_CALL(mock_foo_, CTUnary(Eq(2))) .Times(2) .WillOnce(Return(true)) .WillOnce(Return(false)); EXPECT_TRUE(foo_->CTUnary(2)); EXPECT_FALSE(foo_->CTUnary(2)); } // Tests mocking a decimal function with calltype. TEST_F(FunctionMockerTest, MocksDecimalFunctionWithCallType) { EXPECT_CALL(mock_foo_, CTDecimal(true, 'a', 0, 0, 1L, A<float>(), Lt(100), 5U, NULL, "hi")) .WillOnce(Return(10)); EXPECT_EQ(10, foo_->CTDecimal(true, 'a', 0, 0, 1, 0, 0, 5, NULL, "hi")); } // Tests mocking functions overloaded on the const-ness of this object. TEST_F(FunctionMockerTest, MocksFunctionsConstFunctionWithCallType) { EXPECT_CALL(Const(mock_foo_), CTConst(_)) .WillOnce(Return('a')); EXPECT_EQ('a', Const(*foo_).CTConst(0)); } #endif // GTEST_OS_WINDOWS class MockB { public: MockB() {} MOCK_METHOD0(DoB, void()); private: GTEST_DISALLOW_COPY_AND_ASSIGN_(MockB); }; // Tests that functions with no EXPECT_CALL() ruls can be called any // number of times. TEST(ExpectCallTest, UnmentionedFunctionCanBeCalledAnyNumberOfTimes) { { MockB b; } { MockB b; b.DoB(); } { MockB b; b.DoB(); b.DoB(); } } // Tests mocking template interfaces. template <typename T> class StackInterface { public: virtual ~StackInterface() {} // Template parameter appears in function parameter. virtual void Push(const T& value) = 0; virtual void Pop() = 0; virtual int GetSize() const = 0; // Template parameter appears in function return type. virtual const T& GetTop() const = 0; }; template <typename T> class MockStack : public StackInterface<T> { public: MockStack() {} MOCK_METHOD1_T(Push, void(const T& elem)); MOCK_METHOD0_T(Pop, void()); MOCK_CONST_METHOD0_T(GetSize, int()); // NOLINT MOCK_CONST_METHOD0_T(GetTop, const T&()); private: GTEST_DISALLOW_COPY_AND_ASSIGN_(MockStack); }; // Tests that template mock works. TEST(TemplateMockTest, Works) { MockStack<int> mock; EXPECT_CALL(mock, GetSize()) .WillOnce(Return(0)) .WillOnce(Return(1)) .WillOnce(Return(0)); EXPECT_CALL(mock, Push(_)); int n = 5; EXPECT_CALL(mock, GetTop()) .WillOnce(ReturnRef(n)); EXPECT_CALL(mock, Pop()) .Times(AnyNumber()); EXPECT_EQ(0, mock.GetSize()); mock.Push(5); EXPECT_EQ(1, mock.GetSize()); EXPECT_EQ(5, mock.GetTop()); mock.Pop(); EXPECT_EQ(0, mock.GetSize()); } #if GTEST_OS_WINDOWS // Tests mocking template interfaces with calltype. template <typename T> class StackInterfaceWithCallType { public: virtual ~StackInterfaceWithCallType() {} // Template parameter appears in function parameter. STDMETHOD_(void, Push)(const T& value) = 0; STDMETHOD_(void, Pop)() = 0; STDMETHOD_(int, GetSize)() const = 0; // Template parameter appears in function return type. STDMETHOD_(const T&, GetTop)() const = 0; }; template <typename T> class MockStackWithCallType : public StackInterfaceWithCallType<T> { public: MockStackWithCallType() {} MOCK_METHOD1_T_WITH_CALLTYPE(STDMETHODCALLTYPE, Push, void(const T& elem)); MOCK_METHOD0_T_WITH_CALLTYPE(STDMETHODCALLTYPE, Pop, void()); MOCK_CONST_METHOD0_T_WITH_CALLTYPE(STDMETHODCALLTYPE, GetSize, int()); MOCK_CONST_METHOD0_T_WITH_CALLTYPE(STDMETHODCALLTYPE, GetTop, const T&()); private: GTEST_DISALLOW_COPY_AND_ASSIGN_(MockStackWithCallType); }; // Tests that template mock with calltype works. TEST(TemplateMockTestWithCallType, Works) { MockStackWithCallType<int> mock; EXPECT_CALL(mock, GetSize()) .WillOnce(Return(0)) .WillOnce(Return(1)) .WillOnce(Return(0)); EXPECT_CALL(mock, Push(_)); int n = 5; EXPECT_CALL(mock, GetTop()) .WillOnce(ReturnRef(n)); EXPECT_CALL(mock, Pop()) .Times(AnyNumber()); EXPECT_EQ(0, mock.GetSize()); mock.Push(5); EXPECT_EQ(1, mock.GetSize()); EXPECT_EQ(5, mock.GetTop()); mock.Pop(); EXPECT_EQ(0, mock.GetSize()); } #endif // GTEST_OS_WINDOWS #define MY_MOCK_METHODS1_ \ MOCK_METHOD0(Overloaded, void()); \ MOCK_CONST_METHOD1(Overloaded, int(int n)); \ MOCK_METHOD2(Overloaded, bool(bool f, int n)) class MockOverloadedOnArgNumber { public: MockOverloadedOnArgNumber() {} MY_MOCK_METHODS1_; private: GTEST_DISALLOW_COPY_AND_ASSIGN_(MockOverloadedOnArgNumber); }; TEST(OverloadedMockMethodTest, CanOverloadOnArgNumberInMacroBody) { MockOverloadedOnArgNumber mock; EXPECT_CALL(mock, Overloaded()); EXPECT_CALL(mock, Overloaded(1)).WillOnce(Return(2)); EXPECT_CALL(mock, Overloaded(true, 1)).WillOnce(Return(true)); mock.Overloaded(); EXPECT_EQ(2, mock.Overloaded(1)); EXPECT_TRUE(mock.Overloaded(true, 1)); } #define MY_MOCK_METHODS2_ \ MOCK_CONST_METHOD1(Overloaded, int(int n)); \ MOCK_METHOD1(Overloaded, int(int n)); class MockOverloadedOnConstness { public: MockOverloadedOnConstness() {} MY_MOCK_METHODS2_; private: GTEST_DISALLOW_COPY_AND_ASSIGN_(MockOverloadedOnConstness); }; TEST(OverloadedMockMethodTest, CanOverloadOnConstnessInMacroBody) { MockOverloadedOnConstness mock; const MockOverloadedOnConstness* const_mock = &mock; EXPECT_CALL(mock, Overloaded(1)).WillOnce(Return(2)); EXPECT_CALL(*const_mock, Overloaded(1)).WillOnce(Return(3)); EXPECT_EQ(2, mock.Overloaded(1)); EXPECT_EQ(3, const_mock->Overloaded(1)); } TEST(MockFunctionTest, WorksForVoidNullary) { MockFunction<void()> foo; EXPECT_CALL(foo, Call()); foo.Call(); } TEST(MockFunctionTest, WorksForNonVoidNullary) { MockFunction<int()> foo; EXPECT_CALL(foo, Call()) .WillOnce(Return(1)) .WillOnce(Return(2)); EXPECT_EQ(1, foo.Call()); EXPECT_EQ(2, foo.Call()); } TEST(MockFunctionTest, WorksForVoidUnary) { MockFunction<void(int)> foo; EXPECT_CALL(foo, Call(1)); foo.Call(1); } TEST(MockFunctionTest, WorksForNonVoidBinary) { MockFunction<int(bool, int)> foo; EXPECT_CALL(foo, Call(false, 42)) .WillOnce(Return(1)) .WillOnce(Return(2)); EXPECT_CALL(foo, Call(true, Ge(100))) .WillOnce(Return(3)); EXPECT_EQ(1, foo.Call(false, 42)); EXPECT_EQ(2, foo.Call(false, 42)); EXPECT_EQ(3, foo.Call(true, 120)); } TEST(MockFunctionTest, WorksFor10Arguments) { MockFunction<int(bool a0, char a1, int a2, int a3, int a4, int a5, int a6, char a7, int a8, bool a9)> foo; EXPECT_CALL(foo, Call(_, 'a', _, _, _, _, _, _, _, _)) .WillOnce(Return(1)) .WillOnce(Return(2)); EXPECT_EQ(1, foo.Call(false, 'a', 0, 0, 0, 0, 0, 'b', 0, true)); EXPECT_EQ(2, foo.Call(true, 'a', 0, 0, 0, 0, 0, 'b', 1, false)); } } // namespace gmock_generated_function_mockers_test } // namespace testing
{ "pile_set_name": "Github" }
# Copyright 2020 Northern.tech AS # This file is part of CFEngine 3 - written and maintained by Northern.tech AS. # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 3. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA # To the extent this program is licensed as part of the Enterprise # versions of Cfengine, the applicable Commercial Open Source License # (COSL) may apply to this file if you as a licensee so wish it. See # included file COSL.txt. body common control { bundlesequence => { "addpasswd" }; } bundle agent addpasswd { vars: # want to set these values by the names of their array keys "pwd[mark]" string => "mark:x:1000:100:Mark Burgess:/home/mark:/bin/bash"; "pwd[fred]" string => "fred:x:1001:100:Right Said:/home/fred:/bin/bash"; "pwd[jane]" string => "jane:x:1002:100:Jane Doe:/home/jane:/bin/bash"; files: "/tmp/passwd" create => "true", edit_line => append_users_starting("addpasswd.pwd"); } ############################################################ # Library stuff ############################################################ bundle edit_line append_users_starting(v) { vars: "index" slist => getindices("$(v)"); classes: "add_$(index)" not => userexists("$(index)"); insert_lines: "$($(v)[$(index)])" ifvarclass => "add_$(index)"; } ############################################################ bundle edit_line append_groups_starting(v) { vars: "index" slist => getindices("$(v)"); classes: "add_$(index)" not => groupexists("$(index)"); insert_lines: "$($(v)[$(index)])" ifvarclass => "add_$(index)"; }
{ "pile_set_name": "Github" }
# The Gamebot The gamebot is responsible for advancing the game. The gamebot is responsible for two tasks: 1) Determine which scripts need to be run during the current tick. 2) Process the data associated with the previous tick and update the scores of the teams.
{ "pile_set_name": "Github" }
IO.print(1 + 2) // expect: 3 IO.print(12.34 + 0.13) // expect: 12.47 IO.print(3 + 5 + 2) // expect: 10
{ "pile_set_name": "Github" }
<?php namespace Tests; use Illuminate\Support\Facades\Hash; use Illuminate\Contracts\Console\Kernel; trait CreatesApplication { /** * Creates the application. * * @return \Illuminate\Foundation\Application */ public function createApplication() { $app = require __DIR__.'/../bootstrap/app.php'; $app->make(Kernel::class)->bootstrap(); // Speed up encryption for tests Hash::setRounds(5); return $app; } }
{ "pile_set_name": "Github" }
// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package main // This program will crash. // We want the stack trace to include the C++ functions, // even though we compile with -g0. /* #cgo CXXFLAGS: -g0 -O0 extern int cxxFunction1(void); */ import "C" func init() { register("CrashTracebackNodebug", CrashTracebackNodebug) } func CrashTracebackNodebug() { C.cxxFunction1() }
{ "pile_set_name": "Github" }
/** * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER. * * Copyright (c) 2007 Sun Microsystems Inc. All Rights Reserved * * The contents of this file are subject to the terms * of the Common Development and Distribution License * (the License). You may not use this file except in * compliance with the License. * * You can obtain a copy of the License at * https://opensso.dev.java.net/public/CDDLv1.0.html or * opensso/legal/CDDLv1.0.txt * See the License for the specific language governing * permission and limitations under the License. * * When distributing Covered Code, include this CDDL * Header Notice in each file and include the License file * at opensso/legal/CDDLv1.0.txt. * If applicable, add the following below the CDDL Header, * with the fields enclosed by brackets [] replaced by * your own identifying information: * "Portions Copyrighted [year] [name of copyright owner]" * * $Id: PolicyNormalAddViewBean.java,v 1.2 2008/06/25 05:43:03 qcheng Exp $ * */ package com.sun.identity.console.policy; import com.iplanet.jato.model.ModelControlException; import com.iplanet.jato.view.event.RequestInvocationEvent; import com.sun.identity.console.base.model.AMAdminConstants; import com.sun.identity.console.base.model.AMConsoleException; import com.sun.identity.console.policy.model.CachedPolicy; import com.sun.identity.console.policy.model.PolicyModel; import com.sun.web.ui.model.CCPageTitleModel; import com.sun.web.ui.view.alert.CCAlert; public class PolicyNormalAddViewBean extends PolicyNormalViewBeanBase { public static final String DEFAULT_DISPLAY_URL = "/console/policy/PolicyNormalAdd.jsp"; /** * Creates a policy creation view bean. */ public PolicyNormalAddViewBean() { super("PolicyNormalAdd", DEFAULT_DISPLAY_URL); } protected void createPageTitleModel() { ptModel = new CCPageTitleModel( getClass().getClassLoader().getResourceAsStream( "com/sun/identity/console/twoBtnsPageTitle.xml")); ptModel.setValue("button1", "button.ok"); ptModel.setValue("button2", "button.cancel"); } /** * Handles cancel request. * * @param event Request invocation event */ public void handleButton2Request(RequestInvocationEvent event) { backTrail(); forwardToPolicyViewBean(); } /** * Handles create policy request. * * @param event Request invocation event */ public void handleButton1Request(RequestInvocationEvent event) throws ModelControlException { String currentRealm = (String)getPageSessionAttribute( AMAdminConstants.CURRENT_REALM); try { reconstructPolicy(); CachedPolicy cachedPolicy = getCachedPolicy(); PolicyModel model = (PolicyModel)getModel(); String name = cachedPolicy.getPolicy().getName(); if (name.equals(model.getLocalizedString("policy.create.name"))) { setInlineAlertMessage(CCAlert.TYPE_ERROR, "message.error", model.getLocalizedString("policy.name.change")); forwardTo(); } else { try { model.createPolicy(currentRealm, cachedPolicy.getPolicy()); backTrail(); forwardToPolicyViewBean(); } catch (AMConsoleException e) { setInlineAlertMessage(CCAlert.TYPE_ERROR, "message.error", e.getMessage()); forwardTo(); } } } catch (AMConsoleException e) { redirectToStartURL(); } } protected String getPropertyXMLFileName(boolean readonly) { return "com/sun/identity/console/propertyPMPolicyNormal.xml"; } protected String getBreadCrumbDisplayName() { return "breadcrumbs.addPolicy"; } protected boolean startPageTrail() { return false; } protected boolean isProfilePage() { return false; } }
{ "pile_set_name": "Github" }
#!/bin/bash -ex [ -n "$WORKSPACE" ] [ -n "$MOZ_OBJDIR" ] [ -n "$GECKO_DIR" ] HAZARD_SHELL_OBJDIR=$WORKSPACE/obj-haz-shell JS_SRCDIR=$GECKO_DIR/js/src ANALYSIS_SRCDIR=$JS_SRCDIR/devtools/rootAnalysis export CC="$TOOLTOOL_DIR/gcc/bin/gcc" export CXX="$TOOLTOOL_DIR/gcc/bin/g++" PYTHON=python2.7 if ! which $PYTHON; then PYTHON=python fi function check_commit_msg () { ( set +e; if [[ -n "$AUTOMATION" ]]; then hg --cwd "$GECKO_DIR" log -r. --template '{desc}\n' | grep -F -q -- "$1" else echo -- "$SCRIPT_FLAGS" | grep -F -q -- "$1" fi ) } if check_commit_msg "--dep"; then HAZ_DEP=1 fi function build_js_shell () { # Must unset MOZ_OBJDIR and MOZCONFIG here to prevent the build system from # inferring that the analysis output directory is the current objdir. We # need a separate objdir here to build the opt JS shell to use to run the # analysis. ( unset MOZ_OBJDIR unset MOZCONFIG ( cd $JS_SRCDIR; autoconf-2.13 ) if [[ -z "$HAZ_DEP" ]]; then [ -d $HAZARD_SHELL_OBJDIR ] && rm -rf $HAZARD_SHELL_OBJDIR fi mkdir -p $HAZARD_SHELL_OBJDIR || true cd $HAZARD_SHELL_OBJDIR $JS_SRCDIR/configure --enable-optimize --disable-debug --enable-ctypes --enable-nspr-build --without-intl-api --with-ccache make -j4 ) # Restore MOZ_OBJDIR and MOZCONFIG } function configure_analysis () { local analysis_dir analysis_dir="$1" if [[ -z "$HAZ_DEP" ]]; then [ -d "$analysis_dir" ] && rm -rf "$analysis_dir" fi mkdir -p "$analysis_dir" || true ( cd "$analysis_dir" cat > defaults.py <<EOF js = "$HAZARD_SHELL_OBJDIR/dist/bin/js" analysis_scriptdir = "$ANALYSIS_SRCDIR" objdir = "$MOZ_OBJDIR" source = "$GECKO_DIR" sixgill = "$TOOLTOOL_DIR/sixgill/usr/libexec/sixgill" sixgill_bin = "$TOOLTOOL_DIR/sixgill/usr/bin" EOF cat > run-analysis.sh <<EOF #!/bin/sh if [ \$# -eq 0 ]; then set gcTypes fi export ANALYSIS_SCRIPTDIR="$ANALYSIS_SRCDIR" exec "$ANALYSIS_SRCDIR/analyze.py" "\$@" EOF chmod +x run-analysis.sh ) } function run_analysis () { local analysis_dir analysis_dir="$1" local build_type build_type="$2" if [[ -z "$HAZ_DEP" ]]; then [ -d $MOZ_OBJDIR ] && rm -rf $MOZ_OBJDIR fi ( cd "$analysis_dir" $PYTHON "$ANALYSIS_SRCDIR/analyze.py" --buildcommand="$GECKO_DIR/testing/mozharness/scripts/spidermonkey/build.${build_type}" ) } function grab_artifacts () { local analysis_dir analysis_dir="$1" local artifacts artifacts="$2" ( cd "$analysis_dir" ls -lah # Do not error out if no files found shopt -s nullglob set +e for f in *.txt *.lst; do gzip -9 -c "$f" > "${artifacts}/$f.gz" done # Check whether the user requested .xdb file upload in the top commit comment if check_commit_msg "--upload-xdbs"; then HAZ_UPLOAD_XDBS=1 fi if [ -n "$HAZ_UPLOAD_XDBS" ]; then for f in *.xdb; do bzip2 -c "$f" > "${artifacts}/$f.bz2" done fi ) } function check_hazards () { ( set +e NUM_HAZARDS=$(grep -c 'Function.*has unrooted.*live across GC call' "$1"/rootingHazards.txt) NUM_UNSAFE=$(grep -c '^Function.*takes unsafe address of unrooted' "$1"/refs.txt) NUM_UNNECESSARY=$(grep -c '^Function.* has unnecessary root' "$1"/unnecessary.txt) set +x echo "TinderboxPrint: rooting hazards<br/>$NUM_HAZARDS" echo "TinderboxPrint: unsafe references to unrooted GC pointers<br/>$NUM_UNSAFE" echo "TinderboxPrint: unnecessary roots<br/>$NUM_UNNECESSARY" if [ $NUM_HAZARDS -gt 0 ]; then echo "TEST-UNEXPECTED-FAIL $NUM_HAZARDS hazards detected" >&2 echo "TinderboxPrint: documentation<br/><a href='https://wiki.mozilla.org/Javascript:Hazard_Builds'>static rooting hazard analysis failures</a>, visit \"Inspect Task\" link for hazard details" exit 1 fi ) }
{ "pile_set_name": "Github" }