max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
611 | /**
* Copyright (c) 2019 Kylart <<EMAIL>>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* Coming right from https://github.com/nodejs/node-addon-api/issues/269#issuecomment-455580129
*/
#ifdef _MSC_VER
#ifndef WIN32_LEAN_AND_MEAN
#define WIN32_LEAN_AND_MEAN
#endif
#include <windows.h>
#include <Shlwapi.h>
#include <delayimp.h>
#include <string.h>
#pragma comment(lib, "Shlwapi.lib")
static FARPROC WINAPI load_exe_hook(unsigned int event, DelayLoadInfo* info) {
if (event != dliNotePreLoadLibrary)
return NULL;
if (_stricmp(info->szDll, "iojs.exe") != 0 &&
_stricmp(info->szDll, "node.exe") != 0 &&
_stricmp(info->szDll, "node.dll") != 0)
return NULL;
// Get a handle to the current process executable.
HMODULE processModule = GetModuleHandle(NULL);
// Get the path to the executable.
TCHAR processPath[_MAX_PATH]; // NOLINT
GetModuleFileName(processModule, processPath, _MAX_PATH);
// Get the name of the current executable.
LPSTR processName = PathFindFileName(processPath);
// If the current process is node or iojs, then just return the proccess module.
if (_stricmp(processName, "node.exe") == 0 ||
_stricmp(processName, "iojs.exe") == 0) {
return (FARPROC)processModule;
}
// If it is another process, attempt to load 'node.dll' from the same directory.
PathRemoveFileSpec(processPath);
PathAppend(processPath, "node.dll");
HMODULE nodeDllModule = GetModuleHandle(processPath);
if (nodeDllModule != NULL) {
// This application has a node.dll in the same directory as the executable, use that.
return (FARPROC)nodeDllModule;
}
// Fallback to the current executable, which must statically link to node.lib.
return (FARPROC)processModule;
}
// See https://docs.microsoft.com/en-us/cpp/build/reference/notification-hooks
decltype(__pfnDliNotifyHook2) __pfnDliNotifyHook2 = load_exe_hook;
#endif
| 786 |
1,872 | # Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
##
## Test weakref
##
## * Since the IronPython GC heavily differs from CPython GC (absence of reference counting),
## the CPython unit tests cannot fully be made pass on IronPython without modification
##
## * Comments below in double quotes are from the Python standard library documentation.
##
## * Issues of the current implementation of _weakref.cs:
##
## - weakref finalization callbacks are run in the CLR finalizer thread.
## This is likely to cause data races in user code.
## - WeakRefTracker.cs code and internal state handling most likely is not
## implemented in a thread-safe way.
##
import gc
import weakref
from iptest import IronPythonTestCase, run_test
class C(object):
def __init__(self, value=0):
self.value = value
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return isinstance(other, C) and self.value == other.value
def __ne__(self, other):
return not self.__eq__(other)
class WeakrefTest(IronPythonTestCase):
def _create_weakrefs(self, o, count, cb = None):
# force creation of different instances for the same target
if not cb and count > 1:
cb = lambda r: None
if count==1:
return weakref.ref(o, cb)
elif count==2:
r1, r2 = weakref.ref(o, cb), weakref.ref(o, cb)
self.assertTrue(r1 is not r2)
return r1, r2
else:
raise Exception("not implemented")
def test_ref_callable(self):
# "if the referent is no longer alive, calling the reference object will cause None to
# be returned"
o = C("a")
r = self._create_weakrefs(o, 1)
# for reasons stated in create_weakrefs(), we cannot test on instance equality
self.assertTrue(r().value == "a")
del o
gc.collect()
self.assertTrue(r() is None)
def test_ref_hashable(self):
# "Weak references are hashable if the object is hashable. They will maintain their hash value
# even after the object was deleted. If hash() is called the first time only after the object
# was deleted, the call will raise TypeError."
o = C("a")
r1, r2 = self._create_weakrefs(o, 2)
self.assertTrue(hash(r1) == hash("a"))
del o
gc.collect()
self.assertTrue(r1() is None)
self.assertTrue(r2() is None)
self.assertTrue(hash(r1) == hash("a"))
self.assertRaises(TypeError, lambda: hash(r2))
def test_ref_equality(self):
# "If the referents are still alive, two references have the same equality relationship as
# their referents (regardless of the callback). If either referent has been deleted, the
# references are equal only if the reference objects are the same object."
o, o2 = C("a"), C("a")
r1, r2 = self._create_weakrefs(o, 2)
r3 = self._create_weakrefs(o2, 1)
self.assertTrue(r1 == r2)
self.assertTrue(r1 == r3)
del o, o2
gc.collect()
self.assertTrue(r1() is None)
self.assertTrue(r3() is None)
self.assertTrue(r1 != r2)
self.assertTrue(r1 != r3)
run_test(__name__)
| 1,365 |
335 | <gh_stars>100-1000
{
"word": "Indubitably",
"definitions": [
"that cannot be doubted; patently evident or certain; unquestionable."
],
"parts-of-speech": "Adjective"
} | 67 |
314 | package lemongrenade.core.database.mongo;
import com.mongodb.DBCollection;
import com.mongodb.DBCursor;
import com.mongodb.WriteResult;
import lemongrenade.core.models.LGdbValue;
import org.bson.types.ObjectId;
import org.json.JSONArray;
import org.json.JSONObject;
import org.mongodb.morphia.Datastore;
import org.mongodb.morphia.dao.BasicDAO;
import org.mongodb.morphia.query.Query;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
public class LGdbValueDAOImpl extends BasicDAO<LGdbValue, ObjectId>
implements LGdbValueDAO {
public LGdbValueDAOImpl(Class<LGdbValue> entityClass, Datastore ds) {super(entityClass, ds);}
public ArrayList<String> getAllJobIDsForDbValueKey(String dbValueKey) {
List<LGdbValue> lGdbValues = getAll();
ArrayList<String> jobIDs = new ArrayList<>();
for (LGdbValue lGdbValue : lGdbValues) {
if (lGdbValue.getDbValues().containsKey(dbValueKey)) {
jobIDs.add(lGdbValue.getJobId());
}
System.out.println(lGdbValue.toJson());
}
return jobIDs;
}
public ArrayList<LGdbValue> getAllJobsThatHaveDbValueKey(String dbValueKey) {
ArrayList<String> allJobIdsWithDbValueKey = getAllJobIDsForDbValueKey(dbValueKey);
ArrayList<LGdbValue> allJobs = new ArrayList<>();
for (String jobId: allJobIdsWithDbValueKey) {
allJobs.add(getDbValuesByJobIdandKey(jobId, dbValueKey));
}
return allJobs;
}
public JSONArray getAllJobsThatHaveDbValueKeyJSONArray(String dbValueKey) {
ArrayList<LGdbValue> lGdbValues = getAllJobsThatHaveDbValueKey(dbValueKey);
JSONArray allJobs = new JSONArray();
for (LGdbValue lGdbValue : lGdbValues) {
lGdbValue = getDbValuesByJobIdandKey(lGdbValue.getJobId(),dbValueKey);
allJobs.put(lGdbValue.toJson());
}
return allJobs;
}
public LGdbValue getDbValuesByJobIdandKey(String jobId, String key) {
Query<LGdbValue> query = createQuery().field("_id").equal(jobId);
LGdbValue mylGdbValue = query.get(), lGdbValue = null;
if (null == mylGdbValue) {
return mylGdbValue;
}
if (!mylGdbValue.containsKey(key.toLowerCase())){
return lGdbValue;
}
ArrayList<String> valuesByKey = mylGdbValue.getDbValues(key.toLowerCase());
lGdbValue = new LGdbValue(jobId,key.toLowerCase(),valuesByKey);
return lGdbValue;
}
public void removeAllDocuments () {
DBCollection lgDBValue = createQuery().getCollection();
DBCursor cursor = lgDBValue.find();
while (cursor.hasNext()) {
lgDBValue.remove(cursor. next());
}
}
public List<LGdbValue> getAll() {
return createQuery().asList();
}
public LGdbValue getDbValuesByJobId(String jobId) {
Query<LGdbValue> query = createQuery().field("_id").equal(jobId);
LGdbValue lGdbValue = query.get();
if (null == lGdbValue) {
return lGdbValue;
}
return lGdbValue;
}
/**
* @param jobId String for job ID
* @param key String for key
* @param dbValue String for db value
*/
public void addDbValueToJob(String jobId, String key, String dbValue) {
MongoDBStore mongoDBStore = new MongoDBStore();
mongoDBStore.appendToDBValues("dbValues", jobId, key.toLowerCase(), dbValue);
}
/**
* @param jobId String for job ID
* @param key String for key
* @param dbValues String for db values
* TODO: Write a function in MongoDBStore to append multiple values in one write
*/
public void addDbValueToJob(String jobId, String key, JSONArray dbValues) {
MongoDBStore mongoDBStore = new MongoDBStore();
for (Object dbValue :dbValues) {
mongoDBStore.appendToDBValues("dbValues", jobId, key.toLowerCase(), dbValue);
}
}
/**
*
* @param jobId String for job ID
* @return WriteResult
*/
@Override public WriteResult delete(String jobId) {
LGdbValue LGdbValue = getDbValuesByJobId(jobId);
return super.delete(LGdbValue);
}
/**
* @param LGdbValue LGdbValue
*/
public void saveDbValues(LGdbValue LGdbValue) {
getDatastore().save(LGdbValue);
}
/**
* main
* @param args Standard main args, unused here.
*/
public static void main(String[] args) {
int z = -1;
String key = "VALUES";
LGdbValueDAOImpl dao;
MorphiaService ms;
ms = new MorphiaService();
dao = new LGdbValueDAOImpl(LGdbValue.class, ms.getDatastore());
long start = -1;
LGdbValue lookup = null;
long seconds = -1;
JSONObject lookupJSON = null;
dao.removeAllDocuments();
String jobId = UUID.randomUUID().toString();
HashMap<String,ArrayList<String>> testValueJO = new HashMap<>();
ArrayList<String> values = new ArrayList<>();
System.out.println("Test 1: Add values to DB and read them back from DB.");
z = 4;
for (int i = 0; i < 4; i++){
values.add(UUID.randomUUID().toString());
}
testValueJO.put(key.toLowerCase(), values);
LGdbValue testValue = new LGdbValue(jobId, testValueJO);
System.out.println("toJson = " + testValue.toJson());
dao.save(testValue);
System.out.println("Added new Query Job " + testValue.toString());
start = System.currentTimeMillis();
lookup = dao.getDbValuesByJobIdandKey(jobId, key);
seconds = (System.currentTimeMillis() - start) ;
System.out.println("Lookup "+lookup.toString());
lookupJSON = lookup.toJson();
System.out.println("toJson = " + lookupJSON.toString());
System.out.println("Query complete. Seconds :" + seconds);
System.out.println("Test 1 is complete.\n\n");
z = 4;
System.out.println("Test 2: Adding new VALUE ");
z = 4;
dao.addDbValueToJob(jobId, key, UUID.randomUUID().toString());
dao.addDbValueToJob(jobId,"cat", UUID.randomUUID().toString());
dao.addDbValueToJob(jobId,"dog", UUID.randomUUID().toString());
dao.addDbValueToJob(jobId,"cat", UUID.randomUUID().toString());
dao.addDbValueToJob(jobId,"dog", UUID.randomUUID().toString());
start = System.currentTimeMillis();
lookup = dao.getDbValuesByJobIdandKey(jobId, key);
seconds = (System.currentTimeMillis() - start) ;
System.out.println("Lookup "+lookup.toString());
lookupJSON = lookup.toJson();
System.out.println("toJson = "+lookupJSON.toString());
System.out.println("Query complete. Seconds :" + seconds);
System.out.println("Test 2 is complete.\n\n");
z = 4;
System.out.println("Test 3:test adding array of VALUES");
z = 4;
JSONArray testValues = new JSONArray();
for (int i = 0; i < 4; i++) {
testValues.put(UUID.randomUUID().toString());
}
jobId = UUID.randomUUID().toString();
dao.addDbValueToJob(jobId,key, testValues);
start = System.currentTimeMillis();
lookup = dao.getDbValuesByJobIdandKey(jobId,key);
seconds = (System.currentTimeMillis() - start) ;
System.out.println("Lookup "+lookup.toString());
lookupJSON = lookup.toJson();
System.out.println("toJson = "+lookupJSON.toString());
System.out.println("Query complete. Seconds :" + seconds);
System.out.println("Test 3 is complete.\n\n");
z = 4;
System.out.println("Test 4: test adding array of different types");
z = 4;
JSONArray blobs = new JSONArray();
JSONArray cats = new JSONArray();
JSONArray BIGDOGS = new JSONArray();
JSONArray elephants = new JSONArray();
for (int i = 0; i < 4; i++) {
blobs.put(UUID.randomUUID().toString());
cats.put(UUID.randomUUID().toString());
BIGDOGS.put(UUID.randomUUID().toString());
elephants.put(UUID.randomUUID().toString());
}
dao.addDbValueToJob(jobId,"blobs", blobs);
dao.addDbValueToJob(jobId,"cats", cats);
dao.addDbValueToJob(jobId,"BIGDOGS", BIGDOGS);
dao.addDbValueToJob(jobId,"elephants", elephants);
start = System.currentTimeMillis();
lookup = dao.getDbValuesByJobId(jobId);
seconds = (System.currentTimeMillis() - start) ;
System.out.println("Lookup "+lookup.toString());
lookupJSON = lookup.toJson();
System.out.println("toJson = "+lookupJSON.toString());
System.out.println("Query complete. Seconds :" + seconds);
System.out.println("Test 4 is complete.\n\n");
z = 4;
System.out.println("Test 5: test updating and retrieving one data value type.");
z = 4;
dao.addDbValueToJob(jobId, "BIGDOGS", "44444");
LGdbValue lookupDogs = dao.getDbValuesByJobIdandKey(jobId, "BIGDOGS");
System.out.println(lookupDogs.toJson().toString());
System.out.println("Test 5: test complete.");
z = 4;
System.out.println("Test 6: Test retrieving everything");
z = 4;
LGdbValue lookupAll = dao.getDbValuesByJobId(jobId);
System.out.println(lookupAll.toJson().toString());
System.out.println("Test 6: test complete.");
z = 4;
System.out.println("Test 7: get all jobs for a particular dbValue (value)");
//add a non-value JobId to test that it does not end up on our value list.
jobId = UUID.randomUUID().toString();
key = "DAWGPOUND";
HashMap<String,ArrayList<String>> testdbValueJO = new HashMap<>();
ArrayList<String> dbValues = new ArrayList<>();
for (int i = 0; i < 4; i++){
dbValues.add(UUID.randomUUID().toString());
}
testdbValueJO.put(key.toLowerCase(), dbValues);
LGdbValue testdbValue = new LGdbValue(jobId, testdbValueJO);
dao.save(testdbValue);
//This tests the function that returns all jobs that have dbValues
ArrayList<LGdbValue> i = dao.getAllJobsThatHaveDbValueKey("values");
for (LGdbValue lGdbValue : i) {
System.out.println("i == "+lGdbValue.getJobId());
}
System.out.println("Test 7: Done.");
System.out.println("Test 8: Get JSON that has JOBids/values ");
JSONArray joy = dao.getAllJobsThatHaveDbValueKeyJSONArray("values");
System.out.println(joy);
System.out.println("Test 8: Done.");
}
} | 4,658 |
852 | import FWCore.ParameterSet.Config as cms
import copy
from SimG4Core.Application.g4SimHits_cfi import *
# Detector simulation (Geant4-based)
trackingMaterialProducer = copy.deepcopy(g4SimHits)
trackingMaterialProducer.Generator.HepMCProductLabel = 'generatorSmeared'
#trackingMaterialProducer.Physics.type = 'SimG4Core/Physics/DummyPhysics'
#trackingMaterialProducer.Physics.DummyEMPhysics = True
#trackingMaterialProducer.Physics.CutsPerRegion = False
trackingMaterialProducer.UseMagneticField = False
trackingMaterialProducer.Watchers = cms.VPSet(cms.PSet(
TrackingMaterialProducer = cms.PSet(
PrimaryTracksOnly = cms.bool(True),
#The file to direct the HGCal volumes z position
txtOutFile = cms.untracked.string('VolumesZPosition.txt'),
#In the beginning of each track, the track will first hit an HGCAL volume and it will
#save the upper z volume boundary. So, the low boundary of the first
#volume is never saved. Here we give the low boundary of the first volume.
#This can be found by asking first to run not on 'HGCal' volume below but
#on 'CALOECTSRear', which at the moment of this writing it contains
#HGCalService, HGCal and thermal screen. You should run Fireworks to
#check if these naming conventions and volumes are valid in the future.
#Then, check the VolumesZPosition.txt file to see where CEService ends and
#put that number in hgcalzfront below. Keep in mind to run on the desired volume here:
#https://github.com/cms-sw/cmssw/blob/master/SimTracker/TrackerMaterialAnalysis/plugins/TrackingMaterialProducer.cc#L95
#and to replace the volume name of the material first hit at the file creation line:
#https://github.com/cms-sw/cmssw/blob/master/SimTracker/TrackerMaterialAnalysis/plugins/TrackingMaterialProducer.cc#L159-L168
hgcalzfront = cms.double(3210.5),
SelectedVolumes = cms.vstring('HGCal')#CALOECTSRear HGCal
),
type = cms.string('TrackingMaterialProducer')
))
| 710 |
1,118 | <reponame>harri-codes/Focus<filename>vendor/rinvex/countries/resources/translations/ht.json
{"deu":{"common":"Haiti","official":"Republik Haiti"},"fin":{"common":"Haiti","official":"Haitin tasavalta"},"fra":{"common":"Haïti","official":"République d'Haïti"},"hrv":{"common":"Haiti","official":"Republika Haiti"},"ita":{"common":"Haiti","official":"Repubblica di Haiti"},"jpn":{"common":"ハイチ","official":"ハイチ共和国"},"nld":{"common":"Haïti","official":"Republiek Haïti"},"por":{"common":"Haiti","official":"República do Haiti"},"rus":{"common":"Гаити","official":"Республика Гаити"},"spa":{"common":"Haiti","official":"República de Haití"}}
| 238 |
634 |
/*
* Copyright 2000-2011 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.find.impl;
import org.jetbrains.annotations.NonNls;
public interface HelpID {
@NonNls
String FIND_PACKAGE_USAGES = "reference.dialogs.findUsages.package";
@NonNls
String FIND_CLASS_USAGES = "reference.dialogs.findUsages.class";
@NonNls
String FIND_METHOD_USAGES = "reference.dialogs.findUsages.method";
@NonNls
String FIND_OTHER_USAGES = "reference.dialogs.findUsages.other";
@NonNls
String FIND_THROW_USAGES = "reference.dialogs.findUsages.throwUsages";
@NonNls
String FIND_IN_PROJECT = "find.findInProject";
@NonNls
String REPLACE_IN_PROJECT = "find.findInProject";
@NonNls
String FIND_OPTIONS = "find.findOptions";
@NonNls
String REPLACE_OPTIONS = "find.replaceOptions";
@NonNls
String FIND_IN_PATH = "reference.dialogs.findinpath";
@NonNls
String REPLACE_IN_PATH = "reference.dialogs.findinpath";
@NonNls
String FIND_IN_EDITOR = "ixFindText";
@NonNls
String REPLACE_IN_EDITOR = "Replace_the_found_target";
}
| 541 |
852 | import FWCore.ParameterSet.Config as cms
from DQMOffline.JetMET.dataCertificationJetMET_cfi import *
dataCertificationJetMETSequence = cms.Sequence(qTesterJet + qTesterMET + dataCertificationJetMET)
dataCertificationJetMETSequenceHI = cms.Sequence(qTesterJet + qTesterMET + dataCertificationJetMETHI)
from Configuration.ProcessModifiers.pp_on_AA_cff import pp_on_AA
pp_on_AA.toReplaceWith( dataCertificationJetMETSequence, dataCertificationJetMETSequenceHI )
| 153 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-v67r-3f67-cvcj",
"modified": "2022-05-01T01:56:48Z",
"published": "2022-05-01T01:56:48Z",
"aliases": [
"CVE-2005-1255"
],
"details": "Multiple stack-based buffer overflows in the IMAP server in IMail 8.12 and 8.13 in Ipswitch Collaboration Suite (ICS), and other versions before IMail Server 8.2 Hotfix 2, allow remote attackers to execute arbitrary code via a LOGIN command with (1) a long username argument or (2) a long username argument that begins with a special character.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2005-1255"
},
{
"type": "WEB",
"url": "http://securitytracker.com/id?1014047"
},
{
"type": "WEB",
"url": "http://www.idefense.com/application/poi/display?id=243&type=vulnerabilities"
},
{
"type": "WEB",
"url": "http://www.ipswitch.com/support/imail/releases/imail_professional/im82hf2.html"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/bid/13727"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 539 |
2,607 | package psymbolic.valuesummary;
/**
* Represents the guarded value in a value summary where the value is of Type T
* @param <T> Type of the value in the guarded value
*/
public class GuardedValue<T> {
private final Guard guard;
private final T value;
public Guard getGuard() { return guard; }
public T getValue() { return value; }
public GuardedValue(T value, Guard guard) {
this.value = value;
this.guard = guard;
}
}
| 155 |
903 | /*
* Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package software.amazon.smithy.aws.cloudformation.schema.model;
import java.util.ArrayList;
import java.util.List;
import software.amazon.smithy.jsonschema.Schema;
import software.amazon.smithy.model.node.Node;
import software.amazon.smithy.model.node.ObjectNode;
import software.amazon.smithy.model.node.ToNode;
import software.amazon.smithy.utils.ListUtils;
import software.amazon.smithy.utils.SmithyBuilder;
import software.amazon.smithy.utils.ToSmithyBuilder;
/**
* Data class representing a CloudFormation Resource Schema's property.
*
* @see <a href="https://docs.aws.amazon.com/cloudformation-cli/latest/userguide/resource-type-schema.html#schema-properties-properties">Resource Properties Definition</a>
* @see <a href="https://github.com/aws-cloudformation/cloudformation-cli/blob/master/src/rpdk/core/data/schema/provider.definition.schema.v1.jsonL74">Resource Type Properties JSON Schema</a>
*/
public final class Property implements ToNode, ToSmithyBuilder<Property> {
private final boolean insertionOrder;
private final List<String> dependencies;
private final Schema schema;
// Other reserved property names in definition but not in the validation
// JSON Schema, so not defined in code:
// * readOnly
// * writeOnly
private Property(Builder builder) {
this.insertionOrder = builder.insertionOrder;
this.dependencies = ListUtils.copyOf(builder.dependencies);
this.schema = builder.schema;
}
@Override
public Node toNode() {
ObjectNode.Builder builder = schema.toNode().expectObjectNode().toBuilder();
// Only serialize these properties if set to non-defaults.
if (insertionOrder) {
builder.withMember("insertionOrder", Node.from(insertionOrder));
}
if (!dependencies.isEmpty()) {
builder.withMember("dependencies", Node.fromStrings(dependencies));
}
return builder.build();
}
@Override
public SmithyBuilder<Property> toBuilder() {
return builder()
.insertionOrder(insertionOrder)
.dependencies(dependencies)
.schema(schema);
}
public static Builder builder() {
return new Builder();
}
public boolean isInsertionOrder() {
return insertionOrder;
}
public List<String> getDependencies() {
return dependencies;
}
public Schema getSchema() {
return schema;
}
public static final class Builder implements SmithyBuilder<Property> {
private boolean insertionOrder = false;
private final List<String> dependencies = new ArrayList<>();
private Schema schema;
private Builder() {}
@Override
public Property build() {
return new Property(this);
}
public Builder insertionOrder(boolean insertionOrder) {
this.insertionOrder = insertionOrder;
return this;
}
public Builder dependencies(List<String> dependencies) {
this.dependencies.clear();
this.dependencies.addAll(dependencies);
return this;
}
public Builder addDependency(String dependency) {
this.dependencies.add(dependency);
return this;
}
public Builder clearDependencies() {
this.dependencies.clear();
return this;
}
public Builder schema(Schema schema) {
this.schema = schema;
return this;
}
}
}
| 1,493 |
1,414 | /*
* Glide64 - Glide video plugin for Nintendo 64 emulators.
* Copyright (c) 2002 Dave2001
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* Licence along with this program; if not, write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA 02110-1301, USA
*/
//****************************************************************
//
// Glide64 - Glide Plugin for Nintendo 64 emulators (tested mostly with Project64)
// Project started on December 29th, 2001
//
// To modify Glide64:
// * Write your name and (optional)email, commented by your work, so I know who did it, and so that you can find which parts you modified when it comes time to send it to me.
// * Do NOT send me the whole project or file that you modified. Take out your modified code sections, and tell me where to put them. If people sent the whole thing, I would have many different versions, but no idea how to combine them all.
//
// Official Glide64 development channel: #Glide64 on EFnet
//
// Original author: Dave2001 (<EMAIL>)
// Other authors: Gonetz, Gugaman
//
//****************************************************************
//
// CRC32 calculation functions
//
// Created by Gonetz, 2004
//
//****************************************************************
#include "CRC.h"
#define CRC32_POLYNOMIAL 0x04C11DB7
unsigned int CRCTable[ 256 ];
unsigned int Reflect( unsigned long ref, char ch )
{
unsigned int value = 0;
// Swap bit 0 for bit 7
// bit 1 for bit 6, etc.
for (char i = 1; i < (ch + 1); i++)
{
if(ref & 1)
value |= 1 << (ch - i);
ref >>= 1;
}
return value;
}
void CRC_BuildTable()
{
unsigned int crc;
for (unsigned i = 0; i <= 255; i++)
{
crc = Reflect( i, 8 ) << 24;
for (unsigned j = 0; j < 8; j++)
crc = (crc << 1) ^ (crc & (1 << 31) ? CRC32_POLYNOMIAL : 0);
CRCTable[i] = Reflect( crc, 32 );
}
}
| 811 |
542 | /* Gobby - GTK-based collaborative text editor
* Copyright (C) 2008-2014 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
#ifndef _GOBBY_BROWSER_COMMANDS_HPP_
#define _GOBBY_BROWSER_COMMANDS_HPP_
#include "operations/operations.hpp"
#include "core/browser.hpp"
#include "core/statusbar.hpp"
#include <sigc++/trackable.h>
namespace Gobby
{
class BrowserCommands: public sigc::trackable
{
public:
BrowserCommands(Browser& browser, FolderManager& folder_manager,
StatusBar& status_bar, Operations& operations,
const Preferences& preferences);
~BrowserCommands();
protected:
static void
on_set_browser_static(InfGtkBrowserModel* model,
GtkTreePath* path,
GtkTreeIter* iter,
InfBrowser* old_browser,
InfBrowser* new_browser,
gpointer user_data)
{
static_cast<BrowserCommands*>(user_data)->
on_set_browser(model, iter, old_browser, new_browser);
}
void on_set_browser(InfGtkBrowserModel* model, GtkTreeIter* iter,
InfBrowser* old_browser, InfBrowser* new_browser);
void on_notify_status(InfBrowser* browser);
void subscribe_chat(InfBrowser* browser);
bool create_chat_document(InfBrowser* browser);
void on_connect(const Glib::ustring& hostname);
void on_activate(InfBrowser* browser, InfBrowserIter* iter);
void on_finished(InfRequest* request,
InfBrowser* browser,
const InfBrowserIter* iter,
const GError* error);
Browser& m_browser;
FolderManager& m_folder_manager;
StatusBar& m_status_bar;
Operations& m_operations;
const Preferences& m_preferences;
gulong m_set_browser_handler;
class BrowserInfo;
typedef std::map<InfBrowser*, BrowserInfo*> BrowserMap;
BrowserMap m_browser_map;
class RequestInfo;
typedef std::map<InfRequest*, RequestInfo*> RequestMap;
RequestMap m_request_map;
};
}
#endif // _GOBBY_BROWSER_COMMANDS_HPP_
| 1,024 |
305 | package org.mamute.infra;
public class NotFoundException extends RuntimeException {
private static final long serialVersionUID = 1L;
public NotFoundException() {
super();
}
public NotFoundException(String message) {
super(message);
}
public NotFoundException(Exception e) {
super(e);
}
}
| 98 |
1,139 | package com.journaldev.java.dependencyinjection.service;
public interface MessageService {
void sendMessage(String msg, String rec);
}
| 39 |
765 | <filename>src/arch/arm/tlb.cc
/*
* Copyright (c) 2010-2013, 2016-2021 Arm Limited
* All rights reserved
*
* The license below extends only to copyright in the software and shall
* not be construed as granting a license to any other intellectual
* property including but not limited to intellectual property relating
* to a hardware implementation of the functionality of the software
* licensed hereunder. You may use the software subject to the license
* terms below provided that you ensure that this notice is replicated
* unmodified and in its entirety in all distributions of the software,
* modified or unmodified, in source code or in binary form.
*
* Copyright (c) 2001-2005 The Regents of The University of Michigan
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met: redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer;
* redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution;
* neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "arch/arm/tlb.hh"
#include <memory>
#include <string>
#include <vector>
#include "arch/arm/table_walker.hh"
#include "arch/arm/tlbi_op.hh"
#include "arch/arm/utility.hh"
#include "base/trace.hh"
#include "cpu/thread_context.hh"
#include "debug/TLB.hh"
#include "debug/TLBVerbose.hh"
#include "params/ArmTLB.hh"
namespace gem5
{
using namespace ArmISA;
TLB::TLB(const ArmTLBParams &p)
: BaseTLB(p), table(new TlbEntry[p.size]), size(p.size),
isStage2(p.is_stage2),
_walkCache(false),
tableWalker(nullptr),
stats(*this), rangeMRU(1), vmid(0)
{
for (int lvl = LookupLevel::L0;
lvl < LookupLevel::Num_ArmLookupLevel; lvl++) {
auto it = std::find(
p.partial_levels.begin(),
p.partial_levels.end(),
lvl);
auto lookup_lvl = static_cast<LookupLevel>(lvl);
if (it != p.partial_levels.end()) {
// A partial entry from of the current LookupLevel can be
// cached within the TLB
partialLevels[lookup_lvl] = true;
// Make sure this is not the last level (complete translation)
if (lvl != LookupLevel::Num_ArmLookupLevel - 1) {
_walkCache = true;
}
} else {
partialLevels[lookup_lvl] = false;
}
}
}
TLB::~TLB()
{
delete[] table;
}
void
TLB::setTableWalker(TableWalker *table_walker)
{
tableWalker = table_walker;
tableWalker->setTlb(this);
}
TlbEntry*
TLB::match(const Lookup &lookup_data)
{
// Vector of TLB entry candidates.
// Only one of them will be assigned to retval and will
// be returned to the MMU (in case of a hit)
// The vector has one entry per lookup level as it stores
// both complete and partial matches
std::vector<std::pair<int, const TlbEntry*>> hits{
LookupLevel::Num_ArmLookupLevel, {0, nullptr}};
int x = 0;
while (x < size) {
if (table[x].match(lookup_data)) {
const TlbEntry &entry = table[x];
hits[entry.lookupLevel] = std::make_pair(x, &entry);
// This is a complete translation, no need to loop further
if (!entry.partial)
break;
}
++x;
}
// Loop over the list of TLB entries matching our translation
// request, starting from the highest lookup level (complete
// translation) and iterating backwards (using reverse iterators)
for (auto it = hits.rbegin(); it != hits.rend(); it++) {
const auto& [idx, entry] = *it;
if (!entry) {
// No match for the current LookupLevel
continue;
}
// Maintaining LRU array
// We only move the hit entry ahead when the position is higher
// than rangeMRU
if (idx > rangeMRU && !lookup_data.functional) {
TlbEntry tmp_entry = *entry;
for (int i = idx; i > 0; i--)
table[i] = table[i - 1];
table[0] = tmp_entry;
return &table[0];
} else {
return &table[idx];
}
}
return nullptr;
}
TlbEntry*
TLB::lookup(const Lookup &lookup_data)
{
const auto mode = lookup_data.mode;
TlbEntry *retval = match(lookup_data);
DPRINTF(TLBVerbose, "Lookup %#x, asn %#x -> %s vmn 0x%x hyp %d secure %d "
"ppn %#x size: %#x pa: %#x ap:%d ns:%d nstid:%d g:%d asid: %d "
"el: %d\n",
lookup_data.va, lookup_data.asn, retval ? "hit" : "miss",
lookup_data.vmid, lookup_data.hyp, lookup_data.secure,
retval ? retval->pfn : 0, retval ? retval->size : 0,
retval ? retval->pAddr(lookup_data.va) : 0,
retval ? retval->ap : 0,
retval ? retval->ns : 0, retval ? retval->nstid : 0,
retval ? retval->global : 0, retval ? retval->asid : 0,
retval ? retval->el : 0);
// Updating stats if this was not a functional lookup
if (!lookup_data.functional) {
if (!retval) {
if (mode == BaseMMU::Execute) {
stats.instMisses++;
} else if (mode == BaseMMU::Write) {
stats.writeMisses++;
} else {
stats.readMisses++;
}
} else {
if (retval->partial) {
stats.partialHits++;
}
if (mode == BaseMMU::Execute) {
stats.instHits++;
} else if (mode == BaseMMU::Write) {
stats.writeHits++;
} else {
stats.readHits++;
}
}
}
return retval;
}
TlbEntry*
TLB::multiLookup(const Lookup &lookup_data)
{
TlbEntry* te = lookup(lookup_data);
if (te) {
checkPromotion(te, lookup_data.mode);
} else {
if (auto tlb = static_cast<TLB*>(nextLevel())) {
te = tlb->multiLookup(lookup_data);
if (te && !lookup_data.functional &&
(!te->partial || partialLevels[te->lookupLevel])) {
// Insert entry only if this is not a functional
// lookup and if the translation is complete (unless this
// TLB caches partial translations)
insert(*te);
}
}
}
return te;
}
void
TLB::checkPromotion(TlbEntry *entry, BaseMMU::Mode mode)
{
TypeTLB acc_type = (mode == BaseMMU::Execute) ?
TypeTLB::instruction : TypeTLB::data;
// Hitting an instruction TLB entry on a data access or
// a data TLB entry on an instruction access:
// promoting the entry to unified
if (!(entry->type & acc_type))
entry->type = TypeTLB::unified;
}
// insert a new TLB entry
void
TLB::insert(TlbEntry &entry)
{
DPRINTF(TLB, "Inserting entry into TLB with pfn:%#x size:%#x vpn: %#x"
" asid:%d vmid:%d N:%d global:%d valid:%d nc:%d xn:%d"
" ap:%#x domain:%#x ns:%d nstid:%d isHyp:%d\n", entry.pfn,
entry.size, entry.vpn, entry.asid, entry.vmid, entry.N,
entry.global, entry.valid, entry.nonCacheable, entry.xn,
entry.ap, static_cast<uint8_t>(entry.domain), entry.ns, entry.nstid,
entry.isHyp);
if (table[size - 1].valid)
DPRINTF(TLB, " - Replacing Valid entry %#x, asn %d vmn %d ppn %#x "
"size: %#x ap:%d ns:%d nstid:%d g:%d isHyp:%d el: %d\n",
table[size-1].vpn << table[size-1].N, table[size-1].asid,
table[size-1].vmid, table[size-1].pfn << table[size-1].N,
table[size-1].size, table[size-1].ap, table[size-1].ns,
table[size-1].nstid, table[size-1].global, table[size-1].isHyp,
table[size-1].el);
// inserting to MRU position and evicting the LRU one
for (int i = size - 1; i > 0; --i)
table[i] = table[i-1];
table[0] = entry;
stats.inserts++;
ppRefills->notify(1);
}
void
TLB::multiInsert(TlbEntry &entry)
{
// Insert a partial translation only if the TLB is configured
// as a walk cache
if (!entry.partial || partialLevels[entry.lookupLevel]) {
insert(entry);
}
if (auto next_level = static_cast<TLB*>(nextLevel())) {
next_level->multiInsert(entry);
}
}
void
TLB::printTlb() const
{
int x = 0;
TlbEntry *te;
DPRINTF(TLB, "Current TLB contents:\n");
while (x < size) {
te = &table[x];
if (te->valid)
DPRINTF(TLB, " * %s\n", te->print());
++x;
}
}
void
TLB::flushAll()
{
DPRINTF(TLB, "Flushing all TLB entries\n");
int x = 0;
TlbEntry *te;
while (x < size) {
te = &table[x];
if (te->valid) {
DPRINTF(TLB, " - %s\n", te->print());
te->valid = false;
stats.flushedEntries++;
}
++x;
}
stats.flushTlb++;
}
void
TLB::flush(const TLBIALL& tlbi_op)
{
DPRINTF(TLB, "Flushing all TLB entries (%s lookup)\n",
(tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0;
TlbEntry *te;
while (x < size) {
te = &table[x];
const bool el_match = te->checkELMatch(
tlbi_op.targetEL, tlbi_op.inHost);
if (te->valid && tlbi_op.secureLookup == !te->nstid &&
(te->vmid == vmid || tlbi_op.el2Enabled) && el_match) {
DPRINTF(TLB, " - %s\n", te->print());
te->valid = false;
stats.flushedEntries++;
}
++x;
}
stats.flushTlb++;
}
void
TLB::flush(const ITLBIALL& tlbi_op)
{
DPRINTF(TLB, "Flushing all ITLB entries (%s lookup)\n",
(tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0;
TlbEntry *te;
while (x < size) {
te = &table[x];
const bool el_match = te->checkELMatch(
tlbi_op.targetEL, tlbi_op.inHost);
if (te->type & TypeTLB::instruction && te->valid &&
tlbi_op.secureLookup == !te->nstid &&
(te->vmid == vmid || tlbi_op.el2Enabled) && el_match) {
DPRINTF(TLB, " - %s\n", te->print());
te->valid = false;
stats.flushedEntries++;
}
++x;
}
stats.flushTlb++;
}
void
TLB::flush(const DTLBIALL& tlbi_op)
{
DPRINTF(TLB, "Flushing all DTLB entries (%s lookup)\n",
(tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0;
TlbEntry *te;
while (x < size) {
te = &table[x];
const bool el_match = te->checkELMatch(
tlbi_op.targetEL, tlbi_op.inHost);
if (te->type & TypeTLB::data && te->valid &&
tlbi_op.secureLookup == !te->nstid &&
(te->vmid == vmid || tlbi_op.el2Enabled) && el_match) {
DPRINTF(TLB, " - %s\n", te->print());
te->valid = false;
stats.flushedEntries++;
}
++x;
}
stats.flushTlb++;
}
void
TLB::flush(const TLBIALLEL &tlbi_op)
{
DPRINTF(TLB, "Flushing all TLB entries (%s lookup)\n",
(tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0;
TlbEntry *te;
while (x < size) {
te = &table[x];
const bool el_match = te->checkELMatch(
tlbi_op.targetEL, tlbi_op.inHost);
if (te->valid && tlbi_op.secureLookup == !te->nstid && el_match) {
DPRINTF(TLB, " - %s\n", te->print());
te->valid = false;
stats.flushedEntries++;
}
++x;
}
stats.flushTlb++;
}
void
TLB::flush(const TLBIVMALL &tlbi_op)
{
DPRINTF(TLB, "Flushing all TLB entries (%s lookup)\n",
(tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0;
TlbEntry *te;
while (x < size) {
te = &table[x];
const bool el_match = te->checkELMatch(
tlbi_op.targetEL, tlbi_op.inHost);
const bool vmid_match =
te->vmid == vmid ||
!tlbi_op.el2Enabled ||
(!tlbi_op.stage2Flush() && tlbi_op.inHost);
if (te->valid && tlbi_op.secureLookup == !te->nstid &&
el_match && vmid_match) {
DPRINTF(TLB, " - %s\n", te->print());
te->valid = false;
stats.flushedEntries++;
}
++x;
}
stats.flushTlb++;
}
void
TLB::flush(const TLBIALLN &tlbi_op)
{
bool hyp = tlbi_op.targetEL == EL2;
DPRINTF(TLB, "Flushing all NS TLB entries (%s lookup)\n",
(hyp ? "hyp" : "non-hyp"));
int x = 0;
TlbEntry *te;
while (x < size) {
te = &table[x];
const bool el_match = te->checkELMatch(tlbi_op.targetEL, false);
if (te->valid && te->nstid && te->isHyp == hyp && el_match) {
DPRINTF(TLB, " - %s\n", te->print());
stats.flushedEntries++;
te->valid = false;
}
++x;
}
stats.flushTlb++;
}
void
TLB::flush(const TLBIMVA &tlbi_op)
{
DPRINTF(TLB, "Flushing TLB entries with mva: %#x, asid: %#x "
"(%s lookup)\n", tlbi_op.addr, tlbi_op.asid,
(tlbi_op.secureLookup ? "secure" : "non-secure"));
_flushMva(tlbi_op.addr, tlbi_op.asid, tlbi_op.secureLookup, false,
tlbi_op.targetEL, tlbi_op.inHost, TypeTLB::unified);
stats.flushTlbMvaAsid++;
}
void
TLB::flush(const ITLBIMVA &tlbi_op)
{
DPRINTF(TLB, "Flushing ITLB entries with mva: %#x, asid: %#x "
"(%s lookup)\n", tlbi_op.addr, tlbi_op.asid,
(tlbi_op.secureLookup ? "secure" : "non-secure"));
_flushMva(tlbi_op.addr, tlbi_op.asid, tlbi_op.secureLookup, false,
tlbi_op.targetEL, tlbi_op.inHost, TypeTLB::instruction);
stats.flushTlbMvaAsid++;
}
void
TLB::flush(const DTLBIMVA &tlbi_op)
{
DPRINTF(TLB, "Flushing DTLB entries with mva: %#x, asid: %#x "
"(%s lookup)\n", tlbi_op.addr, tlbi_op.asid,
(tlbi_op.secureLookup ? "secure" : "non-secure"));
_flushMva(tlbi_op.addr, tlbi_op.asid, tlbi_op.secureLookup, false,
tlbi_op.targetEL, tlbi_op.inHost, TypeTLB::data);
stats.flushTlbMvaAsid++;
}
void
TLB::flush(const TLBIASID &tlbi_op)
{
DPRINTF(TLB, "Flushing TLB entries with asid: %#x (%s lookup)\n",
tlbi_op.asid, (tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0 ;
TlbEntry *te;
while (x < size) {
te = &table[x];
const bool el_match = te->checkELMatch(
tlbi_op.targetEL, tlbi_op.inHost);
const bool vmid_match =
te->vmid == vmid || !tlbi_op.el2Enabled || tlbi_op.inHost;
if (te->valid && te->asid == tlbi_op.asid &&
tlbi_op.secureLookup == !te->nstid &&
vmid_match && el_match) {
te->valid = false;
DPRINTF(TLB, " - %s\n", te->print());
stats.flushedEntries++;
}
++x;
}
stats.flushTlbAsid++;
}
void
TLB::flush(const ITLBIASID &tlbi_op)
{
DPRINTF(TLB, "Flushing ITLB entries with asid: %#x (%s lookup)\n",
tlbi_op.asid, (tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0 ;
TlbEntry *te;
while (x < size) {
te = &table[x];
if (te->type & TypeTLB::instruction &&
te->valid && te->asid == tlbi_op.asid &&
tlbi_op.secureLookup == !te->nstid &&
(te->vmid == vmid || tlbi_op.el2Enabled) &&
te->checkELMatch(tlbi_op.targetEL, tlbi_op.inHost)) {
te->valid = false;
DPRINTF(TLB, " - %s\n", te->print());
stats.flushedEntries++;
}
++x;
}
stats.flushTlbAsid++;
}
void
TLB::flush(const DTLBIASID &tlbi_op)
{
DPRINTF(TLB, "Flushing DTLB entries with asid: %#x (%s lookup)\n",
tlbi_op.asid, (tlbi_op.secureLookup ? "secure" : "non-secure"));
int x = 0 ;
TlbEntry *te;
while (x < size) {
te = &table[x];
if (te->type & TypeTLB::data &&
te->valid && te->asid == tlbi_op.asid &&
tlbi_op.secureLookup == !te->nstid &&
(te->vmid == vmid || tlbi_op.el2Enabled) &&
te->checkELMatch(tlbi_op.targetEL, tlbi_op.inHost)) {
te->valid = false;
DPRINTF(TLB, " - %s\n", te->print());
stats.flushedEntries++;
}
++x;
}
stats.flushTlbAsid++;
}
void
TLB::flush(const TLBIMVAA &tlbi_op) {
DPRINTF(TLB, "Flushing TLB entries with mva: %#x (%s lookup)\n",
tlbi_op.addr,
(tlbi_op.secureLookup ? "secure" : "non-secure"));
_flushMva(tlbi_op.addr, 0xbeef, tlbi_op.secureLookup, true,
tlbi_op.targetEL, tlbi_op.inHost, TypeTLB::unified);
stats.flushTlbMva++;
}
void
TLB::_flushMva(Addr mva, uint64_t asn, bool secure_lookup,
bool ignore_asn, ExceptionLevel target_el, bool in_host,
TypeTLB entry_type)
{
TlbEntry *te;
Lookup lookup_data;
lookup_data.va = sext<56>(mva);
lookup_data.asn = asn;
lookup_data.ignoreAsn = ignore_asn;
lookup_data.vmid = vmid;
lookup_data.hyp = target_el == EL2;
lookup_data.secure = secure_lookup;
lookup_data.functional = true;
lookup_data.targetEL = target_el;
lookup_data.inHost = in_host;
lookup_data.mode = BaseMMU::Read;
te = lookup(lookup_data);
while (te != NULL) {
bool matching_type = (te->type & entry_type);
if (matching_type && secure_lookup == !te->nstid) {
DPRINTF(TLB, " - %s\n", te->print());
te->valid = false;
stats.flushedEntries++;
}
te = lookup(lookup_data);
}
}
void
TLB::takeOverFrom(BaseTLB *_otlb)
{
}
TLB::TlbStats::TlbStats(TLB &parent)
: statistics::Group(&parent), tlb(parent),
ADD_STAT(partialHits, statistics::units::Count::get(),
"partial translation hits"),
ADD_STAT(instHits, statistics::units::Count::get(), "Inst hits"),
ADD_STAT(instMisses, statistics::units::Count::get(), "Inst misses"),
ADD_STAT(readHits, statistics::units::Count::get(), "Read hits"),
ADD_STAT(readMisses, statistics::units::Count::get(), "Read misses"),
ADD_STAT(writeHits, statistics::units::Count::get(), "Write hits"),
ADD_STAT(writeMisses, statistics::units::Count::get(), "Write misses"),
ADD_STAT(inserts, statistics::units::Count::get(),
"Number of times an entry is inserted into the TLB"),
ADD_STAT(flushTlb, statistics::units::Count::get(),
"Number of times complete TLB was flushed"),
ADD_STAT(flushTlbMva, statistics::units::Count::get(),
"Number of times TLB was flushed by MVA"),
ADD_STAT(flushTlbMvaAsid, statistics::units::Count::get(),
"Number of times TLB was flushed by MVA & ASID"),
ADD_STAT(flushTlbAsid, statistics::units::Count::get(),
"Number of times TLB was flushed by ASID"),
ADD_STAT(flushedEntries, statistics::units::Count::get(),
"Number of entries that have been flushed from TLB"),
ADD_STAT(readAccesses, statistics::units::Count::get(), "Read accesses",
readHits + readMisses),
ADD_STAT(writeAccesses, statistics::units::Count::get(), "Write accesses",
writeHits + writeMisses),
ADD_STAT(instAccesses, statistics::units::Count::get(), "Inst accesses",
instHits + instMisses),
ADD_STAT(hits, statistics::units::Count::get(),
"Total TLB (inst and data) hits",
readHits + writeHits + instHits),
ADD_STAT(misses, statistics::units::Count::get(),
"Total TLB (inst and data) misses",
readMisses + writeMisses + instMisses),
ADD_STAT(accesses, statistics::units::Count::get(),
"Total TLB (inst and data) accesses",
readAccesses + writeAccesses + instAccesses)
{
// If this is a pure Data TLB, mark the instruction
// stats as nozero, so that they won't make it in
// into the final stats file
if (tlb.type() == TypeTLB::data) {
instHits.flags(statistics::nozero);
instMisses.flags(statistics::nozero);
instAccesses.flags(statistics::nozero);
}
// If this is a pure Instruction TLB, mark the data
// stats as nozero, so that they won't make it in
// into the final stats file
if (tlb.type() & TypeTLB::instruction) {
readHits.flags(statistics::nozero);
readMisses.flags(statistics::nozero);
writeHits.flags(statistics::nozero);
writeMisses.flags(statistics::nozero);
readAccesses.flags(statistics::nozero);
writeAccesses.flags(statistics::nozero);
}
partialHits.flags(statistics::nozero);
}
void
TLB::regProbePoints()
{
ppRefills.reset(new probing::PMU(getProbeManager(), "Refills"));
}
Port *
TLB::getTableWalkerPort()
{
return &tableWalker->getTableWalkerPort();
}
} // namespace gem5
| 10,293 |
331 | <gh_stars>100-1000
/**
* Copyright (C) 2016 - 2030 youtongluan.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.yx.db.event;
import java.util.List;
import org.yx.bean.IOC;
import org.yx.common.listener.EventBus;
import org.yx.conf.Const;
public final class DBEventPublisher {
private static EventBus modifyBus;
private static EventBus queryBus;
public static void init() {
modifyBus = IOC.get(Const.LISTENER_DB_MODIFY, EventBus.class);
queryBus = IOC.get(Const.LISTENER_DB_QUERY, EventBus.class);
}
public static void publishModify(List<DBEvent> events) {
modifyBus.publishBatch(events);
}
public static void publishModify(DBEvent event) {
modifyBus.publish(event);
}
public static void publishQuery(QueryEvent event) {
queryBus.publish(event);
}
} | 408 |
797 | <reponame>huangboju/Alpha<gh_stars>100-1000
//
// ALPHAFileManager.h
// Alpha
//
// Created by <NAME> on 25/11/14.
// Copyright © 2014 Unified Sense. All rights reserved.
//
@import Foundation;
@interface ALPHAFileManager : NSObject
@property (nonatomic, readonly) NSURL* documentsDirectory;
@property (nonatomic, strong) NSDateFormatter* fileDateFormatter;
+ (instancetype)sharedManager;
@end
| 138 |
2,406 | // Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
///////////////////////////////////////////////////////////////////////////////////////////////////
#pragma once
#include "cldnn/primitives/embedding_bag.hpp"
#include "primitive_inst.h"
#include <string>
namespace cldnn {
template <>
struct typed_program_node<embedding_bag> : public typed_program_node_base<embedding_bag> {
using parent = typed_program_node_base<embedding_bag>;
public:
using parent::parent;
program_node& input(size_t index = 0) const { return get_dependency(index); }
size_t inputs_count() const { return get_dependencies().size(); }
};
using embedding_bag_node = typed_program_node<embedding_bag>;
template <>
class typed_primitive_inst<embedding_bag> : public typed_primitive_inst_base<embedding_bag> {
using parent = typed_primitive_inst_base<embedding_bag>;
public:
static layout calc_output_layout(embedding_bag_node const& node);
static std::string to_string(embedding_bag_node const& node);
typed_primitive_inst(network& network, embedding_bag_node const& desc);
};
using embedding_bag_inst = typed_primitive_inst<embedding_bag>;
} // namespace cldnn
| 391 |
327 | <reponame>Meteo-Concept/cpp-driver<filename>src/prepare_host_handler.hpp
/*
Copyright (c) DataStax, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef DATASTAX_INTERNAL_PREPARE_HOST_HANDLER_HPP
#define DATASTAX_INTERNAL_PREPARE_HOST_HANDLER_HPP
#include "callback.hpp"
#include "connector.hpp"
#include "host.hpp"
#include "prepared.hpp"
#include "ref_counted.hpp"
#include "string.hpp"
namespace datastax { namespace internal { namespace core {
class Connector;
/**
* A handler for pre-preparing statements on a newly available host.
*/
class PrepareHostHandler
: public RefCounted<PrepareHostHandler>
, public ConnectionListener {
public:
typedef internal::Callback<void, const PrepareHostHandler*> Callback;
typedef SharedRefPtr<PrepareHostHandler> Ptr;
PrepareHostHandler(const Host::Ptr& host,
const PreparedMetadata::Entry::Vec& prepared_metadata_entries,
const Callback& callback, ProtocolVersion protocol_version,
unsigned max_requests_per_flush);
const Host::Ptr host() const { return host_; }
void prepare(uv_loop_t* loop, const ConnectionSettings& settings);
private:
virtual void on_close(Connection* connection);
void on_connect(Connector* connector);
private:
/**
* A callback for preparing a single statement on a host. It continues the
* preparation process on success, otherwise it closes the temporary
* connection and logs a warning.
*/
class PrepareCallback : public SimpleRequestCallback {
public:
PrepareCallback(const PrepareRequest::ConstPtr& prepare_request,
const PrepareHostHandler::Ptr& handler);
virtual void on_internal_set(ResponseMessage* response);
virtual void on_internal_error(CassError code, const String& message);
virtual void on_internal_timeout();
private:
PrepareHostHandler::Ptr handler_;
};
/**
* A callback for setting the keyspace on a connection. This is requrired
* pre-V5/DSEv2 because the keyspace state is per connection. It continues
* the preparation process on success, otherwise it closes the temporary
* connection and logs a warning.
*/
class SetKeyspaceCallback : public SimpleRequestCallback {
public:
SetKeyspaceCallback(const String& keyspace, const PrepareHostHandler::Ptr& handler);
virtual void on_internal_set(ResponseMessage* response);
virtual void on_internal_error(CassError code, const String& message);
virtual void on_internal_timeout();
private:
PrepareHostHandler::Ptr handler_;
};
private:
// This is the main method for iterating over the list of prepared statements
void prepare_next();
// Returns true if the keyspace is current or using protocol v5/DSEv2
bool check_and_set_keyspace();
bool is_done() const;
void close();
private:
const Host::Ptr host_;
const ProtocolVersion protocol_version_;
Callback callback_;
Connection* connection_;
String current_keyspace_;
int prepares_outstanding_;
const int max_prepares_outstanding_;
PreparedMetadata::Entry::Vec prepared_metadata_entries_;
PreparedMetadata::Entry::Vec::const_iterator current_entry_it_;
};
}}} // namespace datastax::internal::core
#endif
| 1,151 |
359 | from keras.layers import Dense, Dropout, Flatten, Conv1D, Activation, \
BatchNormalization, MaxPooling1D
from keras.models import Sequential
from keras.optimizers import Adam
import numpy as np
from .dnn import DNN
class CNN1D(DNN):
def __init__(self, model: Sequential, trained: bool = False) -> None:
super(CNN1D, self).__init__(model, trained)
@classmethod
def make(
cls,
input_shape: int,
n_kernels: int,
kernel_sizes: int,
hidden_size: int,
dropout: float = 0.5,
n_classes: int = 6,
lr: float = 0.001
):
"""
搭建模型
Args:
input_shape (int): 特征维度
n_kernels (int): 卷积核数量
kernel_sizes (list): 每个卷积层的卷积核大小,列表长度为卷积层数量
hidden_size (int): 全连接层大小
dropout (float, optional, default=0.5): dropout
n_classes (int, optional, default=6): 标签种类数量
lr (float, optional, default=0.001): 学习率
"""
model = Sequential()
for size in kernel_sizes:
model.add(Conv1D(
filters = n_kernels,
kernel_size = size,
padding = 'same',
input_shape = (input_shape, 1)
)) # 卷积层
model.add(BatchNormalization(axis=-1))
model.add(Activation('relu'))
model.add(Dropout(dropout))
model.add(Flatten())
model.add(Dense(hidden_size))
model.add(BatchNormalization(axis = -1))
model.add(Activation('relu'))
model.add(Dropout(dropout))
model.add(Dense(n_classes, activation='softmax')) # 分类层
optimzer = Adam(lr=lr)
model.compile(loss='categorical_crossentropy', optimizer=optimzer, metrics=['accuracy'])
return cls(model)
def reshape_input(self, data: np.ndarray) -> np.ndarray:
"""二维数组转三维"""
# (n_samples, n_feats) -> (n_samples, n_feats, 1)
data = np.reshape(data, (data.shape[0], data.shape[1], 1))
return data
| 1,137 |
1,044 | <reponame>li-caspar/eventlet_0.30.2
if __name__ == '__main__':
from tests.mock import patch
import sys
import eventlet
from eventlet import hubs
with patch.object(hubs, 'notify_opened') as mock_func:
eventlet.monkey_patch(builtins=True)
with open(__file__, 'r') as f:
mock_func.assert_called_with(f.fileno())
if sys.version_info.major == 2:
with file(__file__, 'r') as f:
mock_func.assert_called_with(f.fileno())
print('pass')
| 237 |
14,668 | // Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/views/user_education/feature_promo_bubble_owner_impl.h"
#include "base/callback.h"
#include "base/no_destructor.h"
#include "base/token.h"
#include "chrome/browser/ui/views/user_education/feature_promo_bubble_view.h"
#include "third_party/abseil-cpp/absl/types/optional.h"
#include "ui/views/widget/widget.h"
FeaturePromoBubbleOwnerImpl::FeaturePromoBubbleOwnerImpl() = default;
FeaturePromoBubbleOwnerImpl::~FeaturePromoBubbleOwnerImpl() = default;
// static
FeaturePromoBubbleOwnerImpl* FeaturePromoBubbleOwnerImpl::GetInstance() {
static base::NoDestructor<FeaturePromoBubbleOwnerImpl> instance;
return instance.get();
}
bool FeaturePromoBubbleOwnerImpl::ToggleFocusForAccessibility() {
// If the bubble isn't present or can't be meaningfully focused, stop.
if (!bubble_)
return false;
// Focus can't be determined just by widget activity; we must check to see if
// there's a focused view in the anchor widget or the top-level browser
// widget (if different).
auto* const anchor = bubble_->GetAnchorView();
const bool is_focus_in_ancestor_widget =
(anchor && anchor->GetFocusManager()->GetFocusedView()) ||
bubble_->GetWidget()
->GetPrimaryWindowWidget()
->GetFocusManager()
->GetFocusedView();
// If the focus isn't in the help bubble, focus the help bubble.
if (is_focus_in_ancestor_widget) {
bubble_->GetWidget()->Activate();
bubble_->RequestFocus();
return true;
}
// If the anchor isn't accessibility-focusable, we can't toggle focus.
if (!anchor || !anchor->IsAccessibilityFocusable())
return false;
// Focus the anchor. We can't request focus for an accessibility-only view
// until we turn on keyboard accessibility for its focus manager.
anchor->GetFocusManager()->SetKeyboardAccessible(true);
anchor->RequestFocus();
return true;
}
bool FeaturePromoBubbleOwnerImpl::IsPromoBubble(
const views::DialogDelegate* bubble) const {
if (!bubble_)
return false;
return bubble_ == bubble;
}
absl::optional<base::Token> FeaturePromoBubbleOwnerImpl::ShowBubble(
FeaturePromoBubbleView::CreateParams params,
base::OnceClosure close_callback) {
if (bubble_)
return absl::nullopt;
DCHECK(!bubble_id_);
DCHECK(!close_callback_);
bubble_ = FeaturePromoBubbleView::Create(std::move(params));
bubble_id_ = base::Token::CreateRandom();
widget_observation_.Observe(bubble_->GetWidget());
close_callback_ = std::move(close_callback);
return bubble_id_;
}
bool FeaturePromoBubbleOwnerImpl::BubbleIsShowing(base::Token bubble_id) const {
DCHECK_EQ((bubble_ != nullptr), bubble_id_.has_value());
return bubble_id_ == bubble_id;
}
bool FeaturePromoBubbleOwnerImpl::AnyBubbleIsShowing() const {
DCHECK_EQ((bubble_ != nullptr), bubble_id_.has_value());
return bubble_;
}
void FeaturePromoBubbleOwnerImpl::CloseBubble(base::Token bubble_id) {
if (bubble_id_ != bubble_id)
return;
DCHECK(bubble_);
bubble_->GetWidget()->Close();
}
void FeaturePromoBubbleOwnerImpl::NotifyAnchorBoundsChanged() {
if (bubble_)
bubble_->OnAnchorBoundsChanged();
}
gfx::Rect FeaturePromoBubbleOwnerImpl::GetBubbleBoundsInScreen(
base::Token bubble_id) const {
DCHECK(bubble_id_ == bubble_id);
return bubble_ ? bubble_->GetWidget()->GetWindowBoundsInScreen()
: gfx::Rect();
}
void FeaturePromoBubbleOwnerImpl::OnWidgetClosing(views::Widget* widget) {
DCHECK(bubble_);
DCHECK_EQ(widget, bubble_->GetWidget());
HandleBubbleClosed();
}
void FeaturePromoBubbleOwnerImpl::OnWidgetDestroying(views::Widget* widget) {
DCHECK(bubble_);
DCHECK_EQ(widget, bubble_->GetWidget());
HandleBubbleClosed();
}
void FeaturePromoBubbleOwnerImpl::HandleBubbleClosed() {
widget_observation_.Reset();
bubble_ = nullptr;
bubble_id_ = absl::nullopt;
std::move(close_callback_).Run();
}
| 1,411 |
2,587 | package cn.hikyson.godeye.monitor.modules;
import androidx.annotation.Keep;
import java.io.Serializable;
import cn.hikyson.godeye.core.internal.modules.sm.BlockInfo;
import cn.hikyson.godeye.core.utils.JsonUtil;
@Keep
public class BlockSimpleInfo implements Serializable {
public long blockTime;
public String blockBaseinfo;
public BlockSimpleInfo(BlockInfo blockInfo) {
if (BlockInfo.BlockType.LONG.equals(blockInfo.blockType)) {
this.blockTime = blockInfo.longBlockInfo.blockTime;
this.blockBaseinfo = JsonUtil.toJson(blockInfo.longBlockInfo);
} else if (BlockInfo.BlockType.SHORT.equals(blockInfo.blockType)) {
this.blockTime = blockInfo.shortBlockInfo.blockTime;
this.blockBaseinfo = JsonUtil.toJson(blockInfo.shortBlockInfo);
} //do nothing
}
}
| 335 |
318 | package com.cxytiandi.kittycloud.comment.provider;
import com.cxytiandi.kittycloud.comment.api.request.CommentReplySaveRequest;
import com.cxytiandi.kittycloud.comment.api.service.CommentReplyRemoteService;
import com.cxytiandi.kittycloud.common.base.ResponseData;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
/**
* CommentReplyRemoteService测试类
*
* @作者 尹吉欢
* @个人微信 jihuan900
* @微信公众号 猿天地
* @GitHub https://github.com/yinjihuan
* @作者介绍 http://cxytiandi.com/about
* @时间 2020-02-15 20:50
*/
@RunWith(SpringRunner.class)
@SpringBootTest
public class CommentReplyRemoteServiceTest {
@Autowired
private CommentReplyRemoteService commentReplyRemoteService;
@Test
public void saveCommentReply() {
CommentReplySaveRequest request = CommentReplySaveRequest.builder()
.commentId("5e47df88b0aa74aa95a96c14")
.content("你说的真好")
.replayRefUserId(2L)
.userId(3L)
.build();
ResponseData<String> saveCommentReplyResp = commentReplyRemoteService.saveCommentReply(request);
Assert.assertTrue(saveCommentReplyResp.isSuccess());
}
@Test
public void removeCommentReply() {
ResponseData<Boolean> removeCommentReplyResp = commentReplyRemoteService.removeCommentReply("5e47f09ab0aa74adc80f9c6e");
Assert.assertTrue(removeCommentReplyResp.isSuccess() && removeCommentReplyResp.getData());
}
} | 664 |
493 | package com.mozz.htmlnative.script.lua;
import android.content.Intent;
import android.net.Uri;
import android.view.View;
import com.mozz.htmlnative.HNEnvironment;
import com.mozz.htmlnative.HNSandBoxContext;
import com.mozz.htmlnative.dom.AttachedElement;
import com.mozz.htmlnative.dom.DomElement;
import com.mozz.htmlnative.parser.CssParser;
import com.mozz.htmlnative.utils.ResourceUtils;
import org.luaj.vm2.LuaDouble;
import org.luaj.vm2.LuaInteger;
import org.luaj.vm2.LuaString;
import org.luaj.vm2.LuaTable;
import org.luaj.vm2.LuaValue;
import org.luaj.vm2.lib.OneArgFunction;
import org.luaj.vm2.lib.TwoArgFunction;
import java.util.HashMap;
import java.util.Map;
/**
* @author <NAME>, 17/5/11.
*/
class LDocument extends LuaTable implements ILGlobalObject {
LDocument(final HNSandBoxContext sandBoxContext) {
super();
set("version", LuaString.valueOf(HNEnvironment.v));
set("jump", new OneArgFunction() {
@Override
public LuaValue call(LuaValue arg) {
if (arg.isstring()) {
String uri = arg.tojstring();
Intent i = new Intent(Intent.ACTION_VIEW, Uri.parse(uri));
sandBoxContext.getAndroidContext().startActivity(i);
}
return LuaValue.NIL;
}
});
set("createElement", new TwoArgFunction() {
@Override
public LuaValue call(LuaValue tag, LuaValue style) {
if (tag instanceof LuaString && style instanceof LuaString) {
DomElement domElement = new AttachedElement();
domElement.setType(tag.tojstring());
Map<String, Object> styleSets = new HashMap<>();
CssParser.parseInlineStyle(style.tojstring(), new StringBuilder(), styleSets);
String idStr = (String) styleSets.get("id");
if (idStr != null) {
domElement.setId(idStr);
styleSets.remove("id");
}
String[] clazz = (String[]) styleSets.get("class");
if (clazz != null) {
domElement.setClazz(clazz);
styleSets.remove("class");
}
return new LView(domElement, styleSets, sandBoxContext);
}
return LuaValue.NIL;
}
});
set("getString", new OneArgFunction() {
@Override
public LuaValue call(LuaValue arg) {
String res = ResourceUtils.getString(arg.tojstring(), sandBoxContext
.getAndroidContext());
if (res != null) {
return LuaString.valueOf(res);
} else {
return LuaValue.NIL;
}
}
});
set("getColor", new OneArgFunction() {
@Override
public LuaValue call(LuaValue arg) {
int res = ResourceUtils.getColor(arg.tojstring(), sandBoxContext
.getAndroidContext());
return LuaInteger.valueOf(res);
}
});
set("getDimension", new OneArgFunction() {
@Override
public LuaValue call(LuaValue arg) {
float res = ResourceUtils.getDimension(arg.tojstring(), sandBoxContext
.getAndroidContext());
return LuaDouble.valueOf(res);
}
});
set("getElementById", new LFindViewById(sandBoxContext));
}
@Override
public int type() {
return TUSERDATA;
}
@Override
public String typename() {
return TYPE_NAMES[3];
}
@Override
public String objectName() {
return "document";
}
/**
* @author <NAME>, 17/3/23.
*/
static class LFindViewById extends OneArgFunction implements ILApi {
private HNSandBoxContext mContext;
public LFindViewById(HNSandBoxContext context) {
mContext = context;
}
@Override
public LuaValue call(LuaValue arg) {
String id = arg.tojstring();
View v = mContext.findViewById(id);
if (v != null) {
LView lView = new LView(v, mContext);
lView.mAdded = true;
return lView;
}
return NIL;
}
@Override
public String apiName() {
return "getElementById";
}
}
}
| 2,297 |
467 | <reponame>LiuStart/bilisoleil
package com.yoyiyi.soleil.bean.app.video;
import com.chad.library.adapter.base.entity.MultiItemEntity;
import java.util.List;
/**
* @author zzq 作者 E-mail: <EMAIL>
* @date 创建时间:2017/6/15 10:08
* 描述:
*/
public class MulSummary implements MultiItemEntity {
public static final int TYPE_DES = 34;
public static final int TYPE_OWNER = 35;
public static final int TYPE_RELATE = 36;
public static final int TYPE_RELATE_HEAD = 37;
public int itemType;
public String desc;
public String title;
public List<VideoDetail.DataBean.TagBean> tags;//标签
public VideoDetail.DataBean.StatBean state;//硬币 播放相关
public VideoDetail.DataBean.RelatesBean relates;//视频推荐
public long ctime;
public VideoDetail.DataBean.OwnerBean owner;
public MulSummary setRelates(VideoDetail.DataBean.RelatesBean relates) {
this.relates = relates;
return this;
}
@Override
public int getItemType() {
return itemType;
}
public MulSummary setItemType(int itemType) {
this.itemType = itemType;
return this;
}
public MulSummary setDesc(String desc) {
this.desc = desc;
return this;
}
public MulSummary setTitle(String title) {
this.title = title;
return this;
}
public MulSummary setTags(List<VideoDetail.DataBean.TagBean> tags) {
this.tags = tags;
return this;
}
public MulSummary setState(VideoDetail.DataBean.StatBean state) {
this.state = state;
return this;
}
public MulSummary setCtime(long ctime) {
this.ctime = ctime;
return this;
}
public MulSummary setOwner(VideoDetail.DataBean.OwnerBean owner) {
this.owner = owner;
return this;
}
}
| 781 |
312 | <reponame>parrot001/jetcd
package io.etcd.jetcd.support;
import java.util.Optional;
import java.util.function.Consumer;
import io.etcd.jetcd.ByteSequence;
import io.etcd.jetcd.api.DeleteRangeRequest;
import io.etcd.jetcd.api.PutRequest;
import io.etcd.jetcd.api.RangeRequest;
import io.etcd.jetcd.options.DeleteOption;
import io.etcd.jetcd.options.GetOption;
import io.etcd.jetcd.options.OptionsUtil;
import io.etcd.jetcd.options.PutOption;
import com.google.protobuf.ByteString;
import static io.etcd.jetcd.options.OptionsUtil.toRangeRequestSortOrder;
import static io.etcd.jetcd.options.OptionsUtil.toRangeRequestSortTarget;
public final class Requests {
private Requests() {
}
public static RangeRequest mapRangeRequest(ByteSequence key, GetOption option, ByteSequence namespace) {
RangeRequest.Builder builder = RangeRequest.newBuilder()
.setKey(Util.prefixNamespace(key, namespace))
.setCountOnly(option.isCountOnly())
.setLimit(option.getLimit())
.setRevision(option.getRevision())
.setKeysOnly(option.isKeysOnly())
.setSerializable(option.isSerializable())
.setSortOrder(toRangeRequestSortOrder(option.getSortOrder()))
.setSortTarget(toRangeRequestSortTarget(option.getSortField()))
.setMinCreateRevision(option.getMinCreateRevision())
.setMaxCreateRevision(option.getMaxCreateRevision())
.setMinModRevision(option.getMinModRevision())
.setMaxModRevision(option.getMaxModRevision());
defineRangeRequestEnd(key, option.getEndKey(), option.isPrefix(), namespace, builder::setRangeEnd);
return builder.build();
}
public static PutRequest mapPutRequest(ByteSequence key, ByteSequence value, PutOption option, ByteSequence namespace) {
return PutRequest.newBuilder()
.setKey(Util.prefixNamespace(key, namespace))
.setValue(ByteString.copyFrom(value.getBytes()))
.setLease(option.getLeaseId())
.setPrevKv(option.getPrevKV())
.build();
}
public static DeleteRangeRequest mapDeleteRequest(ByteSequence key, DeleteOption option, ByteSequence namespace) {
DeleteRangeRequest.Builder builder = DeleteRangeRequest.newBuilder()
.setKey(Util.prefixNamespace(key, namespace))
.setPrevKv(option.isPrevKV());
defineRangeRequestEnd(key, option.getEndKey(), option.isPrefix(), namespace, builder::setRangeEnd);
return builder.build();
}
private static void defineRangeRequestEnd(ByteSequence key, Optional<ByteSequence> endKeyOptional,
boolean hasPrefix, ByteSequence namespace, Consumer<ByteString> setRangeEndConsumer) {
if (endKeyOptional.isPresent()) {
setRangeEndConsumer
.accept(Util.prefixNamespaceToRangeEnd(ByteString.copyFrom(endKeyOptional.get().getBytes()), namespace));
} else {
if (hasPrefix) {
ByteSequence endKey = OptionsUtil.prefixEndOf(key);
setRangeEndConsumer.accept(Util.prefixNamespaceToRangeEnd(ByteString.copyFrom(endKey.getBytes()), namespace));
}
}
}
}
| 1,258 |
1,418 | <reponame>amenic-hub/aima-java
package aima.core.logic.planning.hierarchicalsearch;
import aima.core.logic.fol.parsing.ast.Term;
import aima.core.logic.planning.ActionSchema;
import java.util.List;
/**
* Artificial Intelligence A Modern Approach (3rd Edition): Figure 11.4, page
* 409.<br>
* <p>
* Each HLA has one or more possible refinements, into a sequence 1
* of actions, each of which may be an HLA or a primitive action (which has no refinements
* by definition).
*
* @author samagra
*/
public class HighLevelAction extends ActionSchema {
List<List<ActionSchema>> refinements;
public HighLevelAction(String name, List<Term> variables, String precondition, String effects, List<List<ActionSchema>> refinements) {
super(name, variables, precondition, effects);
this.refinements = refinements;
}
public void addRefinement(List<ActionSchema> newRefinement) {
this.refinements.add(newRefinement);
}
public List<List<ActionSchema>> getRefinements() {
return refinements;
}
@Override
public String toString() {
String result = super.toString();
result = result + "\n" + "REFINEMENTS : \n";
for (List<ActionSchema> refinement :
this.getRefinements()) {
result += "\n";
for (ActionSchema action :
refinement) {
result = result + "\n" + (action.getName());
}
}
return result;
}
}
| 584 |
1,253 | <reponame>AndreTeixeira1998/Arduino
#pragma once
//
// FILE: set.h
// AUTHOR: <NAME>
// VERSION: 0.2.5
// DATE: 2014-09-11
// PURPOSE: SET library for Arduino
// URL: https://github.com/RobTillaart/SET
#include "Arduino.h"
#define SET_LIB_VERSION (F("0.2.5"))
class Set
{
public:
explicit Set(const bool clear = true); // create empty Set
Set(const Set &t); // create copy Set
void clear(); // clear the Set
void clr() { clear(); }; // will become obsolete 0.3.0
void invert(); // flip all elements in the Set
void addAll(); // add all elements
uint16_t count() const; // return the #elements
bool isEmpty();
bool isFull();
void add(const uint8_t value); // add element to the Set
void sub(const uint8_t value); // remove element from Set
void invert(const uint8_t value); // flip element in Set
bool has(const uint8_t value); // element is in Set
Set operator + (const Set &); // union
Set operator - (const Set &); // diff
Set operator * (const Set &); // intersection
void operator += (const Set &); // union
void operator -= (const Set &); // diff
void operator *= (const Set &); // intersection
bool operator == (const Set &) const; // equal
bool operator != (const Set &) const; // not equal
bool operator <= (const Set &) const; // is subSet,
// a superSet b is not implemented as one could
// say b subSet a (b <= a)
// a <= b
// iterating through the Set
// returns value or -1 if not exist
int setCurrent(const uint8_t current); // set element as current
int first(); // find first element
int next(); // find next element
int prev(); // find previous element
int last(); // find last element
int getNth(const uint8_t n); // find Nth element in a set (from start)
private:
uint8_t _mem[32]; // can hold 0..255
uint8_t _masks[8] = {1, 2, 4, 8, 16, 32, 64, 128};
int _current = -1;
int findNext(const uint8_t p, const uint8_t q); // helper for first, next
int findPrev(const uint8_t p, const uint8_t q); // helper for last, prev
};
// -- END OF FILE --
| 1,052 |
1,939 | package com.tencent.devops.common.api.checkerset;
import lombok.Data;
/**
* 描述
*
* @version V1.0
* @date 2020/1/10
*/
@Data
public class AuthManagementPermissionReqVO
{
/**
* 项目ID
*/
private String projectId;
/**
* 用户名
*/
private String user;
/**
* 规则集ID
*/
private String checkerSetId;
}
| 175 |
2,360 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class XcbProto(AutotoolsPackage):
"""xcb-proto provides the XML-XCB protocol descriptions that libxcb uses to
generate the majority of its code and API."""
homepage = "https://xcb.freedesktop.org/"
url = "https://xorg.freedesktop.org/archive/individual/proto/xcb-proto-1.14.1.tar.xz"
version('1.14.1', sha256='f04add9a972ac334ea11d9d7eb4fc7f8883835da3e4859c9afa971efdf57fcc3')
version('1.14', sha256='186a3ceb26f9b4a015f5a44dcc814c93033a5fc39684f36f1ecc79834416a605')
version('1.13', sha256='0698e8f596e4c0dbad71d3dc754d95eb0edbb42df5464e0f782621216fa33ba7')
version('1.12', sha256='cfa49e65dd390233d560ce4476575e4b76e505a0e0bacdfb5ba6f8d0af53fd59')
version('1.11', sha256='d12152193bd71aabbdbb97b029717ae6d5d0477ab239614e3d6193cc0385d906')
# TODO: uncomment once build deps can be resolved separately
# See #7646, #4145, #4063, and #2548 for details
# extends('python')
patch('xcb-proto-1.12-schema-1.patch', when='@1.12')
def url_for_version(self, version):
if version >= Version('1.14'):
url = 'https://xorg.freedesktop.org/archive/individual/proto/xcb-proto-{0}.tar.xz'
else:
url = 'http://xcb.freedesktop.org/dist/xcb-proto-{0}.tar.gz'
return url.format(version)
| 681 |
14,668 | <reponame>zealoussnow/chromium
// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/test/data/nacl/ppapi_test_lib/internal_utils.h"
namespace {
PP_Module global_pp_module = 0;
PP_Instance global_pp_instance = 0;
PPB_GetInterface global_ppb_get_interface = NULL;
} // namespace
void set_ppb_get_interface(PPB_GetInterface get_interface) {
global_ppb_get_interface = get_interface;
}
void set_pp_instance(PP_Instance instance) { global_pp_instance = instance; }
void set_pp_module(PP_Module module) { global_pp_module = module; }
PPB_GetInterface ppb_get_interface() { return global_ppb_get_interface; }
PP_Module pp_module() { return global_pp_module; }
PP_Instance pp_instance() { return global_pp_instance; }
| 281 |
348 | {"nom":"Montbartier","circ":"2ème circonscription","dpt":"Tarn-et-Garonne","inscrits":894,"abs":479,"votants":415,"blancs":30,"nuls":20,"exp":365,"res":[{"nuance":"RDG","nom":"Mme <NAME>","voix":201},{"nuance":"FN","nom":"<NAME>","voix":164}]} | 98 |
334 | import sys
import traceback
# Python 3 compatibility
from ._compat import reduce, as_unicode
CHAR_ESCAPE = u'.'
CHAR_SEPARATOR = u','
def import_module(name, required=True):
"""
Import module by name
:param name:
Module name
:param required:
If set to `True` and module was not found - will throw exception.
If set to `False` and module was not found - will return None.
Default is `True`.
"""
try:
__import__(name, globals(), locals(), [])
except ImportError:
if not required and module_not_found():
return None
raise
return sys.modules[name]
def import_attribute(name):
"""
Import attribute using string reference.
:param name:
String reference.
Raises ImportError or AttributeError if module or attribute do not exist.
Example::
import_attribute('a.b.c.foo')
"""
path, attr = name.rsplit('.', 1)
module = __import__(path, globals(), locals(), [attr])
return getattr(module, attr)
def module_not_found(additional_depth=0):
"""
Checks if ImportError was raised because module does not exist or
something inside it raised ImportError
:param additional_depth:
supply int of depth of your call if you're not doing
import on the same level of code - f.e., if you call function, which is
doing import, you should pass 1 for single additional level of depth
"""
tb = sys.exc_info()[2]
if len(traceback.extract_tb(tb)) > (1 + additional_depth):
return False
return True
def rec_getattr(obj, attr, default=None):
"""
Recursive getattr.
:param attr:
Dot delimited attribute name
:param default:
Default value
Example::
rec_getattr(obj, 'a.b.c')
"""
try:
return reduce(getattr, attr.split('.'), obj)
except AttributeError:
return default
def get_dict_attr(obj, attr, default=None):
"""
Get attribute of the object without triggering its __getattr__.
:param obj:
Object
:param attr:
Attribute name
:param default:
Default value if attribute was not found
"""
for obj in [obj] + obj.__class__.mro():
if attr in obj.__dict__:
return obj.__dict__[attr]
return default
def escape(value):
return (as_unicode(value)
.replace(CHAR_ESCAPE, CHAR_ESCAPE + CHAR_ESCAPE)
.replace(CHAR_SEPARATOR, CHAR_ESCAPE + CHAR_SEPARATOR))
def iterencode(iter):
"""
Encode enumerable as compact string representation.
:param iter:
Enumerable
"""
return ','.join(as_unicode(v)
.replace(CHAR_ESCAPE, CHAR_ESCAPE + CHAR_ESCAPE)
.replace(CHAR_SEPARATOR, CHAR_ESCAPE + CHAR_SEPARATOR)
for v in iter)
def iterdecode(value):
"""
Decode enumerable from string presentation as a tuple
"""
if not value:
return tuple()
result = []
accumulator = u''
escaped = False
for c in value:
if not escaped:
if c == CHAR_ESCAPE:
escaped = True
continue
elif c == CHAR_SEPARATOR:
result.append(accumulator)
accumulator = u''
continue
else:
escaped = False
accumulator += c
result.append(accumulator)
return tuple(result)
| 1,610 |
974 | <filename>3rdparty/blend2d/src/blend2d/font_p.h
// Blend2D - 2D Vector Graphics Powered by a JIT Compiler
//
// * Official Blend2D Home Page: https://blend2d.com
// * Official Github Repository: https://github.com/blend2d/blend2d
//
// Copyright (c) 2017-2020 The Blend2D Authors
//
// This software is provided 'as-is', without any express or implied
// warranty. In no event will the authors be held liable for any damages
// arising from the use of this software.
//
// Permission is granted to anyone to use this software for any purpose,
// including commercial applications, and to alter it and redistribute it
// freely, subject to the following restrictions:
//
// 1. The origin of this software must not be misrepresented; you must not
// claim that you wrote the original software. If you use this software
// in a product, an acknowledgment in the product documentation would be
// appreciated but is not required.
// 2. Altered source versions must be plainly marked as such, and must not be
// misrepresented as being the original software.
// 3. This notice may not be removed or altered from any source distribution.
#ifndef BLEND2D_FONT_P_H_INCLUDED
#define BLEND2D_FONT_P_H_INCLUDED
#include "./api-internal_p.h"
#include "./array_p.h"
#include "./font.h"
#include "./matrix_p.h"
//! \cond INTERNAL
//! \addtogroup blend2d_internal
//! \{
// ============================================================================
// [Forward Declarations]
// ============================================================================
struct BLOTFaceImpl;
// ============================================================================
// [Constants]
// ============================================================================
static constexpr uint32_t BL_FONT_GET_GLYPH_OUTLINE_BUFFER_SIZE = 2048;
// ============================================================================
// [Utilities]
// ============================================================================
//! Returns `true` if the given `tag` is valid. A valid tag consists of 4
//! ASCII characters within [32..126] range (inclusive).
static BL_INLINE bool blFontTagIsValid(uint32_t tag) noexcept {
return (bool)( (((tag - 0x20202020u) & 0xFF000000u) < 0x5F000000u) &
(((tag - 0x20202020u) & 0x00FF0000u) < 0x005F0000u) &
(((tag - 0x20202020u) & 0x0000FF00u) < 0x00005F00u) &
(((tag - 0x20202020u) & 0x000000FFu) < 0x0000005Fu) );
}
//! Converts `tag` to a null-terminated ASCII string `str`. Characters that are
//! not printable are replaced by '?' character, thus it's not safe to convert
//! the output string back to tag if it was invalid.
static BL_INLINE void blFontTagToAscii(char str[5], uint32_t tag) noexcept {
for (size_t i = 0; i < 4; i++, tag <<= 8) {
uint32_t c = tag >> 24;
str[i] = (c < 32 || c > 127) ? char('?') : char(c);
}
str[4] = '\0';
}
BL_INLINE void blFontMatrixMultiply(BLMatrix2D* dst, const BLFontMatrix* a, const BLMatrix2D* b) noexcept {
dst->reset(a->m00 * b->m00 + a->m01 * b->m10,
a->m00 * b->m01 + a->m01 * b->m11,
a->m10 * b->m00 + a->m11 * b->m10,
a->m10 * b->m01 + a->m11 * b->m11,
b->m20,
b->m21);
}
BL_INLINE void blFontMatrixMultiply(BLMatrix2D* dst, const BLMatrix2D* a, const BLFontMatrix* b) noexcept {
dst->reset(a->m00 * b->m00 + a->m01 * b->m10,
a->m00 * b->m01 + a->m01 * b->m11,
a->m10 * b->m00 + a->m11 * b->m10,
a->m10 * b->m01 + a->m11 * b->m11,
a->m20 * b->m00 + a->m21 * b->m10,
a->m20 * b->m01 + a->m21 * b->m11);
}
// ============================================================================
// [BLFontTableT]
// ============================================================================
//! A convenience class that maps `BLFontTable` to a typed table.
template<typename T>
class BLFontTableT : public BLFontTable {
public:
BL_INLINE BLFontTableT() noexcept = default;
constexpr BLFontTableT(const BLFontTableT& other) noexcept = default;
constexpr BLFontTableT(const BLFontTable& other) noexcept
: BLFontTable(other) {}
constexpr BLFontTableT(const uint8_t* data, size_t size) noexcept
: BLFontTable { data, size } {}
BL_INLINE BLFontTableT& operator=(const BLFontTableT& other) noexcept = default;
BL_INLINE const T* operator->() const noexcept { return dataAs<T>(); }
};
static BL_INLINE bool blFontTableFitsN(const BLFontTable& table, size_t requiredSize, size_t offset = 0) noexcept {
return (table.size - offset) >= requiredSize;
}
template<typename T>
static BL_INLINE bool blFontTableFitsT(const BLFontTable& table, size_t offset = 0) noexcept {
return blFontTableFitsN(table, T::kMinSize, offset);
}
static BL_INLINE BLFontTable blFontSubTable(const BLFontTable& table, size_t offset) noexcept {
BL_ASSERT(offset <= table.size);
return BLFontTable { table.data + offset, table.size - offset };
}
static BL_INLINE BLFontTable blFontSubTableChecked(const BLFontTable& table, size_t offset) noexcept {
return blFontSubTable(table, blMin(table.size, offset));
}
template<typename T>
static BL_INLINE BLFontTableT<T> blFontSubTableT(const BLFontTable& table, size_t offset) noexcept {
BL_ASSERT(offset <= table.size);
return BLFontTableT<T> { table.data + offset, table.size - offset };
}
template<typename T>
static BL_INLINE BLFontTableT<T> blFontSubTableCheckedT(const BLFontTable& table, size_t offset) noexcept {
return blFontSubTableT<T>(table, blMin(table.size, offset));
}
// ============================================================================
// [BLFontData - Internal]
// ============================================================================
struct BLInternalFontDataImpl : public BLFontDataImpl {
volatile size_t backRefCount;
BLArray<BLFontFaceImpl*> faceCache;
};
template<>
struct BLInternalCastImpl<BLFontDataImpl> { typedef BLInternalFontDataImpl Type; };
// ============================================================================
// [BLFontFace - Internal]
// ============================================================================
struct BLInternalFontFaceFuncs {
BLResult (BL_CDECL* mapTextToGlyphs)(
const BLFontFaceImpl* impl,
uint32_t* content,
size_t count,
BLGlyphMappingState* state) BL_NOEXCEPT;
BLResult (BL_CDECL* getGlyphBounds)(
const BLFontFaceImpl* impl,
const uint32_t* glyphData,
intptr_t glyphAdvance,
BLBoxI* boxes,
size_t count) BL_NOEXCEPT;
BLResult (BL_CDECL* getGlyphAdvances)(
const BLFontFaceImpl* impl,
const uint32_t* glyphData,
intptr_t glyphAdvance,
BLGlyphPlacement* placementData,
size_t count) BL_NOEXCEPT;
BLResult (BL_CDECL* getGlyphOutlines)(
const BLFontFaceImpl* impl,
uint32_t glyphId,
const BLMatrix2D* userMatrix,
BLPath* out,
size_t* contourCountOut,
BLMemBuffer* tmpBuffer) BL_NOEXCEPT;
BLResult (BL_CDECL* applyKern)(
const BLFontFaceImpl* faceI,
uint32_t* glyphData,
BLGlyphPlacement* placementData,
size_t count) BL_NOEXCEPT;
BLResult (BL_CDECL* applyGSub)(
const BLFontFaceImpl* impl,
BLGlyphBuffer* gb,
size_t index,
BLBitWord lookups) BL_NOEXCEPT;
BLResult (BL_CDECL* applyGPos)(
const BLFontFaceImpl* impl,
BLGlyphBuffer* gb,
size_t index,
BLBitWord lookups) BL_NOEXCEPT;
BLResult (BL_CDECL* positionGlyphs)(
const BLFontFaceImpl* impl,
uint32_t* glyphData,
BLGlyphPlacement* placementData,
size_t count) BL_NOEXCEPT;
};
BL_HIDDEN extern BLInternalFontFaceFuncs blNullFontFaceFuncs;
struct BLInternalFontFaceImpl : public BLFontFaceImpl {
BLInternalFontFaceFuncs funcs;
};
template<>
struct BLInternalCastImpl<BLFontFaceImpl> { typedef BLInternalFontFaceImpl Type; };
// ============================================================================
// [BLFont - Internal]
// ============================================================================
struct BLInternalFontImpl : public BLFontImpl {};
template<>
struct BLInternalCastImpl<BLFontImpl> { typedef BLInternalFontImpl Type; };
BL_HIDDEN BLResult blFontImplDelete(BLFontImpl* impl_) noexcept;
//! \}
//! \endcond
#endif // BLEND2D_FONT_P_H_INCLUDED
| 2,847 |
358 | // Copyright 2018 The Ripple Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "listener.h"
#include "mysql_init.h"
namespace mysql_ripple {
Listener::Listener(SessionFactory *factory,
mysql::ServerPort *port)
: ThreadedSession(Session::Listener),
factory_(factory), port_(port) {
}
Listener::~Listener() {
}
bool Listener::Stop() {
if (ThreadedSession::Stop()) {
port_->Shutdown();
return true;
}
return false;
}
void* Listener::Run() {
mysql::ThreadInit();
while (!ShouldStop()) {
Connection *con = port_->Accept();
if (con != nullptr) {
if (!factory_->NewSession(con)) {
delete con;
}
}
}
mysql::ThreadDeinit();
return nullptr;
}
} // namespace mysql_ripple
| 427 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.messagebus;
import java.util.ArrayDeque;
import java.util.Deque;
/**
* An wrapper around a stack of frame objects that is aware of the message that owns it. It contains functionality to
* move the content of itself to another, never to copy, since a callback is unique and might be counted by
* implementations such as Resender.
*
* @author <NAME>
*/
public class CallStack {
private Deque<StackFrame> stack = new ArrayDeque<>();
/**
* Push a handler onto the callstack of this message with a given context.
*
* @param handler The reply handler to store.
* @param context The context to be associated with the message for that handler.
*/
public void push(ReplyHandler handler, Object context) {
stack.push(new StackFrame(handler, context));
}
/**
* Pop a frame from this stack. The handler part of the frame will be returned and the context part will be set on
* the given reply. Invoke this method on an empty stack and terrible things will happen.
*
* @param routable The routable that will have its context set.
* @return The next handler on the stack.
*/
public ReplyHandler pop(Routable routable) {
StackFrame frame = stack.pop();
routable.setContext(frame.context);
return frame.handler;
}
/**
* Swap the content of this and the argument stack.
*
* @param other The stack to swap content with.
*/
public void swap(CallStack other) {
Deque<StackFrame> tmp = stack;
stack = other.stack;
other.stack = tmp;
}
/**
* Clear this call stack. This method should only be used when you are certain that it is safe to just throw away
* the stack. It has similar effects to stopping a thread, you need to know where it is safe to do so.
*/
public void clear() {
stack.clear();
}
/**
* Returns the number of elements of the callstack.
*
* @return The number of elements.
*/
public int size() {
return stack.size();
}
/**
* Helper class that holds stack frame data.
*/
private static class StackFrame {
private final ReplyHandler handler;
private final Object context;
StackFrame(ReplyHandler handler, Object context) {
this.handler = handler;
this.context = context;
}
}
}
| 855 |
311 | <gh_stars>100-1000
package com.bitlove.fetlife.event;
import com.bitlove.fetlife.model.pojos.fetlife.dbjson.Event;
import com.google.android.gms.maps.model.LatLngBounds;
import java.util.List;
public class EventsByLocationRetrievedEvent {
private final int page;
private List<Event> events;
private LatLngBounds searchBounds;
public EventsByLocationRetrievedEvent(LatLngBounds searchBounds, int page, List<Event> events) {
this.events = events;
this.searchBounds = searchBounds;
this.page = page;
}
public List<Event> getEvents() {
return events;
}
public LatLngBounds getSearchBounds() {
return searchBounds;
}
public int getPage() {
return page;
}
}
| 294 |
421 | <reponame>hamarb123/dotnet-api-docs<gh_stars>100-1000
// System.Web.Services.Description.Port.ctor().
// System.Web.Services.Description.Port.Binding().
// System.Web.Services.Description.Portt.Extensions.
// System.Web.Services.Description.Port.Name.
// System.Web.Services.Description.Port.Service.
/* The following example demonstrates the constructor and the properties 'Binding',
'Extensions','Name', and 'Service' of the 'Port' class.
The input to the program is a WSDL file 'AddNumbers_cs.wsdl'.
It creates a 'ServiceDescription' instance by using the static read method
of 'ServiceDescription' by passing the 'AddNumbers.wsdl' name as an argument.
It creates a 'Binding' object and adds that binding object to
'ServiceDescription'. It adds the 'PortType',Messages to the 'ServiceDescription'
object. Finally it writes the 'ServiceDescrption' as a WSDL file with
name 'AddNumbersOne.wsdl.
*/
#using <System.Xml.dll>
#using <System.Web.Services.dll>
#using <System.dll>
using namespace System;
using namespace System::Web::Services::Description;
using namespace System::Web;
using namespace System::Collections;
using namespace System::Xml;
int main()
{
try
{
ServiceDescription^ myDescription = ServiceDescription::Read( "AddNumbers_cs.wsdl" );
// Create the 'Binding' object.
Binding^ myBinding = gcnew Binding;
myBinding->Name = "PortServiceHttpPost";
XmlQualifiedName^ qualifiedName = gcnew XmlQualifiedName( "s0:PortServiceHttpPost" );
myBinding->Type = qualifiedName;
// Create the 'HttpBinding' object.
HttpBinding^ myHttpBinding = gcnew HttpBinding;
myHttpBinding->Verb = "POST";
// Add the 'HttpBinding' to the 'Binding'.
myBinding->Extensions->Add( myHttpBinding );
// Create the 'OperationBinding' object.
OperationBinding^ myOperationBinding = gcnew OperationBinding;
myOperationBinding->Name = "AddNumbers";
HttpOperationBinding^ myOperation = gcnew HttpOperationBinding;
myOperation->Location = "/AddNumbers";
// Add the 'HttpOperationBinding' to 'OperationBinding'.
myOperationBinding->Extensions->Add( myOperation );
// Create the 'InputBinding' object.
InputBinding^ myInput = gcnew InputBinding;
MimeContentBinding^ postMimeContentbinding = gcnew MimeContentBinding;
postMimeContentbinding->Type = "application/x-www-form-urlencoded";
myInput->Extensions->Add( postMimeContentbinding );
// Add the 'InputBinding' to 'OperationBinding'.
myOperationBinding->Input = myInput;
// Create the 'OutputBinding' object.
OutputBinding^ myOutput = gcnew OutputBinding;
MimeXmlBinding^ postMimeXmlbinding = gcnew MimeXmlBinding;
postMimeXmlbinding->Part = "Body";
myOutput->Extensions->Add( postMimeXmlbinding );
// Add the 'OutPutBinding' to 'OperationBinding'.
myOperationBinding->Output = myOutput;
// Add the 'OperationBinding' to 'Binding'.
myBinding->Operations->Add( myOperationBinding );
// Add the 'Binding' to 'BindingCollection' of 'ServiceDescription'.
myDescription->Bindings->Add( myBinding );
// <Snippet1>
// <Snippet2>
// <Snippet3>
// <Snippet4>
// <Snippet5>
// Create a Port.
Port^ postPort = gcnew Port;
postPort->Name = "PortServiceHttpPost";
postPort->Binding = gcnew XmlQualifiedName( "s0:PortServiceHttpPost" );
// </Snippet4>
// </Snippet2>
// Create an HttpAddressBinding.
HttpAddressBinding^ postAddressBinding = gcnew HttpAddressBinding;
postAddressBinding->Location = "http://localhost/PortClass/PortService_cs.asmx";
// Add the HttpAddressBinding to the Port.
postPort->Extensions->Add( postAddressBinding );
// </Snippet3>
// Get the Service of the postPort.
Service^ myService = postPort->Service;
// Print the service name for the port.
Console::WriteLine( "This is the service name of the postPort:*{0}*", myDescription->Services[ 0 ]->Ports[ 0 ]->Service->Name );
// Add the Port to the PortCollection of the ServiceDescription.
myDescription->Services[ 0 ]->Ports->Add( postPort );
// </Snippet5>
// </Snippet1>
// Create a 'PortType' object.
PortType^ postPortType = gcnew PortType;
postPortType->Name = "PortServiceHttpPost";
Operation^ postOperation = gcnew Operation;
postOperation->Name = "AddNumbers";
OperationMessage^ postInput = dynamic_cast<OperationMessage^>(gcnew OperationInput);
postInput->Message = gcnew XmlQualifiedName( "s0:AddNumbersHttpPostIn" );
OperationMessage^ postOutput = dynamic_cast<OperationMessage^>(gcnew OperationOutput);
postOutput->Message = gcnew XmlQualifiedName( "s0:AddNumbersHttpPostOut" );
postOperation->Messages->Add( postInput );
postOperation->Messages->Add( postOutput );
// Add the 'Operation' to 'PortType'.
postPortType->Operations->Add( postOperation );
// Adds the 'PortType' to 'PortTypeCollection' of 'ServiceDescription'.
myDescription->PortTypes->Add( postPortType );
// Create the 'Message' object.
Message^ postMessage1 = gcnew Message;
postMessage1->Name = "AddNumbersHttpPostIn";
// Create the 'MessageParts'.
MessagePart^ postMessagePart1 = gcnew MessagePart;
postMessagePart1->Name = "firstnumber";
postMessagePart1->Type = gcnew XmlQualifiedName( "s:string" );
MessagePart^ postMessagePart2 = gcnew MessagePart;
postMessagePart2->Name = "secondnumber";
postMessagePart2->Type = gcnew XmlQualifiedName( "s:string" );
// Add the 'MessagePart' objects to 'Messages'.
postMessage1->Parts->Add( postMessagePart1 );
postMessage1->Parts->Add( postMessagePart2 );
// Create another 'Message' object.
Message^ postMessage2 = gcnew Message;
postMessage2->Name = "AddNumbersHttpPostOut";
MessagePart^ postMessagePart3 = gcnew MessagePart;
postMessagePart3->Name = "Body";
postMessagePart3->Element = gcnew XmlQualifiedName( "s0:int" );
// Add the 'MessagePart' to 'Message'
postMessage2->Parts->Add( postMessagePart3 );
// Add the 'Message' objects to 'ServiceDescription'.
myDescription->Messages->Add( postMessage1 );
myDescription->Messages->Add( postMessage2 );
// Write the 'ServiceDescription' as a WSDL file.
myDescription->Write( "AddNumbersOne.wsdl" );
Console::WriteLine( "WSDL file with name 'AddNumbersOne.Wsdl' file created Successfully" );
}
catch ( Exception^ ex )
{
Console::WriteLine( "Exception {0} occurred", ex->Message );
}
}
| 2,596 |
631 | package org.javalite.activejdbc;
import org.javalite.activejdbc.test.ActiveJDBCTest;
import org.javalite.activejdbc.test_models.Person;
import org.javalite.activejdbc.test_models.User;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* @author <NAME> on 4/7/15.
*/
public class LifecycleCallbackSpec extends ActiveJDBCTest {
@Before
public void setup() throws Exception {
deleteAndPopulateTable("people");
Person.callbackWith(new CallbackAdapter() {
@Override
public void afterLoad(Model m) {
m.set("name", m.get("name") + " :suffix added after load");
}
});
}
@After
public void tearDown(){
Person.callbackWith(new CallbackAdapter());
User.callbackWith(new CallbackAdapter());
}
@Test
public void shouldFireAfterLoadFromDB() {
a(Person.findAll().orderBy("name").get(0).get("name")).shouldBeEqual("Joe :suffix added after load");
}
@Test
public void shouldResetListener() {
CallbackAdapter adapter = new CallbackAdapter() {
@Override
public void afterLoad(Model m) {
m.set("first_name", m.get("first_name") + " :suffix added after load");
}
};
User.createIt("first_name", "Tim", "last_name", "Kane", "email", "<EMAIL>");
User.createIt("first_name", "Mike", "last_name", "Pense", "email", "<EMAIL>");
User.callbackWith(new CallbackAdapter()); // does nothing
User tim = User.findFirst("first_name = 'Tim'");
a(tim.get("first_name")).shouldBeEqual("Tim");
User.callbackWith(adapter);
tim = User.findFirst("first_name = 'Tim'");
a(tim.get("first_name")).shouldBeEqual("Tim :suffix added after load");
}
}
| 764 |
1,336 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.servicecomb.http.client.common;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.client.methods.HttpUriRequest;
import org.apache.http.client.methods.RequestBuilder;
import org.apache.http.entity.StringEntity;
public class HttpRequest {
public static final String GET = "GET";
public static final String POST = "POST";
public static final String DELETE = "DELETE";
public static final String PUT = "PUT";
private String method;
private String url;
private Map<String, String> headers;
private String content;
public HttpRequest(String url, Map<String, String> headers, String content, String method) {
this.url = url;
this.headers = headers;
this.content = content;
this.method = method;
}
public String getUrl() {
return url;
}
public Map<String, String> getHeaders() {
return headers;
}
public void addHeader(String name, String value) {
if (headers == null) {
headers = new HashMap<>();
}
headers.put(name, value);
}
public String getContent() {
return content;
}
public String getMethod() {
return method;
}
public void setMethod(String method) {
this.method = method;
}
public HttpUriRequest getRealRequest() {
HttpUriRequest httpUriRequest = null;
switch (method) {
case GET: {
httpUriRequest = new HttpGet(url);
break;
}
case POST: {
httpUriRequest = new HttpPost(url);
if (content != null) {
((HttpPost) httpUriRequest).setEntity(new StringEntity(content, "UTF-8"));
}
break;
}
case DELETE: {
httpUriRequest = new HttpDelete(url);
break;
}
case PUT: {
httpUriRequest = new HttpPut(url);
if (content != null) {
((HttpPut) httpUriRequest).setEntity(new StringEntity(content, "UTF-8"));
}
break;
}
default: {
httpUriRequest = RequestBuilder.create(method).build();
}
}
Optional.ofNullable(httpUriRequest).ifPresent(request -> headers.forEach(request::addHeader));
return httpUriRequest;
}
}
| 1,094 |
317 | <filename>Modules/Filtering/MathParserX/include/otbBandMathXImageFilter.hxx
/*
* Copyright (C) 1999-2011 Insight Software Consortium
* Copyright (C) 2005-2020 Centre National d'Etudes Spatiales (CNES)
*
* This file is part of Orfeo Toolbox
*
* https://www.orfeo-toolbox.org/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef otbBandMathXImageFilter_hxx
#define otbBandMathXImageFilter_hxx
#include "otbBandMathXImageFilter.h"
#include "itkImageRegionIterator.h"
#include "itkImageRegionConstIterator.h"
#include "itkImageScanlineConstIterator.h"
#include "itkImageScanlineIterator.h"
#include "itkImageRegionConstIteratorWithOnlyIndex.h"
#include "itkConstNeighborhoodIterator.h"
#include "itkNumericTraits.h"
#include "itkProgressReporter.h"
#include "otbMacro.h"
#include <iostream>
#include <fstream>
#include <string>
namespace otb
{
/** Constructor */
template <class TImage>
BandMathXImageFilter<TImage>::BandMathXImageFilter()
{
// This number will be incremented each time an image
// is added over the one minimumrequired
this->SetNumberOfRequiredInputs(1);
m_UnderflowCount = 0;
m_OverflowCount = 0;
m_ThreadUnderflow.SetSize(1);
m_ThreadOverflow.SetSize(1);
// idxX and idxY
adhocStruct ahcX;
ahcX.name = "idxX";
ahcX.type = 0;
m_VAllowedVarNameAuto.push_back(ahcX);
adhocStruct ahcY;
ahcY.name = "idxY";
ahcY.type = 1;
m_VAllowedVarNameAuto.push_back(ahcY);
m_SizeNeighbourhood = 10;
m_ManyExpressions = true;
}
/** Destructor */
template <class TImage>
BandMathXImageFilter<TImage>::~BandMathXImageFilter()
{
m_Expression.clear();
m_VParser.clear();
for (unsigned int i = 0; i < m_AImage.size(); ++i)
m_AImage[i].clear();
m_AImage.clear();
m_VVarName.clear();
m_VAllowedVarNameAuto.clear();
m_VAllowedVarNameAddedByUser.clear();
m_VFinalAllowedVarName.clear();
m_VNotAllowedVarName.clear();
m_outputsDimensions.clear();
}
template <class TImage>
void BandMathXImageFilter<TImage>::PrintSelf(std::ostream& os, itk::Indent indent) const
{
Superclass::PrintSelf(os, indent);
os << indent << "Expressions: " << std::endl;
for (unsigned int i = 0; i < m_Expression.size(); i++)
os << indent << m_Expression[i] << std::endl;
os << indent << "Computed values follow:" << std::endl;
os << indent << "UnderflowCount: " << m_UnderflowCount << std::endl;
os << indent << "OverflowCount: " << m_OverflowCount << std::endl;
os << indent << "itk::NumericTraits<typename PixelValueType>::NonpositiveMin() : " << itk::NumericTraits<PixelValueType>::NonpositiveMin() << std::endl;
os << indent << "itk::NumericTraits<typename PixelValueType>::max() : " << itk::NumericTraits<PixelValueType>::max() << std::endl;
}
template <class TImage>
void BandMathXImageFilter<TImage>::SetNthInput(DataObjectPointerArraySizeType idx, const ImageType* image)
{
std::stringstream sstm;
sstm << "im" << (idx + 1);
this->SetNthInput(idx, image, sstm.str());
}
template <class TImage>
void BandMathXImageFilter<TImage>::SetNthInput(DataObjectPointerArraySizeType idx, const ImageType* image, const std::string& varName)
{
ImageType* imagebis = const_cast<ImageType*>(image); // Useful for call of UpdateOutputInformation() (see below)
this->SetInput(idx, imagebis);
// imiPhyX and imiPhyY
std::stringstream sstmPhyX;
adhocStruct ahcPhyX;
sstmPhyX << varName << "PhyX";
ahcPhyX.name = sstmPhyX.str();
ahcPhyX.type = 2;
ahcPhyX.info[0] = idx; // Input image #ID
m_VAllowedVarNameAuto.push_back(ahcPhyX);
std::stringstream sstmPhyY;
adhocStruct ahcPhyY;
sstmPhyY << varName << "PhyY";
ahcPhyY.name = sstmPhyY.str();
ahcPhyY.type = 3;
ahcPhyY.info[0] = idx; // Input image #ID
m_VAllowedVarNameAuto.push_back(ahcPhyY);
// imi
std::stringstream sstm_glob;
adhocStruct ahc_glob;
sstm_glob << varName;
ahc_glob.name = sstm_glob.str();
ahc_glob.type = 4;
ahc_glob.info[0] = idx; // Input image #ID
m_VAllowedVarNameAuto.push_back(ahc_glob);
// Mandatory before call of GetNumberOfComponentsPerPixel
// Really important not to call the filter's UpdateOutputInformation method here:
// this method is not ready until all inputs, variables and expressions are set.
imagebis->UpdateOutputInformation();
// imibj
for (unsigned int j = 0; j < imagebis->GetNumberOfComponentsPerPixel(); j++)
{
std::stringstream sstm;
adhocStruct ahc;
sstm << varName << "b" << (j + 1);
ahc.name = sstm.str();
ahc.type = 5;
ahc.info[0] = idx; // Input image #ID
ahc.info[1] = j; // Band #ID
m_VAllowedVarNameAuto.push_back(ahc);
}
// imibjNkxp
for (unsigned int j = 0; j < imagebis->GetNumberOfComponentsPerPixel(); j++)
for (unsigned int x = 0; x <= m_SizeNeighbourhood; x++)
for (unsigned int y = 0; y <= m_SizeNeighbourhood; y++)
{
std::stringstream sstm;
adhocStruct ahc;
sstm << varName << "b" << (j + 1) << "N" << 2 * x + 1 << "x" << 2 * y + 1;
ahc.name = sstm.str();
ahc.type = 6;
ahc.info[0] = idx; // Input image #ID
ahc.info[1] = j; // Band #ID
ahc.info[2] = 2 * x + 1; // Size x direction (matrix convention = cols)
ahc.info[3] = 2 * y + 1; // Size y direction (matrix convention = rows)
m_VAllowedVarNameAuto.push_back(ahc);
}
// imibjSTATS
std::vector<std::string> statsNames;
statsNames.push_back("Mini");
statsNames.push_back("Maxi");
statsNames.push_back("Mean");
statsNames.push_back("Sum");
statsNames.push_back("Var");
for (unsigned int j = 0; j < imagebis->GetNumberOfComponentsPerPixel(); j++)
for (unsigned int t = 0; t < statsNames.size(); t++)
{
std::stringstream sstm;
adhocStruct ahc;
sstm << varName << "b" << (j + 1) << statsNames[t];
ahc.name = sstm.str();
ahc.type = 8;
ahc.info[0] = idx; // Input image #ID
ahc.info[1] = j; // Band #ID
ahc.info[2] = t; // Sub-type : 0 Mini, 1 Maxi, 2 Mean ...
m_VAllowedVarNameAuto.push_back(ahc);
}
}
template <typename TImage>
TImage* BandMathXImageFilter<TImage>::GetNthInput(DataObjectPointerArraySizeType idx)
{
return const_cast<TImage*>(this->GetInput(idx));
}
template <typename TImage>
void BandMathXImageFilter<TImage>::SetManyExpressions(bool flag)
{
m_ManyExpressions = flag;
}
template <typename TImage>
void BandMathXImageFilter<TImage>::SetExpression(const std::string& expression)
{
std::string expressionToBePushed = expression;
if (expression.find(";") != std::string::npos)
{
std::ostringstream oss;
oss << "cat(";
for (unsigned int i = 0; i < expression.size(); ++i)
if (expression[i] == ';')
oss << ",";
else
oss << expression[i];
oss << ")";
expressionToBePushed = oss.str();
}
if (m_ManyExpressions)
m_Expression.push_back(expressionToBePushed);
else if (m_Expression.size() == 0)
m_Expression.push_back(expressionToBePushed);
if (m_Expression.size() > 1)
this->SetNthOutput((DataObjectPointerArraySizeType)(m_Expression.size()) - 1, (TImage::New()).GetPointer());
this->Modified();
}
template <typename TImage>
void BandMathXImageFilter<TImage>::ClearExpression()
{
m_Expression.clear();
this->Modified();
}
template <typename TImage>
void BandMathXImageFilter<TImage>::SetMatrix(const std::string& name, const std::string& definition)
{
for (unsigned int i = 0; i < m_VAllowedVarNameAddedByUser.size(); i++)
if (name.compare(m_VAllowedVarNameAddedByUser[i].name) == 0)
itkExceptionMacro(<< "Variable name '" << name << "' already used." << std::endl);
if ((definition.find("{") != 0) || (definition.find("}")) != definition.size() - 1)
itkExceptionMacro(<< "Definition of a matrix must begin with { and end with } characters." << std::endl);
// Get rid of { and } characters
std::string def;
for (unsigned int i = 1; i < definition.size() - 1; ++i)
def.push_back(definition[i]);
std::vector<std::vector<double>> mat;
std::istringstream iss(def);
std::string rows;
while (std::getline(iss, rows, ';'))
{
mat.push_back(std::vector<double>(0));
std::istringstream iss2(rows);
std::string elmt;
while (std::getline(iss2, elmt, ','))
{
std::istringstream iss3(elmt);
double val;
iss3 >> val;
mat.back().push_back(val);
}
}
// Check dimensions of the matrix
for (unsigned int i = 0; i < mat.size() - 1; i++)
if (mat[i].size() != mat[i + 1].size())
itkExceptionMacro(<< "Each row must have the same number of cols : " << definition << std::endl);
// Registration
adhocStruct ahc;
ahc.name = name;
ahc.type = 7;
ahc.info[0] = mat[0].size(); // Size x direction (matrix convention = cols)
ahc.info[1] = mat.size(); // Size y direction (matrix convention = rows)
ahc.value = ValueType(ahc.info[1], ahc.info[0], 0.0);
for (unsigned int i = 0; i < mat.size(); i++)
for (unsigned int j = 0; j < mat[i].size(); j++)
ahc.value.At(i, j) = mat[i][j];
m_VAllowedVarNameAddedByUser.push_back(ahc);
}
template <typename TImage>
void BandMathXImageFilter<TImage>::SetConstant(const std::string& name, double value)
{
for (unsigned int i = 0; i < m_VAllowedVarNameAddedByUser.size(); i++)
if (name.compare(m_VAllowedVarNameAddedByUser[i].name) == 0)
itkExceptionMacro(<< "Variable name '" << name << "' already used." << std::endl);
adhocStruct ahc;
ahc.name = name;
ahc.type = 7;
ahc.value = value;
m_VAllowedVarNameAddedByUser.push_back(ahc);
}
template <typename TImage>
void BandMathXImageFilter<TImage>::ExportContext(const std::string& filename)
{
std::vector<std::string> vectI, vectF, vectM, vectFinal;
for (unsigned int i = 0; i < m_VAllowedVarNameAddedByUser.size(); i++)
{
std::ostringstream iss;
std::string str;
switch (m_VAllowedVarNameAddedByUser[i].value.GetType())
{
case 'i':
iss << "#I " << m_VAllowedVarNameAddedByUser[i].name << " " << m_VAllowedVarNameAddedByUser[i].value.GetInteger();
str = iss.str();
vectI.push_back(str);
break;
case 'f':
iss << "#F " << m_VAllowedVarNameAddedByUser[i].name << " " << m_VAllowedVarNameAddedByUser[i].value.GetFloat();
str = iss.str();
vectF.push_back(str);
break;
case 'c':
itkExceptionMacro(<< "Complex numbers not supported." << std::endl);
break;
case 'm':
iss << "#M " << m_VAllowedVarNameAddedByUser[i].name << " "
<< "{";
for (int k = 0; k < m_VAllowedVarNameAddedByUser[i].value.GetRows(); k++)
{
iss << " " << m_VAllowedVarNameAddedByUser[i].value.At(k, 0);
for (int p = 1; p < m_VAllowedVarNameAddedByUser[i].value.GetCols(); p++)
iss << " , " << m_VAllowedVarNameAddedByUser[i].value.At(k, p);
iss << ";";
}
str = iss.str();
str.erase(str.size() - 1);
str.push_back('}');
vectM.push_back(str);
break;
}
}
// Sorting : I F M and E
for (unsigned int i = 0; i < vectI.size(); ++i)
vectFinal.push_back(vectI[i]);
for (unsigned int i = 0; i < vectF.size(); ++i)
vectFinal.push_back(vectF[i]);
for (unsigned int i = 0; i < vectM.size(); ++i)
vectFinal.push_back(vectM[i]);
for (unsigned int i = 0; i < m_Expression.size(); ++i)
{
std::ostringstream iss;
iss << "#E " << m_Expression[i] << std::endl;
std::string str = iss.str();
vectFinal.push_back(str);
}
std::ofstream exportFile(filename, std::ios::out | std::ios::trunc);
if (exportFile)
{
for (unsigned int i = 0; i < vectFinal.size(); ++i)
exportFile << vectFinal[i] << std::endl;
exportFile.close();
}
else
itkExceptionMacro(<< "Could not open " << filename << "." << std::endl);
}
template <typename TImage>
void BandMathXImageFilter<TImage>::ImportContext(const std::string& filename)
{
std::ifstream importFile(filename, std::ios::in);
std::string wholeline, line, name, matrixdef;
int pos, pos2, lineID = 0, nbSuccesses = 0;
double value;
if (importFile)
{
while (std::getline(importFile, wholeline))
{
lineID++;
pos = wholeline.find_first_not_of(' ');
if (pos != (int)std::string::npos)
{
line = wholeline.substr(pos);
if ((line[0] == '#') && ((line[1] == 'I') || (line[1] == 'i') || (line[1] == 'F') || (line[1] == 'f')))
{
pos = line.find_first_not_of(' ', 2);
if (pos == (int)std::string::npos)
itkExceptionMacro(<< "In file '" << filename << "', line " << lineID << " : please, set the name and the value of the constant." << std::endl);
std::string sub = line.substr(pos);
pos = sub.find_first_of(' ');
name = sub.substr(0, pos);
if (sub.find_first_of('{', pos) != std::string::npos)
itkExceptionMacro(<< "In file '" << filename << "', line " << lineID
<< " : symbol #F found, but find vector/matrix definition. Please, set an integer or a float number." << std::endl);
if (sub.find_first_not_of(' ', pos) == std::string::npos)
itkExceptionMacro(<< "In file '" << filename << "', line " << lineID << " : please, set the value of the constant." << std::endl)
std::istringstream iss(sub.substr(pos));
iss >> value;
SetConstant(name, value);
nbSuccesses++;
}
else if ((line[0] == '#') && ((line[1] == 'M') || (line[1] == 'm')))
{
pos = line.find_first_not_of(' ', 2);
if (pos == (int)std::string::npos)
itkExceptionMacro(<< "In file '" << filename << "', line " << lineID << " : please, set the name and the definition of the vector/matrix."
<< std::endl);
std::string sub = line.substr(pos);
pos = sub.find_first_of(' ');
name = sub.substr(0, pos);
pos2 = sub.find_first_of('{');
if (pos2 != (int)std::string::npos)
matrixdef = sub.substr(pos2);
else
itkExceptionMacro(<< "In file '" << filename << "', line " << lineID << " : symbol #M found, but couldn't not find vector/matrix definition."
<< std::endl);
SetMatrix(name, matrixdef);
nbSuccesses++;
}
else if ((line[0] == '#') && ((line[1] == 'E') || (line[1] == 'e')))
{
pos = line.find_first_not_of(' ', 2);
if (pos == (int)std::string::npos)
itkExceptionMacro(<< "In file '" << filename << "', line " << lineID << " : symbol #E found, but couldn't not find any expression." << std::endl);
std::string sub = line.substr(pos);
SetExpression(sub);
nbSuccesses++;
}
}
} // while
importFile.close();
if (nbSuccesses == 0)
itkExceptionMacro(<< "No constant or expression could be set; please, ensure that the file '" << filename << "' is correct." << std::endl);
}
else
itkExceptionMacro(<< "Could not open " << filename << "." << std::endl);
}
template <typename TImage>
std::string BandMathXImageFilter<TImage>::GetExpression(unsigned int IDExpression) const
{
if (IDExpression < m_Expression.size())
return m_Expression[IDExpression];
return "";
}
template <typename TImage>
std::vector<std::string> BandMathXImageFilter<TImage>::GetVarNames() const
{
std::vector<std::string> res;
for (int y = 0; y < m_VVarName.size(); y++)
res.push_back(m_VVarName[y].name);
return res;
}
template <typename TImage>
void BandMathXImageFilter<TImage>::AddVariable(adhocStruct& ahc)
{
bool found = false;
for (unsigned int i = 0; i < m_VVarName.size(); ++i)
if (m_VVarName[i].name == ahc.name)
found = true;
if (!found)
m_VVarName.push_back(ahc);
}
template <typename TImage>
void BandMathXImageFilter<TImage>::PrepareParsers()
{
if (m_Expression.size() == 0)
itkExceptionMacro(<< "No expression set; please set at least one expression." << std::endl);
// Generate variables names
m_VVarName.clear();
m_VNotAllowedVarName.clear();
m_VFinalAllowedVarName.clear();
// m_VFinalAllowedVarName = m_VAllowedVarNameAuto + m_VAllowedVarNameAddedByUser
// m_VFinalAllowedVarName = variable names dictionary
for (unsigned int i = 0; i < m_VAllowedVarNameAddedByUser.size(); i++)
m_VFinalAllowedVarName.push_back(m_VAllowedVarNameAddedByUser[i]);
for (unsigned int i = 0; i < m_VAllowedVarNameAuto.size(); i++)
m_VFinalAllowedVarName.push_back(m_VAllowedVarNameAuto[i]);
unsigned int nbExpr = m_Expression.size();
for (unsigned int IDExpression = 0; IDExpression < nbExpr; ++IDExpression) // For each expression
{
ParserType::Pointer dummyParser = ParserType::New();
dummyParser->SetExpr(this->GetExpression(IDExpression));
mup::var_maptype vmap = dummyParser->GetExprVar();
for (mup::var_maptype::iterator item = vmap.begin(); item != vmap.end(); ++item)
{
bool OK = false;
int i = 0;
while ((!OK) && (i < (int)m_VFinalAllowedVarName.size()))
{
if (item->first == m_VFinalAllowedVarName[i].name)
OK = true;
else
i++;
}
if (OK)
{
AddVariable(m_VFinalAllowedVarName[i]);
}
else
{
adhocStruct ahc;
ahc.name = item->first;
m_VNotAllowedVarName.push_back(ahc);
}
}
} // At this step, m_VVarName has been built
// Checking formulas consistency
if (m_VNotAllowedVarName.size() > 0)
{
std::stringstream sstm;
sstm << "Following variables not allowed : ";
for (unsigned int i = 0; i < m_VNotAllowedVarName.size(); ++i)
sstm << m_VNotAllowedVarName[i].name << " ";
sstm << std::endl;
itkExceptionMacro(<< sstm.str());
}
// Register variables for each parser (important : one parser per thread and per expression)
m_VParser.clear();
unsigned int nbThreads = this->GetNumberOfThreads();
for (unsigned int k = 0; k < nbThreads; k++)
{
std::vector<ParserType::Pointer> parserList;
for (unsigned int i = 0; i < nbExpr; i++)
{
parserList.push_back(ParserType::New());
}
m_VParser.push_back(parserList);
}
// Important to remember that variables of m_VVarName come from a call of GetExprVar method
// Only useful variables are allocated in this filter
int nbVar = m_VVarName.size();
m_StatsVarDetected.clear();
m_NeighDetected.clear();
m_NeighExtremaSizes.clear();
unsigned int nbInputImages = this->GetNumberOfInputs();
RadiusType dummyRadius;
dummyRadius[0] = 1;
dummyRadius[1] = 1;
m_NeighExtremaSizes.resize(nbInputImages, dummyRadius);
// Reset
for (unsigned int i = 0; i < m_AImage.size(); ++i)
m_AImage[i].clear();
m_AImage.clear();
m_AImage.resize(nbThreads);
double initValue = 0.1;
for (unsigned int i = 0; i < nbThreads; ++i) // For each thread
{
m_AImage[i].resize(nbVar); // For each variable
for (int j = 0; j < nbVar; ++j)
{
m_AImage[i][j].name = m_VVarName[j].name;
m_AImage[i][j].type = m_VVarName[j].type;
for (int t = 0; t < 5; ++t)
m_AImage[i][j].info[t] = m_VVarName[j].info[t];
if ((m_AImage[i][j].type == 0) || (m_AImage[i][j].type == 1)) // indices (idxX & idxY)
{
m_AImage[i][j].value = ValueType(initValue);
}
if (m_AImage[i][j].type == 2) // imiPhyX
{
SpacingType spacing = this->GetNthInput(m_AImage[i][j].info[0])->GetSignedSpacing();
m_AImage[i][j].value = ValueType(static_cast<double>(spacing[0]));
}
if (m_AImage[i][j].type == 3) // imiPhyY
{
SpacingType spacing = this->GetNthInput(m_AImage[i][j].info[0])->GetSignedSpacing();
m_AImage[i][j].value = ValueType(static_cast<double>(spacing[1]));
}
if (m_AImage[i][j].type == 4) // vector
{
unsigned int nbBands = this->GetNthInput(m_AImage[i][j].info[0])->GetNumberOfComponentsPerPixel();
m_AImage[i][j].value = ValueType(1, nbBands, initValue);
}
if (m_AImage[i][j].type == 5) // pixel
{
m_AImage[i][j].value = ValueType(initValue);
}
if (m_AImage[i][j].type == 6) // neighborhood
{
m_AImage[i][j].value = ValueType(m_AImage[i][j].info[3], m_AImage[i][j].info[2], initValue);
// m_AImage[i][j].info[0] = Image ID
bool found = false;
for (unsigned int r = 0; r < m_NeighDetected.size() && !found; r++)
if (m_NeighDetected[r] == (unsigned int)m_AImage[i][j].info[0])
found = true;
if (!found)
m_NeighDetected.push_back(m_AImage[i][j].info[0]);
// find biggest radius for a given input image (idis given by info[0])
if (m_NeighExtremaSizes[m_AImage[i][j].info[0]][0] < (unsigned int)((m_VVarName[j].info[2] - 1) / 2)) // Size x direction (otb convention)
m_NeighExtremaSizes[m_AImage[i][j].info[0]][0] = (unsigned int)((m_VVarName[j].info[2] - 1) / 2);
if (m_NeighExtremaSizes[m_AImage[i][j].info[0]][1] < (unsigned int)((m_VVarName[j].info[3] - 1) / 2)) // Size y direction (otb convention)
m_NeighExtremaSizes[m_AImage[i][j].info[0]][1] = (unsigned int)((m_VVarName[j].info[3] - 1) / 2);
}
if (m_AImage[i][j].type == 7) // user defined variables
{
for (int t = 0; t < (int)m_VAllowedVarNameAddedByUser.size(); t++)
if (m_VAllowedVarNameAddedByUser[t].name.compare(m_AImage[i][j].name) == 0)
m_AImage[i][j].value = m_VAllowedVarNameAddedByUser[t].value;
}
if (m_AImage[i][j].type == 8) // global stats
{
m_AImage[i][j].value = ValueType(initValue);
// m_AImage[i][j].info[0] = Image ID
bool found = false;
for (unsigned int r = 0; r < m_StatsVarDetected.size() && !found; r++)
if (m_StatsVarDetected[r] == m_AImage[i][j].info[0])
found = true;
if (!found)
m_StatsVarDetected.push_back(m_AImage[i][j].info[0]);
}
// Register variable
for (unsigned int k = 0; k < nbExpr; k++)
{
m_VParser[i][k]->DefineVar(m_AImage[i][j].name, &(m_AImage[i][j].value));
}
initValue += 0.001;
if (initValue > 1.0)
initValue = 0.1;
}
}
// Set expressions
for (unsigned int k = 0; k < nbThreads; k++)
{
for (unsigned int i = 0; i < nbExpr; i++)
{
m_VParser[k][i]->SetExpr(m_Expression[i]);
}
}
}
template <typename TImage>
void BandMathXImageFilter<TImage>::PrepareParsersGlobStats()
{
// Must instantiate stats variables of the parsers
// Note : at this stage, inputs have already been set to largest possible regions.
for (unsigned int i = 0; i < m_StatsVarDetected.size(); i++)
{
StreamingStatisticsVectorImageFilterPointerType filter = StreamingStatisticsVectorImageFilterType::New();
filter->SetInput(this->GetNthInput(m_StatsVarDetected[i]));
filter->Update();
PixelType pix; // Variable length vector
MatrixType mat;
for (unsigned int t = 0; t < m_AImage.size(); t++) // for each thread
for (unsigned int v = 0; v < m_AImage[t].size(); v++) // for each variable
if ((m_AImage[t][v].type == 8) && (m_AImage[t][v].info[0] == m_StatsVarDetected[i])) // type 8 : flag identifying a glob stat; info[0] : input ID
{
switch (m_AImage[t][v].info[2]) // info[2] sub-type (see also SetNthInput method above)
{
case 0: // mini
pix = filter->GetMinimum();
for (int b = 0; b < (int)pix.GetSize(); b++) // for each band
if (m_AImage[t][v].info[1] == b) // info[1] : band ID
m_AImage[t][v].value = pix[b];
break;
case 1: // maxi
pix = filter->GetMaximum();
for (int b = 0; b < (int)pix.GetSize(); b++) // for each band
if (m_AImage[t][v].info[1] == b) // info[1] : band ID
m_AImage[t][v].value = pix[b];
break;
case 2: // mean
pix = filter->GetMean();
for (int b = 0; b < (int)pix.GetSize(); b++) // for each band
if (m_AImage[t][v].info[1] == b) // info[1] : band ID
m_AImage[t][v].value = pix[b];
break;
break;
case 3: // sum
pix = filter->GetSum();
for (int b = 0; b < (int)pix.GetSize(); b++) // for each band
if (m_AImage[t][v].info[1] == b) // info[1] : band ID
m_AImage[t][v].value = pix[b];
break;
case 4: // stddev
mat = filter->GetCovariance();
for (int b = 0; b < (int)mat.Cols(); b++) // for each band
if (m_AImage[t][v].info[1] == b) // info[1] : band ID
m_AImage[t][v].value = mat(b, b);
break;
}
}
}
}
template <typename TImage>
void BandMathXImageFilter<TImage>::OutputsDimensions()
{
this->SetNumberOfRequiredOutputs((int)m_Expression.size());
m_outputsDimensions.clear();
for (int i = 0; i < (int)m_Expression.size(); ++i)
{
ValueType value = m_VParser[0][i]->EvalRef();
switch (value.GetType())
{ // ValueType
case 'b':
itkExceptionMacro(<< "Booleans not supported." << std::endl);
break;
case 'i':
m_outputsDimensions.push_back(1);
break;
case 'f':
m_outputsDimensions.push_back(1);
break;
case 'c':
itkExceptionMacro(<< "Complex numbers not supported." << std::endl);
break;
case 'm':
{
const mup::matrix_type& vect = value.GetArray();
if (vect.GetRows() == 1) // Vector
m_outputsDimensions.push_back(vect.GetCols());
else // Matrix
itkExceptionMacro(<< "Result of the evaluation can't be a matrix." << std::endl);
}
break;
default:
itkExceptionMacro(<< "Unknown output type : " << value.GetType() << std::endl);
break;
}
// std::cout << "Type = " << value.GetType() << " dimension = " << m_outputsDimensions.back() << std::endl;
}
}
template <typename TImage>
void BandMathXImageFilter<TImage>::CheckImageDimensions(void)
{
// Check if input image dimensions match
unsigned int nbInputImages = this->GetNumberOfInputs();
unsigned int inputSize[2];
inputSize[0] = this->GetNthInput(0)->GetLargestPossibleRegion().GetSize(0);
inputSize[1] = this->GetNthInput(0)->GetLargestPossibleRegion().GetSize(1);
for (unsigned int p = 1; p < nbInputImages; p++)
if ((inputSize[0] != this->GetNthInput(p)->GetLargestPossibleRegion().GetSize(0)) ||
(inputSize[1] != this->GetNthInput(p)->GetLargestPossibleRegion().GetSize(1)))
{
itkExceptionMacro(<< "Input images must have the same dimensions." << std::endl
<< "band #1 is [" << inputSize[0] << ";" << inputSize[1] << "]" << std::endl
<< "band #" << p + 1 << " is [" << this->GetNthInput(p)->GetLargestPossibleRegion().GetSize(0) << ";"
<< this->GetNthInput(p)->GetLargestPossibleRegion().GetSize(1) << "]");
}
}
template <typename TImage>
void BandMathXImageFilter<TImage>::GenerateOutputInformation(void)
{
Superclass::GenerateOutputInformation();
CheckImageDimensions();
PrepareParsers();
if (GlobalStatsDetected())
PrepareParsersGlobStats();
OutputsDimensions();
typedef itk::ImageBase<TImage::ImageDimension> ImageBaseType;
typename ImageBaseType::Pointer outputPtr;
int i = 0;
for (itk::OutputDataObjectIterator it(this); !it.IsAtEnd(); i++, it++)
{
// Check whether the output is an image of the appropriate
// dimension (use ProcessObject's version of the GetInput()
// method since it returns the input as a pointer to a
// DataObject as opposed to the subclass version which
// static_casts the input to an TImage).
outputPtr = dynamic_cast<ImageBaseType*>(it.GetOutput());
if (outputPtr)
outputPtr->SetNumberOfComponentsPerPixel(m_outputsDimensions[i]);
}
}
template <typename TImage>
void BandMathXImageFilter<TImage>::GenerateInputRequestedRegion()
{
// call the superclass' implementation of this method
Superclass::GenerateInputRequestedRegion();
for (unsigned int i = 0; i < m_NeighDetected.size(); i++)
if (m_NeighDetected[i] < this->GetNumberOfInputs())
{
// get pointers to the input and output
typename Superclass::InputImagePointer inputPtr = const_cast<TImage*>(this->GetNthInput(m_NeighDetected[i]));
ImageRegionType inputRequestedRegion;
inputRequestedRegion = inputPtr->GetRequestedRegion();
// pad the input requested region by the operator radius
inputRequestedRegion.PadByRadius(m_NeighExtremaSizes[m_NeighDetected[i]]);
// crop the input requested region at the input's largest possible region
if (inputRequestedRegion.Crop(inputPtr->GetLargestPossibleRegion()))
{
inputPtr->SetRequestedRegion(inputRequestedRegion);
return;
}
else
{
// Couldn't crop the region (requested region is outside the largest
// possible region). Throw an exception.
// store what we tried to request (prior to trying to crop)
inputPtr->SetRequestedRegion(inputRequestedRegion);
// build an exception
itk::InvalidRequestedRegionError e(__FILE__, __LINE__);
std::ostringstream msg, msg2;
msg << static_cast<const char*>(this->GetNameOfClass()) << "::GenerateInputRequestedRegion()";
e.SetLocation(msg.str());
msg2 << "Requested region is (at least partially) outside the largest possible region (input #" << m_NeighDetected[i] << ").";
e.SetDescription(msg2.str());
e.SetDataObject(inputPtr);
throw e;
}
}
else
itkExceptionMacro(<< "Requested input #" << m_NeighDetected[i] << ", but only " << this->GetNumberOfInputs() << " inputs are available." << std::endl);
}
template <typename TImage>
void BandMathXImageFilter<TImage>::BeforeThreadedGenerateData()
{
unsigned int nbThreads = this->GetNumberOfThreads();
// Allocate and initialize the thread temporaries
m_ThreadUnderflow.SetSize(nbThreads);
m_ThreadUnderflow.Fill(0);
m_ThreadOverflow.SetSize(nbThreads);
m_ThreadOverflow.Fill(0);
}
template <typename TImage>
void BandMathXImageFilter<TImage>::AfterThreadedGenerateData()
{
unsigned int nbThreads = this->GetNumberOfThreads();
unsigned int i;
m_UnderflowCount = 0;
m_OverflowCount = 0;
// Accumulate counts for each thread
for (i = 0; i < nbThreads; ++i)
{
m_UnderflowCount += m_ThreadUnderflow[i];
m_OverflowCount += m_ThreadOverflow[i];
}
if ((m_UnderflowCount != 0) || (m_OverflowCount != 0))
{
std::stringstream sstm;
sstm << std::endl << "The Following Parsed Expression : ";
for (unsigned int t = 0; t < m_Expression.size(); ++t)
sstm << this->GetExpression(t) << std::endl;
sstm << "Generated " << m_UnderflowCount << " Underflow(s) "
<< "And " << m_OverflowCount << " Overflow(s) " << std::endl
<< "The Parsed Expression, The Inputs And The Output "
<< "Type May Be Incompatible !";
otbWarningMacro(<< sstm.str());
}
}
template <typename TImage>
void BandMathXImageFilter<TImage>::ThreadedGenerateData(const ImageRegionType& outputRegionForThread, itk::ThreadIdType threadId)
{
ValueType value;
unsigned int nbInputImages = this->GetNumberOfInputs();
//----------------- --------- -----------------//
//----------------- Iterators -----------------//
//----------------- --------- -----------------//
typedef itk::ImageScanlineConstIterator<TImage> ImageScanlineConstIteratorType;
typedef itk::ImageScanlineIterator<TImage> ImageScanlineIteratorType;
typedef itk::ImageRegionConstIteratorWithOnlyIndex<TImage> IndexIteratorType;
std::vector<ImageScanlineConstIteratorType> Vit;
Vit.resize(nbInputImages);
for (unsigned int j = 0; j < nbInputImages; ++j)
Vit[j] = ImageScanlineConstIteratorType(this->GetNthInput(j), outputRegionForThread);
std::vector<ImageScanlineIteratorType> VoutIt;
VoutIt.resize(m_Expression.size());
for (unsigned int j = 0; j < VoutIt.size(); ++j)
VoutIt[j] = ImageScanlineIteratorType(this->GetOutput(j), outputRegionForThread);
// Special case : neighborhoods
std::vector<itk::ConstNeighborhoodIterator<TImage>> VNit;
for (unsigned int j = 0; j < m_VVarName.size(); ++j)
if (m_VVarName[j].type == 6)
{
RadiusType radius;
radius[0] = (int)((m_VVarName[j].info[2] - 1) / 2); // Size x direction (otb convention)
radius[1] = (int)((m_VVarName[j].info[3] - 1) / 2); // Size y direction (otb convention)
VNit.push_back(
itk::ConstNeighborhoodIterator<TImage>(radius, this->GetNthInput(m_VVarName[j].info[0]), outputRegionForThread)); // info[0] = Input image ID
VNit.back().NeedToUseBoundaryConditionOn();
}
// Index only iterator
IndexIteratorType indexIterator(this->GetNthInput(0), outputRegionForThread);
// Support progress methods/callbacks
itk::ProgressReporter progress(this, threadId, outputRegionForThread.GetNumberOfPixels());
// iterator on variables
typename std::vector<adhocStruct>::iterator iterVarStart = m_AImage[threadId].begin();
typename std::vector<adhocStruct>::iterator iterVarEnd = m_AImage[threadId].end();
typename std::vector<adhocStruct>::iterator iterVar = iterVarStart;
// temporary output vectors
std::vector<PixelType> tmpOutputs(m_Expression.size());
for (unsigned int k = 0; k < m_Expression.size(); ++k)
tmpOutputs[k].SetSize(m_outputsDimensions[k]);
//----------------- --------------------- -----------------//
//----------------- Variable affectations -----------------//
//----------------- --------------------- -----------------//
for (unsigned int j = 0; j < nbInputImages; ++j)
{
Vit[j].GoToBegin();
}
for (unsigned int j = 0; j < m_Expression.size(); ++j)
{
VoutIt[j].GoToBegin();
}
for (unsigned int j = 0; j < VNit.size(); ++j)
{
VNit[j].GoToBegin();
}
indexIterator.GoToBegin();
while (!Vit[0].IsAtEnd()) // For each pixel
{
while (!Vit[0].IsAtEndOfLine()) // For each line
{
int ngbhNameIndex = 0;
int index;
iterVar = iterVarStart;
while (iterVar != iterVarEnd)
{
switch (iterVar->type)
{
case 0: // idxX
iterVar->value = static_cast<double>(indexIterator.GetIndex()[0]);
break;
case 1: // idxY
iterVar->value = static_cast<double>(indexIterator.GetIndex()[1]);
break;
case 2: // Spacing X (imiPhyX)
// Nothing to do (already set inside BeforeThreadedGenerateData)"
break;
case 3: // Spacing Y (imiPhyY)
// Nothing to do (already set inside BeforeThreadedGenerateData)"
break;
case 4: // vector
// iterVar->info[0] : Input image #ID
for (int p = 0; p < iterVar->value.GetCols(); ++p)
iterVar->value.At(0, p) = Vit[iterVar->info[0]].Get()[p];
break;
case 5: // pixel
// iterVar->info[0] : Input image #ID
// iterVar->info[1] : Band #ID
iterVar->value = Vit[iterVar->info[0]].Get()[iterVar->info[1]];
break;
case 6: // neighborhood
// iterVar->info[1] : Band #ID
if (iterVar->info[2] * iterVar->info[3] != (int)VNit[ngbhNameIndex].Size())
itkExceptionMacro(<< "Size of muparserx variable is different from its related otb neighborhood iterator")
index = 0;
for (int rows = 0; rows < iterVar->info[3]; ++rows)
for (int cols = 0; cols < iterVar->info[2]; ++cols)
{
iterVar->value.At(rows, cols) = VNit[ngbhNameIndex].GetPixel(index)[iterVar->info[1]];
index++;
}
ngbhNameIndex++;
break;
case 7:
// Nothing to do : user defined variable or constant, which have already been set inside PrepareParsers (see above)
break;
case 8:
// Nothing to do : variable has already been set inside PrepareParsersGlobStats method (see above)
break;
default:
itkExceptionMacro(<< "Type of the variable is unknown");
break;
}
iterVar++;
} // End while on vars
//----------------- ----------- -----------------//
//----------------- Evaluations -----------------//
//----------------- ----------- -----------------//
for (unsigned int IDExpression = 0; IDExpression < m_Expression.size(); ++IDExpression)
{
value = m_VParser[threadId][IDExpression]->EvalRef();
switch (value.GetType())
{ // ValueType
case 'i':
tmpOutputs[IDExpression][0] = value.GetInteger();
break;
case 'f':
tmpOutputs[IDExpression][0] = value.GetFloat();
break;
case 'c':
itkExceptionMacro(<< "Complex numbers are not supported." << std::endl);
break;
case 'm':
{
const mup::matrix_type& vect = value.GetArray();
if (vect.GetRows() == 1) // Vector
for (int p = 0; p < vect.GetCols(); ++p)
tmpOutputs[IDExpression][p] = vect.At(0, p).GetFloat();
else // Matrix
itkExceptionMacro(<< "Result of the evaluation can't be a matrix." << std::endl);
}
break;
}
//----------------- Pixel affectations -----------------//
for (unsigned int p = 0; p < m_outputsDimensions[IDExpression]; ++p)
{
// Case value is equal to -inf or inferior to the minimum value
// allowed by the PixelValueType cast
if (tmpOutputs[IDExpression][p] < double(itk::NumericTraits<PixelValueType>::NonpositiveMin()))
{
tmpOutputs[IDExpression][p] = itk::NumericTraits<PixelValueType>::NonpositiveMin();
m_ThreadUnderflow[threadId]++;
}
// Case value is equal to inf or superior to the maximum value
// allowed by the PixelValueType cast
else if (tmpOutputs[IDExpression][p] > double(itk::NumericTraits<PixelValueType>::max()))
{
tmpOutputs[IDExpression][p] = itk::NumericTraits<PixelValueType>::max();
m_ThreadOverflow[threadId]++;
}
}
VoutIt[IDExpression].Set(tmpOutputs[IDExpression]);
}
for (unsigned int j = 0; j < nbInputImages; ++j)
{
++Vit[j];
}
for (unsigned int j = 0; j < m_Expression.size(); ++j)
{
++VoutIt[j];
}
for (unsigned int j = 0; j < VNit.size(); ++j)
{
++VNit[j];
}
++indexIterator;
progress.CompletedPixel();
}
for (unsigned int j = 0; j < nbInputImages; ++j)
{
Vit[j].NextLine();
}
for (unsigned int j = 0; j < m_Expression.size(); ++j)
{
VoutIt[j].NextLine();
}
}
}
} // end namespace otb
#endif
| 17,157 |
348 | {"nom":"Hautot-Saint-Sulpice","dpt":"Seine-Maritime","inscrits":503,"abs":87,"votants":416,"blancs":41,"nuls":16,"exp":359,"res":[{"panneau":"1","voix":193},{"panneau":"2","voix":166}]} | 76 |
467 | <filename>app/src/main/java/com/yoyiyi/soleil/mvp/contract/discover/InterestContract.java
package com.yoyiyi.soleil.mvp.contract.discover;
import com.yoyiyi.soleil.base.BaseContract;
import com.yoyiyi.soleil.bean.discover.Community;
import com.yoyiyi.soleil.bean.discover.InterestAd;
import com.yoyiyi.soleil.bean.discover.InterestCategrory;
import java.util.List;
/**
* @author zzq 作者 E-mail: <EMAIL>
* @date 创建时间:2017/5/12 10:09
* 描述:话题中心Contract
*/
public interface InterestContract {
interface View extends BaseContract.BaseView {
void showInterestAd(InterestAd interestAdList);
void showCommunity(Community community);
void showInterestCategrory(List<InterestCategrory.ResultBean> interestCategroryList);
void onComplete();
}
interface Presenter<T> extends BaseContract.BasePresenter<T> {
void getInterestData();
}
}
| 358 |
4,538 | /**
* @file aiot_task_api.h
* @brief task模块头文件, 提供任务管理的能力
* @date 2020-11-25
*
* @copyright Copyright (C) 2015-2020 Alibaba Group Holding Limited
*
* @details
*
*
*/
#ifndef __AIOT_TASK_API_H__
#define __AIOT_TASK_API_H__
#if defined(__cplusplus)
extern "C" {
#endif
#include <stdint.h>
/**
* @brief -0x0B00~-0x0BFF表达SDK在task模块内的状态码
*
*/
#define STATE_TASK_BASE (-0x0B00)
/**
* @brief 销毁task会话实例时, 发现会话句柄为空, 中止销毁动作
*
*/
#define STATE_TASK_DEINIT_HANDLE_IS_NULL (-0x0B01)
/**
* @brief 配置task会话实例时, 发现会话句柄为空, 中止配置动作
*
*/
#define STATE_TASK_SETOPT_HANDLE_IS_NULL (-0x0B02)
/**
* @brief 接收到服务器notify下行消息时的日志状态码
*/
#define STATE_TASK_RECV_NOTIFY (-0x0B03)
/**
* @brief 解析服务器下推的MQTT下行JSON报文时出错
*/
#define STATE_TASK_PARSE_NOTIFY_FAILED (-0x0B04)
/**
* @brief 为解析JSON报文而申请内存时, 未获取到所需内存而解析失败
*/
#define STATE_TASK_PARSE_JSON_MALLOC_FAILED (-0x0B05)
/**
* @brief 接收到服务器notify下行消息时的日志状态码
*/
#define STATE_TASK_PARSE_JSON_ERROR (-0x0B06)
/**
* @brief 接收到查询task id是空
*/
#define STATE_TASK_QUERY_TASK_ID_IS_NULL (-0x0B07)
/**
* @brief 接收到服务器get list reply下行消息时的日志状态码
*/
#define STATE_TASK_RECV_GET_LIST_REPLY (-0x0B08)
/**
* @brief 配置task会话实例时, 发现会话句柄为空, 中止配置动作
*
*/
#define STATE_TASK_SETOPT_DATA_IS_NULL (-0x0B09)
/**
* @brief 配置task 描述时状态设置不对
*
*/
#define STATE_TASK_UPDATE_STATUS_INVALID (-0x0B0A)
/**
* @brief aiot_task_setopt 接口的option参数可选值.
*/
/**
* @brief update task的时候task status_details只能为NULL或者json字符串对象
*
*/
#define STATE_TASK_UPDATE_STATUS_DETAILS_INVALID (-0x0B0B)
typedef enum {
/**
* @brief 设置MQTT的handle
*
* @details
*
* OTA过程中使用MQTT的通道能力, 用以向云端上报版本号, 进度, 以及错误码
*
* 数据类型: (void *)
*/
AIOT_TASKOPT_MQTT_HANDLE,
/**
* @brief 设置处理task消息的用户回调函数
*
* @details
*
* 从云端下发或者返回的数据的处理函数
*
* 数据类型: (void *)
*/
AIOT_TASKOPT_RECV_HANDLER,
/**
* @brief 用户需要SDK暂存的上下文
*
* @details
*
* 这个上下文指针会在 AIOT_TASKOPT_RECV_HANDLER设置的回调被调用时, 由SDK传给用户
*
* 数据类型: (void *)
*/
AIOT_TASKOPT_USERDATA,
AIOT_TASKOPT_MAX
} aiot_task_option_t;
/**
* @brief 任务的状态.
*/
typedef enum {
AIOT_TASK_STATUS_QUEUED, /* 服务端设置的状态: 任务处于队列中, 还没有推送 */
AIOT_TASK_STATUS_SENT, /* 服务端设置的状态: 任务已推送 */
AIOT_TASK_STATUS_IN_PROGRESS, /* 设备端设置的状态: 任务进行中. 设备端开始执行一个任务后, 将 */
AIOT_TASK_STATUS_SUCCEEDED, /* 设备端设置的状态: 任务完成 */
AIOT_TASK_STATUS_FAILED, /* 设备端设置的状态: 任务执行失败 */
AIOT_TASK_STATUS_REJECTED, /* 设备端设置的状态: 设备端拒绝执行任务 */
AIOT_TASK_STATUS_CANCELLED, /* 服务端设置的状态: 任务被服务端取消 */
AIOT_TASK_STATUS_REMOVED, /* 服务端设置的状态: 任务从服务端删除 */
AIOT_TASK_STATUS_TIMED_OUT, /* 服务端设置的状态: 任务执行超时 */
AIOT_TASK_STATUS_NOT_FOUND /* 服务端设置的状态: 没有找到此任务相关信息 */
} aiot_task_status_t;
/**
* @brief 下行有关的数据结构
*/
typedef enum {
AIOT_TASKRECV_NOTIFY, /* 对应/sys/{productKey}/{deviceName}/thing/job/notify 这个下行topic, 云端主动下推, 带任务详情 */
AIOT_TASKRECV_GET_DETAIL_REPLY, /* 对应/sys/{productKey}/{deviceName}/thing/job/get_reply 这个下行topic, 可以是单个任务的详情, 也可以是任务列表的简单描述 */
AIOT_TASKRECV_GET_LIST_REPLY, /* 对应/sys/{productKey}/{deviceName}/thing/job/get_reply 这个下行topic, 可以是单个任务的详情, 也可以是任务列表的简单描述 */
AIOT_TASKRECV_UPDATE_REPLY /* 对应/sys/{productKey}/{deviceName}/thing/job/update_reply 这个下行topic, 里面包含某个任务的update的结果, 即是否成功 */
} aiot_task_recv_type_t;
/**
* @brief 任务描述的数据结构
*/
typedef struct {
char *task_id; /* 任务ID */
aiot_task_status_t status; /* 任务的状态 */
char *job_document; /* 任务执行规则 */
char *sign_method; /* 文件签名的方法 */
char *sign; /* 文件的签名 */
char *document_file_url; /* 任务文件下载的url */
char *status_details; /* 客户自定义状态,透传到云端, 注意格式为json对象,例如 "{\"key\": \"value\"", strlen("\"key\": \"value\"}"*/
uint8_t progress; /* 任务处理的进度,数字从0-100 */
void *handle; /* 任务处理的句柄 */
} task_desc_t;
/**
* @brief 从云端拉取list时每个任务的简要描述
*/
typedef struct {
char *task_id; /* 任务ID */
aiot_task_status_t status; /* 任务的状态 */
} task_summary_t;
/**
* @brief 从云端拉取list返回的数据
*/
typedef struct {
uint32_t number; /* 从云端拉取的任务list的大小 */
task_summary_t *tasks; /* 拉取的任务数组指针 */
} task_get_list_reply_t;
/**
* @brief 从云端拉取任务详细信息时返回的数据
*/
typedef struct {
uint32_t code; /* 云端返回的code */
task_desc_t task; /* 任务描述的详细信息 */
} task_get_detail_reply_t;
/**
* @brief 更新任务状态到云端后,云端返回的数据
*/
typedef struct {
uint32_t code; /* 云端返回的code */
char *task_id; /* 更新任务后返回的任务id */
aiot_task_status_t status; /* 更新任务后返回的状态 */
} task_update_reply_t;
/**
* @brief 云端主动下发或更新任务云端返回的数据
*/
typedef struct {
aiot_task_recv_type_t type; /* 返回的数据类型 */
union {
task_desc_t notify; /* 云端主动推送任务的数据 */
task_get_list_reply_t get_list_reply; /* 请求任务list返回的数据 */
task_get_detail_reply_t get_detail_reply; /* 请求任务详细状态返回的数据 */
task_update_reply_t update_reply; /* 更新任务状态返回的数据 */
} data;
} aiot_task_recv_t;
/**
* @brief 设备收到task的mqtt下行报文时的接收回调函数
*
* @param[in] handle task实例句柄
* @param[in] recv 云端下行的消息
* @param[in] userdata 用户上下文
*
* @return void
*/
typedef void (* aiot_task_recv_handler_t)(void *handle, const aiot_task_recv_t *recv, void *userdata);
/**
* @brief 创建一个task实例
*
* @return void*
* @retval 非NULL task实例句柄
* @retval NULL 初始化失败, 或者是因为没有设置portfile, 或者是内存分配失败导致
*
*/
void *aiot_task_init(void);
/**
* @brief 销毁task实例句柄
*
* @param[in] handle 指向task实例句柄的指针
*
* @return int32_t
* @retval STATE_USER_INPUT_NULL_POINTER handle或者handle所指向的地址为空
* @retval STATE_SUCCESS 执行成功
*
*/
int32_t aiot_task_deinit(void **handle);
/**
* @brief 设置task句柄的参数
*
* @details
*
* 对task会话进行配置, 常见的配置选项包括
*
* @param[in] handle task句柄
* @param[in] option 配置选项, 更多信息请参考@ref aiot_task_option_t
* @param[in] data 配置选项数据, 更多信息请参考@ref aiot_task_option_t
*
* @return int32_t
* @retval STATE_TASK_SETOPT_HANDLE_IS_NULL task句柄为空
* @retval STATE_TASK_SETOPT_DATA_IS_NULL 参数data字段为空
* @retval STATE_USER_INPUT_UNKNOWN_OPTION option不支持
* @retval STATE_SUCCESS 参数设置成功
*
*/
int32_t aiot_task_setopt(void *handle, aiot_task_option_t option, void *data);
/**
* @brief 从云端获取task列表
*
* @details
*
* 从云端获取task列表
*
* @param[in] handle task句柄
*
* @return int32_t
* @retval STATE_TASK_SETOPT_DATA_IS_NULL 参数的handle字段为空
* @retval STATE_SUCCESS 发送成功
*/
int32_t aiot_task_get_task_list(void *handle);
/* 发送报文到/sys/{productKey}/{deviceName}/thing/job/get. 若函数入参user_task_id不为空, 则上行报文的payload为 "taskId": user_task_id, 返回该任务的详情; */
/* 若user_task_id为空, 则上行报文的payload为 "taskId": "$next", 云端返回未处于终态的任务队列中时间排在最前面一个任务, 该任务状态为QUEUED、SENT、IN_PROGRESS三者之一 */
/**
* @brief 从云端获取task详细内容
*
* @details
*
* 从云端获取task详细内容
*
* @param[in] handle task句柄
* @param[in] user_task_id task的id或者$next
*
* @return int32_t
* @retval STATE_TASK_SETOPT_DATA_IS_NULL或user_task_id 参数的handle字段为空
* @retval STATE_SUCCESS 发送成功
*
*/
int32_t aiot_task_get_task_detail(void *handle, char *user_task_id);
/**
* @brief 更新任务状态到云端
*
* @details
*
* 更新任务状态到云端
*
* @param[in] handle task句柄
* @param[in] task task信息
*
* @return int32_t
* @retval STATE_TASK_SETOPT_DATA_IS_NULL或task 参数的handle字段为空
* @retval STATE_SUCCESS 更新成功
*
*/
int32_t aiot_task_update(void *handle, task_desc_t *task);
#if defined(__cplusplus)
}
#endif
#endif /* #ifndef __AIOT_TASK_API_H__ */
| 6,385 |
892 | <gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-9v35-rf4g-xvc2",
"modified": "2022-01-08T00:00:50Z",
"published": "2021-12-28T00:00:30Z",
"aliases": [
"CVE-2021-45338"
],
"details": "Multiple privilege escalation vulnerabilities in Avast Antivirus prior to 20.4 allow a local user to gain elevated privileges by calling unnecessarily powerful internal methods of the main antivirus service which could lead to the (1) arbitrary file delete, (2) write and (3) reset security.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2021-45338"
},
{
"type": "WEB",
"url": "https://github.com/the-deniss/Vulnerability-Disclosures/tree/main/CVE-2021-AVST1.1"
},
{
"type": "WEB",
"url": "https://github.com/the-deniss/Vulnerability-Disclosures/tree/main/CVE-2021-AVST1.2"
},
{
"type": "WEB",
"url": "https://github.com/the-deniss/Vulnerability-Disclosures/tree/main/CVE-2021-AVST1.3"
},
{
"type": "WEB",
"url": "https://www.avast.com/hacker-hall-of-fame/en/researcher-david-eade-reports-antitrack-bug-to-avast-0"
}
],
"database_specific": {
"cwe_ids": [
"CWE-269"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 601 |
348 | {"nom":"Brouillet","circ":"2ème circonscription","dpt":"Marne","inscrits":84,"abs":36,"votants":48,"blancs":5,"nuls":1,"exp":42,"res":[{"nuance":"LR","nom":"Mme <NAME>","voix":26},{"nuance":"REM","nom":"Mme <NAME>","voix":16}]} | 92 |
30,023 | <filename>tests/components/smarttub/test_config_flow.py
"""Test the smarttub config flow."""
from unittest.mock import patch
from smarttub import LoginFailed
from homeassistant import config_entries, data_entry_flow
from homeassistant.components.smarttub.const import DOMAIN
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from tests.common import MockConfigEntry
async def test_form(hass):
"""Test we get the form."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.smarttub.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_EMAIL: "test-email", CONF_PASSWORD: "<PASSWORD>"},
)
assert result["type"] == "create_entry"
assert result["title"] == "test-email"
assert result["data"] == {
CONF_EMAIL: "test-email",
CONF_PASSWORD: "<PASSWORD>",
}
await hass.async_block_till_done()
mock_setup_entry.assert_called_once()
async def test_form_invalid_auth(hass, smarttub_api):
"""Test we handle invalid auth."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
smarttub_api.login.side_effect = LoginFailed
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_EMAIL: "test-email", CONF_PASSWORD: "<PASSWORD>"},
)
assert result["type"] == "form"
assert result["errors"] == {"base": "invalid_auth"}
async def test_reauth_success(hass, smarttub_api, account):
"""Test reauthentication flow."""
mock_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_EMAIL: "test-email", CONF_PASSWORD: "<PASSWORD>"},
unique_id=account.id,
)
mock_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": config_entries.SOURCE_REAUTH,
"unique_id": mock_entry.unique_id,
"entry_id": mock_entry.entry_id,
},
data=mock_entry.data,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_EMAIL: "test-email3", CONF_PASSWORD: "<PASSWORD>"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "reauth_successful"
assert mock_entry.data[CONF_EMAIL] == "test-email3"
assert mock_entry.data[CONF_PASSWORD] == "<PASSWORD>"
async def test_reauth_wrong_account(hass, smarttub_api, account):
"""Test reauthentication flow if the user enters credentials for a different already-configured account."""
mock_entry1 = MockConfigEntry(
domain=DOMAIN,
data={CONF_EMAIL: "test-email1", CONF_PASSWORD: "<PASSWORD>"},
unique_id=account.id,
)
mock_entry1.add_to_hass(hass)
mock_entry2 = MockConfigEntry(
domain=DOMAIN,
data={CONF_EMAIL: "test-email2", CONF_PASSWORD: "<PASSWORD>"},
unique_id="mockaccount2",
)
mock_entry2.add_to_hass(hass)
# we try to reauth account #2, and the user successfully authenticates to account #1
account.id = mock_entry1.unique_id
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={
"source": config_entries.SOURCE_REAUTH,
"unique_id": mock_entry2.unique_id,
"entry_id": mock_entry2.entry_id,
},
data=mock_entry2.data,
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "reauth_confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_EMAIL: "test-email1", CONF_PASSWORD: "<PASSWORD>"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
| 1,796 |
548 | <gh_stars>100-1000
from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
from ..constants import scopingElements, tableInsertModeElements, namespaces
# The scope markers are inserted when entering object elements,
# marquees, table cells, and table captions, and are used to prevent formatting
# from "leaking" into tables, object elements, and marquees.
Marker = None
listElementsMap = {
None: (frozenset(scopingElements), False),
"button": (frozenset(scopingElements | set([(namespaces["html"], "button")])), False),
"list": (frozenset(scopingElements | set([(namespaces["html"], "ol"),
(namespaces["html"], "ul")])), False),
"table": (frozenset([(namespaces["html"], "html"),
(namespaces["html"], "table")]), False),
"select": (frozenset([(namespaces["html"], "optgroup"),
(namespaces["html"], "option")]), True)
}
class Node(object):
"""Represents an item in the tree"""
def __init__(self, name):
"""Creates a Node
:arg name: The tag name associated with the node
"""
# The tag name assocaited with the node
self.name = name
# The parent of the current node (or None for the document node)
self.parent = None
# The value of the current node (applies to text nodes and comments)
self.value = None
# A dict holding name -> value pairs for attributes of the node
self.attributes = {}
# A list of child nodes of the current node. This must include all
# elements but not necessarily other node types.
self.childNodes = []
# A list of miscellaneous flags that can be set on the node.
self._flags = []
def __str__(self):
attributesStr = " ".join(["%s=\"%s\"" % (name, value)
for name, value in
self.attributes.items()])
if attributesStr:
return "<%s %s>" % (self.name, attributesStr)
else:
return "<%s>" % (self.name)
def __repr__(self):
return "<%s>" % (self.name)
def appendChild(self, node):
"""Insert node as a child of the current node
:arg node: the node to insert
"""
raise NotImplementedError
def insertText(self, data, insertBefore=None):
"""Insert data as text in the current node, positioned before the
start of node insertBefore or to the end of the node's text.
:arg data: the data to insert
:arg insertBefore: True if you want to insert the text before the node
and False if you want to insert it after the node
"""
raise NotImplementedError
def insertBefore(self, node, refNode):
"""Insert node as a child of the current node, before refNode in the
list of child nodes. Raises ValueError if refNode is not a child of
the current node
:arg node: the node to insert
:arg refNode: the child node to insert the node before
"""
raise NotImplementedError
def removeChild(self, node):
"""Remove node from the children of the current node
:arg node: the child node to remove
"""
raise NotImplementedError
def reparentChildren(self, newParent):
"""Move all the children of the current node to newParent.
This is needed so that trees that don't store text as nodes move the
text in the correct way
:arg newParent: the node to move all this node's children to
"""
# XXX - should this method be made more general?
for child in self.childNodes:
newParent.appendChild(child)
self.childNodes = []
def cloneNode(self):
"""Return a shallow copy of the current node i.e. a node with the same
name and attributes but with no parent or child nodes
"""
raise NotImplementedError
def hasContent(self):
"""Return true if the node has children or text, false otherwise
"""
raise NotImplementedError
class ActiveFormattingElements(list):
def append(self, node):
equalCount = 0
if node != Marker:
for element in self[::-1]:
if element == Marker:
break
if self.nodesEqual(element, node):
equalCount += 1
if equalCount == 3:
self.remove(element)
break
list.append(self, node)
def nodesEqual(self, node1, node2):
if not node1.nameTuple == node2.nameTuple:
return False
if not node1.attributes == node2.attributes:
return False
return True
class TreeBuilder(object):
"""Base treebuilder implementation
* documentClass - the class to use for the bottommost node of a document
* elementClass - the class to use for HTML Elements
* commentClass - the class to use for comments
* doctypeClass - the class to use for doctypes
"""
# pylint:disable=not-callable
# Document class
documentClass = None
# The class to use for creating a node
elementClass = None
# The class to use for creating comments
commentClass = None
# The class to use for creating doctypes
doctypeClass = None
# Fragment class
fragmentClass = None
def __init__(self, namespaceHTMLElements):
"""Create a TreeBuilder
:arg namespaceHTMLElements: whether or not to namespace HTML elements
"""
if namespaceHTMLElements:
self.defaultNamespace = "http://www.w3.org/1999/xhtml"
else:
self.defaultNamespace = None
self.reset()
def reset(self):
self.openElements = []
self.activeFormattingElements = ActiveFormattingElements()
# XXX - rename these to headElement, formElement
self.headPointer = None
self.formPointer = None
self.insertFromTable = False
self.document = self.documentClass()
def elementInScope(self, target, variant=None):
# If we pass a node in we match that. if we pass a string
# match any node with that name
exactNode = hasattr(target, "nameTuple")
if not exactNode:
if isinstance(target, text_type):
target = (namespaces["html"], target)
assert isinstance(target, tuple)
listElements, invert = listElementsMap[variant]
for node in reversed(self.openElements):
if exactNode and node == target:
return True
elif not exactNode and node.nameTuple == target:
return True
elif (invert ^ (node.nameTuple in listElements)):
return False
assert False # We should never reach this point
def reconstructActiveFormattingElements(self):
# Within this algorithm the order of steps described in the
# specification is not quite the same as the order of steps in the
# code. It should still do the same though.
# Step 1: stop the algorithm when there's nothing to do.
if not self.activeFormattingElements:
return
# Step 2 and step 3: we start with the last element. So i is -1.
i = len(self.activeFormattingElements) - 1
entry = self.activeFormattingElements[i]
if entry == Marker or entry in self.openElements:
return
# Step 6
while entry != Marker and entry not in self.openElements:
if i == 0:
# This will be reset to 0 below
i = -1
break
i -= 1
# Step 5: let entry be one earlier in the list.
entry = self.activeFormattingElements[i]
while True:
# Step 7
i += 1
# Step 8
entry = self.activeFormattingElements[i]
clone = entry.cloneNode() # Mainly to get a new copy of the attributes
# Step 9
element = self.insertElement({"type": "StartTag",
"name": clone.name,
"namespace": clone.namespace,
"data": clone.attributes})
# Step 10
self.activeFormattingElements[i] = element
# Step 11
if element == self.activeFormattingElements[-1]:
break
def clearActiveFormattingElements(self):
entry = self.activeFormattingElements.pop()
while self.activeFormattingElements and entry != Marker:
entry = self.activeFormattingElements.pop()
def elementInActiveFormattingElements(self, name):
"""Check if an element exists between the end of the active
formatting elements and the last marker. If it does, return it, else
return false"""
for item in self.activeFormattingElements[::-1]:
# Check for Marker first because if it's a Marker it doesn't have a
# name attribute.
if item == Marker:
break
elif item.name == name:
return item
return False
def insertRoot(self, token):
element = self.createElement(token)
self.openElements.append(element)
self.document.appendChild(element)
def insertDoctype(self, token):
name = token["name"]
publicId = token["publicId"]
systemId = token["systemId"]
doctype = self.doctypeClass(name, publicId, systemId)
self.document.appendChild(doctype)
def insertComment(self, token, parent=None):
if parent is None:
parent = self.openElements[-1]
parent.appendChild(self.commentClass(token["data"]))
def createElement(self, token):
"""Create an element but don't insert it anywhere"""
name = token["name"]
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
return element
def _getInsertFromTable(self):
return self._insertFromTable
def _setInsertFromTable(self, value):
"""Switch the function used to insert an element from the
normal one to the misnested table one and back again"""
self._insertFromTable = value
if value:
self.insertElement = self.insertElementTable
else:
self.insertElement = self.insertElementNormal
insertFromTable = property(_getInsertFromTable, _setInsertFromTable)
def insertElementNormal(self, token):
name = token["name"]
assert isinstance(name, text_type), "Element %s not unicode" % name
namespace = token.get("namespace", self.defaultNamespace)
element = self.elementClass(name, namespace)
element.attributes = token["data"]
self.openElements[-1].appendChild(element)
self.openElements.append(element)
return element
def insertElementTable(self, token):
"""Create an element and insert it into the tree"""
element = self.createElement(token)
if self.openElements[-1].name not in tableInsertModeElements:
return self.insertElementNormal(token)
else:
# We should be in the InTable mode. This means we want to do
# special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
if insertBefore is None:
parent.appendChild(element)
else:
parent.insertBefore(element, insertBefore)
self.openElements.append(element)
return element
def insertText(self, data, parent=None):
"""Insert text data."""
if parent is None:
parent = self.openElements[-1]
if (not self.insertFromTable or (self.insertFromTable and
self.openElements[-1].name
not in tableInsertModeElements)):
parent.insertText(data)
else:
# We should be in the InTable mode. This means we want to do
# special magic element rearranging
parent, insertBefore = self.getTableMisnestedNodePosition()
parent.insertText(data, insertBefore)
def getTableMisnestedNodePosition(self):
"""Get the foster parent element, and sibling to insert before
(or None) when inserting a misnested table node"""
# The foster parent element is the one which comes before the most
# recently opened table element
# XXX - this is really inelegant
lastTable = None
fosterParent = None
insertBefore = None
for elm in self.openElements[::-1]:
if elm.name == "table":
lastTable = elm
break
if lastTable:
# XXX - we should really check that this parent is actually a
# node here
if lastTable.parent:
fosterParent = lastTable.parent
insertBefore = lastTable
else:
fosterParent = self.openElements[
self.openElements.index(lastTable) - 1]
else:
fosterParent = self.openElements[0]
return fosterParent, insertBefore
def generateImpliedEndTags(self, exclude=None):
name = self.openElements[-1].name
# XXX td, th and tr are not actually needed
if (name in frozenset(("dd", "dt", "li", "option", "optgroup", "p", "rp", "rt")) and
name != exclude):
self.openElements.pop()
# XXX This is not entirely what the specification says. We should
# investigate it more closely.
self.generateImpliedEndTags(exclude)
def getDocument(self):
"""Return the final tree"""
return self.document
def getFragment(self):
"""Return the final fragment"""
# assert self.innerHTML
fragment = self.fragmentClass()
self.openElements[0].reparentChildren(fragment)
return fragment
def testSerializer(self, node):
"""Serialize the subtree of node in the format required by unit tests
:arg node: the node from which to start serializing
"""
raise NotImplementedError
| 6,598 |
457 | package io.purplejs.core.internal.resource;
import io.purplejs.core.resource.Resource;
import io.purplejs.core.resource.ResourceLoader;
import io.purplejs.core.resource.ResourcePath;
final class ChainedResourceLoader
implements ResourceLoader
{
private final ResourceLoader loader;
private final ResourceLoader next;
ChainedResourceLoader( final ResourceLoader loader, final ResourceLoader next )
{
this.loader = loader;
this.next = next;
}
@Override
public Resource loadOrNull( final ResourcePath path )
{
final Resource resource = this.loader.loadOrNull( path );
if ( resource != null )
{
return resource;
}
return this.next.loadOrNull( path );
}
}
| 274 |
677 | // Copyright 2010 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Author: <EMAIL> (<NAME>)
#include "util.h"
#include <assert.h>
#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "gumbo.h"
#include "parser.h"
// TODO(jdtang): This should be elsewhere, but there's no .c file for
// SourcePositions and yet the constant needs some linkage, so this is as good
// as any.
const GumboSourcePosition kGumboEmptySourcePosition = {0, 0, 0};
/*
* Default memory management helpers;
* set to system's realloc and free by default
*/
void *(*gumbo_user_allocator)(void *, size_t) = realloc;
void (*gumbo_user_free)(void *) = free;
void gumbo_memory_set_allocator(void *(*allocator_p)(void *, size_t)) {
gumbo_user_allocator = allocator_p ? allocator_p : realloc;
}
void gumbo_memory_set_free(void (*free_p)(void *)) {
gumbo_user_free = free_p ? free_p : free;
}
| 462 |
2,113 | //-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _GFXFORMATUTILS_H_
#define _GFXFORMATUTILS_H_
#ifndef _PLATFORM_H_
#include "platform/platform.h"
#endif
#ifndef _GFXENUMS_H_
#include "gfx/gfxEnums.h"
#endif
#ifndef _COLOR_H_
#include "core/color.h"
#endif
//WIP: still in early stages
/// Some information about a GFXFormat.
struct GFXFormatInfo
{
protected:
struct Data
{
/// Bytes per single pixel.
U32 mBytesPerPixel;
/// If true, format has alpha channel.
bool mHasAlpha;
/// If true, format uses compression.
bool mIsCompressed;
/// If true, channels are in floating-point.
bool mIsFloatingPoint;
Data() {}
Data( U32 bpp, bool hasAlpha = false, bool isCompressed = false, bool isFP = false )
: mBytesPerPixel( bpp ),
mHasAlpha( hasAlpha ),
mIsCompressed( isCompressed ),
mIsFloatingPoint( isFP ) {}
};
GFXFormat mFormat;
static Data smFormatInfos[ GFXFormat_COUNT ];
public:
GFXFormatInfo( GFXFormat format )
: mFormat( format ) {}
/// @return the number of bytes per pixel in this format.
/// @note For compressed formats that can't give a fixed value per pixel,
/// this will be zero.
U32 getBytesPerPixel() const { return smFormatInfos[ mFormat ].mBytesPerPixel; }
/// @return true if the format has an alpha channel.
bool hasAlpha() const { return smFormatInfos[ mFormat ].mHasAlpha; }
/// @return true if format uses compression.
bool isCompressed() const { return smFormatInfos[ mFormat ].mIsCompressed; }
/// @return true if channels are stored in floating-point format.
bool isFloatingPoint() const { return smFormatInfos[ mFormat ].mIsFloatingPoint; }
};
#if 0
///
extern void GFXCopyPixels( GFXFormat fromFormat, U32 fromWidth, U32 fromHeight, U8* fromData,
GFXFormat toFormat, U32 toWidth, U32 toHeight, U8* toData );
#endif
inline void GFXPackPixel( GFXFormat format, U8*& ptr, U8 red, U8 green, U8 blue, U8 alpha, bool leastSignficantFirst = true )
{
switch( format )
{
case GFXFormatR8G8B8A8:
if( leastSignficantFirst )
{
ptr[ 0 ] = blue;
ptr[ 1 ] = green;
ptr[ 2 ] = red;
ptr[ 3 ] = alpha;
}
else
{
ptr[ 0 ] = red;
ptr[ 1 ] = green;
ptr[ 2 ] = blue;
ptr[ 3 ] = alpha;
}
ptr += 4;
break;
case GFXFormatR8G8B8:
if( leastSignficantFirst )
{
ptr[ 0 ] = blue;
ptr[ 1 ] = green;
ptr[ 2 ] = red;
}
else
{
ptr[ 0 ] = red;
ptr[ 1 ] = green;
ptr[ 2 ] = blue;
}
ptr += 3;
break;
default:
AssertISV( false, "GFXPackPixel() - pixel format not implemented." );
}
}
#endif // _GFXFORMATUTILS_H_
| 1,789 |
348 | {"nom":"Bentayou-Sérée","dpt":"Pyrénées-Atlantiques","inscrits":94,"abs":12,"votants":82,"blancs":11,"nuls":4,"exp":67,"res":[{"panneau":"1","voix":45},{"panneau":"2","voix":22}]} | 79 |
1,540 | <gh_stars>1000+
package test;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import test.sample.BaseSampleInheritance;
public class SampleInheritance extends BaseSampleInheritance {
// Test dependency of configuration methods
@BeforeClass(groups = {"configuration0"})
public void configuration0() {
addConfiguration("configuration0");
// System.out.println("CONFIGURATION 0");
}
@BeforeClass(
groups = "final",
dependsOnGroups = {"configuration1"})
public void configuration2() {
assert m_configurations.size() == 2 : "Expected size 2 found " + m_configurations.size();
assert "configuration0".equals(m_configurations.get(0)) : "Expected configuration0 to be run";
assert "configuration1".equals(m_configurations.get(1)) : "Expected configuration1 to be run";
addConfiguration("configuration2");
}
@Test(
groups = "final",
dependsOnGroups = {"inheritedTestMethod"})
public void inheritedMethodsWereCalledInOrder() {
assert m_invokedBaseMethod : "Didn't invoke test method in base class";
assert m_invokedBaseConfiguration : "Didn't invoke configuration method in base class";
}
@Test(groups = "final2", dependsOnGroups = "final")
public void configurationsWereCalledInOrder() {
assert m_configurations.size() == 3;
assert "configuration0".equals(m_configurations.get(0)) : "Expected configuration0 to be run";
assert "configuration1".equals(m_configurations.get(1)) : "Expected configuration1 to be run";
assert "configuration2".equals(m_configurations.get(2)) : "Expected configuration1 to be run";
}
}
| 523 |
669 | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Dict
import torch
import torchcontrol as toco
from torchcontrol.transform import Transformation as T
from torchcontrol.transform import Rotation as R
from torchcontrol.utils import to_tensor
class JointImpedanceControl(toco.PolicyModule):
"""
Impedance control in joint space.
"""
def __init__(
self,
joint_pos_current,
Kp,
Kd,
robot_model: torch.nn.Module,
ignore_gravity=True,
):
"""
Args:
joint_pos_current: Current joint positions
Kp: P gains in joint space
Kd: D gains in joint space
robot_model: A robot model from torchcontrol.models
ignore_gravity: `True` if the robot is already gravity compensated, `False` otherwise
"""
super().__init__()
# Initialize modules
self.robot_model = robot_model
self.invdyn = toco.modules.feedforward.InverseDynamics(
self.robot_model, ignore_gravity=ignore_gravity
)
self.joint_pd = toco.modules.feedback.JointSpacePD(Kp, Kd)
# Reference pose
self.joint_pos_desired = torch.nn.Parameter(to_tensor(joint_pos_current))
self.joint_vel_desired = torch.zeros_like(self.joint_pos_desired)
def forward(self, state_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
"""
Args:
state_dict: A dictionary containing robot states
Returns:
A dictionary containing the controller output
"""
# State extraction
joint_pos_current = state_dict["joint_positions"]
joint_vel_current = state_dict["joint_velocities"]
# Control logic
torque_feedback = self.joint_pd(
joint_pos_current,
joint_vel_current,
self.joint_pos_desired,
self.joint_vel_desired,
)
torque_feedforward = self.invdyn(
joint_pos_current, joint_vel_current, torch.zeros_like(joint_pos_current)
) # coriolis
torque_out = torque_feedback + torque_feedforward
return {"joint_torques": torque_out}
class CartesianImpedanceControl(toco.PolicyModule):
"""
Performs impedance control in Cartesian space.
Errors and feedback are computed in Cartesian space, and the resulting forces are projected back into joint space.
"""
def __init__(
self,
joint_pos_current,
Kp,
Kd,
robot_model: torch.nn.Module,
ignore_gravity=True,
):
"""
Args:
joint_pos_current: Current joint positions
Kp: P gains in Cartesian space
Kd: D gains in Cartesian space
robot_model: A robot model from torchcontrol.models
ignore_gravity: `True` if the robot is already gravity compensated, `False` otherwise
"""
super().__init__()
# Initialize modules
self.robot_model = robot_model
self.invdyn = toco.modules.feedforward.InverseDynamics(
self.robot_model, ignore_gravity=ignore_gravity
)
self.pose_pd = toco.modules.feedback.CartesianSpacePDFast(Kp, Kd)
# Reference pose
joint_pos_current = to_tensor(joint_pos_current)
ee_pos_current, ee_quat_current = self.robot_model.forward_kinematics(
joint_pos_current
)
self.ee_pos_desired = torch.nn.Parameter(ee_pos_current)
self.ee_quat_desired = torch.nn.Parameter(ee_quat_current)
self.ee_vel_desired = torch.nn.Parameter(torch.zeros(3))
self.ee_rvel_desired = torch.nn.Parameter(torch.zeros(3))
def forward(self, state_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
"""
Args:
state_dict: A dictionary containing robot states
Returns:
A dictionary containing the controller output
"""
# State extraction
joint_pos_current = state_dict["joint_positions"]
joint_vel_current = state_dict["joint_velocities"]
# Control logic
ee_pos_current, ee_quat_current = self.robot_model.forward_kinematics(
joint_pos_current
)
jacobian = self.robot_model.compute_jacobian(joint_pos_current)
ee_twist_current = jacobian @ joint_vel_current
wrench_feedback = self.pose_pd(
ee_pos_current,
ee_quat_current,
ee_twist_current,
self.ee_pos_desired,
self.ee_quat_desired,
torch.cat([self.ee_vel_desired, self.ee_rvel_desired]),
)
torque_feedback = jacobian.T @ wrench_feedback
torque_feedforward = self.invdyn(
joint_pos_current, joint_vel_current, torch.zeros_like(joint_pos_current)
) # coriolis
torque_out = torque_feedback + torque_feedforward
return {"joint_torques": torque_out}
| 2,266 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.dependency_injection;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import javax.inject.Scope;
/**
* Scope of components living as long as activities associated with them live.
* Can be used for all activities, since there is no way for components of different activities (as
* well as different instances of the same activity) to communicate with each other via Dagger.
*/
@Scope
@Retention(RetentionPolicy.SOURCE)
public @interface ActivityScope {}
| 191 |
6,717 | //******************************************************************************
//
// Copyright (c) Microsoft. All rights reserved.
//
// This code is licensed under the MIT License (MIT).
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//******************************************************************************
#pragma once
#include "WinrtType.h"
#include "CodeGenModel.h"
struct ReturnHandler {
std::wstring returnVal;
std::wstring returnType;
std::wstring wrlType;
std::wstring wrlBaseType;
bool needsARCAnnotation = false;
// Params to add to the message signature (for callbacks since we aren't returning them):
std::vector<ObjC::MethodSig::NameTypeVar> newParams;
std::wstring header;
std::wstring footer;
};
class ShimTracker;
// These are dumb text conversions to find the equivalent names in our implementations:
std::wstring midlToWrlType(const std::wstring& midlType);
std::wstring mapNamespacedType(const std::wstring& midlType);
bool basicTypeToWinrt(std::wstring& wrtType, const std::wstring& basicType);
ReturnHandler handleReturnType(const std::shared_ptr<ObjectModel::MemberInfo>& memberInfo, const SymbolTable& _types, ShimTracker& _shims);
WinrtType winrtTypeInfo(const std::shared_ptr<ObjectModel::Symbol>& internalType,
const SymbolTable& types,
ShimTracker& shims,
bool box = false);
bool winrtTypeInfoBasic(const std::wstring& typeName, WinrtType* out);
inline bool hasPrefix(const std::wstring& str, const std::wstring& prefix) {
if (prefix.length() <= str.length()) {
return str.substr(0, prefix.length()) == prefix;
}
return false;
}
| 699 |
1,652 | <filename>redis/redis-checker/src/main/java/com/ctrip/xpipe/redis/checker/healthcheck/SingleDcSupport.java
package com.ctrip.xpipe.redis.checker.healthcheck;
public interface SingleDcSupport {
}
| 72 |
577 | package com.hoko.blur.processor;
import android.content.Context;
import android.graphics.Bitmap;
import androidx.renderscript.Allocation;
import androidx.renderscript.Element;
import androidx.renderscript.RSRuntimeException;
import androidx.renderscript.RenderScript;
import androidx.renderscript.ScriptIntrinsicBlur;
import android.util.Log;
import com.hoko.blur.HokoBlur;
import com.hoko.blur.renderscript.ScriptC_BoxBlur;
import com.hoko.blur.renderscript.ScriptC_StackBlur;
import com.hoko.blur.util.MathUtil;
import com.hoko.blur.util.Preconditions;
/**
* Created by yuxfzju on 16/9/7.
*/
class RenderScriptBlurProcessor extends BlurProcessor {
private static final String TAG = RenderScriptBlurProcessor.class.getSimpleName();
private RenderScript mRenderScript;
private ScriptIntrinsicBlur mGaussianBlurScript;
private ScriptC_BoxBlur mBoxBlurScript;
private ScriptC_StackBlur mStackBlurScript;
private static final int RS_MAX_RADIUS = 25;
private volatile boolean rsRuntimeInited = false;
RenderScriptBlurProcessor(HokoBlurBuild builder) {
super(builder);
init(builder.mCtx);
}
private void init(Context context) {
Preconditions.checkNotNull(context, "Please set context for renderscript scheme, forget to set context for builder?");
try {
mRenderScript = RenderScript.create(context.getApplicationContext());
mGaussianBlurScript = ScriptIntrinsicBlur.create(mRenderScript, Element.U8_4(mRenderScript));
mBoxBlurScript = new ScriptC_BoxBlur(mRenderScript);
mStackBlurScript = new ScriptC_StackBlur(mRenderScript);
rsRuntimeInited = true;
} catch (RSRuntimeException e) {
Log.e(TAG, "Failed to init RenderScript runtime", e);
rsRuntimeInited = false;
}
}
/**
* RenderScript built-in parallel implementation
*
* @param bitmap
* @param concurrent
* @return
*/
@Override
protected Bitmap doInnerBlur(Bitmap bitmap, boolean concurrent) {
Preconditions.checkNotNull(bitmap, "scaledInBitmap == null");
if (!rsRuntimeInited) {
Log.e(TAG, "RenderScript Runtime is not initialized");
return bitmap;
}
Allocation allocationIn = Allocation.createFromBitmap(mRenderScript, bitmap);
Allocation allocationOut = Allocation.createFromBitmap(mRenderScript, Bitmap.createBitmap(bitmap));
try {
switch (mMode) {
case HokoBlur.MODE_BOX:
doBoxBlur(bitmap, allocationIn, allocationOut);
allocationIn.copyTo(bitmap);
break;
case HokoBlur.MODE_STACK:
doStackBlur(bitmap, allocationIn, allocationOut);
allocationIn.copyTo(bitmap);
break;
case HokoBlur.MODE_GAUSSIAN:
doGaussianBlur(allocationIn, allocationOut);
allocationOut.copyTo(bitmap);
break;
}
} catch (Throwable e) {
Log.e(TAG, "Blur the bitmap error", e);
} finally {
allocationIn.destroy();
allocationOut.destroy();
}
return bitmap;
}
private void doBoxBlur(Bitmap input, Allocation in, Allocation out) {
if (mBoxBlurScript == null) {
throw new IllegalStateException("The blur script is unavailable");
}
mBoxBlurScript.set_input(in);
mBoxBlurScript.set_output(out);
mBoxBlurScript.set_width(input.getWidth());
mBoxBlurScript.set_height(input.getHeight());
mBoxBlurScript.set_radius(mRadius);
mBoxBlurScript.forEach_boxblur_h(in);
mBoxBlurScript.set_input(out);
mBoxBlurScript.set_output(in);
mBoxBlurScript.forEach_boxblur_v(out);
}
private void doGaussianBlur(Allocation in, Allocation out) {
if (mGaussianBlurScript == null) {
throw new IllegalStateException("The blur script is unavailable");
}
// RenderScript won't work, if too large blur radius
mRadius = MathUtil.clamp(mRadius, 0, RS_MAX_RADIUS);
mGaussianBlurScript.setRadius(mRadius);
// mAllocationIn.copyFrom(input);
mGaussianBlurScript.setInput(in);
mGaussianBlurScript.forEach(out);
}
private void doStackBlur(Bitmap input, Allocation in, Allocation out) {
if (mStackBlurScript == null) {
throw new IllegalStateException("The blur script is unavailable");
}
mStackBlurScript.set_input(in);
mStackBlurScript.set_output(out);
mStackBlurScript.set_width(input.getWidth());
mStackBlurScript.set_height(input.getHeight());
mStackBlurScript.set_radius(mRadius);
mStackBlurScript.forEach_stackblur_v(in);
mStackBlurScript.set_input(out);
mStackBlurScript.set_output(in);
mStackBlurScript.forEach_stackblur_h(out);
}
}
| 2,201 |
2,151 | <filename>third_party/blink/renderer/core/layout/ng/inline/ng_inline_item.h
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef NGInlineItem_h
#define NGInlineItem_h
#include "third_party/blink/renderer/core/core_export.h"
#include "third_party/blink/renderer/core/layout/ng/inline/ng_offset_mapping.h"
#include "third_party/blink/renderer/core/layout/ng/ng_style_variant.h"
#include "third_party/blink/renderer/core/style/computed_style.h"
#include "third_party/blink/renderer/platform/fonts/shaping/shape_result.h"
#include "third_party/blink/renderer/platform/text/text_direction.h"
#include <unicode/ubidi.h>
#include <unicode/uscript.h>
namespace blink {
class LayoutObject;
// Class representing a single text node or styled inline element with text
// content segmented by style, text direction, sideways rotation, font fallback
// priority (text, symbol, emoji, etc), and script (but not by font).
// In this representation TextNodes are merged up into their parent inline
// element where possible.
class CORE_EXPORT NGInlineItem {
public:
enum NGInlineItemType {
kText,
kControl,
kAtomicInline,
kOpenTag,
kCloseTag,
kFloating,
kOutOfFlowPositioned,
kListMarker,
kBidiControl
// When adding new values, make sure the bit size of |type_| is large
// enough to store.
};
// Whether pre- and post-context should be used for shaping.
enum NGLayoutInlineShapeOptions {
kNoContext = 0,
kPreContext = 1,
kPostContext = 2
};
enum NGCollapseType {
// No collapsible spaces.
kNotCollapsible,
// A collapsible space run that does not contain segment breaks.
kCollapsibleSpace,
// A collapsible space run that contains segment breaks.
kCollapsibleNewline,
// This item is opaque to whitespace collapsing.
kOpaqueToCollapsing
};
// The constructor and destructor can't be implicit or inlined, because they
// require full definition of ComputedStyle.
NGInlineItem(NGInlineItemType type,
unsigned start,
unsigned end,
const ComputedStyle* style = nullptr,
LayoutObject* layout_object = nullptr,
bool end_may_collapse = false);
~NGInlineItem();
// Copy constructor adjusting start/end and shape results.
NGInlineItem(const NGInlineItem&,
unsigned adjusted_start,
unsigned adjusted_end,
scoped_refptr<const ShapeResult>);
NGInlineItemType Type() const { return static_cast<NGInlineItemType>(type_); }
const char* NGInlineItemTypeToString(int val) const;
const ShapeResult* TextShapeResult() const { return shape_result_.get(); }
NGLayoutInlineShapeOptions ShapeOptions() const {
return static_cast<NGLayoutInlineShapeOptions>(shape_options_);
}
// If this item is "empty" for the purpose of empty block calculation.
bool IsEmptyItem() const { return is_empty_item_; }
// If this item should create a box fragment. Box fragments can be omitted for
// optimization if this is false.
bool ShouldCreateBoxFragment() const { return should_create_box_fragment_; }
unsigned StartOffset() const { return start_offset_; }
unsigned EndOffset() const { return end_offset_; }
unsigned Length() const { return end_offset_ - start_offset_; }
TextDirection Direction() const { return DirectionFromLevel(BidiLevel()); }
UBiDiLevel BidiLevel() const { return static_cast<UBiDiLevel>(bidi_level_); }
// Resolved bidi level for the reordering algorithm. Certain items have
// artificial bidi level for the reordering algorithm without affecting its
// direction.
UBiDiLevel BidiLevelForReorder() const;
UScriptCode GetScript() const { return script_; }
const ComputedStyle* Style() const { return style_.get(); }
LayoutObject* GetLayoutObject() const { return layout_object_; }
void SetOffset(unsigned start, unsigned end);
void SetEndOffset(unsigned);
bool HasStartEdge() const;
bool HasEndEdge() const;
void SetStyleVariant(NGStyleVariant style_variant) {
style_variant_ = static_cast<unsigned>(style_variant);
}
NGStyleVariant StyleVariant() const {
return static_cast<NGStyleVariant>(style_variant_);
}
// Get or set the whitespace collapse type at the end of this item.
NGCollapseType EndCollapseType() const {
return static_cast<NGCollapseType>(end_collapse_type_);
}
void SetEndCollapseType(NGCollapseType type) { end_collapse_type_ = type; }
// Whether the item may be affected by whitespace collapsing. Unlike the
// EndCollapseType() method this returns true even if a trailing space has
// been removed.
bool EndMayCollapse() const { return end_may_collapse_; }
static void Split(Vector<NGInlineItem>&, unsigned index, unsigned offset);
void SetBidiLevel(UBiDiLevel);
static unsigned SetBidiLevel(Vector<NGInlineItem>&,
unsigned index,
unsigned end_offset,
UBiDiLevel);
void AssertOffset(unsigned offset) const;
void AssertEndOffset(unsigned offset) const;
String ToString() const;
private:
void ComputeBoxProperties();
unsigned start_offset_;
unsigned end_offset_;
UScriptCode script_;
scoped_refptr<const ShapeResult> shape_result_;
scoped_refptr<const ComputedStyle> style_;
LayoutObject* layout_object_;
unsigned type_ : 4;
unsigned bidi_level_ : 8; // UBiDiLevel is defined as uint8_t.
unsigned shape_options_ : 2;
unsigned is_empty_item_ : 1;
unsigned should_create_box_fragment_ : 1;
unsigned style_variant_ : 2;
unsigned end_collapse_type_ : 2; // NGCollapseType
unsigned end_may_collapse_ : 1;
friend class NGInlineNode;
};
inline void NGInlineItem::AssertOffset(unsigned offset) const {
DCHECK((offset >= start_offset_ && offset < end_offset_) ||
(offset == start_offset_ && start_offset_ == end_offset_));
}
inline void NGInlineItem::AssertEndOffset(unsigned offset) const {
DCHECK_GE(offset, start_offset_);
DCHECK_LE(offset, end_offset_);
}
// Represents a text content with a list of NGInlineItem. A node may have an
// additional NGInlineItemsData for ::first-line pseudo element.
struct CORE_EXPORT NGInlineItemsData {
// Text content for all inline items represented by a single NGInlineNode.
// Encoded either as UTF-16 or latin-1 depending on the content.
String text_content;
Vector<NGInlineItem> items;
// The DOM to text content offset mapping of this inline node.
std::unique_ptr<NGOffsetMapping> offset_mapping;
void AssertOffset(unsigned index, unsigned offset) const {
items[index].AssertOffset(offset);
}
void AssertEndOffset(unsigned index, unsigned offset) const {
items[index].AssertEndOffset(offset);
}
};
} // namespace blink
#endif // NGInlineItem_h
| 2,312 |
1,865 | {
"name": "cta",
"title": "cta.js",
"description": "A light-weight performant library to animate your 'action-to-effect' paths",
"version": "0.3.2",
"homepage": "http://kushagragour.in/lab/ctajs/",
"author": {
"name": "<NAME>",
"email": "<EMAIL>",
"url": "http://kushagragour.in"
},
"main": "dist/cta.js",
"repository": {
"type": "git",
"url": "git://github.com/chinchang/cta.js.git"
},
"bugs": {
"url": "https://github.com/chinchang/cta.js/issues"
},
"license": "MIT",
"keywords": [
"animation",
"transition",
"ux",
"javascript",
"library"
],
"devDependencies": {
"grunt-contrib-concat": "~0.5.1",
"grunt": "~0.4.5",
"grunt-contrib-jshint": "^0.11.0",
"grunt-contrib-uglify": "~0.8.0"
}
}
| 379 |
726 | public class Codec {
// Encodes a tree to a single string.
//递归; 二叉树的遍历, 采用前序遍历, 1)要保留结构信息(除了左右孩子, 还要考虑null), 2)要设置间隔符, 从而区分不同的val
public String serialize(TreeNode root) {
//base case
if(root==null)
return "#!";
StringBuilder sb = new StringBuilder();
sb.append(root.val).append("!");
sb.append(serialize(root.left));
sb.append(serialize(root.right));
return sb.toString();
}
// Decodes your encoded data to tree.
public TreeNode deserialize(String data) {
String[] strs = data.split("!");
return core(strs);
}
private int i;
//二叉树的遍历, 前序遍历, 递归
private TreeNode core(String[] strs){
//base case
if(i==strs.length)
return null;
if(strs[i].equals("#")){
i++; //索引++
return null;
}
TreeNode root = new TreeNode(Integer.parseInt(strs[i]));
i++;
root.left = core(strs);
root.right = core(strs);
return root;
}
} | 604 |
425 | #include <stdio.h>
#include <string.h>
#include "pico/stdlib.h"
#include "bmp280.hpp"
#include "common/pimoroni_i2c.hpp"
using namespace pimoroni;
I2C i2c(BOARD::BREAKOUT_GARDEN);
BMP280 bmp280(&i2c);
int main() {
gpio_init(PICO_DEFAULT_LED_PIN);
gpio_set_dir(PICO_DEFAULT_LED_PIN, GPIO_OUT);
stdio_init_all();
if(!bmp280.init()) {
printf("Failed to init bmp280!\n");
}
while (1) {
BMP280::bmp280_reading result = bmp280.read();
printf("%s %0.2lf deg C, %ld hPa\n", result.status == BMP280_OK ? "OK" : "ER", result.temperature, result.pressure);
sleep_ms(1000);
}
return 0;
} | 282 |
328 | <gh_stars>100-1000
package com.ctg.test.springboothtml.controller;
import com.ctg.test.springboothtml.DBUtil;
import com.ctg.test.springboothtml.IpUtil;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.core.env.Environment;
import org.springframework.stereotype.Controller;
import org.springframework.util.CollectionUtils;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.net.InetAddress;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.Map;
/**
* 测试sql,xss
*/
@Controller
@EnableAutoConfiguration
public class TestController {
@Autowired
private Environment env;
@Autowired
IpUtil ipUtil;
/**
* http://localhost:8180/webapp2/testXss?userName=1&passWord=<script>alert(1)</script>
* @param request
* @param response
* @return
*/
@RequestMapping("/testXss")
@ResponseBody
String testXss(@RequestParam String userName, @RequestParam String passWord,HttpServletRequest request, HttpServletResponse response) {
return "testXss";
}
/**
* 测试错误的sql:http://localhost:8180/webapp2/testSql1?userName=1&passWord=1 or 1=1
* @param userName
* @param passWord
* @param request
* @param response
* @return
* @throws SQLException
*/
@RequestMapping(value = {"/testSql1"})
@ResponseBody
public Object testSql1(@RequestParam String userName, @RequestParam String passWord,
HttpServletRequest request, HttpServletResponse response) throws SQLException {
Map<String,Object> result=new HashMap<>();
//slq注入 输入 or 1=1
String sql1 ="select user_name,pass_word from cas_user where user_name= '"+userName+"' and pass_word="+passWord;
System.out.print("sql1:"+sql1+"\n");
Statement st=DBUtil.getConnection().createStatement();
//4.处理数据库的返回结果(使用ResultSet类)
ResultSet rs=st.executeQuery(sql1);
int i=0;
while(rs.next()){
i=i+1;
result.put("user_name"+i,rs.getString("user_name"));
result.put("pass_word",rs.getString("pass_word"));
}
//关闭资源
rs.close();
st.close();
return result;
}
/**
* 测试正确的操作sql:http://localhost:8180/webapp2/testSql2?userName=1&passWord=1 or 1=1
* @param userName
* @param passWord
* @param request
* @param response
* @return
* @throws SQLException
*/
@RequestMapping(value = {"/testSql2"})
@ResponseBody
public Object testSql2(@RequestParam String userName, @RequestParam String passWord,
HttpServletRequest request, HttpServletResponse response) throws SQLException {
Map<String,Object> result=new HashMap<>();
//正确也可以利用PreparedStatement预编译,再设置参数
String sql2 ="select user_name,pass_word from cas_user where user_name= '"+userName+"' and pass_word= '"+passWord+"'";
System.out.print("sql2:"+sql2+"\n");
Statement st=DBUtil.getConnection().createStatement();
//正确也可以利用PreparedStatement预编译,再设置参数
/*PreparedStatement pst=DBUtil.getConnection().prepareStatement(sql2);
pst.setString(1,userName);
pst.setString(2,passWord);*/
//4.处理数据库的返回结果(使用ResultSet类)
ResultSet rs=st.executeQuery(sql2);
int i=0;
while(rs.next()){
i=i+1;
result.put("user_name"+i,rs.getString("user_name"));
result.put("pass_word",rs.getString("<PASSWORD>"));
}
//关闭资源
rs.close();
st.close();
return result;
}
} | 1,505 |
529 | <filename>src/dnnl/fuse_conv_batchnorm_bias_mkl.cc
/* Copyright 2020 Stanford, Tsinghua
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "taso/ops.h"
#include "taso/dnnl_helper.h"
using namespace taso;
using namespace dnnl;
void fuse_conv_batchnorm_bias_kernel(int volume, DATATYPE* dst_ptr,
const DATATYPE* scale, const DATATYPE* beta,
const DATATYPE* mean, const DATATYPE* var) {
#pragma omp parallel for
for (int i = 0; i < volume; i++) {
dst_ptr[i] = beta[i] - scale[i] * mean[i] / std::sqrt(var[i] + BN_MIN_EPSILON);
}
}
void FuseConvBatchNormBias::map(void)
{
assert(inputs[0].numDim == 1);
assert(inputs[1].numDim == 1);
assert(inputs[2].numDim == 1);
assert(inputs[3].numDim == 1);
// allocate tensors
size_t outputSize = sizeof(DATATYPE) * outputs[0].volume();
CHECK_NE(nullptr, outputs[0].data_ptr = malloc(outputSize));
}
void FuseConvBatchNormBias::unmap(void)
{
// clear primitives
net.clear();
// free tensors
free(outputs[0].data_ptr);
outputs[0].data_ptr = nullptr;
}
void FuseConvBatchNormBias::forward(bool block)
{
int volume = outputs[0].volume();
DATATYPE* scale_ptr = (DATATYPE*) inputs[0].data_ptr;
DATATYPE* beta_ptr = (DATATYPE*) inputs[1].data_ptr;
DATATYPE* mean_ptr = (DATATYPE*) inputs[2].data_ptr;
DATATYPE* var_ptr = (DATATYPE*) inputs[3].data_ptr;
fuse_conv_batchnorm_bias_kernel(volume,
(DATATYPE*)outputs[0].data_ptr, scale_ptr, beta_ptr, mean_ptr, var_ptr);
}
| 762 |
945 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.iotdb.cluster.query.manage;
import org.apache.iotdb.cluster.query.RemoteQueryContext;
import org.apache.iotdb.cluster.rpc.thrift.Node;
import org.apache.iotdb.db.exception.StorageEngineException;
import org.apache.iotdb.db.query.control.QueryResourceManager;
import org.apache.iotdb.db.query.executor.groupby.GroupByExecutor;
import org.apache.iotdb.db.query.reader.series.IAggregateReader;
import org.apache.iotdb.db.query.reader.series.IReaderByTimestamp;
import org.apache.iotdb.tsfile.read.reader.IBatchReader;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
public class ClusterQueryManager {
private AtomicLong idAtom = new AtomicLong();
private Map<Node, Map<Long, RemoteQueryContext>> queryContextMap = new ConcurrentHashMap<>();
private Map<Long, IBatchReader> seriesReaderMap = new ConcurrentHashMap<>();
private Map<Long, IReaderByTimestamp> seriesReaderByTimestampMap = new ConcurrentHashMap<>();
private Map<Long, IAggregateReader> aggrReaderMap = new ConcurrentHashMap<>();
private Map<Long, GroupByExecutor> groupByExecutorMap = new ConcurrentHashMap<>();
public synchronized RemoteQueryContext getQueryContext(Node node, long queryId) {
Map<Long, RemoteQueryContext> nodeContextMap =
queryContextMap.computeIfAbsent(node, n -> new HashMap<>());
return nodeContextMap.computeIfAbsent(
queryId,
qId -> new RemoteQueryContext(QueryResourceManager.getInstance().assignQueryId(true)));
}
public long registerReader(IBatchReader reader) {
long newReaderId = idAtom.incrementAndGet();
seriesReaderMap.put(newReaderId, reader);
return newReaderId;
}
public long registerReaderByTime(IReaderByTimestamp readerByTimestamp) {
long newReaderId = idAtom.incrementAndGet();
seriesReaderByTimestampMap.put(newReaderId, readerByTimestamp);
return newReaderId;
}
public synchronized void endQuery(Node node, long queryId) throws StorageEngineException {
Map<Long, RemoteQueryContext> nodeContextMap = queryContextMap.get(node);
if (nodeContextMap == null) {
return;
}
RemoteQueryContext remoteQueryContext = nodeContextMap.remove(queryId);
if (remoteQueryContext == null) {
return;
}
// release file resources
QueryResourceManager.getInstance().endQuery(remoteQueryContext.getQueryId());
// remove the readers from the cache
Set<Long> readerIds = remoteQueryContext.getLocalReaderIds();
for (long readerId : readerIds) {
seriesReaderMap.remove(readerId);
seriesReaderByTimestampMap.remove(readerId);
}
Set<Long> localGroupByExecutorIds = remoteQueryContext.getLocalGroupByExecutorIds();
for (Long localGroupByExecutorId : localGroupByExecutorIds) {
groupByExecutorMap.remove(localGroupByExecutorId);
}
}
public IBatchReader getReader(long readerId) {
return seriesReaderMap.get(readerId);
}
public IReaderByTimestamp getReaderByTimestamp(long readerId) {
return seriesReaderByTimestampMap.get(readerId);
}
IAggregateReader getAggrReader(long readerId) {
return aggrReaderMap.get(readerId);
}
public void endAllQueries() throws StorageEngineException {
for (Map<Long, RemoteQueryContext> contextMap : queryContextMap.values()) {
for (RemoteQueryContext context : contextMap.values()) {
QueryResourceManager.getInstance().endQuery(context.getQueryId());
}
}
seriesReaderByTimestampMap.clear();
seriesReaderMap.clear();
aggrReaderMap.clear();
}
long registerAggrReader(IAggregateReader aggregateReader) {
long newReaderId = idAtom.incrementAndGet();
aggrReaderMap.put(newReaderId, aggregateReader);
return newReaderId;
}
public long registerGroupByExecutor(GroupByExecutor groupByExecutor) {
long newExecutorId = idAtom.incrementAndGet();
groupByExecutorMap.put(newExecutorId, groupByExecutor);
return newExecutorId;
}
public GroupByExecutor getGroupByExecutor(long executorId) {
return groupByExecutorMap.get(executorId);
}
}
| 1,568 |
1,099 | package com.example.commonlibrary.net.download;
import android.content.ContentValues;
import android.os.Parcel;
import android.os.Parcelable;
import com.example.commonlibrary.net.NetManager;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.Generated;
/**
* 文件实体
*/
@Entity
public class FileInfo implements Parcelable {
@Id
private String url;
private String name;
private int totalBytes;
private int loadBytes;
private int speed;
private int status;
private String path;
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getTotalBytes() {
return totalBytes;
}
public void setTotalBytes(int totalBytes) {
this.totalBytes = totalBytes;
}
public int getLoadBytes() {
return loadBytes;
}
public void setLoadBytes(int loadBytes) {
this.loadBytes = loadBytes;
}
public int getSpeed() {
return speed;
}
public void setSpeed(int speed) {
this.speed = speed;
}
public int getStatus() {
return status;
}
public void setStatus(int status) {
this.status = status;
}
public String getPath() {
return path;
}
public void setPath(String path) {
this.path = path;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(this.url);
dest.writeString(this.name);
dest.writeInt(this.totalBytes);
dest.writeInt(this.loadBytes);
dest.writeInt(this.speed);
dest.writeInt(this.status);
dest.writeString(this.path);
}
protected FileInfo(Parcel in) {
this.url = in.readString();
this.name = in.readString();
this.totalBytes = in.readInt();
this.loadBytes = in.readInt();
this.speed = in.readInt();
this.status = in.readInt();
this.path = in.readString();
}
@Generated(hash = 1019480899)
public FileInfo(String url, String name, int totalBytes, int loadBytes,
int speed, int status, String path) {
this.url = url;
this.name = name;
this.totalBytes = totalBytes;
this.loadBytes = loadBytes;
this.speed = speed;
this.status = status;
this.path = path;
}
@Generated(hash = 1367591352)
public FileInfo() {
}
public FileInfo(int status, String url, String name, int totalSize) {
this.status = status;
this.url = url;
this.name = name;
this.totalBytes = totalSize;
this.loadBytes = 0;
this.speed = 0;
this.path = NetManager.getInstance().getDownLoadCacheDir();
}
public FileInfo(String url, String name) {
this.url = url;
this.name = name;
this.status = DownloadStatus.NORMAL;
this.totalBytes = 0;
this.loadBytes = 0;
this.speed = 0;
this.path = NetManager.getInstance().getDownLoadCacheDir();
}
public static final Creator<FileInfo> CREATOR = new Creator<FileInfo>() {
@Override
public FileInfo createFromParcel(Parcel source) {
return new FileInfo(source);
}
@Override
public FileInfo[] newArray(int size) {
return new FileInfo[size];
}
};
@Override
public String toString() {
return "FileInfo{" +
"url='" + url + '\'' +
", name='" + name + '\'' +
", totalBytes=" + totalBytes +
", loadBytes=" + loadBytes +
", speed=" + speed +
", status=" + status +
", path='" + path + '\'' +
'}';
}
public ContentValues toValues() {
ContentValues values = new ContentValues();
values.put("url", url);
values.put("name", name);
values.put("path", path);
values.put("loadBytes", loadBytes);
values.put("totalBytes", totalBytes);
values.put("status", status);
return values;
}
}
| 1,935 |
435 | T f()
{
T1 r;
r->x = 42;
T2 q;
q = r;
return r->x;
}
| 51 |
20,295 | /**
* @file
* @brief A buzz number is a number that is either divisible by 7 or has last
* digit as 7.
*/
#include <iostream>
/** main function */
int main() {
int n, t;
std::cin >> t;
while (t--) {
std::cin >> n;
if ((n % 7 == 0) || (n % 10 == 7))
std::cout << n << " is a buzz number" << std::endl;
else
std::cout << n << " is not a buzz number" << std::endl;
}
return 0;
}
| 207 |
2,389 | /*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.coreos.monitoring.models;
import com.google.gson.annotations.SerializedName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
/**
* Specification of the desired behavior of the Prometheus cluster. More info:
* https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
*/
@ApiModel(
description =
"Specification of the desired behavior of the Prometheus cluster. More info: https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#spec-and-status")
@javax.annotation.Generated(
value = "org.openapitools.codegen.languages.JavaClientCodegen",
date = "2020-08-31T19:41:55.826Z[Etc/UTC]")
public class V1PrometheusSpec {
public static final String SERIALIZED_NAME_ADDITIONAL_ALERT_MANAGER_CONFIGS =
"additionalAlertManagerConfigs";
@SerializedName(SERIALIZED_NAME_ADDITIONAL_ALERT_MANAGER_CONFIGS)
private V1PrometheusSpecAdditionalAlertManagerConfigs additionalAlertManagerConfigs;
public static final String SERIALIZED_NAME_ADDITIONAL_ALERT_RELABEL_CONFIGS =
"additionalAlertRelabelConfigs";
@SerializedName(SERIALIZED_NAME_ADDITIONAL_ALERT_RELABEL_CONFIGS)
private V1PrometheusSpecAdditionalAlertRelabelConfigs additionalAlertRelabelConfigs;
public static final String SERIALIZED_NAME_ADDITIONAL_SCRAPE_CONFIGS = "additionalScrapeConfigs";
@SerializedName(SERIALIZED_NAME_ADDITIONAL_SCRAPE_CONFIGS)
private V1PrometheusSpecAdditionalScrapeConfigs additionalScrapeConfigs;
public static final String SERIALIZED_NAME_AFFINITY = "affinity";
@SerializedName(SERIALIZED_NAME_AFFINITY)
private V1ThanosRulerSpecAffinity affinity;
public static final String SERIALIZED_NAME_ALERTING = "alerting";
@SerializedName(SERIALIZED_NAME_ALERTING)
private V1PrometheusSpecAlerting alerting;
public static final String SERIALIZED_NAME_APISERVER_CONFIG = "apiserverConfig";
@SerializedName(SERIALIZED_NAME_APISERVER_CONFIG)
private V1PrometheusSpecApiserverConfig apiserverConfig;
public static final String SERIALIZED_NAME_ARBITRARY_F_S_ACCESS_THROUGH_S_MS =
"arbitraryFSAccessThroughSMs";
@SerializedName(SERIALIZED_NAME_ARBITRARY_F_S_ACCESS_THROUGH_S_MS)
private V1PrometheusSpecArbitraryFSAccessThroughSMs arbitraryFSAccessThroughSMs;
public static final String SERIALIZED_NAME_BASE_IMAGE = "baseImage";
@SerializedName(SERIALIZED_NAME_BASE_IMAGE)
private String baseImage;
public static final String SERIALIZED_NAME_CONFIG_MAPS = "configMaps";
@SerializedName(SERIALIZED_NAME_CONFIG_MAPS)
private List<String> configMaps = null;
public static final String SERIALIZED_NAME_CONTAINERS = "containers";
@SerializedName(SERIALIZED_NAME_CONTAINERS)
private List<V1ThanosRulerSpecContainers> containers = null;
public static final String SERIALIZED_NAME_DISABLE_COMPACTION = "disableCompaction";
@SerializedName(SERIALIZED_NAME_DISABLE_COMPACTION)
private Boolean disableCompaction;
public static final String SERIALIZED_NAME_ENABLE_ADMIN_A_P_I = "enableAdminAPI";
@SerializedName(SERIALIZED_NAME_ENABLE_ADMIN_A_P_I)
private Boolean enableAdminAPI;
public static final String SERIALIZED_NAME_ENFORCED_NAMESPACE_LABEL = "enforcedNamespaceLabel";
@SerializedName(SERIALIZED_NAME_ENFORCED_NAMESPACE_LABEL)
private String enforcedNamespaceLabel;
public static final String SERIALIZED_NAME_EVALUATION_INTERVAL = "evaluationInterval";
@SerializedName(SERIALIZED_NAME_EVALUATION_INTERVAL)
private String evaluationInterval;
public static final String SERIALIZED_NAME_EXTERNAL_LABELS = "externalLabels";
@SerializedName(SERIALIZED_NAME_EXTERNAL_LABELS)
private Map<String, String> externalLabels = null;
public static final String SERIALIZED_NAME_EXTERNAL_URL = "externalUrl";
@SerializedName(SERIALIZED_NAME_EXTERNAL_URL)
private String externalUrl;
public static final String SERIALIZED_NAME_IGNORE_NAMESPACE_SELECTORS =
"ignoreNamespaceSelectors";
@SerializedName(SERIALIZED_NAME_IGNORE_NAMESPACE_SELECTORS)
private Boolean ignoreNamespaceSelectors;
public static final String SERIALIZED_NAME_IMAGE = "image";
@SerializedName(SERIALIZED_NAME_IMAGE)
private String image;
public static final String SERIALIZED_NAME_IMAGE_PULL_SECRETS = "imagePullSecrets";
@SerializedName(SERIALIZED_NAME_IMAGE_PULL_SECRETS)
private List<V1ThanosRulerSpecImagePullSecrets> imagePullSecrets = null;
public static final String SERIALIZED_NAME_INIT_CONTAINERS = "initContainers";
@SerializedName(SERIALIZED_NAME_INIT_CONTAINERS)
private List<V1ThanosRulerSpecContainers> initContainers = null;
public static final String SERIALIZED_NAME_LISTEN_LOCAL = "listenLocal";
@SerializedName(SERIALIZED_NAME_LISTEN_LOCAL)
private Boolean listenLocal;
public static final String SERIALIZED_NAME_LOG_FORMAT = "logFormat";
@SerializedName(SERIALIZED_NAME_LOG_FORMAT)
private String logFormat;
public static final String SERIALIZED_NAME_LOG_LEVEL = "logLevel";
@SerializedName(SERIALIZED_NAME_LOG_LEVEL)
private String logLevel;
public static final String SERIALIZED_NAME_NODE_SELECTOR = "nodeSelector";
@SerializedName(SERIALIZED_NAME_NODE_SELECTOR)
private Map<String, String> nodeSelector = null;
public static final String SERIALIZED_NAME_OVERRIDE_HONOR_LABELS = "overrideHonorLabels";
@SerializedName(SERIALIZED_NAME_OVERRIDE_HONOR_LABELS)
private Boolean overrideHonorLabels;
public static final String SERIALIZED_NAME_OVERRIDE_HONOR_TIMESTAMPS = "overrideHonorTimestamps";
@SerializedName(SERIALIZED_NAME_OVERRIDE_HONOR_TIMESTAMPS)
private Boolean overrideHonorTimestamps;
public static final String SERIALIZED_NAME_PAUSED = "paused";
@SerializedName(SERIALIZED_NAME_PAUSED)
private Boolean paused;
public static final String SERIALIZED_NAME_POD_METADATA = "podMetadata";
@SerializedName(SERIALIZED_NAME_POD_METADATA)
private V1PrometheusSpecPodMetadata podMetadata;
public static final String SERIALIZED_NAME_POD_MONITOR_NAMESPACE_SELECTOR =
"podMonitorNamespaceSelector";
@SerializedName(SERIALIZED_NAME_POD_MONITOR_NAMESPACE_SELECTOR)
private V1PrometheusSpecPodMonitorNamespaceSelector podMonitorNamespaceSelector;
public static final String SERIALIZED_NAME_POD_MONITOR_SELECTOR = "podMonitorSelector";
@SerializedName(SERIALIZED_NAME_POD_MONITOR_SELECTOR)
private V1PrometheusSpecPodMonitorSelector podMonitorSelector;
public static final String SERIALIZED_NAME_PORT_NAME = "portName";
@SerializedName(SERIALIZED_NAME_PORT_NAME)
private String portName;
public static final String SERIALIZED_NAME_PRIORITY_CLASS_NAME = "priorityClassName";
@SerializedName(SERIALIZED_NAME_PRIORITY_CLASS_NAME)
private String priorityClassName;
public static final String SERIALIZED_NAME_PROMETHEUS_EXTERNAL_LABEL_NAME =
"prometheusExternalLabelName";
@SerializedName(SERIALIZED_NAME_PROMETHEUS_EXTERNAL_LABEL_NAME)
private String prometheusExternalLabelName;
public static final String SERIALIZED_NAME_QUERY = "query";
@SerializedName(SERIALIZED_NAME_QUERY)
private V1PrometheusSpecQuery query;
public static final String SERIALIZED_NAME_REMOTE_READ = "remoteRead";
@SerializedName(SERIALIZED_NAME_REMOTE_READ)
private List<V1PrometheusSpecRemoteRead> remoteRead = null;
public static final String SERIALIZED_NAME_REMOTE_WRITE = "remoteWrite";
@SerializedName(SERIALIZED_NAME_REMOTE_WRITE)
private List<V1PrometheusSpecRemoteWrite> remoteWrite = null;
public static final String SERIALIZED_NAME_REPLICA_EXTERNAL_LABEL_NAME =
"replicaExternalLabelName";
@SerializedName(SERIALIZED_NAME_REPLICA_EXTERNAL_LABEL_NAME)
private String replicaExternalLabelName;
public static final String SERIALIZED_NAME_REPLICAS = "replicas";
@SerializedName(SERIALIZED_NAME_REPLICAS)
private Integer replicas;
public static final String SERIALIZED_NAME_RESOURCES = "resources";
@SerializedName(SERIALIZED_NAME_RESOURCES)
private V1AlertmanagerSpecResources resources;
public static final String SERIALIZED_NAME_RETENTION = "retention";
@SerializedName(SERIALIZED_NAME_RETENTION)
private String retention;
public static final String SERIALIZED_NAME_RETENTION_SIZE = "retentionSize";
@SerializedName(SERIALIZED_NAME_RETENTION_SIZE)
private String retentionSize;
public static final String SERIALIZED_NAME_ROUTE_PREFIX = "routePrefix";
@SerializedName(SERIALIZED_NAME_ROUTE_PREFIX)
private String routePrefix;
public static final String SERIALIZED_NAME_RULE_NAMESPACE_SELECTOR = "ruleNamespaceSelector";
@SerializedName(SERIALIZED_NAME_RULE_NAMESPACE_SELECTOR)
private V1PrometheusSpecRuleNamespaceSelector ruleNamespaceSelector;
public static final String SERIALIZED_NAME_RULE_SELECTOR = "ruleSelector";
@SerializedName(SERIALIZED_NAME_RULE_SELECTOR)
private V1PrometheusSpecRuleSelector ruleSelector;
public static final String SERIALIZED_NAME_RULES = "rules";
@SerializedName(SERIALIZED_NAME_RULES)
private V1PrometheusSpecRules rules;
public static final String SERIALIZED_NAME_SCRAPE_INTERVAL = "scrapeInterval";
@SerializedName(SERIALIZED_NAME_SCRAPE_INTERVAL)
private String scrapeInterval;
public static final String SERIALIZED_NAME_SECRETS = "secrets";
@SerializedName(SERIALIZED_NAME_SECRETS)
private List<String> secrets = null;
public static final String SERIALIZED_NAME_SECURITY_CONTEXT = "securityContext";
@SerializedName(SERIALIZED_NAME_SECURITY_CONTEXT)
private V1ThanosRulerSpecSecurityContext1 securityContext;
public static final String SERIALIZED_NAME_SERVICE_ACCOUNT_NAME = "serviceAccountName";
@SerializedName(SERIALIZED_NAME_SERVICE_ACCOUNT_NAME)
private String serviceAccountName;
public static final String SERIALIZED_NAME_SERVICE_MONITOR_NAMESPACE_SELECTOR =
"serviceMonitorNamespaceSelector";
@SerializedName(SERIALIZED_NAME_SERVICE_MONITOR_NAMESPACE_SELECTOR)
private V1PrometheusSpecServiceMonitorNamespaceSelector serviceMonitorNamespaceSelector;
public static final String SERIALIZED_NAME_SERVICE_MONITOR_SELECTOR = "serviceMonitorSelector";
@SerializedName(SERIALIZED_NAME_SERVICE_MONITOR_SELECTOR)
private V1PrometheusSpecServiceMonitorSelector serviceMonitorSelector;
public static final String SERIALIZED_NAME_SHA = "sha";
@SerializedName(SERIALIZED_NAME_SHA)
private String sha;
public static final String SERIALIZED_NAME_STORAGE = "storage";
@SerializedName(SERIALIZED_NAME_STORAGE)
private V1ThanosRulerSpecStorage storage;
public static final String SERIALIZED_NAME_TAG = "tag";
@SerializedName(SERIALIZED_NAME_TAG)
private String tag;
public static final String SERIALIZED_NAME_THANOS = "thanos";
@SerializedName(SERIALIZED_NAME_THANOS)
private V1PrometheusSpecThanos thanos;
public static final String SERIALIZED_NAME_TOLERATIONS = "tolerations";
@SerializedName(SERIALIZED_NAME_TOLERATIONS)
private List<V1ThanosRulerSpecTolerations> tolerations = null;
public static final String SERIALIZED_NAME_VERSION = "version";
@SerializedName(SERIALIZED_NAME_VERSION)
private String version;
public static final String SERIALIZED_NAME_VOLUME_MOUNTS = "volumeMounts";
@SerializedName(SERIALIZED_NAME_VOLUME_MOUNTS)
private List<V1ThanosRulerSpecVolumeMounts> volumeMounts = null;
public static final String SERIALIZED_NAME_VOLUMES = "volumes";
@SerializedName(SERIALIZED_NAME_VOLUMES)
private List<V1ThanosRulerSpecVolumes> volumes = null;
public static final String SERIALIZED_NAME_WAL_COMPRESSION = "walCompression";
@SerializedName(SERIALIZED_NAME_WAL_COMPRESSION)
private Boolean walCompression;
public V1PrometheusSpec additionalAlertManagerConfigs(
V1PrometheusSpecAdditionalAlertManagerConfigs additionalAlertManagerConfigs) {
this.additionalAlertManagerConfigs = additionalAlertManagerConfigs;
return this;
}
/**
* Get additionalAlertManagerConfigs
*
* @return additionalAlertManagerConfigs
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecAdditionalAlertManagerConfigs getAdditionalAlertManagerConfigs() {
return additionalAlertManagerConfigs;
}
public void setAdditionalAlertManagerConfigs(
V1PrometheusSpecAdditionalAlertManagerConfigs additionalAlertManagerConfigs) {
this.additionalAlertManagerConfigs = additionalAlertManagerConfigs;
}
public V1PrometheusSpec additionalAlertRelabelConfigs(
V1PrometheusSpecAdditionalAlertRelabelConfigs additionalAlertRelabelConfigs) {
this.additionalAlertRelabelConfigs = additionalAlertRelabelConfigs;
return this;
}
/**
* Get additionalAlertRelabelConfigs
*
* @return additionalAlertRelabelConfigs
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecAdditionalAlertRelabelConfigs getAdditionalAlertRelabelConfigs() {
return additionalAlertRelabelConfigs;
}
public void setAdditionalAlertRelabelConfigs(
V1PrometheusSpecAdditionalAlertRelabelConfigs additionalAlertRelabelConfigs) {
this.additionalAlertRelabelConfigs = additionalAlertRelabelConfigs;
}
public V1PrometheusSpec additionalScrapeConfigs(
V1PrometheusSpecAdditionalScrapeConfigs additionalScrapeConfigs) {
this.additionalScrapeConfigs = additionalScrapeConfigs;
return this;
}
/**
* Get additionalScrapeConfigs
*
* @return additionalScrapeConfigs
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecAdditionalScrapeConfigs getAdditionalScrapeConfigs() {
return additionalScrapeConfigs;
}
public void setAdditionalScrapeConfigs(
V1PrometheusSpecAdditionalScrapeConfigs additionalScrapeConfigs) {
this.additionalScrapeConfigs = additionalScrapeConfigs;
}
public V1PrometheusSpec affinity(V1ThanosRulerSpecAffinity affinity) {
this.affinity = affinity;
return this;
}
/**
* Get affinity
*
* @return affinity
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1ThanosRulerSpecAffinity getAffinity() {
return affinity;
}
public void setAffinity(V1ThanosRulerSpecAffinity affinity) {
this.affinity = affinity;
}
public V1PrometheusSpec alerting(V1PrometheusSpecAlerting alerting) {
this.alerting = alerting;
return this;
}
/**
* Get alerting
*
* @return alerting
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecAlerting getAlerting() {
return alerting;
}
public void setAlerting(V1PrometheusSpecAlerting alerting) {
this.alerting = alerting;
}
public V1PrometheusSpec apiserverConfig(V1PrometheusSpecApiserverConfig apiserverConfig) {
this.apiserverConfig = apiserverConfig;
return this;
}
/**
* Get apiserverConfig
*
* @return apiserverConfig
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecApiserverConfig getApiserverConfig() {
return apiserverConfig;
}
public void setApiserverConfig(V1PrometheusSpecApiserverConfig apiserverConfig) {
this.apiserverConfig = apiserverConfig;
}
public V1PrometheusSpec arbitraryFSAccessThroughSMs(
V1PrometheusSpecArbitraryFSAccessThroughSMs arbitraryFSAccessThroughSMs) {
this.arbitraryFSAccessThroughSMs = arbitraryFSAccessThroughSMs;
return this;
}
/**
* Get arbitraryFSAccessThroughSMs
*
* @return arbitraryFSAccessThroughSMs
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecArbitraryFSAccessThroughSMs getArbitraryFSAccessThroughSMs() {
return arbitraryFSAccessThroughSMs;
}
public void setArbitraryFSAccessThroughSMs(
V1PrometheusSpecArbitraryFSAccessThroughSMs arbitraryFSAccessThroughSMs) {
this.arbitraryFSAccessThroughSMs = arbitraryFSAccessThroughSMs;
}
public V1PrometheusSpec baseImage(String baseImage) {
this.baseImage = baseImage;
return this;
}
/**
* Base image to use for a Prometheus deployment.
*
* @return baseImage
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Base image to use for a Prometheus deployment.")
public String getBaseImage() {
return baseImage;
}
public void setBaseImage(String baseImage) {
this.baseImage = baseImage;
}
public V1PrometheusSpec configMaps(List<String> configMaps) {
this.configMaps = configMaps;
return this;
}
public V1PrometheusSpec addConfigMapsItem(String configMapsItem) {
if (this.configMaps == null) {
this.configMaps = new ArrayList<String>();
}
this.configMaps.add(configMapsItem);
return this;
}
/**
* ConfigMaps is a list of ConfigMaps in the same namespace as the Prometheus object, which shall
* be mounted into the Prometheus Pods. The ConfigMaps are mounted into
* /etc/prometheus/configmaps/<configmap-name>.
*
* @return configMaps
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"ConfigMaps is a list of ConfigMaps in the same namespace as the Prometheus object, which shall be mounted into the Prometheus Pods. The ConfigMaps are mounted into /etc/prometheus/configmaps/<configmap-name>.")
public List<String> getConfigMaps() {
return configMaps;
}
public void setConfigMaps(List<String> configMaps) {
this.configMaps = configMaps;
}
public V1PrometheusSpec containers(List<V1ThanosRulerSpecContainers> containers) {
this.containers = containers;
return this;
}
public V1PrometheusSpec addContainersItem(V1ThanosRulerSpecContainers containersItem) {
if (this.containers == null) {
this.containers = new ArrayList<V1ThanosRulerSpecContainers>();
}
this.containers.add(containersItem);
return this;
}
/**
* Containers allows injecting additional containers or modifying operator generated containers.
* This can be used to allow adding an authentication proxy to a Prometheus pod or to change the
* behavior of an operator generated container. Containers described here modify an operator
* generated container if they share the same name and modifications are done via a strategic
* merge patch. The current container names are: `prometheus`,
* `prometheus-config-reloader`, `rules-configmap-reloader`, and
* `thanos-sidecar`. Overriding containers is entirely outside the scope of what the
* maintainers will support and by doing so, you accept that this behaviour may break at any time
* without notice.
*
* @return containers
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Containers allows injecting additional containers or modifying operator generated containers. This can be used to allow adding an authentication proxy to a Prometheus pod or to change the behavior of an operator generated container. Containers described here modify an operator generated container if they share the same name and modifications are done via a strategic merge patch. The current container names are: `prometheus`, `prometheus-config-reloader`, `rules-configmap-reloader`, and `thanos-sidecar`. Overriding containers is entirely outside the scope of what the maintainers will support and by doing so, you accept that this behaviour may break at any time without notice.")
public List<V1ThanosRulerSpecContainers> getContainers() {
return containers;
}
public void setContainers(List<V1ThanosRulerSpecContainers> containers) {
this.containers = containers;
}
public V1PrometheusSpec disableCompaction(Boolean disableCompaction) {
this.disableCompaction = disableCompaction;
return this;
}
/**
* Disable prometheus compaction.
*
* @return disableCompaction
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Disable prometheus compaction.")
public Boolean getDisableCompaction() {
return disableCompaction;
}
public void setDisableCompaction(Boolean disableCompaction) {
this.disableCompaction = disableCompaction;
}
public V1PrometheusSpec enableAdminAPI(Boolean enableAdminAPI) {
this.enableAdminAPI = enableAdminAPI;
return this;
}
/**
* Enable access to prometheus web admin API. Defaults to the value of `false`. WARNING:
* Enabling the admin APIs enables mutating endpoints, to delete data, shutdown Prometheus, and
* more. Enabling this should be done with care and the user is advised to add additional
* authentication authorization via a proxy to ensure only clients authorized to perform these
* actions can do so. For more information see
* https://prometheus.io/docs/prometheus/latest/querying/api/#tsdb-admin-apis
*
* @return enableAdminAPI
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Enable access to prometheus web admin API. Defaults to the value of `false`. WARNING: Enabling the admin APIs enables mutating endpoints, to delete data, shutdown Prometheus, and more. Enabling this should be done with care and the user is advised to add additional authentication authorization via a proxy to ensure only clients authorized to perform these actions can do so. For more information see https://prometheus.io/docs/prometheus/latest/querying/api/#tsdb-admin-apis")
public Boolean getEnableAdminAPI() {
return enableAdminAPI;
}
public void setEnableAdminAPI(Boolean enableAdminAPI) {
this.enableAdminAPI = enableAdminAPI;
}
public V1PrometheusSpec enforcedNamespaceLabel(String enforcedNamespaceLabel) {
this.enforcedNamespaceLabel = enforcedNamespaceLabel;
return this;
}
/**
* EnforcedNamespaceLabel enforces adding a namespace label of origin for each alert and metric
* that is user created. The label value will always be the namespace of the object that is being
* created.
*
* @return enforcedNamespaceLabel
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"EnforcedNamespaceLabel enforces adding a namespace label of origin for each alert and metric that is user created. The label value will always be the namespace of the object that is being created.")
public String getEnforcedNamespaceLabel() {
return enforcedNamespaceLabel;
}
public void setEnforcedNamespaceLabel(String enforcedNamespaceLabel) {
this.enforcedNamespaceLabel = enforcedNamespaceLabel;
}
public V1PrometheusSpec evaluationInterval(String evaluationInterval) {
this.evaluationInterval = evaluationInterval;
return this;
}
/**
* Interval between consecutive evaluations.
*
* @return evaluationInterval
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Interval between consecutive evaluations.")
public String getEvaluationInterval() {
return evaluationInterval;
}
public void setEvaluationInterval(String evaluationInterval) {
this.evaluationInterval = evaluationInterval;
}
public V1PrometheusSpec externalLabels(Map<String, String> externalLabels) {
this.externalLabels = externalLabels;
return this;
}
public V1PrometheusSpec putExternalLabelsItem(String key, String externalLabelsItem) {
if (this.externalLabels == null) {
this.externalLabels = new HashMap<String, String>();
}
this.externalLabels.put(key, externalLabelsItem);
return this;
}
/**
* The labels to add to any time series or alerts when communicating with external systems
* (federation, remote storage, Alertmanager).
*
* @return externalLabels
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"The labels to add to any time series or alerts when communicating with external systems (federation, remote storage, Alertmanager).")
public Map<String, String> getExternalLabels() {
return externalLabels;
}
public void setExternalLabels(Map<String, String> externalLabels) {
this.externalLabels = externalLabels;
}
public V1PrometheusSpec externalUrl(String externalUrl) {
this.externalUrl = externalUrl;
return this;
}
/**
* The external URL the Prometheus instances will be available under. This is necessary to
* generate correct URLs. This is necessary if Prometheus is not served from root of a DNS name.
*
* @return externalUrl
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"The external URL the Prometheus instances will be available under. This is necessary to generate correct URLs. This is necessary if Prometheus is not served from root of a DNS name.")
public String getExternalUrl() {
return externalUrl;
}
public void setExternalUrl(String externalUrl) {
this.externalUrl = externalUrl;
}
public V1PrometheusSpec ignoreNamespaceSelectors(Boolean ignoreNamespaceSelectors) {
this.ignoreNamespaceSelectors = ignoreNamespaceSelectors;
return this;
}
/**
* IgnoreNamespaceSelectors if set to true will ignore NamespaceSelector settings from the
* podmonitor and servicemonitor configs, and they will only discover endpoints within their
* current namespace. Defaults to false.
*
* @return ignoreNamespaceSelectors
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"IgnoreNamespaceSelectors if set to true will ignore NamespaceSelector settings from the podmonitor and servicemonitor configs, and they will only discover endpoints within their current namespace. Defaults to false.")
public Boolean getIgnoreNamespaceSelectors() {
return ignoreNamespaceSelectors;
}
public void setIgnoreNamespaceSelectors(Boolean ignoreNamespaceSelectors) {
this.ignoreNamespaceSelectors = ignoreNamespaceSelectors;
}
public V1PrometheusSpec image(String image) {
this.image = image;
return this;
}
/**
* Image if specified has precedence over baseImage, tag and sha combinations. Specifying the
* version is still necessary to ensure the Prometheus Operator knows what version of Prometheus
* is being configured.
*
* @return image
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Image if specified has precedence over baseImage, tag and sha combinations. Specifying the version is still necessary to ensure the Prometheus Operator knows what version of Prometheus is being configured.")
public String getImage() {
return image;
}
public void setImage(String image) {
this.image = image;
}
public V1PrometheusSpec imagePullSecrets(
List<V1ThanosRulerSpecImagePullSecrets> imagePullSecrets) {
this.imagePullSecrets = imagePullSecrets;
return this;
}
public V1PrometheusSpec addImagePullSecretsItem(
V1ThanosRulerSpecImagePullSecrets imagePullSecretsItem) {
if (this.imagePullSecrets == null) {
this.imagePullSecrets = new ArrayList<V1ThanosRulerSpecImagePullSecrets>();
}
this.imagePullSecrets.add(imagePullSecretsItem);
return this;
}
/**
* An optional list of references to secrets in the same namespace to use for pulling prometheus
* and alertmanager images from registries see
* http://kubernetes.io/docs/user-guide/images#specifying-imagepullsecrets-on-a-pod
*
* @return imagePullSecrets
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"An optional list of references to secrets in the same namespace to use for pulling prometheus and alertmanager images from registries see http://kubernetes.io/docs/user-guide/images#specifying-imagepullsecrets-on-a-pod")
public List<V1ThanosRulerSpecImagePullSecrets> getImagePullSecrets() {
return imagePullSecrets;
}
public void setImagePullSecrets(List<V1ThanosRulerSpecImagePullSecrets> imagePullSecrets) {
this.imagePullSecrets = imagePullSecrets;
}
public V1PrometheusSpec initContainers(List<V1ThanosRulerSpecContainers> initContainers) {
this.initContainers = initContainers;
return this;
}
public V1PrometheusSpec addInitContainersItem(V1ThanosRulerSpecContainers initContainersItem) {
if (this.initContainers == null) {
this.initContainers = new ArrayList<V1ThanosRulerSpecContainers>();
}
this.initContainers.add(initContainersItem);
return this;
}
/**
* InitContainers allows adding initContainers to the pod definition. Those can be used to e.g.
* fetch secrets for injection into the Prometheus configuration from external sources. Any errors
* during the execution of an initContainer will lead to a restart of the Pod. More info:
* https://kubernetes.io/docs/concepts/workloads/pods/init-containers/ Using initContainers for
* any use case other then secret fetching is entirely outside the scope of what the maintainers
* will support and by doing so, you accept that this behaviour may break at any time without
* notice.
*
* @return initContainers
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"InitContainers allows adding initContainers to the pod definition. Those can be used to e.g. fetch secrets for injection into the Prometheus configuration from external sources. Any errors during the execution of an initContainer will lead to a restart of the Pod. More info: https://kubernetes.io/docs/concepts/workloads/pods/init-containers/ Using initContainers for any use case other then secret fetching is entirely outside the scope of what the maintainers will support and by doing so, you accept that this behaviour may break at any time without notice.")
public List<V1ThanosRulerSpecContainers> getInitContainers() {
return initContainers;
}
public void setInitContainers(List<V1ThanosRulerSpecContainers> initContainers) {
this.initContainers = initContainers;
}
public V1PrometheusSpec listenLocal(Boolean listenLocal) {
this.listenLocal = listenLocal;
return this;
}
/**
* ListenLocal makes the Prometheus server listen on loopback, so that it does not bind against
* the Pod IP.
*
* @return listenLocal
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"ListenLocal makes the Prometheus server listen on loopback, so that it does not bind against the Pod IP.")
public Boolean getListenLocal() {
return listenLocal;
}
public void setListenLocal(Boolean listenLocal) {
this.listenLocal = listenLocal;
}
public V1PrometheusSpec logFormat(String logFormat) {
this.logFormat = logFormat;
return this;
}
/**
* Log format for Prometheus to be configured with.
*
* @return logFormat
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Log format for Prometheus to be configured with.")
public String getLogFormat() {
return logFormat;
}
public void setLogFormat(String logFormat) {
this.logFormat = logFormat;
}
public V1PrometheusSpec logLevel(String logLevel) {
this.logLevel = logLevel;
return this;
}
/**
* Log level for Prometheus to be configured with.
*
* @return logLevel
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Log level for Prometheus to be configured with.")
public String getLogLevel() {
return logLevel;
}
public void setLogLevel(String logLevel) {
this.logLevel = logLevel;
}
public V1PrometheusSpec nodeSelector(Map<String, String> nodeSelector) {
this.nodeSelector = nodeSelector;
return this;
}
public V1PrometheusSpec putNodeSelectorItem(String key, String nodeSelectorItem) {
if (this.nodeSelector == null) {
this.nodeSelector = new HashMap<String, String>();
}
this.nodeSelector.put(key, nodeSelectorItem);
return this;
}
/**
* Define which Nodes the Pods are scheduled on.
*
* @return nodeSelector
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Define which Nodes the Pods are scheduled on.")
public Map<String, String> getNodeSelector() {
return nodeSelector;
}
public void setNodeSelector(Map<String, String> nodeSelector) {
this.nodeSelector = nodeSelector;
}
public V1PrometheusSpec overrideHonorLabels(Boolean overrideHonorLabels) {
this.overrideHonorLabels = overrideHonorLabels;
return this;
}
/**
* OverrideHonorLabels if set to true overrides all user configured honor_labels. If HonorLabels
* is set in ServiceMonitor or PodMonitor to true, this overrides honor_labels to false.
*
* @return overrideHonorLabels
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"OverrideHonorLabels if set to true overrides all user configured honor_labels. If HonorLabels is set in ServiceMonitor or PodMonitor to true, this overrides honor_labels to false.")
public Boolean getOverrideHonorLabels() {
return overrideHonorLabels;
}
public void setOverrideHonorLabels(Boolean overrideHonorLabels) {
this.overrideHonorLabels = overrideHonorLabels;
}
public V1PrometheusSpec overrideHonorTimestamps(Boolean overrideHonorTimestamps) {
this.overrideHonorTimestamps = overrideHonorTimestamps;
return this;
}
/**
* OverrideHonorTimestamps allows to globally enforce honoring timestamps in all scrape configs.
*
* @return overrideHonorTimestamps
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"OverrideHonorTimestamps allows to globally enforce honoring timestamps in all scrape configs.")
public Boolean getOverrideHonorTimestamps() {
return overrideHonorTimestamps;
}
public void setOverrideHonorTimestamps(Boolean overrideHonorTimestamps) {
this.overrideHonorTimestamps = overrideHonorTimestamps;
}
public V1PrometheusSpec paused(Boolean paused) {
this.paused = paused;
return this;
}
/**
* When a Prometheus deployment is paused, no actions except for deletion will be performed on the
* underlying objects.
*
* @return paused
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"When a Prometheus deployment is paused, no actions except for deletion will be performed on the underlying objects.")
public Boolean getPaused() {
return paused;
}
public void setPaused(Boolean paused) {
this.paused = paused;
}
public V1PrometheusSpec podMetadata(V1PrometheusSpecPodMetadata podMetadata) {
this.podMetadata = podMetadata;
return this;
}
/**
* Get podMetadata
*
* @return podMetadata
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecPodMetadata getPodMetadata() {
return podMetadata;
}
public void setPodMetadata(V1PrometheusSpecPodMetadata podMetadata) {
this.podMetadata = podMetadata;
}
public V1PrometheusSpec podMonitorNamespaceSelector(
V1PrometheusSpecPodMonitorNamespaceSelector podMonitorNamespaceSelector) {
this.podMonitorNamespaceSelector = podMonitorNamespaceSelector;
return this;
}
/**
* Get podMonitorNamespaceSelector
*
* @return podMonitorNamespaceSelector
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecPodMonitorNamespaceSelector getPodMonitorNamespaceSelector() {
return podMonitorNamespaceSelector;
}
public void setPodMonitorNamespaceSelector(
V1PrometheusSpecPodMonitorNamespaceSelector podMonitorNamespaceSelector) {
this.podMonitorNamespaceSelector = podMonitorNamespaceSelector;
}
public V1PrometheusSpec podMonitorSelector(
V1PrometheusSpecPodMonitorSelector podMonitorSelector) {
this.podMonitorSelector = podMonitorSelector;
return this;
}
/**
* Get podMonitorSelector
*
* @return podMonitorSelector
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecPodMonitorSelector getPodMonitorSelector() {
return podMonitorSelector;
}
public void setPodMonitorSelector(V1PrometheusSpecPodMonitorSelector podMonitorSelector) {
this.podMonitorSelector = podMonitorSelector;
}
public V1PrometheusSpec portName(String portName) {
this.portName = portName;
return this;
}
/**
* Port name used for the pods and governing service. This defaults to web
*
* @return portName
*/
@javax.annotation.Nullable
@ApiModelProperty(
value = "Port name used for the pods and governing service. This defaults to web")
public String getPortName() {
return portName;
}
public void setPortName(String portName) {
this.portName = portName;
}
public V1PrometheusSpec priorityClassName(String priorityClassName) {
this.priorityClassName = priorityClassName;
return this;
}
/**
* Priority class assigned to the Pods
*
* @return priorityClassName
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Priority class assigned to the Pods")
public String getPriorityClassName() {
return priorityClassName;
}
public void setPriorityClassName(String priorityClassName) {
this.priorityClassName = priorityClassName;
}
public V1PrometheusSpec prometheusExternalLabelName(String prometheusExternalLabelName) {
this.prometheusExternalLabelName = prometheusExternalLabelName;
return this;
}
/**
* Name of Prometheus external label used to denote Prometheus instance name. Defaults to the
* value of `prometheus`. External label will _not_ be added when value is set to empty
* string (`\"\"`).
*
* @return prometheusExternalLabelName
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Name of Prometheus external label used to denote Prometheus instance name. Defaults to the value of `prometheus`. External label will _not_ be added when value is set to empty string (`\"\"`).")
public String getPrometheusExternalLabelName() {
return prometheusExternalLabelName;
}
public void setPrometheusExternalLabelName(String prometheusExternalLabelName) {
this.prometheusExternalLabelName = prometheusExternalLabelName;
}
public V1PrometheusSpec query(V1PrometheusSpecQuery query) {
this.query = query;
return this;
}
/**
* Get query
*
* @return query
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecQuery getQuery() {
return query;
}
public void setQuery(V1PrometheusSpecQuery query) {
this.query = query;
}
public V1PrometheusSpec remoteRead(List<V1PrometheusSpecRemoteRead> remoteRead) {
this.remoteRead = remoteRead;
return this;
}
public V1PrometheusSpec addRemoteReadItem(V1PrometheusSpecRemoteRead remoteReadItem) {
if (this.remoteRead == null) {
this.remoteRead = new ArrayList<V1PrometheusSpecRemoteRead>();
}
this.remoteRead.add(remoteReadItem);
return this;
}
/**
* If specified, the remote_read spec. This is an experimental feature, it may change in any
* upcoming release in a breaking way.
*
* @return remoteRead
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"If specified, the remote_read spec. This is an experimental feature, it may change in any upcoming release in a breaking way.")
public List<V1PrometheusSpecRemoteRead> getRemoteRead() {
return remoteRead;
}
public void setRemoteRead(List<V1PrometheusSpecRemoteRead> remoteRead) {
this.remoteRead = remoteRead;
}
public V1PrometheusSpec remoteWrite(List<V1PrometheusSpecRemoteWrite> remoteWrite) {
this.remoteWrite = remoteWrite;
return this;
}
public V1PrometheusSpec addRemoteWriteItem(V1PrometheusSpecRemoteWrite remoteWriteItem) {
if (this.remoteWrite == null) {
this.remoteWrite = new ArrayList<V1PrometheusSpecRemoteWrite>();
}
this.remoteWrite.add(remoteWriteItem);
return this;
}
/**
* If specified, the remote_write spec. This is an experimental feature, it may change in any
* upcoming release in a breaking way.
*
* @return remoteWrite
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"If specified, the remote_write spec. This is an experimental feature, it may change in any upcoming release in a breaking way.")
public List<V1PrometheusSpecRemoteWrite> getRemoteWrite() {
return remoteWrite;
}
public void setRemoteWrite(List<V1PrometheusSpecRemoteWrite> remoteWrite) {
this.remoteWrite = remoteWrite;
}
public V1PrometheusSpec replicaExternalLabelName(String replicaExternalLabelName) {
this.replicaExternalLabelName = replicaExternalLabelName;
return this;
}
/**
* Name of Prometheus external label used to denote replica name. Defaults to the value of
* `prometheus_replica`. External label will _not_ be added when value is set to empty
* string (`\"\"`).
*
* @return replicaExternalLabelName
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Name of Prometheus external label used to denote replica name. Defaults to the value of `prometheus_replica`. External label will _not_ be added when value is set to empty string (`\"\"`).")
public String getReplicaExternalLabelName() {
return replicaExternalLabelName;
}
public void setReplicaExternalLabelName(String replicaExternalLabelName) {
this.replicaExternalLabelName = replicaExternalLabelName;
}
public V1PrometheusSpec replicas(Integer replicas) {
this.replicas = replicas;
return this;
}
/**
* Number of instances to deploy for a Prometheus deployment.
*
* @return replicas
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Number of instances to deploy for a Prometheus deployment.")
public Integer getReplicas() {
return replicas;
}
public void setReplicas(Integer replicas) {
this.replicas = replicas;
}
public V1PrometheusSpec resources(V1AlertmanagerSpecResources resources) {
this.resources = resources;
return this;
}
/**
* Get resources
*
* @return resources
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1AlertmanagerSpecResources getResources() {
return resources;
}
public void setResources(V1AlertmanagerSpecResources resources) {
this.resources = resources;
}
public V1PrometheusSpec retention(String retention) {
this.retention = retention;
return this;
}
/**
* Time duration Prometheus shall retain data for. Default is '24h', and must match the
* regular expression `[0-9]+(ms|s|m|h|d|w|y)` (milliseconds seconds minutes hours days
* weeks years).
*
* @return retention
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Time duration Prometheus shall retain data for. Default is '24h', and must match the regular expression `[0-9]+(ms|s|m|h|d|w|y)` (milliseconds seconds minutes hours days weeks years).")
public String getRetention() {
return retention;
}
public void setRetention(String retention) {
this.retention = retention;
}
public V1PrometheusSpec retentionSize(String retentionSize) {
this.retentionSize = retentionSize;
return this;
}
/**
* Maximum amount of disk space used by blocks.
*
* @return retentionSize
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Maximum amount of disk space used by blocks.")
public String getRetentionSize() {
return retentionSize;
}
public void setRetentionSize(String retentionSize) {
this.retentionSize = retentionSize;
}
public V1PrometheusSpec routePrefix(String routePrefix) {
this.routePrefix = routePrefix;
return this;
}
/**
* The route prefix Prometheus registers HTTP handlers for. This is useful, if using ExternalURL
* and a proxy is rewriting HTTP routes of a request, and the actual ExternalURL is still true,
* but the server serves requests under a different route prefix. For example for use with
* `kubectl proxy`.
*
* @return routePrefix
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"The route prefix Prometheus registers HTTP handlers for. This is useful, if using ExternalURL and a proxy is rewriting HTTP routes of a request, and the actual ExternalURL is still true, but the server serves requests under a different route prefix. For example for use with `kubectl proxy`.")
public String getRoutePrefix() {
return routePrefix;
}
public void setRoutePrefix(String routePrefix) {
this.routePrefix = routePrefix;
}
public V1PrometheusSpec ruleNamespaceSelector(
V1PrometheusSpecRuleNamespaceSelector ruleNamespaceSelector) {
this.ruleNamespaceSelector = ruleNamespaceSelector;
return this;
}
/**
* Get ruleNamespaceSelector
*
* @return ruleNamespaceSelector
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecRuleNamespaceSelector getRuleNamespaceSelector() {
return ruleNamespaceSelector;
}
public void setRuleNamespaceSelector(
V1PrometheusSpecRuleNamespaceSelector ruleNamespaceSelector) {
this.ruleNamespaceSelector = ruleNamespaceSelector;
}
public V1PrometheusSpec ruleSelector(V1PrometheusSpecRuleSelector ruleSelector) {
this.ruleSelector = ruleSelector;
return this;
}
/**
* Get ruleSelector
*
* @return ruleSelector
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecRuleSelector getRuleSelector() {
return ruleSelector;
}
public void setRuleSelector(V1PrometheusSpecRuleSelector ruleSelector) {
this.ruleSelector = ruleSelector;
}
public V1PrometheusSpec rules(V1PrometheusSpecRules rules) {
this.rules = rules;
return this;
}
/**
* Get rules
*
* @return rules
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecRules getRules() {
return rules;
}
public void setRules(V1PrometheusSpecRules rules) {
this.rules = rules;
}
public V1PrometheusSpec scrapeInterval(String scrapeInterval) {
this.scrapeInterval = scrapeInterval;
return this;
}
/**
* Interval between consecutive scrapes.
*
* @return scrapeInterval
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Interval between consecutive scrapes.")
public String getScrapeInterval() {
return scrapeInterval;
}
public void setScrapeInterval(String scrapeInterval) {
this.scrapeInterval = scrapeInterval;
}
public V1PrometheusSpec secrets(List<String> secrets) {
this.secrets = secrets;
return this;
}
public V1PrometheusSpec addSecretsItem(String secretsItem) {
if (this.secrets == null) {
this.secrets = new ArrayList<String>();
}
this.secrets.add(secretsItem);
return this;
}
/**
* Secrets is a list of Secrets in the same namespace as the Prometheus object, which shall be
* mounted into the Prometheus Pods. The Secrets are mounted into
* /etc/prometheus/secrets/<secret-name>.
*
* @return secrets
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Secrets is a list of Secrets in the same namespace as the Prometheus object, which shall be mounted into the Prometheus Pods. The Secrets are mounted into /etc/prometheus/secrets/<secret-name>.")
public List<String> getSecrets() {
return secrets;
}
public void setSecrets(List<String> secrets) {
this.secrets = secrets;
}
public V1PrometheusSpec securityContext(V1ThanosRulerSpecSecurityContext1 securityContext) {
this.securityContext = securityContext;
return this;
}
/**
* Get securityContext
*
* @return securityContext
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1ThanosRulerSpecSecurityContext1 getSecurityContext() {
return securityContext;
}
public void setSecurityContext(V1ThanosRulerSpecSecurityContext1 securityContext) {
this.securityContext = securityContext;
}
public V1PrometheusSpec serviceAccountName(String serviceAccountName) {
this.serviceAccountName = serviceAccountName;
return this;
}
/**
* ServiceAccountName is the name of the ServiceAccount to use to run the Prometheus Pods.
*
* @return serviceAccountName
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"ServiceAccountName is the name of the ServiceAccount to use to run the Prometheus Pods.")
public String getServiceAccountName() {
return serviceAccountName;
}
public void setServiceAccountName(String serviceAccountName) {
this.serviceAccountName = serviceAccountName;
}
public V1PrometheusSpec serviceMonitorNamespaceSelector(
V1PrometheusSpecServiceMonitorNamespaceSelector serviceMonitorNamespaceSelector) {
this.serviceMonitorNamespaceSelector = serviceMonitorNamespaceSelector;
return this;
}
/**
* Get serviceMonitorNamespaceSelector
*
* @return serviceMonitorNamespaceSelector
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecServiceMonitorNamespaceSelector getServiceMonitorNamespaceSelector() {
return serviceMonitorNamespaceSelector;
}
public void setServiceMonitorNamespaceSelector(
V1PrometheusSpecServiceMonitorNamespaceSelector serviceMonitorNamespaceSelector) {
this.serviceMonitorNamespaceSelector = serviceMonitorNamespaceSelector;
}
public V1PrometheusSpec serviceMonitorSelector(
V1PrometheusSpecServiceMonitorSelector serviceMonitorSelector) {
this.serviceMonitorSelector = serviceMonitorSelector;
return this;
}
/**
* Get serviceMonitorSelector
*
* @return serviceMonitorSelector
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecServiceMonitorSelector getServiceMonitorSelector() {
return serviceMonitorSelector;
}
public void setServiceMonitorSelector(
V1PrometheusSpecServiceMonitorSelector serviceMonitorSelector) {
this.serviceMonitorSelector = serviceMonitorSelector;
}
public V1PrometheusSpec sha(String sha) {
this.sha = sha;
return this;
}
/**
* SHA of Prometheus container image to be deployed. Defaults to the value of `version`.
* Similar to a tag, but the SHA explicitly deploys an immutable container image. Version and Tag
* are ignored if SHA is set.
*
* @return sha
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"SHA of Prometheus container image to be deployed. Defaults to the value of `version`. Similar to a tag, but the SHA explicitly deploys an immutable container image. Version and Tag are ignored if SHA is set.")
public String getSha() {
return sha;
}
public void setSha(String sha) {
this.sha = sha;
}
public V1PrometheusSpec storage(V1ThanosRulerSpecStorage storage) {
this.storage = storage;
return this;
}
/**
* Get storage
*
* @return storage
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1ThanosRulerSpecStorage getStorage() {
return storage;
}
public void setStorage(V1ThanosRulerSpecStorage storage) {
this.storage = storage;
}
public V1PrometheusSpec tag(String tag) {
this.tag = tag;
return this;
}
/**
* Tag of Prometheus container image to be deployed. Defaults to the value of `version`.
* Version is ignored if Tag is set.
*
* @return tag
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Tag of Prometheus container image to be deployed. Defaults to the value of `version`. Version is ignored if Tag is set.")
public String getTag() {
return tag;
}
public void setTag(String tag) {
this.tag = tag;
}
public V1PrometheusSpec thanos(V1PrometheusSpecThanos thanos) {
this.thanos = thanos;
return this;
}
/**
* Get thanos
*
* @return thanos
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "")
public V1PrometheusSpecThanos getThanos() {
return thanos;
}
public void setThanos(V1PrometheusSpecThanos thanos) {
this.thanos = thanos;
}
public V1PrometheusSpec tolerations(List<V1ThanosRulerSpecTolerations> tolerations) {
this.tolerations = tolerations;
return this;
}
public V1PrometheusSpec addTolerationsItem(V1ThanosRulerSpecTolerations tolerationsItem) {
if (this.tolerations == null) {
this.tolerations = new ArrayList<V1ThanosRulerSpecTolerations>();
}
this.tolerations.add(tolerationsItem);
return this;
}
/**
* If specified, the pod's tolerations.
*
* @return tolerations
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "If specified, the pod's tolerations.")
public List<V1ThanosRulerSpecTolerations> getTolerations() {
return tolerations;
}
public void setTolerations(List<V1ThanosRulerSpecTolerations> tolerations) {
this.tolerations = tolerations;
}
public V1PrometheusSpec version(String version) {
this.version = version;
return this;
}
/**
* Version of Prometheus to be deployed.
*
* @return version
*/
@javax.annotation.Nullable
@ApiModelProperty(value = "Version of Prometheus to be deployed.")
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public V1PrometheusSpec volumeMounts(List<V1ThanosRulerSpecVolumeMounts> volumeMounts) {
this.volumeMounts = volumeMounts;
return this;
}
public V1PrometheusSpec addVolumeMountsItem(V1ThanosRulerSpecVolumeMounts volumeMountsItem) {
if (this.volumeMounts == null) {
this.volumeMounts = new ArrayList<V1ThanosRulerSpecVolumeMounts>();
}
this.volumeMounts.add(volumeMountsItem);
return this;
}
/**
* VolumeMounts allows configuration of additional VolumeMounts on the output StatefulSet
* definition. VolumeMounts specified will be appended to other VolumeMounts in the prometheus
* container, that are generated as a result of StorageSpec objects.
*
* @return volumeMounts
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"VolumeMounts allows configuration of additional VolumeMounts on the output StatefulSet definition. VolumeMounts specified will be appended to other VolumeMounts in the prometheus container, that are generated as a result of StorageSpec objects.")
public List<V1ThanosRulerSpecVolumeMounts> getVolumeMounts() {
return volumeMounts;
}
public void setVolumeMounts(List<V1ThanosRulerSpecVolumeMounts> volumeMounts) {
this.volumeMounts = volumeMounts;
}
public V1PrometheusSpec volumes(List<V1ThanosRulerSpecVolumes> volumes) {
this.volumes = volumes;
return this;
}
public V1PrometheusSpec addVolumesItem(V1ThanosRulerSpecVolumes volumesItem) {
if (this.volumes == null) {
this.volumes = new ArrayList<V1ThanosRulerSpecVolumes>();
}
this.volumes.add(volumesItem);
return this;
}
/**
* Volumes allows configuration of additional volumes on the output StatefulSet definition.
* Volumes specified will be appended to other volumes that are generated as a result of
* StorageSpec objects.
*
* @return volumes
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Volumes allows configuration of additional volumes on the output StatefulSet definition. Volumes specified will be appended to other volumes that are generated as a result of StorageSpec objects.")
public List<V1ThanosRulerSpecVolumes> getVolumes() {
return volumes;
}
public void setVolumes(List<V1ThanosRulerSpecVolumes> volumes) {
this.volumes = volumes;
}
public V1PrometheusSpec walCompression(Boolean walCompression) {
this.walCompression = walCompression;
return this;
}
/**
* Enable compression of the write-ahead log using Snappy. This flag is only available in versions
* of Prometheus >= 2.11.0.
*
* @return walCompression
*/
@javax.annotation.Nullable
@ApiModelProperty(
value =
"Enable compression of the write-ahead log using Snappy. This flag is only available in versions of Prometheus >= 2.11.0.")
public Boolean getWalCompression() {
return walCompression;
}
public void setWalCompression(Boolean walCompression) {
this.walCompression = walCompression;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
V1PrometheusSpec v1PrometheusSpec = (V1PrometheusSpec) o;
return Objects.equals(
this.additionalAlertManagerConfigs, v1PrometheusSpec.additionalAlertManagerConfigs)
&& Objects.equals(
this.additionalAlertRelabelConfigs, v1PrometheusSpec.additionalAlertRelabelConfigs)
&& Objects.equals(this.additionalScrapeConfigs, v1PrometheusSpec.additionalScrapeConfigs)
&& Objects.equals(this.affinity, v1PrometheusSpec.affinity)
&& Objects.equals(this.alerting, v1PrometheusSpec.alerting)
&& Objects.equals(this.apiserverConfig, v1PrometheusSpec.apiserverConfig)
&& Objects.equals(
this.arbitraryFSAccessThroughSMs, v1PrometheusSpec.arbitraryFSAccessThroughSMs)
&& Objects.equals(this.baseImage, v1PrometheusSpec.baseImage)
&& Objects.equals(this.configMaps, v1PrometheusSpec.configMaps)
&& Objects.equals(this.containers, v1PrometheusSpec.containers)
&& Objects.equals(this.disableCompaction, v1PrometheusSpec.disableCompaction)
&& Objects.equals(this.enableAdminAPI, v1PrometheusSpec.enableAdminAPI)
&& Objects.equals(this.enforcedNamespaceLabel, v1PrometheusSpec.enforcedNamespaceLabel)
&& Objects.equals(this.evaluationInterval, v1PrometheusSpec.evaluationInterval)
&& Objects.equals(this.externalLabels, v1PrometheusSpec.externalLabels)
&& Objects.equals(this.externalUrl, v1PrometheusSpec.externalUrl)
&& Objects.equals(this.ignoreNamespaceSelectors, v1PrometheusSpec.ignoreNamespaceSelectors)
&& Objects.equals(this.image, v1PrometheusSpec.image)
&& Objects.equals(this.imagePullSecrets, v1PrometheusSpec.imagePullSecrets)
&& Objects.equals(this.initContainers, v1PrometheusSpec.initContainers)
&& Objects.equals(this.listenLocal, v1PrometheusSpec.listenLocal)
&& Objects.equals(this.logFormat, v1PrometheusSpec.logFormat)
&& Objects.equals(this.logLevel, v1PrometheusSpec.logLevel)
&& Objects.equals(this.nodeSelector, v1PrometheusSpec.nodeSelector)
&& Objects.equals(this.overrideHonorLabels, v1PrometheusSpec.overrideHonorLabels)
&& Objects.equals(this.overrideHonorTimestamps, v1PrometheusSpec.overrideHonorTimestamps)
&& Objects.equals(this.paused, v1PrometheusSpec.paused)
&& Objects.equals(this.podMetadata, v1PrometheusSpec.podMetadata)
&& Objects.equals(
this.podMonitorNamespaceSelector, v1PrometheusSpec.podMonitorNamespaceSelector)
&& Objects.equals(this.podMonitorSelector, v1PrometheusSpec.podMonitorSelector)
&& Objects.equals(this.portName, v1PrometheusSpec.portName)
&& Objects.equals(this.priorityClassName, v1PrometheusSpec.priorityClassName)
&& Objects.equals(
this.prometheusExternalLabelName, v1PrometheusSpec.prometheusExternalLabelName)
&& Objects.equals(this.query, v1PrometheusSpec.query)
&& Objects.equals(this.remoteRead, v1PrometheusSpec.remoteRead)
&& Objects.equals(this.remoteWrite, v1PrometheusSpec.remoteWrite)
&& Objects.equals(this.replicaExternalLabelName, v1PrometheusSpec.replicaExternalLabelName)
&& Objects.equals(this.replicas, v1PrometheusSpec.replicas)
&& Objects.equals(this.resources, v1PrometheusSpec.resources)
&& Objects.equals(this.retention, v1PrometheusSpec.retention)
&& Objects.equals(this.retentionSize, v1PrometheusSpec.retentionSize)
&& Objects.equals(this.routePrefix, v1PrometheusSpec.routePrefix)
&& Objects.equals(this.ruleNamespaceSelector, v1PrometheusSpec.ruleNamespaceSelector)
&& Objects.equals(this.ruleSelector, v1PrometheusSpec.ruleSelector)
&& Objects.equals(this.rules, v1PrometheusSpec.rules)
&& Objects.equals(this.scrapeInterval, v1PrometheusSpec.scrapeInterval)
&& Objects.equals(this.secrets, v1PrometheusSpec.secrets)
&& Objects.equals(this.securityContext, v1PrometheusSpec.securityContext)
&& Objects.equals(this.serviceAccountName, v1PrometheusSpec.serviceAccountName)
&& Objects.equals(
this.serviceMonitorNamespaceSelector, v1PrometheusSpec.serviceMonitorNamespaceSelector)
&& Objects.equals(this.serviceMonitorSelector, v1PrometheusSpec.serviceMonitorSelector)
&& Objects.equals(this.sha, v1PrometheusSpec.sha)
&& Objects.equals(this.storage, v1PrometheusSpec.storage)
&& Objects.equals(this.tag, v1PrometheusSpec.tag)
&& Objects.equals(this.thanos, v1PrometheusSpec.thanos)
&& Objects.equals(this.tolerations, v1PrometheusSpec.tolerations)
&& Objects.equals(this.version, v1PrometheusSpec.version)
&& Objects.equals(this.volumeMounts, v1PrometheusSpec.volumeMounts)
&& Objects.equals(this.volumes, v1PrometheusSpec.volumes)
&& Objects.equals(this.walCompression, v1PrometheusSpec.walCompression);
}
@Override
public int hashCode() {
return Objects.hash(
additionalAlertManagerConfigs,
additionalAlertRelabelConfigs,
additionalScrapeConfigs,
affinity,
alerting,
apiserverConfig,
arbitraryFSAccessThroughSMs,
baseImage,
configMaps,
containers,
disableCompaction,
enableAdminAPI,
enforcedNamespaceLabel,
evaluationInterval,
externalLabels,
externalUrl,
ignoreNamespaceSelectors,
image,
imagePullSecrets,
initContainers,
listenLocal,
logFormat,
logLevel,
nodeSelector,
overrideHonorLabels,
overrideHonorTimestamps,
paused,
podMetadata,
podMonitorNamespaceSelector,
podMonitorSelector,
portName,
priorityClassName,
prometheusExternalLabelName,
query,
remoteRead,
remoteWrite,
replicaExternalLabelName,
replicas,
resources,
retention,
retentionSize,
routePrefix,
ruleNamespaceSelector,
ruleSelector,
rules,
scrapeInterval,
secrets,
securityContext,
serviceAccountName,
serviceMonitorNamespaceSelector,
serviceMonitorSelector,
sha,
storage,
tag,
thanos,
tolerations,
version,
volumeMounts,
volumes,
walCompression);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class V1PrometheusSpec {\n");
sb.append(" additionalAlertManagerConfigs: ")
.append(toIndentedString(additionalAlertManagerConfigs))
.append("\n");
sb.append(" additionalAlertRelabelConfigs: ")
.append(toIndentedString(additionalAlertRelabelConfigs))
.append("\n");
sb.append(" additionalScrapeConfigs: ")
.append(toIndentedString(additionalScrapeConfigs))
.append("\n");
sb.append(" affinity: ").append(toIndentedString(affinity)).append("\n");
sb.append(" alerting: ").append(toIndentedString(alerting)).append("\n");
sb.append(" apiserverConfig: ").append(toIndentedString(apiserverConfig)).append("\n");
sb.append(" arbitraryFSAccessThroughSMs: ")
.append(toIndentedString(arbitraryFSAccessThroughSMs))
.append("\n");
sb.append(" baseImage: ").append(toIndentedString(baseImage)).append("\n");
sb.append(" configMaps: ").append(toIndentedString(configMaps)).append("\n");
sb.append(" containers: ").append(toIndentedString(containers)).append("\n");
sb.append(" disableCompaction: ").append(toIndentedString(disableCompaction)).append("\n");
sb.append(" enableAdminAPI: ").append(toIndentedString(enableAdminAPI)).append("\n");
sb.append(" enforcedNamespaceLabel: ")
.append(toIndentedString(enforcedNamespaceLabel))
.append("\n");
sb.append(" evaluationInterval: ").append(toIndentedString(evaluationInterval)).append("\n");
sb.append(" externalLabels: ").append(toIndentedString(externalLabels)).append("\n");
sb.append(" externalUrl: ").append(toIndentedString(externalUrl)).append("\n");
sb.append(" ignoreNamespaceSelectors: ")
.append(toIndentedString(ignoreNamespaceSelectors))
.append("\n");
sb.append(" image: ").append(toIndentedString(image)).append("\n");
sb.append(" imagePullSecrets: ").append(toIndentedString(imagePullSecrets)).append("\n");
sb.append(" initContainers: ").append(toIndentedString(initContainers)).append("\n");
sb.append(" listenLocal: ").append(toIndentedString(listenLocal)).append("\n");
sb.append(" logFormat: ").append(toIndentedString(logFormat)).append("\n");
sb.append(" logLevel: ").append(toIndentedString(logLevel)).append("\n");
sb.append(" nodeSelector: ").append(toIndentedString(nodeSelector)).append("\n");
sb.append(" overrideHonorLabels: ")
.append(toIndentedString(overrideHonorLabels))
.append("\n");
sb.append(" overrideHonorTimestamps: ")
.append(toIndentedString(overrideHonorTimestamps))
.append("\n");
sb.append(" paused: ").append(toIndentedString(paused)).append("\n");
sb.append(" podMetadata: ").append(toIndentedString(podMetadata)).append("\n");
sb.append(" podMonitorNamespaceSelector: ")
.append(toIndentedString(podMonitorNamespaceSelector))
.append("\n");
sb.append(" podMonitorSelector: ").append(toIndentedString(podMonitorSelector)).append("\n");
sb.append(" portName: ").append(toIndentedString(portName)).append("\n");
sb.append(" priorityClassName: ").append(toIndentedString(priorityClassName)).append("\n");
sb.append(" prometheusExternalLabelName: ")
.append(toIndentedString(prometheusExternalLabelName))
.append("\n");
sb.append(" query: ").append(toIndentedString(query)).append("\n");
sb.append(" remoteRead: ").append(toIndentedString(remoteRead)).append("\n");
sb.append(" remoteWrite: ").append(toIndentedString(remoteWrite)).append("\n");
sb.append(" replicaExternalLabelName: ")
.append(toIndentedString(replicaExternalLabelName))
.append("\n");
sb.append(" replicas: ").append(toIndentedString(replicas)).append("\n");
sb.append(" resources: ").append(toIndentedString(resources)).append("\n");
sb.append(" retention: ").append(toIndentedString(retention)).append("\n");
sb.append(" retentionSize: ").append(toIndentedString(retentionSize)).append("\n");
sb.append(" routePrefix: ").append(toIndentedString(routePrefix)).append("\n");
sb.append(" ruleNamespaceSelector: ")
.append(toIndentedString(ruleNamespaceSelector))
.append("\n");
sb.append(" ruleSelector: ").append(toIndentedString(ruleSelector)).append("\n");
sb.append(" rules: ").append(toIndentedString(rules)).append("\n");
sb.append(" scrapeInterval: ").append(toIndentedString(scrapeInterval)).append("\n");
sb.append(" secrets: ").append(toIndentedString(secrets)).append("\n");
sb.append(" securityContext: ").append(toIndentedString(securityContext)).append("\n");
sb.append(" serviceAccountName: ").append(toIndentedString(serviceAccountName)).append("\n");
sb.append(" serviceMonitorNamespaceSelector: ")
.append(toIndentedString(serviceMonitorNamespaceSelector))
.append("\n");
sb.append(" serviceMonitorSelector: ")
.append(toIndentedString(serviceMonitorSelector))
.append("\n");
sb.append(" sha: ").append(toIndentedString(sha)).append("\n");
sb.append(" storage: ").append(toIndentedString(storage)).append("\n");
sb.append(" tag: ").append(toIndentedString(tag)).append("\n");
sb.append(" thanos: ").append(toIndentedString(thanos)).append("\n");
sb.append(" tolerations: ").append(toIndentedString(tolerations)).append("\n");
sb.append(" version: ").append(toIndentedString(version)).append("\n");
sb.append(" volumeMounts: ").append(toIndentedString(volumeMounts)).append("\n");
sb.append(" volumes: ").append(toIndentedString(volumes)).append("\n");
sb.append(" walCompression: ").append(toIndentedString(walCompression)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| 23,595 |
582 | package com.easy.arProduce;
import org.springframework.cloud.stream.annotation.Output;
import org.springframework.messaging.MessageChannel;
public interface MySource {
@Output("output1")
MessageChannel output1();
@Output("output2")
MessageChannel output2();
} | 86 |
906 | // Copyright (c) 2020 <NAME> and contributors of the VTIL Project
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// 3. Neither the name of VTIL Project nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#pragma once
#include <mutex>
#include <cstdlib>
#include <atomic>
#include <algorithm>
#include <cstring>
#include <thread>
#include "detached_queue.hpp"
#include "relaxed_atomics.hpp"
#include "type_helpers.hpp"
#include "task.hpp"
// [Configuration]
// Determine the number of buckets, initial size, growth settings and the local buffer length.
// - If local buffer length is zero, will dispatch to bucket directly.
//
#ifndef VTIL_OBJECT_POOL_BUCKETS
#define VTIL_OBJECT_POOL_BUCKETS std::thread::hardware_concurrency()
#endif
#ifndef VTIL_OBJECT_POOL_INITIAL_SIZE
#define VTIL_OBJECT_POOL_INITIAL_SIZE ( 8ull * 1024 * 1024 )
#endif
#ifndef VTIL_OBJECT_POOL_GROWTH_CAP
#define VTIL_OBJECT_POOL_GROWTH_CAP ( 64ull * 1024 * 1024 )
#endif
#ifndef VTIL_OBJECT_POOL_GROWTH_FACTOR
#define VTIL_OBJECT_POOL_GROWTH_FACTOR 2
#endif
#ifndef VTIL_OBJECT_POOL_LOCAL_BUFFER_LEN
#define VTIL_OBJECT_POOL_LOCAL_BUFFER_LEN 256
#endif
namespace vtil
{
// Object pools allow for fast singular type allocation.
//
template <typename T>
struct object_pool
{
// Forward declares and type exports.
//
using value_type = T;
struct pool_instance;
struct bucket_entry;
// A single entry in the pool.
//
struct alignas( T ) object_entry
{
// Stores raw data.
//
uint8_t raw_data[ sizeof( T ) ] = { 0 };
// Pointer to owning pool.
//
pool_instance* pool = nullptr;
// Whether object has to be destructed to be used again or not.
//
bool deferred_destruction = false;
// Key for free queue.
//
detached_queue_key<object_entry> free_queue_key;
// Decay into object pointer.
//
T* decay() { return ( T* ) &raw_data[ 0 ]; }
const T* decay() const { return ( const T* ) make_mutable( this )->decay(); }
// Resolve from object pointer.
//
static object_entry* resolve( const void* obj ) { return ptr_at<object_entry>( ( T* ) obj, -make_offset( &object_entry::raw_data ) ); }
};
// Base pool type.
//
struct pool_instance
{
// Key for the pool queue.
//
detached_queue_key<pool_instance> pool_queue_key;
// Number of objects we store and the objects themselves.
//
size_t object_count;
object_entry objects[ 1 ];
};
// Declare the pool allocator.
//
__forceinline static pool_instance* allocate_pool( size_t n )
{
static_assert( alignof( object_entry ) <= 8, "Object aligned over max alignment." );
pool_instance* pool = ( pool_instance* ) malloc( sizeof( pool_instance ) + sizeof( object_entry ) * ( n - 1 ) );
pool->object_count = n;
return pool;
}
__forceinline static void deallocate_pool( pool_instance* pool )
{
free( pool );
}
// Bucket entry dedicating a pool list to each thread.
//
struct bucket_entry
{
// Atomic queue of free memory regions.
//
atomic_detached_queue<object_entry> free_queue;
// Mutex protecting the pool list.
//
std::mutex pool_list_mutex;
// Last of last pool in bytes, approximated.
//
size_t last_pool_size_raw = 0;
// List of pools.
//
detached_queue<pool_instance> pools;
// Allocation and deallocation.
//
T* allocate()
{
static_assert( sizeof( object_entry ) < VTIL_OBJECT_POOL_INITIAL_SIZE, "Objects cannot be larger than initial size." );
// Enter pool allocation loop:
//
while ( true )
{
// Pop entry from free queue, if non null:
//
if ( object_entry* entry = free_queue.pop_front( &object_entry::free_queue_key ) )
{
// If it's destruction was deferred, do so now.
//
if ( entry->deferred_destruction )
std::destroy_at<T>( entry->decay() );
// Return the entry.
//
return entry->decay();
}
// Acquire pool list mutex.
//
std::lock_guard _gp{ pool_list_mutex };
// If free queue has any entries, try again.
//
if ( free_queue.size() )
continue;
// Determine new pool's size (raw size is merely an approximation).
//
size_t new_pool_size_raw = last_pool_size_raw
? std::min<size_t>( last_pool_size_raw * VTIL_OBJECT_POOL_GROWTH_FACTOR, VTIL_OBJECT_POOL_GROWTH_CAP )
: VTIL_OBJECT_POOL_INITIAL_SIZE;
last_pool_size_raw = new_pool_size_raw;
size_t object_count = new_pool_size_raw / sizeof( object_entry );
// Allocate the pool, keep the first object to ourselves.
//
pool_instance* new_pool = allocate_pool( object_count );
object_entry* return_value = &new_pool->objects[ 0 ];
return_value->pool = new_pool;
// Initialize every other object, linking them internally so that
// we don't have to hold the free-list lock too long.
//
object_entry* tmp_prev = nullptr;
for ( size_t i = 1; i < object_count; i++ )
{
new ( new_pool->objects + i ) object_entry{
.pool = new_pool,
.free_queue_key = {
.prev = &new_pool->objects[ i - 1 ].free_queue_key,
.next = &new_pool->objects[ i + 1 ].free_queue_key
}
};
}
// Form the partial list.
//
detached_queue<object_entry> tmp;
tmp.head = &new_pool->objects[ 1 ].free_queue_key;
tmp.tail = &new_pool->objects[ object_count - 1 ].free_queue_key;
tmp.tail->next = nullptr;
tmp.head->prev = nullptr;
tmp.list_size = object_count - 1;
// Insert into pools list.
//
pools.emplace_back( &new_pool->pool_queue_key );
// Merge into free queue.
//
free_queue.emplace_back( tmp );
// Return the allocated address.
//
return return_value->decay();
}
}
void deallocate( T* pointer )
{
// Resolve object entry, and emplace it into the free queue.
//
object_entry* entry = object_entry::resolve( pointer );
free_queue.emplace_back( &entry->free_queue_key );
}
};
// Atomic counter responsible of the grouping of threads => buckets.
//
inline static std::atomic<size_t> counter = { 0 };
// Global list of buckets.
//
inline static const size_t bucket_count = VTIL_OBJECT_POOL_BUCKETS;
static bucket_entry* get_bucket( size_t idx )
{
static const size_t length = VTIL_OBJECT_POOL_BUCKETS;
static bucket_entry* entries = new bucket_entry[ length ];
return entries + ( idx % length );
}
// Local proxy that buffers all commands to avoid spinning.
//
struct local_proxy
{
// Secondary queue that proxies bucket::free_queue.
//
detached_queue<object_entry> secondary_free_queue;
// Smart bucket swapping / balancing.
//
size_t bucket_index = counter++;
bucket_entry* _bucketa = get_bucket( bucket_index );
bucket_entry* _bucketd = get_bucket( bucket_index );
bucket_entry* get_bucket_for_alloc()
{
if ( _bucketa->free_queue.empty() )
_bucketa = get_bucket( --bucket_index );
return _bucketa;
}
bucket_entry* get_bucket_for_dealloc()
{
if ( _bucketd->free_queue.size() > ( VTIL_OBJECT_POOL_INITIAL_SIZE / sizeof( object_entry ) ) )
_bucketd = get_bucket( ++bucket_index );
return _bucketd;
}
// Allocate / deallocate proxies.
//
T* allocate()
{
// Handle no-buffering case.
//
if constexpr( VTIL_OBJECT_POOL_LOCAL_BUFFER_LEN == 0 )
return get_bucket_for_alloc()->allocate();
// If we've buffered any freed memory regions:
//
if ( object_entry* entry = secondary_free_queue.pop_back( &object_entry::free_queue_key ) )
{
// If it's destruction was deferred, do so now.
//
if ( entry->deferred_destruction )
std::destroy_at<T>( entry->decay() );
// Return the entry.
//
return entry->decay();
}
// Dispatch to bucket.
//
return get_bucket_for_alloc()->allocate();
}
void deallocate( T* pointer )
{
// Handle no-buffering case.
//
if constexpr ( VTIL_OBJECT_POOL_LOCAL_BUFFER_LEN == 0 )
return get_bucket_for_dealloc()->deallocate( pointer );
// Insert into free queue.
//
secondary_free_queue.emplace_back( &object_entry::resolve( pointer )->free_queue_key );
// If queue size is over the buffer length:
//
if ( secondary_free_queue.size() >= VTIL_OBJECT_POOL_LOCAL_BUFFER_LEN )
get_bucket_for_dealloc()->free_queue.emplace_back( secondary_free_queue );
}
void flush()
{
if ( secondary_free_queue.size() )
get_bucket_for_dealloc()->free_queue.emplace_back( secondary_free_queue );
}
// Flush buffer on destruction.
//
~local_proxy() { flush(); }
};
inline static task_local( local_proxy ) bucket_proxy;
// Allocate / deallocate wrappers.
//
__forceinline static T* allocate() { return bucket_proxy->allocate(); }
__forceinline static void deallocate( T* pointer ) { bucket_proxy->deallocate( pointer ); }
// Construct / deconsturct wrappers.
//
template<typename... Tx>
__forceinline static T* construct( Tx&&... args ) { return new ( allocate() ) T( std::forward<Tx>( args )... ); }
__forceinline static void destruct( T* pointer, bool deferred = true )
{
if ( !( object_entry::resolve( pointer )->deferred_destruction = deferred ) )
std::destroy_at<T>( pointer );
return deallocate( pointer );
}
};
};
| 4,315 |
676 | <reponame>PlexPt/Gitskarios<gh_stars>100-1000
package com.alorma.github.ui.adapter.viewpager;
import android.content.res.Resources;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentPagerAdapter;
import com.alorma.github.ui.listeners.TitleProvider;
import java.util.List;
public class NavigationPagerAdapter extends FragmentPagerAdapter {
private Resources resources;
private List<Fragment> listFragments;
public NavigationPagerAdapter(FragmentManager fm, Resources resources, List<Fragment> listFragments) {
super(fm);
this.resources = resources;
this.listFragments = listFragments;
}
@Override
public Fragment getItem(int position) {
return listFragments.get(position);
}
@Override
public int getCount() {
return listFragments.size();
}
@Override
public CharSequence getPageTitle(int position) {
if (listFragments.get(position) instanceof TitleProvider) {
int title = ((TitleProvider) listFragments.get(position)).getTitle();
return resources.getString(title);
}
return "";
}
} | 369 |
2,151 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef PPAPI_EXAMPLES_COMPOSITOR_SPINNING_CUBE_H_
#define PPAPI_EXAMPLES_COMPOSITOR_SPINNING_CUBE_H_
#include "ppapi/c/pp_stdint.h"
class SpinningCube {
public:
SpinningCube();
~SpinningCube();
void Init(uint32_t width, uint32_t height);
void set_direction(int direction) { direction_ = direction; }
void SetFlingMultiplier(float drag_distance, float drag_time);
void UpdateForTimeDelta(float delta_time);
void UpdateForDragDistance(float distance);
void Draw();
void OnGLContextLost();
private:
class GLState;
// Disallow copy and assign.
SpinningCube(const SpinningCube& other);
SpinningCube& operator=(const SpinningCube& other);
void Update();
bool initialized_;
uint32_t width_;
uint32_t height_;
// Owned ptr.
GLState* state_;
float fling_multiplier_;
int direction_;
};
#endif // PPAPI_EXAMPLES_COMPOSITOR_SPINNING_CUBE_H_
| 364 |
358 | <reponame>codedumper1/mysql-ripple
/*
* Copyright 2018 The Ripple Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef MYSQL_RIPPLE_GTID_H
#define MYSQL_RIPPLE_GTID_H
#include <map>
#include <string>
#include <cstdint>
#include <vector>
#include "absl/strings/string_view.h"
#include "mysql_compat.h"
namespace mysql_ripple {
// Uuid with MySQL Parse/Serialize
struct Uuid {
static const int PACK_LENGTH = 16;
static const int TEXT_LENGTH = 36;
Uuid() {
clear();
}
void clear() {
memset(bytes_, 0, sizeof(bytes_));
}
bool empty() const {
uint8_t zero[sizeof(bytes_)] = { 0 };
return memcmp(bytes_, zero, sizeof(zero)) == 0;
}
// Convert to/from string representation.
std::string ToString() const;
bool ToString(char *buffer, int len) const;
bool Parse(absl::string_view);
// Convert to/from binary representation used
// by mysqld.
bool ParseFromBuffer(const uint8_t *buffer, int len);
bool SerializeToBuffer(uint8_t *buffer, int len) const;
bool Equal(const Uuid& other) const {
return memcmp(bytes_, other.bytes_, sizeof(bytes_)) == 0;
}
// Construct a fake uuid from server_id.
void ConstructFromServerId(int server_id);
private:
uint8_t bytes_[PACK_LENGTH];
};
// This class represents an server identifier.
// For MariaDB server_id is used,
// for MySQL uuid is used.
struct ServerId {
ServerId() : server_id(0) {}
void reset() { server_id = 0; uuid.clear(); }
void assign(uint64_t id) {
if (id != server_id) {
server_id = id;
uuid.clear();
}
}
bool empty() const {
return server_id == 0 && uuid.empty();
}
bool equal(const ServerId& other) const {
return server_id == other.server_id &&
uuid.Equal(other.uuid);
}
Uuid uuid;
uint64_t server_id;
};
// This class represents a GTID.
struct GTID {
GTID();
ServerId server_id;
uint64_t seq_no;
uint32_t domain_id; // Only used by MariaDB
void Reset() {
server_id.reset();
seq_no = 0;
domain_id = 0;
}
GTID& set_server_id(uint64_t val) { server_id.assign(val); return *this;}
GTID& set_domain_id(uint32_t val) { domain_id = val; return *this;}
GTID& set_sequence_no(uint64_t val) { seq_no = val; return *this;}
bool IsEmpty() const {
return (server_id.empty() && domain_id == 0 && seq_no == 0);
}
bool equal(const GTID& other) const {
return
(other.domain_id == domain_id) &&
(other.seq_no == seq_no) &&
(other.server_id.equal(server_id));
}
std::string ToString() const;
std::string ToMariaDBString() const;
bool Parse(absl::string_view);
};
// This class represents a start position.
// This means last GTID executed per domain,
// i.e last GTID not needed per domain.
// Or in MySQL terminology executed_gtid_set = { -inf - GTIDStartPosition }
struct GTIDStartPosition {
std::vector<GTID> gtids;
bool IsEmpty() const {
return gtids.empty();
}
void Reset() {
gtids.clear();
}
bool Equal(const GTIDStartPosition& other) const {
if (gtids.size() != other.gtids.size())
return false;
for (const GTID& g1 : gtids) {
bool found = false;
for (const GTID& g2 : other.gtids) {
if (g1.equal(g2)) {
found = true;
break;
}
}
if (!found)
return false;
}
return true;
}
// Check if g2 is valid to put in binlog after p1
static bool ValidSuccessor(const GTIDStartPosition& p1,
const GTID& g2) {
for (const GTID& g1 : p1.gtids) {
if (g1.domain_id == g2.domain_id)
return g2.seq_no > g1.seq_no;
}
return true;
}
// Check if p2 is strictly after p1
// I.e all GTIDs in p2 are ValidSuccessor in p1.
static bool IsAfter(const GTIDStartPosition& p1,
const GTIDStartPosition& p2) {
for (const GTID& g2 : p2.gtids) {
if (!ValidSuccessor(p1, g2))
return false;
}
return true;
}
bool ValidSuccessor(const GTID& gtid) const {
return ValidSuccessor(*this, gtid);
}
// Check if this is contained in start-end interval.
// Note that start position not included.
bool IsContained(const GTIDStartPosition& start,
const GTIDStartPosition& last) const {
if (IsEmpty())
return true;
if (start.IsEmpty() && last.IsEmpty())
return false;
if (!last.IsEmpty()) {
if (IsAfter(last, *this)) {
return false;
}
}
if (!start.IsEmpty()) {
if (!IsAfter(start, *this)) {
return false;
}
}
return true;
}
// For logging/debugging.
std::string ToString() const;
// Set/Parse slave_connect_state set by MariaDB.
void ToMariaDBConnectState(std::string *dst) const;
bool ParseMariaDBConnectState(absl::string_view);
// Set/Parse string format used in binlog index.
// [ UUID : ]? domain-serverid-sequenceno
void SerializeToString(std::string *dst) const;
bool Parse(absl::string_view);
bool Update(const GTID&);
};
// GTIDSet as represented by MySQL
struct GTIDSet {
struct Interval {
uint64_t start; // inclusive
uint64_t end; // not inclusive
bool Parse(absl::string_view s);
};
struct GTIDInterval {
Uuid uuid;
std::vector<Interval> intervals;
bool Parse(absl::string_view s);
};
// Clear
void Clear() { gtid_intervals_.clear(); }
bool Parse(absl::string_view s);
std::string ToString() const;
// Parse encoded GTIDSet as written by MySQL
bool ParseFromBuffer(const uint8_t *ptr, int len);
// Write encoded GTIDSet in MySQL format
int PackLength() const;
bool SerializeToBuffer(uint8_t *buffer, int len) const;
//
void AddInterval(const GTIDInterval& gtid_interval) {
gtid_intervals_.push_back(gtid_interval);
}
std::vector<GTIDInterval> gtid_intervals_;
};
// This is the canonical representation of executed gtids in ripple.
// It's a superset of GTIDStartPosition and GTIDSet
// The class can use either UUID or domain_id as "key".
class GTIDList {
public:
// What is key of a stream.
// MariaDB: domain_id
// MySQL: uuid
enum StreamKey {
KEY_UNSPECIFIED,
KEY_DOMAIN_ID,
KEY_UUID,
};
// Mode for GTIDList
// MariaDB: MODE_MONOTONIC/MODE_STRICT_MONOTONIC
// MySQL: MODE_GAPS
enum ListMode {
MODE_UNSPECIFIED,
MODE_MONOTONIC,
MODE_STRICT_MONOTONIC,
MODE_GAPS,
};
GTIDList(StreamKey key = KEY_UNSPECIFIED, ListMode mode = MODE_UNSPECIFIED)
: key_(key), mode_(mode)
{}
bool IsEmpty() const {
return streams_.empty();
}
void Reset() {
streams_.clear();
}
// Assign GTIDList from GTIDStartPosition or GTIDSet.
void Assign(const GTIDStartPosition& start_pos);
void Assign(const GTIDSet& gtidset);
//
bool Equal(const GTIDList&) const;
// Is gtid part of this gtid list?
bool Contained(const GTID& gtid) const;
// Are all gtids represented in A contained in B ?
static bool Subset(const GTIDList& A, const GTIDList& B);
// Is gtid valid to include ?
// This depends on setting for strict monotonically increasing
bool ValidSuccessor(const GTID& gtid) const;
// Include gtid into this gtid list.
bool Update(const GTID& gtid);
// Set/Parse string format used in binlog index.
// [ UUID : ]? domain-serverid-sequenceno
// [ UUID : ]? domain-serverid-[lo-hi]
void SerializeToString(std::string *dst) const;
bool Parse(absl::string_view s);
// for debugging...
std::string ToString() const {
std::string tmp;
SerializeToString(&tmp);
return tmp;
}
StreamKey GetStreamKey() const {
return key_;
}
ListMode GetListMode() const {
return mode_;
}
std::string GetConfigString() const {
std::string ret = "key=";
switch (GetStreamKey()) {
case KEY_UNSPECIFIED:
ret += "unspecified";
break;
case KEY_DOMAIN_ID:
ret += "domain_id";
break;
case KEY_UUID:
ret += "uuid";
break;
}
ret += ", ";
ret += "mode=";
switch (GetListMode()) {
case MODE_UNSPECIFIED:
ret += "unspecified";
break;
case MODE_MONOTONIC:
ret += "monotonic";
break;
case MODE_STRICT_MONOTONIC:
ret += "strict_monotonic";
break;
case MODE_GAPS:
ret += "gaps";
break;
}
return ret;
}
private:
struct GTIDStream {
ServerId server_id;
uint32_t domain_id;
std::vector<GTIDSet::Interval> intervals;
bool Parse(absl::string_view);
};
StreamKey key_;
ListMode mode_;
std::vector<GTIDStream> streams_;
GTIDStream* FindStream(const GTID&) const;
GTIDStream* FindStream(const GTIDStream&) const;
GTIDStream* FindStreamByDomainId(uint32_t domain_id) const;
GTIDStream* FindStreamByUUID(const Uuid& uuid) const;
void CheckMerge(GTIDStream *stream, int i);
void GuessModeFromGTID(const GTID&);
// Check that data is consistent wrt key_ and mode_
bool Validate() const;
friend bool mysql::compat::Convert(const GTIDList&, GTIDSet *);
friend bool mysql::compat::Convert(const GTIDList&, GTIDStartPosition *);
};
} // namespace mysql_ripple
#endif // MYSQL_RIPPLE_GTID_H
| 3,785 |
662 | <gh_stars>100-1000
package com.alibaba.rsocket.transport.netty;
import io.netty.handler.ssl.util.SimpleTrustManagerFactory;
import javax.net.ssl.ManagerFactoryParameters;
import javax.net.ssl.TrustManager;
import java.security.KeyStore;
import java.util.List;
/**
* Fingerprints trust manager factory
*
* @author leijuan
*/
public class FingerPrintTrustManagerFactory extends SimpleTrustManagerFactory {
private TrustManager trustManager;
public FingerPrintTrustManagerFactory(List<String> fingerPrintsSha256) {
this.trustManager = new FingerPrintX509TrustManager(fingerPrintsSha256);
}
@Override
protected void engineInit(KeyStore keyStore) throws Exception {
}
@Override
protected void engineInit(ManagerFactoryParameters managerFactoryParameters) throws Exception {
}
@Override
protected TrustManager[] engineGetTrustManagers() {
return new TrustManager[]{trustManager};
}
}
| 294 |
647 | /*
* Copyright © 2012 Intel Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice (including the
* next paragraph) shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef _TEST_RUNNER_H_
#define _TEST_RUNNER_H_
#ifdef NDEBUG
#error "Tests must not be built with NDEBUG defined, they rely on assert()."
#endif
#include <unistd.h>
struct test {
const char *name;
void (*run)(void);
int must_fail;
} __attribute__ ((aligned (16)));
#define TEST(name) \
static void name(void); \
\
const struct test test##name \
__attribute__ ((used, section ("test_section"))) = { \
#name, name, 0 \
}; \
\
static void name(void)
#define FAIL_TEST(name) \
static void name(void); \
\
const struct test test##name \
__attribute__ ((used, section ("test_section"))) = { \
#name, name, 1 \
}; \
\
static void name(void)
int
count_open_fds(void);
void
exec_fd_leak_check(int nr_expected_fds); /* never returns */
void
check_fd_leaks(int supposed_fds);
/*
* set/reset the timeout in seconds. The timeout starts
* at the point of invoking this function
*/
void
test_set_timeout(unsigned int);
/* test-runner uses alarm() and SIGALRM, so we can not
* use usleep and sleep functions in tests (see 'man usleep'
* or 'man sleep', respectively). Following functions are safe
* to use in tests */
void
test_usleep(useconds_t);
void
test_sleep(unsigned int);
void
test_disable_coredumps(void);
#define DISABLE_LEAK_CHECKS \
do { \
extern int fd_leak_check_enabled; \
fd_leak_check_enabled = 0; \
} while (0);
#endif
| 917 |
1,293 | /*
* Copyright (c) 2020 - Manifold Systems LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package manifold.ext;
import junit.framework.TestCase;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import static org.junit.Assert.assertArrayEquals;
public class ManArrayExtTest extends TestCase
{
public void testExistingMembersStillWork()
{
int[] iArray = {1, 2, 3};
assertEquals( 3, iArray.length );
assertArrayEquals( iArray, iArray.clone() );
assertNotSame( iArray, iArray.clone() );
String[] strArray = {"a", "b", "c", "d"};
assertEquals( 4, strArray.length );
assertArrayEquals( strArray, strArray.clone() );
assertNotSame( strArray, strArray.clone() );
}
public void testToList()
{
int[] iArray = {1, 2, 3};
List<Integer> iList = iArray.toList();
assertEquals( new ArrayList<Integer>() {{add(1); add(2); add(3);}}, iList );
String[] strArray = {"a", "b", "c", "d"};
List<String> strList = strArray.toList();
assertEquals( Arrays.asList( strArray ), strList );
}
public void testCopy()
{
int[] iArray = {1, 2, 3};
assertArrayEquals( iArray, iArray.copy() );
assertNotSame( iArray, iArray.copy() );
int[] iArrayCopy = iArray.copy( iArray.length + 1);
assertArrayEquals( new int[]{1, 2, 3, 0}, iArrayCopy );
iArrayCopy = iArray.copy( -1 );
assertArrayEquals( new int[]{1, 2, 3}, iArrayCopy );
String[] strArray = {"a", "b", "c", "d"};
assertArrayEquals( strArray, strArray.copy() );
assertNotSame( strArray, strArray.copy() );
String[] strArrayCopy = strArray.copy( strArray.length + 1 );
assertArrayEquals( new String[]{"a", "b", "c", "d", null}, strArrayCopy );
}
public void testCopyTo()
{
int[] iArray = {1, 2, 3, 4};
assertArrayEquals( iArray, iArray.copyTo( new int[4] ) );
String[] strArray = {"a", "b", "c", "d"};
assertArrayEquals( strArray, strArray.copyTo( new String[4] ) );
assertNotSame( strArray, strArray.copyTo( new String[4] ) );
}
public void testCopyRange()
{
int[] iArray = {1, 2, 3, 4};
assertArrayEquals( Arrays.copyOfRange( iArray, 1, 3 ), iArray.copyRange( 1, 3 ) );
assertArrayEquals( Arrays.copyOfRange( iArray, 1, 4 ), iArray.copyRange( 1, 4) );
assertArrayEquals( Arrays.copyOfRange( iArray, 1, 4 ), iArray.copyRange( 1, -1 ) );
String[] strArray = {"a", "b", "c", "d"};
assertArrayEquals( Arrays.copyOfRange( strArray, 1, 3 ), strArray.copyRange( 1, 3 ) );
assertArrayEquals( Arrays.copyOfRange( strArray, 1, 4 ), strArray.copyRange( 1, 4) );
assertArrayEquals( Arrays.copyOfRange( strArray, 1, 4 ), strArray.copyRange( 1, -1 ) );
}
public void testCopyRangeTo()
{
int[] iArray = {1, 2, 3, 4};
assertArrayEquals( new int[]{0, 2, 3}, iArray.copyRangeTo( 1, 3, new int[3], 1 ) );
String[] strArray = {"a", "b", "c", "d"};
assertArrayEquals( new String[]{null, "b", "c"}, strArray.copyRangeTo( 1, 3, new String[3], 1 ) );
}
public void testStream()
{
int[] iArray = {1, 2, 3, 4};
try
{
iArray.stream();
fail(); // primitives not support with stream()
}
catch( IllegalArgumentException ignore )
{
}
String[] strArray = {"a", "b", "c", "d"};
List<String> result = strArray.stream().map( e -> e + e ).collect( Collectors.toList() );
assertEquals( new ArrayList<String>() {{add("aa"); add("bb"); add("cc"); add("dd");}}, result );
}
public void testForEach()
{
int[] iArray = {1, 2, 3, 4};
try
{
iArray.forEach( (i, e) -> {} );
fail(); // primitives not support with forEach()
}
catch( IllegalArgumentException ignore )
{
}
String[] strArray = {"a", "b", "c", "d"};
strArray.forEach( (i, e) -> {strArray[i] = e + e;} );
assertArrayEquals( new String[] {"aa", "bb", "cc", "dd"}, strArray );
}
public void testBinarySearch()
{
int[] intArr = {2, 5, 11, 23};
assertEquals( 2, intArr.binarySearch( 11 ) );
assertEquals( 2, intArr.binarySearch( 0, 4, 11 ) );
String[] strArray = {"a", "b", "c", "d"};
assertEquals( 2, strArray.binarySearch( "c", (s1, s2) -> s1.compareTo(s2) ) );
}
public void testEquals()
{
int[] intArr1 = {2, 5, 11, 23};
int[] intArr2 = intArr1.copy();
//noinspection SimplifiableJUnitAssertion,ArrayEquals
assertTrue( intArr1.equals( intArr2 ) );
int[] intArr3 = intArr1.copy();
intArr3[0] = 0;
//noinspection SimplifiableJUnitAssertion,ArrayEquals
assertFalse( intArr1.equals( intArr3 ) );
String[] strArr1 = {"a", "b", "c", "d"};
String[] strArr2 = strArr1.copy();
//noinspection SimplifiableJUnitAssertion,ArrayEquals
assertTrue( strArr1.equals( strArr2 ) );
String[] strArr3 = strArr1.copy();
strArr3[0] = null;
//noinspection SimplifiableJUnitAssertion,ArrayEquals
assertFalse( strArr1.equals( strArr3 ) );
}
}
| 2,151 |
1,395 | # Copyright (c) 2014, Fundacion Dr. <NAME>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import absolute_import
import os
import unittest
from barf.analysis.graphs import CFGRecoverer, ControlFlowGraph, RecursiveDescent
from barf.analysis.graphs.basicblock import BasicBlock
from barf.arch import ARCH_X86_MODE_32
from barf.arch.x86 import X86ArchitectureInformation
from barf.arch.x86.disassembler import X86Disassembler
from barf.arch.x86.parser import X86Parser
from barf.arch.x86.translator import X86Translator
from barf.core.binary import BinaryFile
def get_full_path(filename):
return os.path.dirname(os.path.abspath(__file__)) + filename
class BinDiffTests(unittest.TestCase):
def setUp(self):
self._parser = X86Parser(ARCH_X86_MODE_32)
self._translator = X86Translator(ARCH_X86_MODE_32)
def test_equality(self):
addr = 0x0804842f
asm = self._parser.parse("cmp DWORD PTR [esp+0x18], 0x41424344")
asm.address = 0x08048425
asm.size = 8
asm1 = [asm]
asm = self._parser.parse("jne 0x08048445")
asm.address = 0x0804842d
asm.size = 2
asm1 += [asm]
ir1 = [self._translator.translate(asm1[0])]
ir1 += [self._translator.translate(asm1[1])]
asm = self._parser.parse("cmp DWORD PTR [esp+0x18], 0x41424344")
asm.address = 0x08048425
asm.size = 8
asm2 = [asm]
asm = self._parser.parse("jne 0x0804844f")
asm.address = 0x0804842d
asm.size = 2
asm2 += [asm]
ir2 = [self._translator.translate(asm2[0])]
ir2 += [self._translator.translate(asm2[1])]
bb1 = BasicBlock()
asm1[0].ir_instrs = ir1[0]
asm1[1].ir_instrs = ir1[1]
bb1.instrs.append(asm1[0])
bb1.instrs.append(asm1[1])
bb2 = BasicBlock()
asm2[0].ir_instrs = ir2[0]
asm2[1].ir_instrs = ir2[1]
bb2.instrs.append(asm2[0])
bb2.instrs.append(asm2[1])
self.assertTrue(bb1 == bb1)
self.assertTrue(bb2 == bb2)
# It will not assert true. Read comment on BasicBlock.__eq__
# self.assertTrue(bb1 != bb2)
class X86CfgRecoveryTests(unittest.TestCase):
def setUp(self):
self._arch_mode = ARCH_X86_MODE_32
self._arch_info = X86ArchitectureInformation(self._arch_mode)
self._disassembler = X86Disassembler(ARCH_X86_MODE_32)
self._translator = X86Translator(ARCH_X86_MODE_32)
def test_sample_1(self):
binary = BinaryFile(get_full_path("/data/bin/x86_sample_1"))
strategy = RecursiveDescent(self._disassembler, binary.text_section, self._translator, self._arch_info)
recoverer = CFGRecoverer(strategy)
bbs, call_targets = recoverer.build(0x0804840b, 0x08048438)
self.assertEqual(len(bbs), 1)
cfg = ControlFlowGraph(bbs, name="main")
self.assertEqual(cfg.start_address, 0x0804840b)
self.assertEqual(cfg.end_address, 0x08048438)
self.assertEqual(len(cfg.basic_blocks), 1)
def test_sample_2(self):
binary = BinaryFile(get_full_path("/data/bin/x86_sample_2"))
strategy = RecursiveDescent(self._disassembler, binary.text_section, self._translator, self._arch_info)
recoverer = CFGRecoverer(strategy)
# Recover "main" function.
bbs, call_targets = recoverer.build(0x0804846d, 0x080484a3)
self.assertEqual(len(bbs), 4)
cfg = ControlFlowGraph(bbs, name="main")
self.assertEqual(cfg.start_address, 0x0804846d)
self.assertEqual(cfg.end_address, 0x080484a3)
self.assertEqual(len(cfg.basic_blocks), 4)
bb_entry = cfg.find_basic_block(0x0804846d)
self.assertEqual(len(bb_entry.branches), 2)
self.assertEqual(bb_entry.taken_branch, 0x08048491)
self.assertEqual(bb_entry.not_taken_branch, 0x0804848a)
bb_taken = cfg.find_basic_block(0x08048491)
self.assertEqual(len(bb_taken.branches), 1)
self.assertEqual(bb_taken.taken_branch, None)
self.assertEqual(bb_taken.not_taken_branch, None)
self.assertEqual(bb_taken.direct_branch, 0x08048496)
bb_not_taken = cfg.find_basic_block(0x0804848a)
self.assertEqual(len(bb_not_taken.branches), 1)
self.assertEqual(bb_not_taken.taken_branch, None)
self.assertEqual(bb_not_taken.not_taken_branch, None)
self.assertEqual(bb_not_taken.direct_branch, 0x08048496)
# Recover "func_1" function.
bbs, call_targets = recoverer.build(0x0804843b, 0x8048453)
self.assertEqual(len(bbs), 1)
cfg = ControlFlowGraph(bbs, name="main")
self.assertEqual(cfg.start_address, 0x0804843b)
self.assertEqual(cfg.end_address, 0x8048453)
self.assertEqual(len(cfg.basic_blocks), 1)
# Recover "func_2" function.
bbs, call_targets = recoverer.build(0x08048454, 0x0804846c)
self.assertEqual(len(bbs), 1)
cfg = ControlFlowGraph(bbs, name="main")
self.assertEqual(cfg.start_address, 0x08048454)
self.assertEqual(cfg.end_address, 0x0804846c)
self.assertEqual(len(cfg.basic_blocks), 1)
def main():
unittest.main()
if __name__ == '__main__':
main()
| 2,853 |
348 | {"nom":"<NAME>","dpt":"Calvados","inscrits":264,"abs":55,"votants":209,"blancs":23,"nuls":4,"exp":182,"res":[{"panneau":"1","voix":105},{"panneau":"2","voix":77}]} | 68 |
777 | <filename>chrome/android/javatests/src/org/chromium/chrome/browser/partnercustomizations/PartnerDisableIncognitoModeIntegrationTest.java<gh_stars>100-1000
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.partnercustomizations;
import android.content.Context;
import android.net.Uri;
import android.os.Bundle;
import android.support.test.filters.MediumTest;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.PopupMenu;
import org.chromium.base.ThreadUtils;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.RetryOnFailure;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.preferences.PrefServiceBridge;
import org.chromium.chrome.test.partnercustomizations.TestPartnerBrowserCustomizationsProvider;
import org.chromium.content.browser.test.util.Criteria;
import org.chromium.content.browser.test.util.CriteriaHelper;
import org.chromium.net.test.EmbeddedTestServer;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
/**
* Integration tests for the partner disabling incognito mode feature.
*/
public class PartnerDisableIncognitoModeIntegrationTest extends
BasePartnerBrowserCustomizationIntegrationTest {
@Override
public void startMainActivity() throws InterruptedException {
// Each test will launch main activity, so purposefully omit here.
}
private void setParentalControlsEnabled(boolean enabled) {
Uri uri = PartnerBrowserCustomizations.buildQueryUri(
PartnerBrowserCustomizations.PARTNER_DISABLE_INCOGNITO_MODE_PATH);
Bundle bundle = new Bundle();
bundle.putBoolean(
TestPartnerBrowserCustomizationsProvider.INCOGNITO_MODE_DISABLED_KEY, enabled);
Context context = getInstrumentation().getTargetContext();
context.getContentResolver().call(uri, "setIncognitoModeDisabled", null, bundle);
}
private void assertIncognitoMenuItemEnabled(boolean enabled) throws ExecutionException {
Menu menu = ThreadUtils.runOnUiThreadBlocking(new Callable<Menu>() {
@Override
public Menu call() throws Exception {
// PopupMenu is a convenient way of building a temp menu.
PopupMenu tempMenu = new PopupMenu(
getActivity(), getActivity().findViewById(R.id.menu_anchor_stub));
tempMenu.inflate(R.menu.main_menu);
Menu menu = tempMenu.getMenu();
getActivity().prepareMenu(menu);
return menu;
}
});
for (int i = 0; i < menu.size(); ++i) {
MenuItem item = menu.getItem(i);
if (item.getItemId() == R.id.new_incognito_tab_menu_id && item.isVisible()) {
assertEquals("Menu item enabled state is not correct.", enabled, item.isEnabled());
}
}
}
private void waitForParentalControlsEnabledState(final boolean parentalControlsEnabled) {
CriteriaHelper.pollUiThread(new Criteria() {
@Override
public boolean isSatisfied() {
// areParentalControlsEnabled is updated on a background thread, so we
// also wait on the isIncognitoModeEnabled to ensure the updates on the
// UI thread have also triggered.
boolean retVal = parentalControlsEnabled
== PartnerBrowserCustomizations.isIncognitoDisabled();
retVal &= parentalControlsEnabled
!= PrefServiceBridge.getInstance().isIncognitoModeEnabled();
return retVal;
}
});
}
private void toggleActivityForegroundState() {
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
getActivity().onPause();
}
});
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
getActivity().onStop();
}
});
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
getActivity().onStart();
}
});
ThreadUtils.runOnUiThreadBlocking(new Runnable() {
@Override
public void run() {
getActivity().onResume();
}
});
}
@MediumTest
@Feature({"DisableIncognitoMode"})
@RetryOnFailure
public void testIncognitoEnabledIfNoParentalControls() throws InterruptedException {
setParentalControlsEnabled(false);
startMainActivityOnBlankPage();
waitForParentalControlsEnabledState(false);
newIncognitoTabFromMenu();
}
@MediumTest
@Feature({"DisableIncognitoMode"})
public void testIncognitoMenuItemEnabledBasedOnParentalControls()
throws InterruptedException, ExecutionException {
setParentalControlsEnabled(true);
startMainActivityOnBlankPage();
waitForParentalControlsEnabledState(true);
assertIncognitoMenuItemEnabled(false);
setParentalControlsEnabled(false);
toggleActivityForegroundState();
waitForParentalControlsEnabledState(false);
assertIncognitoMenuItemEnabled(true);
}
@MediumTest
@Feature({"DisableIncognitoMode"})
public void testEnabledParentalControlsClosesIncognitoTabs() throws InterruptedException {
EmbeddedTestServer testServer = EmbeddedTestServer.createAndStartServer(
getInstrumentation().getContext());
try {
String[] testUrls = {
testServer.getURL("/chrome/test/data/android/about.html"),
testServer.getURL("/chrome/test/data/android/ok.txt"),
testServer.getURL("/chrome/test/data/android/test.html")
};
setParentalControlsEnabled(false);
startMainActivityOnBlankPage();
waitForParentalControlsEnabledState(false);
loadUrlInNewTab(testUrls[0], true);
loadUrlInNewTab(testUrls[1], true);
loadUrlInNewTab(testUrls[2], true);
loadUrlInNewTab(testUrls[0], false);
setParentalControlsEnabled(true);
toggleActivityForegroundState();
waitForParentalControlsEnabledState(true);
CriteriaHelper.pollInstrumentationThread(Criteria.equals(0, new Callable<Integer>() {
@Override
public Integer call() {
return incognitoTabsCount();
}
}));
} finally {
testServer.stopAndDestroyServer();
}
}
}
| 2,868 |
769 | #include <string.h> // For memcpy
#include <stm32f10x_gpio.h>
#include <stm32f10x_rcc.h>
#include <stm32f10x_spi.h>
#include <pcf8812.h>
#include <delay.h>
#include <font5x7.h>
uint8_t vRAM[917]; // Display buffer
void PCF8812_Init() {
// Configure pins as output with Push-Pull
RCC_APB2PeriphClockCmd(RCC_APB2Periph_GPIOA,ENABLE);
GPIO_InitTypeDef PORT;
PORT.GPIO_Speed = GPIO_Speed_50MHz;
PORT.GPIO_Mode = GPIO_Mode_Out_PP;
PORT.GPIO_Pin = PCF8812_MOSI_PIN;
GPIO_Init(PCF8812_MOSI_PORT,&PORT);
PORT.GPIO_Pin = PCF8812_SCK_PIN;
GPIO_Init(PCF8812_SCK_PORT,&PORT);
PORT.GPIO_Pin = PCF8812_CS_PIN;
GPIO_Init(PCF8812_CS_PORT,&PORT);
PORT.GPIO_Pin = PCF8812_PWR_PIN;
GPIO_Init(PCF8812_PWR_PORT,&PORT);
PORT.GPIO_Pin = PCF8812_DC_PIN;
GPIO_Init(PCF8812_DC_PORT,&PORT);
PORT.GPIO_Pin = PCF8812_RES_PIN;
GPIO_Init(PCF8812_RES_PORT,&PORT);
CS_H();
RES_H();
DC_L();
PWR_L();
}
// PCF8812 power on
void PCF8812_PowerOn(void) {
CS_L();
RES_L();
Delay_ms(20);
PWR_H();
Delay_ms(20);
RES_H();
}
// Hardware reset of PCF8812
void PCF8812_Reset(void) {
RES_L();
RES_H();
}
// Software SPI send byte
void PCF8812_Write(uint8_t data) {
uint8_t i;
for (i = 0; i < 8; i++) {
if (data & 0x80) MOSI_H(); else MOSI_L();
data <<= 1;
SCK_L();
SCK_H();
}
}
// Set RAM address (Y - bank number, X - position in bank)
void PCF8812_SetXY(uint8_t X, uint8_t Y) {
DC_L();
PCF8812_Write(0x40 | Y); // Select display RAM bank (0..8)
PCF8812_Write(0x80 | X); // Set X address (0..101)
}
// Send vRAM buffer into display
void PCF8812_Flush(void) {
uint32_t i;
DC_L();
PCF8812_Write(0x40); // Select display RAM bank 0
PCF8812_Write(0x80); // Set column 0
DC_H();
for (i = 0; i < 816; i++) PCF8812_Write(vRAM[i]);
}
// Fill vRAM with byte pattern
void PCF8812_Fill(uint8_t pattern) {
uint32_t i;
for (i = 0; i < 816; i++) vRAM[i] = pattern;
}
// Set pixel in vRAM buffer
void PCF8812_SetPixel(uint8_t X, uint8_t Y) {
vRAM[((Y / 8) * 102) + X] |= 1 << (Y % 8);
}
// Clear pixel in vRAM buffer
void PCF8812_ResetPixel(uint8_t X, uint8_t Y) {
vRAM[((Y / 8) * 102) + X] &= ~(1 << (Y % 8));
}
void PCF8812_HLine(uint8_t X1, uint8_t X2, uint8_t Y, PSetReset_TypeDef SR) {
uint8_t x;
if (SR == PSet) {
for (x = X1; x <= X2; x++) PCF8812_SetPixel(x,Y);
} else {
for (x = X1; x <= X2; x++) PCF8812_ResetPixel(x,Y);
}
}
void PCF8812_VLine(uint8_t X, uint8_t Y1, uint8_t Y2, PSetReset_TypeDef SR) {
uint8_t y;
if (SR == PSet) {
for (y = Y1; y <= Y2; y++) PCF8812_SetPixel(X,y);
} else {
for (y = Y1; y <= Y2; y++) PCF8812_ResetPixel(X,y);
}
}
void PCF8812_Rect(uint8_t X1, uint8_t Y1, uint8_t X2, uint8_t Y2, PSetReset_TypeDef SR) {
PCF8812_HLine(X1,X2,Y1,SR);
PCF8812_HLine(X1,X2,Y2,SR);
PCF8812_VLine(X1,Y1 + 1,Y2 - 1,SR);
PCF8812_VLine(X2,Y1 + 1,Y2 - 1,SR);
}
void PCF8812_FillRect(uint8_t X1, uint8_t Y1, uint8_t X2, uint8_t Y2, PSetReset_TypeDef SR) {
uint8_t y;
for (y = Y1; y <= Y2; y++) PCF8812_HLine(X1,X2,y,SR);
}
void PCF8812_Line(int16_t X1, int16_t Y1, int16_t X2, int16_t Y2) {
int16_t dX = X2-X1;
int16_t dY = Y2-Y1;
int16_t dXsym = (dX > 0) ? 1 : -1;
int16_t dYsym = (dY > 0) ? 1 : -1;
if (dX == 0) {
if (Y2 > Y1) PCF8812_VLine(X1,Y1,Y2,PSet); else PCF8812_VLine(X1,Y2,Y1,PSet);
return;
}
if (dY == 0) {
if (X2 > X1) PCF8812_HLine(X1,X2,Y1,PSet); else PCF8812_HLine(X2,X1,Y1,PSet);
return;
}
dX *= dXsym;
dY *= dYsym;
int16_t dX2 = dX << 1;
int16_t dY2 = dY << 1;
int16_t di;
if (dX >= dY) {
di = dY2 - dX;
while (X1 != X2) {
PCF8812_SetPixel(X1,Y1);
X1 += dXsym;
if (di < 0) {
di += dY2;
} else {
di += dY2 - dX2;
Y1 += dYsym;
}
}
} else {
di = dX2 - dY;
while (Y1 != Y2) {
PCF8812_SetPixel(X1,Y1);
Y1 += dYsym;
if (di < 0) {
di += dX2;
} else {
di += dX2 - dY2;
X1 += dXsym;
}
}
}
PCF8812_SetPixel(X1,Y1);
}
void PCF8812_Ellipse(uint16_t X, uint16_t Y, uint16_t A, uint16_t B) {
int16_t Xc = 0, Yc = B;
long A2 = (long)A*A, B2 = (long)B*B;
long C1 = -(A2/4 + A % 2 + B2);
long C2 = -(B2/4 + B % 2 + A2);
long C3 = -(B2/4 + B % 2);
long t = -A2 * Yc;
long dXt = B2*Xc*2, dYt = -A2*Yc*2;
long dXt2 = B2*2, dYt2 = A2*2;
while (Yc >= 0 && Xc <= A) {
PCF8812_SetPixel(X + Xc,Y + Yc);
if (Xc != 0 || Yc != 0) PCF8812_SetPixel(X - Xc,Y - Yc);
if (Xc != 0 && Yc != 0) {
PCF8812_SetPixel(X + Xc,Y - Yc);
PCF8812_SetPixel(X - Xc,Y + Yc);
}
if (t + Xc*B2 <= C1 || t + Yc*A2 <= C3) {
Xc++;
dXt += dXt2;
t += dXt;
} else if (t - Yc*A2 > C2) {
Yc--;
dYt += dYt2;
t += dYt;
} else {
Xc++;
Yc--;
dXt += dXt2;
dYt += dYt2;
t += dXt;
t += dYt;
}
}
}
void PCF8812_PutChar5x7(uint8_t X, uint8_t Y, uint8_t Char, Opaque_TypeDef bckgnd) {
uint16_t i,j;
uint8_t buffer[5],tmpCh;
memcpy(buffer,&Font5x7[(Char - 32) * 5],5);
if (bckgnd == opaque) {
for (i = 0; i < 5; i++) {
tmpCh = buffer[i];
for (j = 0; j < 8; j++) {
if ((tmpCh >> j) & 0x01) PCF8812_SetPixel(X + i,Y + j); else PCF8812_ResetPixel(X + i,Y + j);
}
}
} else {
for (i = 0; i < 5; i++) {
tmpCh = buffer[i];
for (j = 0; j < 8; j++) {
if ((tmpCh >> j) & 0x01) PCF8812_SetPixel(X + i,Y + j);
}
}
}
}
void PCF8812_PutStr5x7(uint8_t X, uint8_t Y, char *str, Opaque_TypeDef bckgnd) {
while (*str) {
PCF8812_PutChar5x7(X,Y,*str++,bckgnd);
if (X < 101 - 6) { X += 6; } else if (Y < 64 - 8) { X = 0; Y += 8; } else { X = 0; Y = 0; }
};
}
void PCF8812_PutInt5x7(uint8_t X, uint8_t Y, uint32_t num, Opaque_TypeDef bckgnd) {
char str[11]; // 10 chars max for UINT32_MAX
int i = 0;
do { str[i++] = num % 10 + '0'; } while ((num /= 10) > 0);
int strLen = i;
for (i--; i >= 0; i--) PCF8812_PutChar5x7(X + (strLen * 6) - ((i + 1) * 6),Y,str[i],bckgnd);
}
void PCF8812_PutHex5x7(uint8_t X, uint8_t Y, uint32_t num, Opaque_TypeDef bckgnd) {
char str[11]; // 10 chars max for UINT32_MAX
int i = 0;
do { str[i++] = "0123456789ABCDEF"[num % 0x10]; } while ((num /= 0x10) > 0);
str[i++] = 'x';
str[i++] = '0';
int strLen = i;
for (i--; i >= 0; i--) PCF8812_PutChar5x7(X + (strLen * 6) - ((i + 1) * 6),Y,str[i],bckgnd);
}
| 3,451 |
348 | <filename>docs/data/leg-t2/030/03004227.json
{"nom":"Saint-Ambroix","circ":"4ème circonscription","dpt":"Gard","inscrits":2630,"abs":1549,"votants":1081,"blancs":84,"nuls":33,"exp":964,"res":[{"nuance":"REM","nom":"Mme <NAME>","voix":527},{"nuance":"FN","nom":"Mme <NAME>","voix":437}]} | 119 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.openide.filesystems;
import java.io.File;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.netbeans.junit.NbTestCase;
import org.openide.filesystems.FileObject;
import org.openide.filesystems.FileUtil;
import org.openide.util.Lookup;
/**
* @author <NAME>
*/
public class FSFoldersInLookupTest extends NbTestCase {
static {
System.setProperty("org.openide.util.Lookup.paths", "MyServices:YourServices");
}
private FileObject root;
private Logger LOG;
public FSFoldersInLookupTest(String name) {
super(name);
}
@Override
protected Level logLevel() {
return Level.FINE;
}
@Override
protected void setUp() throws Exception {
if (System.getProperty("netbeans.user") == null) {
System.setProperty("netbeans.user", new File(getWorkDir(), "ud").getPath());
}
LOG = Logger.getLogger("Test." + getName());
root = FileUtil.getConfigRoot();
for (FileObject fo : root.getChildren()) {
fo.delete();
}
super.setUp();
}
public void testInterfaceFoundInMyServices() throws Exception {
assertNull("not found", Lookup.getDefault().lookup(Shared.class));
FileObject fo = FileUtil.createData(root, "MyServices/sub/dir/2/" + Shared.class.getName().replace('.', '-') + ".instance");
assertNotNull("found", Lookup.getDefault().lookup(Shared.class));
fo.delete();
assertNull("not found again", Lookup.getDefault().lookup(Shared.class));
}
public void testInterfaceFoundInMyServices2() throws Exception {
assertNull("not found", Lookup.getDefault().lookup(Shared.class));
FileObject fo = FileUtil.createData(root, "YourServices/kuk/" + Shared.class.getName().replace('.', '-') + ".instance");
assertNotNull("found", Lookup.getDefault().lookup(Shared.class));
fo.delete();
assertNull("not found again", Lookup.getDefault().lookup(Shared.class));
}
public static final class Shared {}
}
| 1,036 |
484 | <gh_stars>100-1000
package lesson1;
import org.nd4j.linalg.api.ndarray.INDArray;
import org.nd4j.linalg.factory.Nd4j;
/**
* Created by Joe on 2017/12/27.
* 获取并且设置数组部分
*/
public class Nd4jGetAndSetParts {
public static void main(String[] args) {
INDArray nd = Nd4j.create(new float[]{1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}, new int[]{2, 6});
System.out.println("原始数组");
System.out.println(nd);
/*
获取一行
*/
System.out.println("获取数组中的一行");
INDArray singleRow = nd.getRow(0);
System.out.println(singleRow);
/*
获取多行
*/
System.out.println("获取数组中的多行");
INDArray multiRows = nd.getRows(0, 1);
System.out.println(multiRows);
/*
替换其中的一行
*/
System.out.println("替换原有数组中的一行");
INDArray replaceRow = Nd4j.create(new float[]{1, 3, 5, 7, 9, 11});
nd.putRow(0, replaceRow);
System.out.println(nd);
}
}
| 598 |
1,455 | /*
* Copyright 2016-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.data.redis.core;
import static org.assertj.core.api.Assertions.*;
import static org.mockito.Mockito.*;
import static org.springframework.test.util.ReflectionTestUtils.*;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicReference;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import org.springframework.data.annotation.Id;
import org.springframework.data.redis.connection.RedisConnection;
import org.springframework.data.redis.connection.jedis.JedisConnectionFactory;
import org.springframework.data.redis.core.RedisKeyValueAdapter.EnableKeyspaceEvents;
import org.springframework.data.redis.core.convert.Bucket;
import org.springframework.data.redis.core.convert.KeyspaceConfiguration;
import org.springframework.data.redis.core.convert.MappingConfiguration;
import org.springframework.data.redis.core.convert.RedisData;
import org.springframework.data.redis.core.convert.SimpleIndexedPropertyValue;
import org.springframework.data.redis.core.index.IndexConfiguration;
import org.springframework.data.redis.core.mapping.RedisMappingContext;
import org.springframework.data.redis.listener.KeyExpirationEventMessageListener;
/**
* Unit tests for {@link RedisKeyValueAdapter}.
*
* @author <NAME>
* @author <NAME>
*/
@ExtendWith(MockitoExtension.class)
@MockitoSettings(strictness = Strictness.LENIENT)
class RedisKeyValueAdapterUnitTests {
private RedisKeyValueAdapter adapter;
private RedisTemplate<?, ?> template;
private RedisMappingContext context;
@Mock JedisConnectionFactory jedisConnectionFactoryMock;
@Mock RedisConnection redisConnectionMock;
@BeforeEach
void setUp() throws Exception {
template = new RedisTemplate<>();
template.setConnectionFactory(jedisConnectionFactoryMock);
template.afterPropertiesSet();
when(jedisConnectionFactoryMock.getConnection()).thenReturn(redisConnectionMock);
Properties keyspaceEventsConfig = new Properties();
keyspaceEventsConfig.put("notify-keyspace-events", "KEA");
when(redisConnectionMock.getConfig("notify-keyspace-events")).thenReturn(keyspaceEventsConfig);
context = new RedisMappingContext(new MappingConfiguration(new IndexConfiguration(), new KeyspaceConfiguration()));
context.afterPropertiesSet();
adapter = new RedisKeyValueAdapter(template, context);
adapter.afterPropertiesSet();
}
@AfterEach
void tearDown() throws Exception {
adapter.destroy();
}
@Test // DATAREDIS-507
void destroyShouldNotDestroyConnectionFactory() throws Exception {
adapter.destroy();
verify(jedisConnectionFactoryMock, never()).destroy();
}
@Test // DATAREDIS-512, DATAREDIS-530
void putShouldRemoveExistingIndexValuesWhenUpdating() {
RedisData rd = new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("_id", "1")));
rd.addIndexedData(new SimpleIndexedPropertyValue("persons", "firstname", "rand"));
when(redisConnectionMock.sMembers(Mockito.any(byte[].class)))
.thenReturn(new LinkedHashSet<>(Arrays.asList("persons:firstname:rand".getBytes())));
when(redisConnectionMock.del((byte[][]) any())).thenReturn(1L);
adapter.put("1", rd, "persons");
verify(redisConnectionMock, times(1)).sRem(Mockito.any(byte[].class), Mockito.any(byte[].class));
}
@Test // DATAREDIS-512
void putShouldNotTryToRemoveExistingIndexValuesWhenInsertingNew() {
RedisData rd = new RedisData(Bucket.newBucketFromStringMap(Collections.singletonMap("_id", "1")));
rd.addIndexedData(new SimpleIndexedPropertyValue("persons", "firstname", "rand"));
when(redisConnectionMock.sMembers(Mockito.any(byte[].class)))
.thenReturn(new LinkedHashSet<>(Arrays.asList("persons:firstname:rand".getBytes())));
when(redisConnectionMock.del((byte[][]) any())).thenReturn(0L);
adapter.put("1", rd, "persons");
verify(redisConnectionMock, never()).sRem(Mockito.any(byte[].class), (byte[][]) any());
}
@Test // DATAREDIS-491
void shouldInitKeyExpirationListenerOnStartup() throws Exception {
adapter.destroy();
adapter = new RedisKeyValueAdapter(template, context);
adapter.setEnableKeyspaceEvents(EnableKeyspaceEvents.ON_STARTUP);
adapter.afterPropertiesSet();
KeyExpirationEventMessageListener listener = ((AtomicReference<KeyExpirationEventMessageListener>) getField(adapter,
"expirationListener")).get();
assertThat(listener).isNotNull();
}
@Test // DATAREDIS-491
void shouldInitKeyExpirationListenerOnFirstPutWithTtl() throws Exception {
adapter.destroy();
adapter = new RedisKeyValueAdapter(template, context);
adapter.setEnableKeyspaceEvents(EnableKeyspaceEvents.ON_DEMAND);
adapter.afterPropertiesSet();
KeyExpirationEventMessageListener listener = ((AtomicReference<KeyExpirationEventMessageListener>) getField(adapter,
"expirationListener")).get();
assertThat(listener).isNull();
adapter.put("should-NOT-start-listener", new WithoutTimeToLive(), "keyspace");
listener = ((AtomicReference<KeyExpirationEventMessageListener>) getField(adapter, "expirationListener")).get();
assertThat(listener).isNull();
adapter.put("should-start-listener", new WithTimeToLive(), "keyspace");
listener = ((AtomicReference<KeyExpirationEventMessageListener>) getField(adapter, "expirationListener")).get();
assertThat(listener).isNotNull();
}
@Test // DATAREDIS-491
void shouldNeverInitKeyExpirationListener() throws Exception {
adapter.destroy();
adapter = new RedisKeyValueAdapter(template, context);
adapter.afterPropertiesSet();
KeyExpirationEventMessageListener listener = ((AtomicReference<KeyExpirationEventMessageListener>) getField(adapter,
"expirationListener")).get();
assertThat(listener).isNull();
adapter.put("should-NOT-start-listener", new WithoutTimeToLive(), "keyspace");
listener = ((AtomicReference<KeyExpirationEventMessageListener>) getField(adapter, "expirationListener")).get();
assertThat(listener).isNull();
adapter.put("should-start-listener", new WithTimeToLive(), "keyspace");
listener = ((AtomicReference<KeyExpirationEventMessageListener>) getField(adapter, "expirationListener")).get();
assertThat(listener).isNull();
}
static class WithoutTimeToLive {
@Id String id;
}
@RedisHash(timeToLive = 10)
static class WithTimeToLive {
@Id String id;
}
}
| 2,339 |
354 | <gh_stars>100-1000
{
"max_train_length": 100,
"batch_size" : 80,
"features" : [ ],
"feature_sizes": [ ],
"use_se_marker": false,
"dev_batch_size": 500,
"word_embedding" : "glove100",
"lstm_hidden_size": 100,
"num_lstm_layers" : 2,
"input_dropout_prob":0.0,
"recurrent_dropout_prob": 0.1,
"max_grad_norm": 1.0,
"lstm_cell":"highway",
"per_layer_dropout":true,
"trainer" : "Adadelta",
"max_epochs": 30,
"checkpoint_every_x_epochs": 1,
"gpu_memory_fraction": 0.2
}
| 246 |
777 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/metrics/leak_detector/ranked_set.h"
#include <stddef.h>
#include <stdint.h>
#include <algorithm>
#include "base/macros.h"
#include "components/metrics/leak_detector/custom_allocator.h"
#include "components/metrics/leak_detector/leak_detector_value_type.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace metrics {
namespace leak_detector {
namespace {
// Makes it easier to instantiate LeakDetectorValueTypes.
LeakDetectorValueType Value(uint32_t value) {
return LeakDetectorValueType(value);
}
} // namespace
class RankedSetTest : public ::testing::Test {
public:
RankedSetTest() {}
void SetUp() override { CustomAllocator::Initialize(); }
void TearDown() override { EXPECT_TRUE(CustomAllocator::Shutdown()); }
private:
DISALLOW_COPY_AND_ASSIGN(RankedSetTest);
};
TEST_F(RankedSetTest, Iterators) {
RankedSet set(10);
EXPECT_TRUE(set.begin() == set.end());
set.Add(Value(0x1234), 100);
EXPECT_FALSE(set.begin() == set.end());
}
TEST_F(RankedSetTest, SingleInsertion) {
RankedSet set(10);
EXPECT_EQ(0U, set.size());
set.Add(Value(0x1234), 100);
EXPECT_EQ(1U, set.size());
auto iter = set.begin();
EXPECT_EQ(0x1234U, iter->value.size());
EXPECT_EQ(100, iter->count);
}
TEST_F(RankedSetTest, InOrderInsertion) {
RankedSet set(10);
EXPECT_EQ(0U, set.size());
set.Add(Value(0x1234), 100);
EXPECT_EQ(1U, set.size());
set.Add(Value(0x2345), 95);
EXPECT_EQ(2U, set.size());
set.Add(Value(0x3456), 90);
EXPECT_EQ(3U, set.size());
set.Add(Value(0x4567), 85);
EXPECT_EQ(4U, set.size());
set.Add(Value(0x5678), 80);
EXPECT_EQ(5U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues[] = {
{Value(0x1234), 100}, {Value(0x2345), 95}, {Value(0x3456), 90},
{Value(0x4567), 85}, {Value(0x5678), 80},
};
size_t index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues));
EXPECT_EQ(kExpectedValues[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues[index].count, entry.count);
++index;
}
}
TEST_F(RankedSetTest, ReverseOrderInsertion) {
RankedSet set(10);
EXPECT_EQ(0U, set.size());
set.Add(Value(0x1234), 0);
EXPECT_EQ(1U, set.size());
set.Add(Value(0x2345), 5);
EXPECT_EQ(2U, set.size());
set.Add(Value(0x3456), 10);
EXPECT_EQ(3U, set.size());
set.Add(Value(0x4567), 15);
EXPECT_EQ(4U, set.size());
set.Add(Value(0x5678), 20);
EXPECT_EQ(5U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues[] = {
{Value(0x5678), 20}, {Value(0x4567), 15}, {Value(0x3456), 10},
{Value(0x2345), 5}, {Value(0x1234), 0},
};
size_t index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues));
EXPECT_EQ(kExpectedValues[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues[index].count, entry.count);
++index;
}
}
TEST_F(RankedSetTest, UnorderedInsertion) {
RankedSet set(10);
EXPECT_EQ(0U, set.size());
set.Add(Value(0x1234), 15);
set.Add(Value(0x2345), 20);
set.Add(Value(0x3456), 10);
set.Add(Value(0x4567), 30);
set.Add(Value(0x5678), 25);
EXPECT_EQ(5U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues1[] = {
{Value(0x4567), 30}, {Value(0x5678), 25}, {Value(0x2345), 20},
{Value(0x1234), 15}, {Value(0x3456), 10},
};
size_t index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues1));
EXPECT_EQ(kExpectedValues1[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues1[index].count, entry.count);
++index;
}
// Add more items.
set.Add(Value(0x6789), 35);
set.Add(Value(0x789a), 40);
set.Add(Value(0x89ab), 50);
set.Add(Value(0x9abc), 5);
set.Add(Value(0xabcd), 0);
EXPECT_EQ(10U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues2[] = {
{Value(0x89ab), 50}, {Value(0x789a), 40}, {Value(0x6789), 35},
{Value(0x4567), 30}, {Value(0x5678), 25}, {Value(0x2345), 20},
{Value(0x1234), 15}, {Value(0x3456), 10}, {Value(0x9abc), 5},
{Value(0xabcd), 0},
};
index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues2));
EXPECT_EQ(kExpectedValues2[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues2[index].count, entry.count);
++index;
}
}
TEST_F(RankedSetTest, UnorderedInsertionWithSameCounts) {
RankedSet set(10);
EXPECT_EQ(0U, set.size());
set.Add(Value(0x1234), 20);
set.Add(Value(0x2345), 20);
set.Add(Value(0x3456), 30);
set.Add(Value(0x4567), 30);
set.Add(Value(0x5678), 20);
EXPECT_EQ(5U, set.size());
// Iterate through the contents to make sure they match what went in. Entries
// with the same count should be ordered from lowest value to highest value.
const RankedSet::Entry kExpectedValues1[] = {
{Value(0x3456), 30}, {Value(0x4567), 30}, {Value(0x1234), 20},
{Value(0x2345), 20}, {Value(0x5678), 20},
};
size_t index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues1));
EXPECT_EQ(kExpectedValues1[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues1[index].count, entry.count);
++index;
}
}
TEST_F(RankedSetTest, RepeatedEntries) {
RankedSet set(10);
EXPECT_EQ(0U, set.size());
set.Add(Value(0x1234), 20);
set.Add(Value(0x3456), 30);
set.Add(Value(0x1234), 20);
set.Add(Value(0x3456), 30);
set.Add(Value(0x4567), 30);
set.Add(Value(0x4567), 30);
EXPECT_EQ(3U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues1[] = {
{Value(0x3456), 30}, {Value(0x4567), 30}, {Value(0x1234), 20},
};
size_t index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues1));
EXPECT_EQ(kExpectedValues1[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues1[index].count, entry.count);
++index;
}
}
TEST_F(RankedSetTest, InsertionWithOverflow) {
RankedSet set(5);
EXPECT_EQ(0U, set.size());
set.Add(Value(0x1234), 15);
set.Add(Value(0x2345), 20);
set.Add(Value(0x3456), 10);
set.Add(Value(0x4567), 30);
set.Add(Value(0x5678), 25);
EXPECT_EQ(5U, set.size());
// These values will not make it into the set, which is now full.
set.Add(Value(0x6789), 0);
EXPECT_EQ(5U, set.size());
set.Add(Value(0x789a), 5);
EXPECT_EQ(5U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues1[] = {
{Value(0x4567), 30}, {Value(0x5678), 25}, {Value(0x2345), 20},
{Value(0x1234), 15}, {Value(0x3456), 10},
};
size_t index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues1));
EXPECT_EQ(kExpectedValues1[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues1[index].count, entry.count);
++index;
}
// Insert some more values that go in the middle of the set.
set.Add(Value(0x89ab), 27);
EXPECT_EQ(5U, set.size());
set.Add(Value(0x9abc), 22);
EXPECT_EQ(5U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues2[] = {
{Value(0x4567), 30}, {Value(0x89ab), 27}, {Value(0x5678), 25},
{Value(0x9abc), 22}, {Value(0x2345), 20},
};
index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues2));
EXPECT_EQ(kExpectedValues2[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues2[index].count, entry.count);
++index;
}
// Insert some more values at the front of the set.
set.Add(Value(0xabcd), 40);
EXPECT_EQ(5U, set.size());
set.Add(Value(0xbcde), 35);
EXPECT_EQ(5U, set.size());
// Iterate through the contents to make sure they match what went in.
const RankedSet::Entry kExpectedValues3[] = {
{Value(0xabcd), 40}, {Value(0xbcde), 35}, {Value(0x4567), 30},
{Value(0x89ab), 27}, {Value(0x5678), 25},
};
index = 0;
for (const auto& entry : set) {
EXPECT_LT(index, arraysize(kExpectedValues3));
EXPECT_EQ(kExpectedValues3[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues3[index].count, entry.count);
++index;
}
}
TEST_F(RankedSetTest, MoveOperation) {
const RankedSet::Entry kExpectedValues[] = {
{Value(0x89ab), 50}, {Value(0x789a), 40}, {Value(0x6789), 35},
{Value(0x4567), 30}, {Value(0x5678), 25}, {Value(0x2345), 20},
{Value(0x1234), 15}, {Value(0x3456), 10}, {Value(0x9abc), 5},
{Value(0xabcd), 0},
};
RankedSet source(10);
for (const RankedSet::Entry& entry : kExpectedValues) {
source.Add(entry.value, entry.count);
}
EXPECT_EQ(10U, source.size());
RankedSet dest(25); // This should be changed by the move.
dest = std::move(source);
EXPECT_EQ(10U, dest.size());
EXPECT_EQ(10U, dest.max_size());
size_t index = 0;
for (const auto& entry : dest) {
EXPECT_LT(index, arraysize(kExpectedValues));
EXPECT_EQ(kExpectedValues[index].value.size(), entry.value.size());
EXPECT_EQ(kExpectedValues[index].count, entry.count);
++index;
}
}
TEST_F(RankedSetTest, Find) {
RankedSet set(10);
EXPECT_EQ(0U, set.size());
set.AddSize(0x1234, 15);
set.AddSize(0x2345, 20);
set.AddSize(0x3456, 10);
set.AddSize(0x4567, 30);
set.AddSize(0x5678, 25);
EXPECT_EQ(5U, set.size());
auto iter = set.FindSize(0x1234);
EXPECT_TRUE(iter != set.end());
EXPECT_EQ(0x1234U, iter->value.size());
EXPECT_EQ(15, iter->count);
iter = set.FindSize(0x2345);
EXPECT_TRUE(iter != set.end());
EXPECT_EQ(0x2345U, iter->value.size());
EXPECT_EQ(20, iter->count);
iter = set.FindSize(0x3456);
EXPECT_TRUE(iter != set.end());
EXPECT_EQ(0x3456U, iter->value.size());
EXPECT_EQ(10, iter->count);
iter = set.FindSize(0x4567);
EXPECT_TRUE(iter != set.end());
EXPECT_EQ(0x4567U, iter->value.size());
EXPECT_EQ(30, iter->count);
iter = set.FindSize(0x5678);
EXPECT_TRUE(iter != set.end());
EXPECT_EQ(0x5678U, iter->value.size());
EXPECT_EQ(25, iter->count);
}
} // namespace leak_detector
} // namespace metrics
| 4,447 |
806 | <filename>killers/nokia/com.evenwell.powersaving.g3/sources/android/support/v4/widget/ListPopupWindowCompat.java<gh_stars>100-1000
package android.support.v4.widget;
import android.os.Build.VERSION;
import android.view.View;
import android.view.View.OnTouchListener;
public final class ListPopupWindowCompat {
static final ListPopupWindowImpl IMPL;
interface ListPopupWindowImpl {
OnTouchListener createDragToOpenListener(Object obj, View view);
}
static class BaseListPopupWindowImpl implements ListPopupWindowImpl {
BaseListPopupWindowImpl() {
}
public OnTouchListener createDragToOpenListener(Object listPopupWindow, View src) {
return null;
}
}
static class KitKatListPopupWindowImpl extends BaseListPopupWindowImpl {
KitKatListPopupWindowImpl() {
}
public OnTouchListener createDragToOpenListener(Object listPopupWindow, View src) {
return ListPopupWindowCompatKitKat.createDragToOpenListener(listPopupWindow, src);
}
}
static {
if (VERSION.SDK_INT >= 19) {
IMPL = new KitKatListPopupWindowImpl();
} else {
IMPL = new BaseListPopupWindowImpl();
}
}
private ListPopupWindowCompat() {
}
public static OnTouchListener createDragToOpenListener(Object listPopupWindow, View src) {
return IMPL.createDragToOpenListener(listPopupWindow, src);
}
}
| 549 |
577 | <filename>fluentlenium-core/src/main/java/org/fluentlenium/core/action/WindowAction.java
package org.fluentlenium.core.action;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.function.Predicate;
import org.fluentlenium.core.FluentControl;
import org.fluentlenium.core.components.ComponentInstantiator;
import org.fluentlenium.core.domain.FluentWebElement;
import org.fluentlenium.core.switchto.FluentTargetLocator;
import org.fluentlenium.core.switchto.FluentTargetLocatorImpl;
import org.openqa.selenium.Dimension;
import org.openqa.selenium.JavascriptExecutor;
import org.openqa.selenium.Point;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebDriverException;
/**
* Execute actions on active window.
*/
public class WindowAction {
private final FluentControl fluentControl;
private final ComponentInstantiator instantiator;
private final WebDriver driver;
/**
* Creates a new window action.
*
* @param control control interface
* @param instantiator component instantiator
* @param driver selenium driver
*/
public WindowAction(FluentControl control, ComponentInstantiator instantiator, WebDriver driver) {
this.driver = driver;
this.instantiator = instantiator;
fluentControl = control;
}
/**
* Gets the page title.
*
* @return page title text
*/
public String title() {
return driver.getTitle();
}
/**
* Maximize the current window.
*
* @return the WindowAction object itself
*/
public WindowAction maximize() {
driver.manage().window().maximize();
return this;
}
/**
* FullScreen the current window.
*
* @return the WindowAction object itself
*/
public WindowAction fullscreen() {
driver.manage().window().fullscreen();
return this;
}
/**
* Sets the current window size.
*
* @param size size of the window
* @return the WindowAction object itself
*/
public WindowAction setSize(Dimension size) {
driver.manage().window().setSize(size);
return this;
}
/**
* Gets the current window size.
*
* @return the current window size
*/
public Dimension getSize() {
return driver.manage().window().getSize();
}
/**
* Sets the current window position.
*
* @param position position to set
* @return the WindowAction object itself
*/
public WindowAction setPosition(Point position) {
driver.manage().window().setPosition(position);
return this;
}
/**
* Gets the current window position.
*
* @return the WindowAction object itself
*/
public Point getPosition() {
return driver.manage().window().getPosition();
}
/**
* Clicks button, which opens new window and switches to newly opened window.
* <p>
* This method doesn't force opening window in new window, we assume the code under test will open new window.
*
* @param button button to be clicked
* @return handle of old (parent) window
*/
public String clickAndOpenNew(FluentWebElement button) {
String oldWindowHandle = driver.getWindowHandle();
Set<String> oldWindowHandles = driver.getWindowHandles();
button.click();
waitForNewWindowToOpen(oldWindowHandles);
Set<String> newWindowHandles = new HashSet<>(driver.getWindowHandles());
newWindowHandles.removeAll(oldWindowHandles);
String newWindowHandle = newWindowHandles.iterator().next();
switchTo(newWindowHandle);
waitForNewWindowStartLoading();
return oldWindowHandle;
}
/**
* Opens new window.
*
* @return handle of old (parent) window
*/
public String openNewAndSwitch() {
Set<String> oldWindowHandles = driver.getWindowHandles();
String oldWindowHandle = driver.getWindowHandle();
JavascriptExecutor jse = (JavascriptExecutor) driver;
jse.executeScript("window.open('someUrl', '_blank')");
waitForNewWindowToOpen(oldWindowHandles);
waitForNewWindowStartLoading();
switchToLast(oldWindowHandle);
return oldWindowHandle;
}
private void waitForNewWindowStartLoading() {
fluentControl.await().atMost(3, TimeUnit.SECONDS)
.ignoring(WebDriverException.class)
.until(() -> fluentControl.getDriver().getCurrentUrl() != null);
}
/**
* Clicks button, which closes current window and switches to last window (in set returned by
* {@link WebDriver#getWindowHandles()}).
* <p>
* If the last window is not the target window, use {@link #switchTo(String)}
* to focus on desired window
*
* @param button button to be clicked
*/
public void clickAndCloseCurrent(FluentWebElement button) {
String currentWindowHandle = driver.getWindowHandle();
button.click();
fluentControl.await().untilWindow(currentWindowHandle).notDisplayed();
switchToLast();
}
/**
* Close the current window.
*/
public void close() {
driver.close();
}
/**
* Create a switch target locator.
*
* @return an object to perform switch on various target.
*/
public FluentTargetLocator<WindowAction> switchTo() {
return new FluentTargetLocatorImpl<>(this, instantiator, driver.switchTo());
}
/**
* Switches to lastly opened window.
*
* @return the WindowAction object itself
*/
public WindowAction switchToLast() {
List<String> windowHandles = new ArrayList<>(driver.getWindowHandles());
driver.switchTo().window(windowHandles.get(windowHandles.size() - 1));
return this;
}
/**
* Switches to lastly opened window excluding the one provided as a parameter.
*
* @param nameOrHandleToExclude if list size is greater than one it will be removed
* @return the WindowAction object itself
*/
public WindowAction switchToLast(String nameOrHandleToExclude) {
List<String> windowHandles = new ArrayList<>(driver.getWindowHandles());
if (windowHandles.size() > 1) {
windowHandles.remove(nameOrHandleToExclude);
}
driver.switchTo().window(windowHandles.get(windowHandles.size() - 1));
return this;
}
/**
* Switches to particular window by handle.
*
* @param nameOrHandle window name or handle
* @return the WindowAction object itself
*/
public WindowAction switchTo(String nameOrHandle) {
return switchTo().window(nameOrHandle);
}
/**
* Gets the current window object.
*
* @return the WebDriver.Window object
*/
public WebDriver.Window getWindow() {
return driver.manage().window();
}
private class WindowHandlesCountIs implements Predicate<FluentControl> {
private final int expectedValue;
WindowHandlesCountIs(int expectedValue) {
this.expectedValue = expectedValue;
}
@Override
public boolean test(FluentControl fluentControl) {
return driver.getWindowHandles().size() == expectedValue;
}
}
private void waitForNewWindowToOpen(Set<String> oldWindowHandles) {
fluentControl.await().atMost(10, TimeUnit.SECONDS)
.withMessage("Timed out waiting for new window to open.")
.untilPredicate(new WindowHandlesCountIs(oldWindowHandles.size() + 1));
}
}
| 2,867 |
381 | <reponame>m4sterchain/mesapy
import sys
from rpython.rlib import jit
from rpython.rlib.rarithmetic import intmask
from rpython.rlib.objectmodel import specialize
from rpython.rtyper.lltypesystem import lltype, rffi
from pypy.interpreter.error import oefmt
from pypy.interpreter.baseobjspace import W_Root
from pypy.module import _cffi_backend
from pypy.module._cffi_backend.ctypeobj import W_CType
from pypy.module._cffi_backend import cffi_opcode, newtype, ctypestruct
from pypy.module._cffi_backend import ctypeprim
from pypy.module._cffi_backend import parse_c_type
@specialize.ll()
def getop(op):
return rffi.cast(rffi.SIGNED, op) & 0xFF
@specialize.ll()
def getarg(op):
return rffi.cast(rffi.SIGNED, op) >> 8
class RealizeCache:
NAMES = [None,
"_Bool",
"char",
"signed char",
"unsigned char",
"short",
"unsigned short",
"int",
"unsigned int",
"long",
"unsigned long",
"long long",
"unsigned long long",
"float",
"double",
"long double",
"wchar_t",
"int8_t",
"uint8_t",
"int16_t",
"uint16_t",
"int32_t",
"uint32_t",
"int64_t",
"uint64_t",
"intptr_t",
"uintptr_t",
"ptrdiff_t",
"size_t",
"ssize_t",
"int_least8_t",
"uint_least8_t",
"int_least16_t",
"uint_least16_t",
"int_least32_t",
"uint_least32_t",
"int_least64_t",
"uint_least64_t",
"int_fast8_t",
"uint_fast8_t",
"int_fast16_t",
"uint_fast16_t",
"int_fast32_t",
"uint_fast32_t",
"int_fast64_t",
"uint_fast64_t",
"intmax_t",
"uintmax_t",
"float _Complex",
"double _Complex",
"char16_t",
"char32_t",
]
assert len(NAMES) == cffi_opcode._NUM_PRIM
def __init__(self, space):
self.space = space
self.all_primitives = [None] * cffi_opcode._NUM_PRIM
self.file_struct = None
def get_file_struct(self):
if self.file_struct is None:
self.file_struct = ctypestruct.W_CTypeStruct(self.space, "FILE")
return self.file_struct
def get_primitive_type(ffi, num):
space = ffi.space
if not (0 <= num < cffi_opcode._NUM_PRIM):
if num == cffi_opcode._UNKNOWN_PRIM:
raise oefmt(ffi.w_FFIError, "primitive integer type with an "
"unexpected size (or not an integer type at all)")
elif num == cffi_opcode._UNKNOWN_FLOAT_PRIM:
raise oefmt(ffi.w_FFIError, "primitive floating-point type with an "
"unexpected size (or not a float type at all)")
elif num == cffi_opcode._UNKNOWN_LONG_DOUBLE:
raise oefmt(ffi.w_FFIError, "primitive floating-point type is "
"'long double', not supported for now with "
"the syntax 'typedef double... xxx;'")
else:
raise oefmt(space.w_NotImplementedError, "prim=%d", num)
realize_cache = space.fromcache(RealizeCache)
w_ctype = realize_cache.all_primitives[num]
if w_ctype is None:
if num == cffi_opcode.PRIM_VOID:
w_ctype = newtype.new_void_type(space)
else:
assert RealizeCache.NAMES[num]
w_ctype = newtype.new_primitive_type(space, RealizeCache.NAMES[num])
realize_cache.all_primitives[num] = w_ctype
return w_ctype
def get_array_type(ffi, opcodes, itemindex, length):
w_ctitem = realize_c_type(ffi, opcodes, itemindex)
w_ctitemptr = newtype.new_pointer_type(ffi.space, w_ctitem)
return newtype._new_array_type(ffi.space, w_ctitemptr, length)
FUNCPTR_FETCH_CHARP = lltype.Ptr(lltype.FuncType([rffi.CCHARP], lltype.Void))
FUNCPTR_FETCH_LONGLONG = lltype.Ptr(lltype.FuncType(
[lltype.Ptr(parse_c_type.GETCONST_S)], rffi.INT))
def realize_global_int(ffi, g, gindex):
fetch_fnptr = rffi.cast(FUNCPTR_FETCH_LONGLONG, g.c_address)
with lltype.scoped_alloc(parse_c_type.GETCONST_S) as p_value:
p_value.c_ctx = ffi.ctxobj.ctx
rffi.setintfield(p_value, 'c_gindex', gindex)
neg = fetch_fnptr(p_value)
value = p_value.c_value
neg = rffi.cast(lltype.Signed, neg)
if neg == 0: # positive
if value <= rffi.cast(rffi.ULONGLONG, sys.maxint):
return ffi.space.newint(intmask(value))
else:
return ffi.space.newint(value)
elif neg == 1: # negative
value = rffi.cast(rffi.LONGLONG, value)
if value >= -sys.maxint-1:
return ffi.space.newint(intmask(value))
else:
return ffi.space.newint(value)
if neg == 2:
got = "%d (0x%x)" % (value, value)
else:
got = "%d" % (rffi.cast(rffi.LONGLONG, value),)
raise oefmt(ffi.w_FFIError,
"the C compiler says '%s' is equal to %s, "
"but the cdef disagrees", rffi.charp2str(g.c_name), got)
class W_RawFuncType(W_Root):
"""Temporary: represents a C function type (not a function pointer)"""
_immutable_fields_ = ['nostruct_ctype', 'nostruct_locs', 'nostruct_nargs']
_ctfuncptr = None
nostruct_ctype = None
nostruct_locs = None
nostruct_nargs = 0
def __init__(self, opcodes, base_index):
self.opcodes = opcodes
self.base_index = base_index
def _unpack(self, ffi):
opcodes = self.opcodes
base_index = self.base_index
assert getop(opcodes[base_index]) == cffi_opcode.OP_FUNCTION
fret = realize_c_type(ffi, opcodes, getarg(opcodes[base_index]))
base_index += 1
num_args = 0
OP_FUNCTION_END = cffi_opcode.OP_FUNCTION_END
while getop(opcodes[base_index + num_args]) != OP_FUNCTION_END:
num_args += 1
#
ellipsis = (getarg(opcodes[base_index + num_args]) & 0x01) != 0
abi = (getarg(opcodes[base_index + num_args]) & 0xFE)
if abi == 0:
abi = _cffi_backend.FFI_DEFAULT_ABI
elif abi == 2:
if _cffi_backend.has_stdcall:
abi = _cffi_backend.FFI_STDCALL
else:
abi = _cffi_backend.FFI_DEFAULT_ABI
else:
raise oefmt(ffi.w_FFIError, "abi number %d not supported", abi)
#
fargs = [realize_c_type(ffi, opcodes, base_index + i)
for i in range(num_args)]
return fargs, fret, ellipsis, abi
def unwrap_as_fnptr(self, ffi):
if self._ctfuncptr is None:
fargs, fret, ellipsis, abi = self._unpack(ffi)
self._ctfuncptr = newtype._new_function_type(
ffi.space, fargs, fret, ellipsis, abi)
return self._ctfuncptr
def unwrap_as_fnptr_in_elidable(self):
assert self._ctfuncptr is not None
return self._ctfuncptr
@jit.dont_look_inside
def prepare_nostruct_fnptr(self, ffi):
# tweaked version: instead of returning the ctfuncptr
# corresponding exactly to the OP_FUNCTION ... OP_FUNCTION_END
# opcodes, this builds in self.nostruct_ctype another one in
# which the struct args are replaced with ptr-to- struct, and
# a struct return value is replaced with a hidden first arg of
# type ptr-to-struct. This is how recompiler.py produces
# trampoline functions for PyPy. (Same with complex numbers.)
if self.nostruct_ctype is None:
fargs, fret, ellipsis, abi = self._unpack(ffi)
# 'locs' will be a string of the same length as the final fargs,
# containing 'A' where a struct argument was detected, and 'R'
# in first position if a struct return value was detected
locs = ['\x00'] * len(fargs)
for i in range(len(fargs)):
farg = fargs[i]
if (isinstance(farg, ctypestruct.W_CTypeStructOrUnion) or
isinstance(farg, ctypeprim.W_CTypePrimitiveComplex)):
farg = newtype.new_pointer_type(ffi.space, farg)
fargs[i] = farg
locs[i] = 'A'
if (isinstance(fret, ctypestruct.W_CTypeStructOrUnion) or
isinstance(fret, ctypeprim.W_CTypePrimitiveComplex)):
fret = newtype.new_pointer_type(ffi.space, fret)
fargs = [fret] + fargs
locs = ['R'] + locs
fret = newtype.new_void_type(ffi.space)
ctfuncptr = newtype._new_function_type(
ffi.space, fargs, fret, ellipsis, abi)
if locs == ['\x00'] * len(locs):
locs = None
else:
locs = ''.join(locs)
self.nostruct_ctype = ctfuncptr
self.nostruct_locs = locs
self.nostruct_nargs = len(ctfuncptr.fargs) - (locs is not None and
locs[0] == 'R')
def repr_fn_type(self, ffi, repl=""):
fargs, fret, ellipsis, abi = self._unpack(ffi)
argnames = [farg.name for farg in fargs]
if ellipsis:
argnames.append('...')
sargs = ', '.join(argnames)
sret1 = fret.name[:fret.name_position]
sret2 = fret.name[fret.name_position:]
if len(repl) > 0 and not sret1.endswith('*'):
repl = " " + repl
return '%s%s(%s)%s' % (sret1, repl, sargs, sret2)
def unexpected_fn_type(self, ffi):
raise oefmt(ffi.w_FFIError,
"the type '%s' is a function type, not a "
"pointer-to-function type", self.repr_fn_type(ffi))
def realize_c_type(ffi, opcodes, index):
"""Interpret an opcodes[] array. If opcodes == ffi.ctxobj.ctx.c_types,
store all the intermediate types back in the opcodes[].
"""
x = realize_c_type_or_func(ffi, opcodes, index)
if not isinstance(x, W_CType):
assert isinstance(x, W_RawFuncType)
raise x.unexpected_fn_type(ffi)
return x
def _realize_name(prefix, charp_src_name):
# "xyz" => "struct xyz"
# "$xyz" => "xyz"
# "$1" => "struct $1"
if (charp_src_name[0] == '$' and charp_src_name[1] != '$'
and not ('0' <= charp_src_name[1] <= '9')):
return rffi.charp2str(rffi.ptradd(charp_src_name, 1))
else:
return prefix + rffi.charp2str(charp_src_name)
def _realize_c_struct_or_union(ffi, sindex):
if sindex == cffi_opcode._IO_FILE_STRUCT:
# returns a single global cached opaque type
return ffi.space.fromcache(RealizeCache).get_file_struct()
s = ffi.ctxobj.ctx.c_struct_unions[sindex]
type_index = rffi.getintfield(s, 'c_type_index')
if ffi.cached_types[type_index] is not None:
return ffi.cached_types[type_index] #found already in the "primary" slot
space = ffi.space
w_ctype = None
c_flags = rffi.getintfield(s, 'c_flags')
c_first_field_index = rffi.getintfield(s, 'c_first_field_index')
if (c_flags & cffi_opcode.F_EXTERNAL) == 0:
if (c_flags & cffi_opcode.F_UNION) != 0:
name = _realize_name("union ", s.c_name)
x = ctypestruct.W_CTypeUnion(space, name)
else:
name = _realize_name("struct ", s.c_name)
if name == "struct _IO_FILE":
x = space.fromcache(RealizeCache).get_file_struct()
else:
x = ctypestruct.W_CTypeStruct(space, name)
if (c_flags & cffi_opcode.F_OPAQUE) == 0:
assert c_first_field_index >= 0
w_ctype = x
w_ctype.size = rffi.getintfield(s, 'c_size')
w_ctype.alignment = rffi.getintfield(s, 'c_alignment')
# w_ctype._field_list and other underscore fields are still
# None, making it a "lazy" (i.e. "non-forced") kind of struct
w_ctype._lazy_ffi = ffi
w_ctype._lazy_s = s
else:
assert c_first_field_index < 0
else:
assert c_first_field_index < 0
x = _fetch_external_struct_or_union(s, ffi.included_ffis_libs)
if x is None:
raise oefmt(ffi.w_FFIError,
"'%s %s' should come from ffi.include() but was not found",
"union" if c_flags & cffi_opcode.F_UNION else "struct",
rffi.charp2str(s.c_name))
assert isinstance(x, ctypestruct.W_CTypeStructOrUnion)
if (c_flags & cffi_opcode.F_OPAQUE) == 0 and x.size < 0:
prefix = "union" if c_flags & cffi_opcode.F_UNION else "struct"
name = rffi.charp2str(s.c_name)
raise oefmt(space.w_NotImplementedError,
"'%s %s' is opaque in the ffi.include(), but no "
"longer in the ffi doing the include (workaround: don't "
"use ffi.include() but duplicate the declarations of "
"everything using %s %s)",
prefix, name, prefix, name)
# Update the "primary" OP_STRUCT_UNION slot
ffi.cached_types[type_index] = x
if w_ctype is not None and rffi.getintfield(s, 'c_size') == -2:
# oops, this struct is unnamed and we couldn't generate
# a C expression to get its size. We have to rely on
# complete_struct_or_union() to compute it now.
try:
do_realize_lazy_struct(w_ctype)
except:
ffi.cached_types[type_index] = None
raise
return x
def _realize_c_enum(ffi, eindex):
e = ffi.ctxobj.ctx.c_enums[eindex]
type_index = rffi.getintfield(e, 'c_type_index')
if ffi.cached_types[type_index] is not None:
return ffi.cached_types[type_index] #found already in the "primary" slot
space = ffi.space
w_basetd = get_primitive_type(ffi, rffi.getintfield(e, 'c_type_prim'))
enumerators_w = []
enumvalues_w = []
p = e.c_enumerators
if p[0] != '\x00':
while True:
j = 0
while p[j] != ',' and p[j] != '\x00':
j += 1
enname = rffi.charpsize2str(p, j)
enumerators_w.append(space.newtext(enname))
gindex = parse_c_type.search_in_globals(ffi.ctxobj.ctx, enname)
assert gindex >= 0
g = ffi.ctxobj.ctx.c_globals[gindex]
assert getop(g.c_type_op) == cffi_opcode.OP_ENUM
assert getarg(g.c_type_op) == -1
w_integer_value = realize_global_int(ffi, g, gindex)
enumvalues_w.append(w_integer_value)
p = rffi.ptradd(p, j)
if p[0] == '\x00':
break
p = rffi.ptradd(p, 1)
name = _realize_name("enum ", e.c_name)
w_ctype = newtype.new_enum_type(space, name,
space.newlist(enumerators_w),
space.newlist(enumvalues_w),
w_basetd)
# Update the "primary" OP_ENUM slot
ffi.cached_types[type_index] = w_ctype
return w_ctype
def realize_c_type_or_func(ffi, opcodes, index):
op = opcodes[index]
from_ffi = (opcodes == ffi.ctxobj.ctx.c_types)
if from_ffi and ffi.cached_types[index] is not None:
return ffi.cached_types[index]
case = getop(op)
if case == cffi_opcode.OP_PRIMITIVE:
x = get_primitive_type(ffi, getarg(op))
elif case == cffi_opcode.OP_POINTER:
y = realize_c_type_or_func(ffi, opcodes, getarg(op))
if isinstance(y, W_CType):
x = newtype.new_pointer_type(ffi.space, y)
elif isinstance(y, W_RawFuncType):
x = y.unwrap_as_fnptr(ffi)
else:
raise NotImplementedError
elif case == cffi_opcode.OP_ARRAY:
x = get_array_type(ffi, opcodes, getarg(op),
rffi.cast(rffi.SIGNED, opcodes[index + 1]))
elif case == cffi_opcode.OP_OPEN_ARRAY:
x = get_array_type(ffi, opcodes, getarg(op), -1)
elif case == cffi_opcode.OP_STRUCT_UNION:
x = _realize_c_struct_or_union(ffi, getarg(op))
elif case == cffi_opcode.OP_ENUM:
x = _realize_c_enum(ffi, getarg(op))
elif case == cffi_opcode.OP_FUNCTION:
x = W_RawFuncType(opcodes, index)
elif case == cffi_opcode.OP_NOOP:
x = realize_c_type_or_func(ffi, opcodes, getarg(op))
elif case == cffi_opcode.OP_TYPENAME:
# essential: the TYPENAME opcode resolves the type index looked
# up in the 'ctx.c_typenames' array, but it does so in 'ctx.c_types'
# instead of in 'opcodes'!
type_index = rffi.getintfield(ffi.ctxobj.ctx.c_typenames[getarg(op)],
'c_type_index')
x = realize_c_type_or_func(ffi, ffi.ctxobj.ctx.c_types, type_index)
else:
raise oefmt(ffi.space.w_NotImplementedError, "op=%d", case)
if from_ffi:
assert ffi.cached_types[index] is None or ffi.cached_types[index] is x
ffi.cached_types[index] = x
return x
def do_realize_lazy_struct(w_ctype):
"""This is called by W_CTypeStructOrUnion.force_lazy_struct().
"""
assert isinstance(w_ctype, ctypestruct.W_CTypeStructOrUnion)
space = w_ctype.space
ffi = w_ctype._lazy_ffi
s = w_ctype._lazy_s
assert w_ctype.size != -1 # not an opaque (but may be -2)
assert ffi is not None # still lazy
first_field = rffi.getintfield(s, 'c_first_field_index')
num_fields = rffi.getintfield(s, 'c_num_fields')
fields_w = [None] * num_fields
for i in range(num_fields):
fld = ffi.ctxobj.ctx.c_fields[first_field + i]
field_name = rffi.charp2str(fld.c_name)
field_size = rffi.getintfield(fld, 'c_field_size')
field_offset = rffi.getintfield(fld, 'c_field_offset')
op = rffi.getintfield(fld, 'c_field_type_op')
case = getop(op)
if case == cffi_opcode.OP_NOOP:
fbitsize = -1 # standard field
elif case == cffi_opcode.OP_BITFIELD:
assert field_size >= 0
fbitsize = field_size
else:
raise oefmt(space.w_NotImplementedError, "field op=%d", case)
w_ctf = realize_c_type(ffi, ffi.ctxobj.ctx.c_types, getarg(op))
if field_offset == -1:
# unnamed struct, with field positions and sizes entirely
# determined by complete_struct_or_union() and not checked.
# Or, bitfields (field_size >= 0), similarly not checked.
assert field_size == -1 or fbitsize >= 0
else:
newtype.detect_custom_layout(w_ctype, newtype.SF_STD_FIELD_POS,
w_ctf.size, field_size,
"wrong size for field '",
field_name, "'")
fields_w[i] = space.newtuple([
space.newtext(field_name),
w_ctf,
space.newint(fbitsize),
space.newint(field_offset)])
sflags = 0
c_flags = rffi.getintfield(s, 'c_flags')
if c_flags & cffi_opcode.F_CHECK_FIELDS:
sflags |= newtype.SF_STD_FIELD_POS
if c_flags & cffi_opcode.F_PACKED:
sflags |= newtype.SF_PACKED
assert w_ctype.size == rffi.getintfield(s, 'c_size')
assert w_ctype.alignment == rffi.getintfield(s, 'c_alignment')
try:
w_ctype.size = -1 # make opaque again
newtype.complete_struct_or_union(
space, w_ctype, space.newlist(fields_w), space.w_None,
totalsize = rffi.getintfield(s, 'c_size'),
totalalignment = rffi.getintfield(s, 'c_alignment'),
sflags = sflags)
except:
w_ctype.size = rffi.getintfield(s, 'c_size') # restore
w_ctype.alignment = rffi.getintfield(s, 'c_alignment') # restore
raise
if rffi.getintfield(s, 'c_size') >= 0:
assert w_ctype.size == rffi.getintfield(s, 'c_size')
assert w_ctype.alignment > 0
if rffi.getintfield(s, 'c_alignment') != -1:
assert w_ctype.alignment == rffi.getintfield(s, 'c_alignment')
assert w_ctype._fields_list is not None # not lazy any more
w_ctype._lazy_ffi = None
w_ctype._lazy_s = lltype.nullptr(parse_c_type.STRUCT_UNION_S)
def _fetch_external_struct_or_union(s, included_ffis_libs):
name = rffi.charp2str(s.c_name)
#
for ffi1, _ in included_ffis_libs:
ctx1 = ffi1.ctxobj.ctx
sindex = parse_c_type.search_in_struct_unions(ctx1, name)
if sindex < 0: # not found at all
continue
s1 = ctx1.c_struct_unions[sindex]
s1_flags = rffi.getintfield(s1, 'c_flags')
s_flags = rffi.getintfield(s, 'c_flags')
if ((s1_flags & (cffi_opcode.F_EXTERNAL | cffi_opcode.F_UNION))
== (s_flags & cffi_opcode.F_UNION)):
# s1 is not external, and the same kind (struct or union) as s
return _realize_c_struct_or_union(ffi1, sindex)
# not found, look more recursively
if len(ffi1.included_ffis_libs) > 0:
w_res = _fetch_external_struct_or_union(s, ffi1.included_ffis_libs)
if w_res is not None:
return w_res
return None
| 10,945 |
868 | <filename>tests/unit-tests/src/test/java/org/apache/activemq/artemis/tests/unit/logging/AssertionLoggerTest.java
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.tests.unit.logging;
import org.apache.activemq.artemis.core.client.impl.ServerLocatorImpl;
import org.apache.activemq.artemis.logs.AssertionLoggerHandler;
import org.apache.activemq.artemis.protocol.amqp.broker.ActiveMQProtonRemotingConnection;
import org.apache.activemq.artemis.protocol.amqp.logger.ActiveMQAMQPProtocolLogger;
import org.apache.activemq.artemis.tests.util.RandomUtil;
import org.jboss.logging.Logger;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* This will validate the AssertionLoggerHandler is working as expected.
* Even though this class belongs to artemis commons, this test has to be done here as we
* are validating the logging.properties, and logging-ci.properties and the classloading of everything.
*/
public class AssertionLoggerTest {
@Before
public void prepare() {
AssertionLoggerHandler.startCapture(true);
}
@After
public void cleanup() {
AssertionLoggerHandler.stopCapture();
}
@Test
public void testHandlingOnAMQP() throws Exception {
validateLogging(ActiveMQProtonRemotingConnection.class);
}
@Test
public void testHandlingOnClientCore() throws Exception {
validateLogging(ServerLocatorImpl.class);
}
@Test
public void testInfoAMQP() throws Exception {
ActiveMQAMQPProtocolLogger.LOGGER.retryConnection("test", "test", 1, 1);
Assert.assertTrue(AssertionLoggerHandler.findText("AMQ111002"));
}
private void validateLogging(Class clazz) {
String randomLogging = RandomUtil.randomString();
Logger logging = Logger.getLogger(clazz);
logging.warn(randomLogging);
Assert.assertTrue(AssertionLoggerHandler.findText(randomLogging));
AssertionLoggerHandler.clear();
for (int i = 0; i < 10; i++) {
logging.warn(randomLogging);
}
Assert.assertEquals(10, AssertionLoggerHandler.countText(randomLogging));
AssertionLoggerHandler.clear();
for (int i = 0; i < 10; i++) {
logging.info(randomLogging);
}
Assert.assertEquals(10, AssertionLoggerHandler.countText(randomLogging));
}
}
| 1,026 |
311 | <gh_stars>100-1000
package org.thunlp.tagsuggest.contentbase;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Properties;
import java.util.Set;
import java.util.Vector;
import java.util.Map.Entry;
import java.util.logging.Logger;
import java.util.HashSet;
import org.thunlp.io.JsonUtil;
import org.thunlp.io.RecordReader;
import org.thunlp.matrix.NormalMatrix;
import org.thunlp.matrix.pagerank.PageRank;
import org.thunlp.misc.WeightString;
import org.thunlp.tagsuggest.common.ConfigIO;
import org.thunlp.tagsuggest.common.GenerativeTagSuggest;
import org.thunlp.tagsuggest.common.KeywordPost;
import org.thunlp.tagsuggest.common.Post;
import org.thunlp.tagsuggest.common.TagSuggest;
import org.thunlp.tagsuggest.common.WordFeatureExtractor;
import org.thunlp.tagsuggest.contentbase.NoiseTagLdaModel.Document;
import java.lang.Thread;
public class TextpagerankTagSuggest implements TagSuggest {
private static Logger LOG = Logger.getAnonymousLogger();
private Properties config = null;
private WordFeatureExtractor extractor = new WordFeatureExtractor();
private int numTags = 10;
private static String[] EMPTY_TAG_SET = new String[0];
private static int[] EMPTY_REASON_SET = new int[0];
private static JsonUtil J = new JsonUtil();
private static double[] PRresult = null;
private static double[] rankResult = null;
private static int num = 0;
public static void main(String[] args) throws IOException {
TextpagerankTagSuggest lda = new TextpagerankTagSuggest();
lda.setConfig(ConfigIO.configFromString("numtags=10;norm=all_log;k=5;dataType=Post"));
lda.loadModel("/home/niuyl/java/work/TagSuggestion/working_dir");
RecordReader reader = new RecordReader("/home/niuyl/java/work/TagSuggestion/post/post.dat");
StringBuilder explain = new StringBuilder("");
int rightnum = 0, allnum = 0;
while (reader.next()) {
++allnum;
Post p = J.fromJson(reader.value(), Post.class);
lda.suggest(p, explain);
}
}
@Override
public void feedback(Post p) {
}
@Override
public void loadModel(String modelPath) throws IOException {
}
@Override
public void setConfig(Properties config) {
this.config = config;
extractor = new WordFeatureExtractor(config);
numTags = Integer.parseInt(config.getProperty("numtags", "10"));
}
public void addEdge(NormalMatrix matrix, Vector<Integer> v, int start,
int end) {
for (int i = start; i < end; i++) {
for (int j = i + 1; j <= end; j++) {
matrix.add(v.get(i), v.get(j), 1);
matrix.add(v.get(j), v.get(i), 1);
}
}
}
@Override
public List<WeightString> suggest(Post p, StringBuilder explain) {
String[] features = extractor.extractKeyword((KeywordPost) p, true, true,true);
Document d = new Document(features, EMPTY_TAG_SET);
// for TextRank
HashMap<String, Integer> textMap = new HashMap<String, Integer>();
HashMap<Integer, String> textWordMap = new HashMap<Integer, String>();
Vector<Integer> textWordId = new Vector<Integer>();
num = 0;
for (String word : features) {
if (!textMap.containsKey(word)) {
textMap.put(word, num);
textWordMap.put(num, word);
textWordId.add(num);
num++;
} else {
textWordId.add(textMap.get(word));
}
}
// calculate the TextRank value
NormalMatrix matrix = new NormalMatrix(num, num);
int window = 10;
int len = textWordId.size();
if (len < window) {
for (int i = 1; i < len; i++) {
addEdge(matrix, textWordId, 0, i);
}
for (int i = 1; i < len - 1; i++) {
addEdge(matrix, textWordId, i, len - 1);
}
} else {
for (int i = 1; i < window - 1; i++) {
addEdge(matrix, textWordId, 0, i);
}
for (int i = 0; i <= len - window; i++) {
addEdge(matrix, textWordId, i, i + window - 1);
}
for (int i = len - window + 1; i < len - 1; i++) {
addEdge(matrix, textWordId, i, len - 1);
}
}
PageRank.prepareMatrix(matrix);
double[] rankResult = PageRank.pageRank(matrix, 100);
List<WeightString> results = new ArrayList<WeightString>();
for (int i = 0; i < num; ++ i)
{
results.add(new WeightString(textWordMap.get(i), rankResult[i]));
}
Collections.sort(results, new Comparator<WeightString>() {
@Override
public int compare(WeightString o1, WeightString o2) {
return Double.compare(o2.weight, o1.weight);
}
});
if (results.size() > numTags)
results = results.subList(0, numTags);
return results;
}
} | 1,783 |
1,444 | <filename>Mage.Sets/src/mage/cards/s/SpecimenCollector.java
package mage.cards.s;
import mage.MageInt;
import mage.abilities.Ability;
import mage.abilities.common.DiesSourceTriggeredAbility;
import mage.abilities.common.EntersBattlefieldTriggeredAbility;
import mage.abilities.effects.common.CreateTokenCopyTargetEffect;
import mage.abilities.effects.common.CreateTokenEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.SubType;
import mage.filter.FilterPermanent;
import mage.filter.common.FilterControlledPermanent;
import mage.filter.predicate.permanent.TokenPredicate;
import mage.game.permanent.token.CrabToken;
import mage.game.permanent.token.SquirrelToken;
import mage.target.TargetPermanent;
import java.util.UUID;
/**
* @author TheElk801
*/
public final class SpecimenCollector extends CardImpl {
private static final FilterPermanent filter = new FilterControlledPermanent("token you control");
static {
filter.add(TokenPredicate.TRUE);
}
public SpecimenCollector(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.CREATURE}, "{4}{U}");
this.subtype.add(SubType.VEDALKEN);
this.subtype.add(SubType.WIZARD);
this.power = new MageInt(2);
this.toughness = new MageInt(1);
// When Specimen Collector enters the battlefield, create a 1/1 green Squirrel creature token and a 0/3 blue Crab creature token.
Ability ability = new EntersBattlefieldTriggeredAbility(new CreateTokenEffect(new SquirrelToken()));
ability.addEffect(new CreateTokenEffect(new CrabToken()).setText("and a 0/3 blue Crab creature token"));
this.addAbility(ability);
// When Specimen Collector dies, create a token that's a copy of target token you control.
ability = new DiesSourceTriggeredAbility(new CreateTokenCopyTargetEffect());
ability.addTarget(new TargetPermanent(filter));
this.addAbility(ability);
}
private SpecimenCollector(final SpecimenCollector card) {
super(card);
}
@Override
public SpecimenCollector copy() {
return new SpecimenCollector(this);
}
}
| 747 |
518 | // #Create Payment Using PayPal Sample
// This sample code demonstrates how you can process a
// PayPal Account based Payment.
// API used: /v1/payments/payment
package com.paypal.api.payments.servlet;
import com.paypal.api.payments.*;
import com.paypal.api.payments.util.ResultPrinter;
import com.paypal.base.rest.APIContext;
import com.paypal.base.rest.PayPalRESTException;
import org.apache.log4j.Logger;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.IOException;
import java.util.*;
import static com.paypal.api.payments.util.SampleConstants.*;
public class ThirdPartyPaymentWithPayPalServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger LOGGER = Logger
.getLogger(ThirdPartyPaymentWithPayPalServlet.class);
Map<String, String> map = new HashMap<String, String>();
@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
doPost(req, resp);
}
// ##Create
// Sample showing to create a Payment using PayPal
@Override
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
throws ServletException, IOException {
createPayment(req, resp);
req.getRequestDispatcher("response.jsp").forward(req, resp);
}
public Payment createPayment(HttpServletRequest req, HttpServletResponse resp) {
Payment createdPayment = null;
// ### Api Context
// Pass in a `ApiContext` object to authenticate
// the call and to send a unique request id
// (that ensures idempotency). The SDK generates
// a request id if you do not pass one explicitly.
APIContext apiContext = new APIContext(clientID, clientSecret, mode);
if (req.getParameter("PayerID") != null) {
Payment payment = new Payment();
if (req.getParameter("guid") != null) {
payment.setId(map.get(req.getParameter("guid")));
}
PaymentExecution paymentExecution = new PaymentExecution();
paymentExecution.setPayerId(req.getParameter("PayerID"));
try {
createdPayment = payment.execute(apiContext, paymentExecution);
ResultPrinter.addResult(req, resp, "Executed The Payment", Payment.getLastRequest(), Payment.getLastResponse(), null);
} catch (PayPalRESTException e) {
ResultPrinter.addResult(req, resp, "Executed The Payment", Payment.getLastRequest(), null, e.getMessage());
}
} else {
// ###Details
// Let's you specify details of a payment amount.
Details details = new Details();
details.setShipping("1");
details.setSubtotal("5");
details.setTax("1");
// ###Amount
// Let's you specify a payment amount.
Amount amount = new Amount();
amount.setCurrency("USD");
// Total must be equal to sum of shipping, tax and subtotal.
amount.setTotal("7");
amount.setDetails(details);
// ### Payee
// Specify a payee with that user's email or merchant id
// Merchant Id can be found at https://www.paypal.com/businessprofile/settings/
Payee payee = new Payee();
payee.setEmail("<EMAIL>");
// ###Transaction
// A transaction defines the contract of a
// payment - what is the payment for and who
// is fulfilling it. Transaction is created with
// a `Payee` and `Amount` types
Transaction transaction = new Transaction();
transaction.setAmount(amount);
transaction.setPayee(payee);
transaction
.setDescription("This is the payment transaction description.");
// ### Items
Item item = new Item();
item.setName("Ground Coffee 40 oz").setQuantity("1").setCurrency("USD").setPrice("5");
ItemList itemList = new ItemList();
List<Item> items = new ArrayList<Item>();
items.add(item);
itemList.setItems(items);
transaction.setItemList(itemList);
// The Payment creation API requires a list of
// Transaction; add the created `Transaction`
// to a List
List<Transaction> transactions = new ArrayList<Transaction>();
transactions.add(transaction);
// ###Payer
// A resource representing a Payer that funds a payment
// Payment Method
// as 'paypal'
Payer payer = new Payer();
payer.setPaymentMethod("paypal");
// ###Payment
// A Payment Resource; create one using
// the above types and intent as 'sale'
Payment payment = new Payment();
payment.setIntent("sale");
payment.setPayer(payer);
payment.setTransactions(transactions);
// ###Redirect URLs
RedirectUrls redirectUrls = new RedirectUrls();
String guid = UUID.randomUUID().toString().replaceAll("-", "");
redirectUrls.setCancelUrl(req.getScheme() + "://"
+ req.getServerName() + ":" + req.getServerPort()
+ req.getContextPath() + "/thirdpartypaymentwithpaypal?guid=" + guid);
redirectUrls.setReturnUrl(req.getScheme() + "://"
+ req.getServerName() + ":" + req.getServerPort()
+ req.getContextPath() + "/thirdpartypaymentwithpaypal?guid=" + guid);
payment.setRedirectUrls(redirectUrls);
// Create a payment by posting to the APIService
// using a valid AccessToken
// The return object contains the status;
try {
createdPayment = payment.create(apiContext);
LOGGER.info("Created payment with id = "
+ createdPayment.getId() + " and status = "
+ createdPayment.getState());
// ###Payment Approval Url
Iterator<Links> links = createdPayment.getLinks().iterator();
while (links.hasNext()) {
Links link = links.next();
if (link.getRel().equalsIgnoreCase("approval_url")) {
req.setAttribute("redirectURL", link.getHref());
}
}
ResultPrinter.addResult(req, resp, "Payment with PayPal", Payment.getLastRequest(), Payment.getLastResponse(), null);
map.put(guid, createdPayment.getId());
} catch (PayPalRESTException e) {
ResultPrinter.addResult(req, resp, "Payment with PayPal", Payment.getLastRequest(), null, e.getMessage());
}
}
return createdPayment;
}
}
| 2,086 |
480 | <reponame>weicao/galaxysql<filename>polardbx-optimizer/src/main/java/com/alibaba/polardbx/optimizer/memory/ApMemoryPool.java
/*
* Copyright [2013-2021], Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.polardbx.optimizer.memory;
import com.alibaba.polardbx.common.utils.logger.Logger;
import com.alibaba.polardbx.common.utils.logger.LoggerFactory;
import java.util.concurrent.Semaphore;
public class ApMemoryPool extends AdaptiveMemoryPool {
protected static final Logger logger = LoggerFactory.getLogger(ApMemoryPool.class);
private Semaphore apSemaphore = null;
private Long overloadTime = null;
public ApMemoryPool(String name, long minLimit, long maxLimit, MemoryPool parent) {
super(name, parent, MemoryType.GENERAL_AP, minLimit, maxLimit);
}
@Override
protected void onMemoryReserved(long mayTotalUsage, boolean allocateSuccessOfParent) {
if (!allocateSuccessOfParent) {
if (mayTotalUsage < minLimit) {
//AP处于低水位,但依然申请失败,需要对TP限流
adaptiveMemoryHandler.limitTpRate();
} else {
//申请失败的原因,是因为AP使用的内存过多导致的
//AP此时处于高水位, 触发内存释放, 同时对AP限流
adaptiveMemoryHandler.revokeReleaseMemory();
adaptiveMemoryHandler.killApQuery();
}
} else {
//虽然申请成功了,但是AP超过了最大阈值,触发内存释放吧, 同时对AP限流
//这里需要扩展下几个策略,1. 是否自杀 2. 是否触发spill 3. 限流。 其中1、2 分别和3 正交的
//FIXME 考虑下是否自杀或者限流
adaptiveMemoryHandler.limitTpRate();
// if (revocableBytes > 0) {
// requestMemoryRevoke(this, Math.min(mayTotalUsage - minLimit, revocableBytes));
// }
}
}
public Semaphore getApSemaphore() {
return apSemaphore;
}
public void setApSemaphore(Semaphore apSemaphore) {
this.apSemaphore = apSemaphore;
}
public Long getOverloadTime() {
return overloadTime;
}
public void setOverloadTime(Long overloadTime) {
this.overloadTime = overloadTime;
}
public void initApTokens() {
// if (apSemaphore == null && children.size() > 1) {
// Semaphore semaphore = new Semaphore(children.size() / 2);
// logger.warn("apSemaphore.availablePermits=" + apSemaphore.availablePermits());
// for (MemoryPool pool : this.children.values()) {
// if (apSemaphore.availablePermits() == 0) {
// break;
// }
// if (((QueryMemoryPool) pool).setSemaphore(apSemaphore)) {
// try {
// apSemaphore.acquire();
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
// }
// }
// apSemaphore = semaphore;
// }
}
public Semaphore acquireToken() throws InterruptedException {
if (apSemaphore != null) {
apSemaphore.acquire();
}
return apSemaphore;
}
public void releaseToken(Semaphore semaphore) {
if (semaphore != null) {
semaphore.release();
}
if (apSemaphore != null && apSemaphore != semaphore) {
apSemaphore.release();
}
}
}
| 1,906 |
480 | /*
* Copyright [2013-2021], Alibaba Group Holding Limited
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.polardbx.server.handler.privileges.polar;
import com.alibaba.polardbx.server.ServerConnection;
import com.alibaba.polardbx.druid.sql.ast.statement.SQLExprTableSource;
import com.alibaba.polardbx.druid.sql.ast.statement.SQLRevokeStatement;
import com.alibaba.polardbx.druid.sql.parser.ByteString;
import com.alibaba.polardbx.common.audit.AuditAction;
import com.alibaba.polardbx.common.utils.GeneralUtil;
import com.alibaba.polardbx.common.utils.logger.Logger;
import com.alibaba.polardbx.common.utils.logger.LoggerFactory;
import com.alibaba.polardbx.gms.privilege.PolarAccountInfo;
import com.alibaba.polardbx.gms.privilege.PolarPrivManager;
import com.alibaba.polardbx.gms.privilege.PrivilegeKind;
import com.alibaba.polardbx.optimizer.parse.FastsqlUtils;
import java.util.ArrayList;
import java.util.List;
import static com.alibaba.polardbx.server.handler.privileges.polar.PolarHandlerCommon.checkDrdsRoot;
import static com.alibaba.polardbx.gms.privilege.audit.AuditPrivilege.polarAudit;
/**
* @author bairui.lrj
*/
public class PolarRevokePrivilegeHandler extends AbstractPrivilegeCommandHandler {
private static final Logger logger = LoggerFactory.getLogger(PolarRevokePrivilegeHandler.class);
private final SQLRevokeStatement stmt;
public PolarRevokePrivilegeHandler(ByteString sql,
ServerConnection serverConn,
PolarAccountInfo granter,
PolarPrivManager polarPrivManager,
SQLRevokeStatement stmt) {
super(sql, serverConn, granter, polarPrivManager);
this.stmt = stmt;
}
private List<PolarAccountInfo> getGrantees(ServerConnection c) {
List<PolarAccountInfo> grantees;
try {
SQLExprTableSource sqlExprTableSource = (SQLExprTableSource) stmt.getResource();
grantees = PolarHandlerCommon.getGrantees(sqlExprTableSource, stmt.getUsers(),
stmt.getPrivileges(), c);
if (stmt.isGrantOption()) {
for (PolarAccountInfo grantee : grantees) {
if (grantee.getFirstDbPriv() != null) {
grantee.getFirstDbPriv().grantPrivilege(PrivilegeKind.GRANT_OPTION);
} else if (grantee.getFirstTbPriv() != null) {
grantee.getFirstTbPriv().grantPrivilege(PrivilegeKind.GRANT_OPTION);
} else {
grantee.getInstPriv().grantPrivilege(PrivilegeKind.GRANT_OPTION);
}
}
}
} catch (Exception e) {
throw GeneralUtil.nestedException(e);
}
return grantees;
}
private void checkGrantees(List<PolarAccountInfo> grantees) {
checkDrdsRoot(grantees);
}
@Override
protected void doHandle() {
ByteString sql = getSql();
ServerConnection c = getServerConn();
List<PolarAccountInfo> grantees = getGrantees(c);
checkGrantees(grantees);
PolarAccountInfo granter = getGranter();
// Revoke
PolarPrivManager.getInstance().revokePrivileges(granter, c.getActiveRoles(), grantees);
polarAudit(getServerConn().getConnectionInfo(), getSql().toString(), AuditAction.REVOKE);
logger.info(String.format("REVOKE succeed, sql: %s, granter: %s", sql, granter.getIdentifier()));
}
}
| 1,688 |
1,016 | package com.thinkbiganalytics.metadata.modeshape.feed;
import com.thinkbiganalytics.metadata.api.template.ChangeComment;
/*-
* #%L
* kylo-metadata-modeshape
* %%
* Copyright (C) 2017 ThinkBig Analytics
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.thinkbiganalytics.metadata.modeshape.category.JcrCategory;
import com.thinkbiganalytics.metadata.modeshape.common.JcrObject;
import com.thinkbiganalytics.metadata.modeshape.common.mixin.AuditableMixin;
import com.thinkbiganalytics.metadata.modeshape.common.mixin.IndexControlledMixin;
import com.thinkbiganalytics.metadata.modeshape.common.mixin.SystemEntityMixin;
import com.thinkbiganalytics.metadata.modeshape.common.mixin.TaggableMixin;
import com.thinkbiganalytics.metadata.modeshape.support.JcrUtil;
import com.thinkbiganalytics.metadata.modeshape.support.JcrVersionUtil;
import com.thinkbiganalytics.metadata.modeshape.template.JcrChangeComment;
import org.joda.time.DateTime;
import java.util.Optional;
import javax.jcr.Node;
public class FeedSummary extends JcrObject implements SystemEntityMixin, AuditableMixin, TaggableMixin, IndexControlledMixin {
public static final String NODE_TYPE = "tba:feedSummary";
public static final String VERSION_COMMENT = "tba:versionComment";
public static final String DETAILS = "tba:details";
private FeedDetails details;
private JcrFeed feed;
public FeedSummary(Node node, JcrFeed feed) {
super(JcrVersionUtil.createAutoCheckoutProxy(node, false));
this.feed = feed;
}
public FeedSummary(Node node, JcrCategory category, JcrFeed feed) {
this(node, feed);
}
/* (non-Javadoc)
* @see com.thinkbiganalytics.metadata.modeshape.common.mixin.AuditableMixin#getModifiedTime()
*/
@Override
public DateTime getModifiedTime() {
DateTime thisTime = AuditableMixin.super.getModifiedTime();
return getFeedDetails()
.map(FeedDetails::getModifiedTime)
.filter(time -> time != null)
.filter(time -> time.compareTo(thisTime) > 0)
.orElse(thisTime);
}
/* (non-Javadoc)
* @see com.thinkbiganalytics.metadata.modeshape.common.mixin.AuditableMixin#getModifiedBy()
*/
@Override
public String getModifiedBy() {
String thisModifier = getModifiedBy();
DateTime thisTime = AuditableMixin.super.getModifiedTime();
return getFeedDetails()
.map(FeedDetails::getModifiedTime)
.filter(time -> time != null)
.filter(time -> time.compareTo(thisTime) > 0)
.map(time -> thisModifier)
.orElse(thisModifier);
}
public Optional<FeedDetails> getFeedDetails() {
if (this.details == null) {
if (JcrUtil.hasNode(getNode(), DETAILS)) {
this.details = JcrUtil.getJcrObject(getNode(), DETAILS, FeedDetails.class, this);
return Optional.of(this.details);
} else {
return Optional.empty();
}
} else {
return Optional.of(this.details);
}
}
public Optional<ChangeComment> getVersionComment() {
return Optional.ofNullable(JcrUtil.getJcrObject(getNode(), VERSION_COMMENT, JcrChangeComment.class));
}
public void setVersionComment(String comment) {
// First remove any existing comment so the timestamp and user gets correctly set.
JcrUtil.removeNode(getNode(), VERSION_COMMENT);
Node chgNode = JcrUtil.getOrCreateNode(getNode(), VERSION_COMMENT, JcrChangeComment.NODE_TYPE);
new JcrChangeComment(chgNode, comment != null ? comment : "");
}
protected JcrFeed getParentFeed() {
return this.feed;
}
}
| 1,657 |
629 | /* Copyright (c) 2017, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
*
* All rights reserved.
*
* The Astrobee platform is licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
#include <calibration/camera_utilities.h>
#include <localization_common/utilities.h>
#include <opencv2/core/eigen.hpp>
#include <iostream>
#include <random>
#include <unordered_map>
namespace calibration {
namespace lc = localization_common;
Eigen::Vector2d Project3dPointToImageSpace(const Eigen::Vector3d& cam_t_point, const Eigen::Matrix3d& intrinsics) {
return (intrinsics * cam_t_point).hnormalized();
}
Eigen::Isometry3d Isometry3d(const cv::Mat& rodrigues_rotation_cv, const cv::Mat& translation_cv) {
Eigen::Vector3d translation;
cv::cv2eigen(translation_cv, translation);
Eigen::Matrix3d rotation;
cv::Mat rotation_cv;
cv::Rodrigues(rodrigues_rotation_cv, rotation_cv);
cv::cv2eigen(rotation_cv, rotation);
return lc::Isometry3d(translation, rotation);
}
void UndistortedPnP(const std::vector<cv::Point2d>& undistorted_image_points, const std::vector<cv::Point3d>& points_3d,
const cv::Mat& intrinsics, const int pnp_method, cv::Mat& rotation, cv::Mat& translation) {
cv::Mat zero_distortion(4, 1, cv::DataType<double>::type, cv::Scalar(0));
cv::solvePnP(points_3d, undistorted_image_points, intrinsics, zero_distortion, rotation, translation, false,
pnp_method);
}
std::vector<int> RandomNIndices(const int num_possible_indices, const int num_sampled_indices) {
static std::random_device rd;
static std::mt19937 gen(rd());
std::uniform_int_distribution<> distribution(0, num_possible_indices - 1);
std::unordered_set<int> sampled_indices_set;
std::vector<int> sampled_indices;
while (static_cast<int>(sampled_indices.size()) < num_sampled_indices) {
const int random_index = distribution(gen);
if (sampled_indices_set.count(random_index) > 0) continue;
sampled_indices_set.emplace(random_index);
sampled_indices.emplace_back(random_index);
}
return sampled_indices;
}
} // namespace calibration
| 909 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.