max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
684 | //------------------------------------------------------------------------------
// Copyright (c) 2018-2020 <NAME>
// All rights reserved.
//
// https://michelemorrone.eu - https://BrutPitt.com
//
// twitter: https://twitter.com/BrutPitt - github: https://github.com/BrutPitt
//
// mailto:<EMAIL> - mailto:<EMAIL>
//
// This software is distributed under the terms of the BSD 2-Clause license
//------------------------------------------------------------------------------
#pragma once
// Mira3D
////////////////////////////////////////////////////////////////////////////
class Mira3D : public attractorScalarK
{
public:
Mira3D() {
stepFn = (stepPtrFn) &Mira3D::Step;
vMin = -10.0; vMax = 10.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 12.f);
}
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
void searchAttractor() { searchLyapunov(); }
};
// Mira4D
////////////////////////////////////////////////////////////////////////////
class Mira4D : public attractorScalarK
{
public:
Mira4D() {
stepFn = (stepPtrFn) &Mira4D::Step;
vMin = -10.0; vMax = 10.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 50.f);
}
int getPtSize() { return attPt4D; }
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
void searchAttractor() { searchLyapunov(); }
};
// PopCorn 4D BaseClasses
////////////////////////////////////////////////////////////////////////////
class PopCorn4DType : public attractorScalarK
{
public:
PopCorn4DType() {
vMin = -1.0; vMax = 1.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 12.f);
}
int getPtSize() { return attPt4D; }
protected:
virtual void startData();
//virtual void additionalDataCtrls();
};
class PopCorn4Dset : public PopCorn4DType
{
public:
void Step(vec4 &v, vec4 &vp);
protected:
double (*pfX)(double), (*pfY)(double), (*pfZ)(double), (*pfW)(double);
};
// PopCorn 4D
////////////////////////////////////////////////////////////////////////////
class PopCorn4D : public PopCorn4Dset
{
public:
PopCorn4D() { stepFn = (stepPtrFn) &PopCorn4Dset::Step;
pfX = pfY = pfZ = pfW = sin; }
};
////////////////////////////////////////////////////////////////////////////
class PopCorn4Dscss : public PopCorn4Dset
{
public:
PopCorn4Dscss() { stepFn = (stepPtrFn) &PopCorn4Dset::Step;
pfX = pfZ = pfW = sin; pfY = cos; }
};
////////////////////////////////////////////////////////////////////////////
class PopCorn4Dscsc : public PopCorn4Dset
{
public:
PopCorn4Dscsc() { stepFn = (stepPtrFn) &PopCorn4Dset::Step;
pfX = pfZ = sin; pfY = pfW = cos; }
};
////////////////////////////////////////////////////////////////////////////
class PopCorn4Dsscc : public PopCorn4Dset
{
public:
PopCorn4Dsscc() { stepFn = (stepPtrFn) &PopCorn4Dset::Step;
pfX = pfY = sin; pfZ = pfW = cos; }
};
class PopCorn4Dsimple : public PopCorn4DType
{
public:
PopCorn4Dsimple() { stepFn = (stepPtrFn) &PopCorn4Dsimple::Step;
pfX = pfY = pfZ = pfW = sin; }
void Step(vec4 &v, vec4 &vp);
protected:
double (*pfX)(double), (*pfY)(double), (*pfZ)(double), (*pfW)(double);
};
////////////////////////////////////////////////////////////////////////////
class PopCorn4Drnd : public PopCorn4DType
{
public:
void startData();
PopCorn4Drnd() { stepFn = (stepPtrFn) &PopCorn4DType::Step; }
protected:
void Step(vec4 &v, vec4 &vp);
};
// PopCorn 3D
////////////////////////////////////////////////////////////////////////////
class PopCorn3D : public attractorScalarK
{
public:
PopCorn3D() {
stepFn = (stepPtrFn) &PopCorn3D::Step;
vMin = -1.0; vMax = 1.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 12.f);
}
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
};
// Martin 4D
////////////////////////////////////////////////////////////////////////////
class Martin4DBase : public attractorScalarK
{
public:
Martin4DBase() {
vMin = .0; vMax = .5; kMin = 2.7; kMax = 3.0;
m_POV = vec3( 0.f, 0, 50.f);
}
int getPtSize() { return attPt4D; }
void Step(vec4 &v, vec4 &vp);
protected:
double (*pfX)(double), (*pfZ)(double);
void startData();
};
class Martin4D : public Martin4DBase
{
public:
Martin4D() { stepFn = (stepPtrFn) &Martin4DBase::Step; pfX = pfZ = sin; }
};
class Martin4Dsc : public Martin4DBase
{
public:
Martin4Dsc() { stepFn = (stepPtrFn) &Martin4DBase::Step; pfX = sin; pfZ = cos; }
};
class Martin4Dcc : public Martin4DBase
{
public:
Martin4Dcc() { stepFn = (stepPtrFn) &Martin4DBase::Step; pfX = pfZ = cos; }
};
// Symmetric Icons
////////////////////////////////////////////////////////////////////////////
class SymmetricIcons4D : public attractorScalarK
{
public:
SymmetricIcons4D() {
stepFn = (stepPtrFn) &SymmetricIcons4D::Step;
vMin = -1.0; vMax = 1.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 12.f);
}
int getPtSize() { return attPt4D; }
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
};
// Hopalong 4D
////////////////////////////////////////////////////////////////////////////
class Hopalong4D : public attractorScalarK
{
public:
Hopalong4D() {
stepFn = (stepPtrFn) &Hopalong4D::Step;
vMin = -1.0; vMax = 1.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 50.f);
}
int getPtSize() { return attPt4D; }
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
//void searchAttractor() { searchLyapunov(); }
};
class Hopalong3D : public attractorScalarIterateZ
{
public:
Hopalong3D() {
stepFn = (stepPtrFn) &Hopalong3D::Step;
vMin = -1.0; vMax = 1.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 50.f);
}
void initStep() { zMin = kVal[2]; zMax = kVal[3]; zIter=100000; attractorScalarIterateZ::initStep(); }
int getPtSize() { return attPt4D; }
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
//void searchAttractor() { searchLyapunov(); }
};
// Kaneko3D
////////////////////////////////////////////////////////////////////////////
class Kaneko3D : public attractorScalarIterateZ
{
public:
Kaneko3D() {
stepFn = (stepPtrFn) &Kaneko3D::Step;
vMin = -10.0; vMax = 10.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 12.f);
}
void initStep() { zMin = kVal[1]; zMax = kVal[2]; zIter=1000; attractorScalarIterateZ::initStep(); }
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
};
// Henon3D
////////////////////////////////////////////////////////////////////////////
class Henon3D : public attractorScalarIterateZ
{
public:
Henon3D() {
stepFn = (stepPtrFn) &Henon3D::Step;
vMin = -10.0; vMax = 10.0; kMin = -1.0; kMax = 1.0;
m_POV = vec3( 0.f, 0, 12.f);
}
void initStep() { zMin = kVal[1]; zMax = kVal[2]; zIter=1000; attractorScalarIterateZ::initStep(); }
protected:
void Step(vec4 &v, vec4 &vp);
void startData();
};
| 2,981 |
4,492 | <reponame>jonathanslenders/cryptography
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
INCLUDES = """
#include <openssl/rand.h>
"""
TYPES = """
typedef ... RAND_METHOD;
"""
FUNCTIONS = """
int RAND_set_rand_method(const RAND_METHOD *);
void RAND_add(const void *, int, double);
int RAND_status(void);
int RAND_bytes(unsigned char *, int);
"""
CUSTOMIZATIONS = """
"""
| 167 |
1,204 | <gh_stars>1000+
/*
* Copyright 2015 <NAME>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.gs.collections.impl.lazy;
import java.util.Iterator;
import com.gs.collections.api.block.function.Function;
import com.gs.collections.api.block.predicate.Predicate;
import com.gs.collections.api.block.predicate.Predicate2;
import com.gs.collections.api.block.procedure.Procedure;
import com.gs.collections.api.block.procedure.Procedure2;
import com.gs.collections.api.block.procedure.primitive.ObjectIntProcedure;
import com.gs.collections.impl.Counter;
import com.gs.collections.impl.block.factory.Predicates;
import com.gs.collections.impl.lazy.iterator.FlatCollectIterator;
import com.gs.collections.impl.utility.Iterate;
import net.jcip.annotations.Immutable;
@Immutable
public class FlatCollectIterable<T, V>
extends AbstractLazyIterable<V>
{
private final Iterable<T> adapted;
private final Function<? super T, ? extends Iterable<V>> function;
public FlatCollectIterable(Iterable<T> newAdapted, Function<? super T, ? extends Iterable<V>> function)
{
this.adapted = newAdapted;
this.function = function;
}
public void each(final Procedure<? super V> procedure)
{
Iterate.forEach(this.adapted, new Procedure<T>()
{
public void value(T each)
{
Iterate.forEach(FlatCollectIterable.this.function.valueOf(each), procedure);
}
});
}
@Override
public void forEachWithIndex(ObjectIntProcedure<? super V> objectIntProcedure)
{
final Procedure<V> innerProcedure = new AdaptObjectIntProcedureToProcedure<V>(objectIntProcedure);
Iterate.forEach(this.adapted, new Procedure<T>()
{
public void value(T each)
{
Iterable<V> iterable = FlatCollectIterable.this.function.valueOf(each);
Iterate.forEach(iterable, innerProcedure);
}
});
}
@Override
public <P> void forEachWith(final Procedure2<? super V, ? super P> procedure, final P parameter)
{
Iterate.forEach(this.adapted, new Procedure<T>()
{
public void value(T each)
{
Iterate.forEachWith(FlatCollectIterable.this.function.valueOf(each), procedure, parameter);
}
});
}
@Override
public V detect(final Predicate<? super V> predicate)
{
final V[] result = (V[]) new Object[1];
Iterate.detect(this.adapted, new Predicate<T>()
{
public boolean accept(T each)
{
Iterable<V> iterable = FlatCollectIterable.this.function.valueOf(each);
return Iterate.anySatisfy(iterable, new Predicate<V>()
{
public boolean accept(V each)
{
if (predicate.accept(each))
{
result[0] = each;
return true;
}
return false;
}
});
}
});
return result[0];
}
@Override
public boolean anySatisfy(final Predicate<? super V> predicate)
{
return Iterate.anySatisfy(this.adapted, new Predicate<T>()
{
public boolean accept(T each)
{
return Iterate.anySatisfy(FlatCollectIterable.this.function.valueOf(each), predicate);
}
});
}
@Override
public <P> boolean anySatisfyWith(Predicate2<? super V, ? super P> predicate, P parameter)
{
return this.anySatisfy(Predicates.bind(predicate, parameter));
}
@Override
public boolean allSatisfy(final Predicate<? super V> predicate)
{
return Iterate.allSatisfy(this.adapted, new Predicate<T>()
{
public boolean accept(T each)
{
return Iterate.allSatisfy(FlatCollectIterable.this.function.valueOf(each), predicate);
}
});
}
@Override
public <P> boolean allSatisfyWith(Predicate2<? super V, ? super P> predicate, P parameter)
{
return this.allSatisfy(Predicates.bind(predicate, parameter));
}
@Override
public boolean noneSatisfy(final Predicate<? super V> predicate)
{
return Iterate.noneSatisfy(this.adapted, new Predicate<T>()
{
public boolean accept(T each)
{
return Iterate.anySatisfy(FlatCollectIterable.this.function.valueOf(each), predicate);
}
});
}
@Override
public <P> boolean noneSatisfyWith(Predicate2<? super V, ? super P> predicate, P parameter)
{
return this.noneSatisfy(Predicates.bind(predicate, parameter));
}
public Iterator<V> iterator()
{
return new FlatCollectIterator<T, V>(this.adapted, this.function);
}
private static final class AdaptObjectIntProcedureToProcedure<V> implements Procedure<V>
{
private static final long serialVersionUID = 1L;
private final Counter index;
private final ObjectIntProcedure<? super V> objectIntProcedure;
private AdaptObjectIntProcedureToProcedure(ObjectIntProcedure<? super V> objectIntProcedure)
{
this.objectIntProcedure = objectIntProcedure;
this.index = new Counter();
}
public void value(V each)
{
this.objectIntProcedure.value(each, this.index.getCount());
this.index.increment();
}
}
}
| 2,665 |
799 | import json
import io
MOCKED_BASE_URL = 'https://api.test.com/api/'
def util_load_json(path):
with io.open(path, mode='r', encoding='utf-8') as f:
return json.loads(f.read())
def test_get_domain_state(requests_mock):
from Cyberpion import Client
mock_response = util_load_json('test_data/domain_state.json')
requests_mock.get(
f'{MOCKED_BASE_URL}domainstate/?verbosity=details&domain=$anon100-2.com',
json=mock_response)
client = Client(
base_url=MOCKED_BASE_URL,
verify=False,
headers={
'Authentication': 'Bearer some_api_key'
}
)
domain = '$anon100-2.com'
response = client.get_domain_state(domain)
# domain types and ips are reformatted in the function, skip checking them
response.pop('domain_types')
response.pop('ips')
mocked = mock_response['results'][0]
mocked.pop('domain_types')
mocked.pop('ips')
assert response == mocked
def test_get_domain_state_command(requests_mock):
from Cyberpion import Client, get_domain_state_command
mock_response = util_load_json('test_data/domain_state.json')
requests_mock.get(
f'{MOCKED_BASE_URL}domainstate/?verbosity=details&domain=$anon100-2.com',
json=mock_response)
client = Client(
base_url=f'{MOCKED_BASE_URL}',
verify=False,
headers={
'Authentication': 'Bearer some_api_key'
}
)
domain = '$anon100-2.com'
response = get_domain_state_command(client, {'domain': domain})
assert response.outputs['DomainState'] == client.get_domain_state(domain)
assert response.outputs_prefix == 'Cyberpion'
assert response.outputs_key_field == 'id'
def test_fetch_incidents(requests_mock):
"""Tests the fetch-incidents command function.
Configures requests_mock instance to generate the appropriate
get_alert API response, loaded from a local JSON file. Checks
the output of the command function with the expected output.
"""
from Cyberpion import Client, fetch_incidents
mock_response = util_load_json('test_data/new_incidents.json')
requests_mock.get(
f'{MOCKED_BASE_URL}actionitems/?verbosity=details&page_size=2',
json=mock_response)
client = Client(
base_url=f'{MOCKED_BASE_URL}',
verify=False,
headers={
'Authentication': 'Bearer some_api_key'
}
)
_, new_incidents = fetch_incidents(
client,
max_fetch=2,
min_severity=0,
show_only_active=True,
alert_types=[],
)
# name is too long for unit test, won't pass nop matter what
name_part_1 = "Fix DNS issues: Nameservers are not geo-separated,"
name_part_2 = " Authoritative nameservers are not geo-separated - $anon100-4.com"
assert new_incidents[0] == {
"name": name_part_1 + name_part_2,
"occurred": "2020-11-18T07:55:31.242711+00:00",
"rawJSON": json.dumps(mock_response['results'][0]),
"severity": 2
}
assert new_incidents[1] == {
"name": "Fix PKI issue: Weak certificate issuer - $anon100-4.com",
"occurred": "2020-11-19T14:27:05.811645+00:00",
"rawJSON": json.dumps(mock_response['results'][1]),
"severity": 3
}
| 1,398 |
5,079 | import cgi
import unittest
from openid.consumer import consumer
from openid import message
from openid.test import support
class DummyEndpoint(object):
preferred_namespace = None
local_id = None
server_url = None
is_op_identifier = False
def preferredNamespace(self):
return self.preferred_namespace
def getLocalID(self):
return self.local_id
def isOPIdentifier(self):
return self.is_op_identifier
class DummyAssoc(object):
handle = "assoc-handle"
class TestAuthRequestMixin(support.OpenIDTestMixin):
"""Mixin for AuthRequest tests for OpenID 1 and 2; DON'T add
unittest.TestCase as a base class here."""
preferred_namespace = None
immediate = False
expected_mode = 'checkid_setup'
def setUp(self):
self.endpoint = DummyEndpoint()
self.endpoint.local_id = 'http://server.unittest/joe'
self.endpoint.claimed_id = 'http://joe.vanity.example/'
self.endpoint.server_url = 'http://server.unittest/'
self.endpoint.preferred_namespace = self.preferred_namespace
self.realm = 'http://example/'
self.return_to = 'http://example/return/'
self.assoc = DummyAssoc()
self.authreq = consumer.AuthRequest(self.endpoint, self.assoc)
def failUnlessAnonymous(self, msg):
for key in ['claimed_id', 'identity']:
self.failIfOpenIDKeyExists(msg, key)
def failUnlessHasRequiredFields(self, msg):
self.failUnlessEqual(self.preferred_namespace,
self.authreq.message.getOpenIDNamespace())
self.failUnlessEqual(self.preferred_namespace,
msg.getOpenIDNamespace())
self.failUnlessOpenIDValueEquals(msg, 'mode',
self.expected_mode)
# Implement these in subclasses because they depend on
# protocol differences!
self.failUnlessHasRealm(msg)
self.failUnlessIdentifiersPresent(msg)
# TESTS
def test_checkNoAssocHandle(self):
self.authreq.assoc = None
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
self.failIfOpenIDKeyExists(msg, 'assoc_handle')
def test_checkWithAssocHandle(self):
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
self.failUnlessOpenIDValueEquals(msg, 'assoc_handle',
self.assoc.handle)
def test_addExtensionArg(self):
self.authreq.addExtensionArg('bag:', 'color', 'brown')
self.authreq.addExtensionArg('bag:', 'material', 'paper')
self.failUnless('bag:' in self.authreq.message.namespaces)
self.failUnlessEqual(self.authreq.message.getArgs('bag:'),
{'color': 'brown',
'material': 'paper'})
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
# XXX: this depends on the way that Message assigns
# namespaces. Really it doesn't care that it has alias "0",
# but that is tested anyway
post_args = msg.toPostArgs()
self.failUnlessEqual('brown', post_args['openid.ext0.color'])
self.failUnlessEqual('paper', post_args['openid.ext0.material'])
def test_standard(self):
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
self.failUnlessHasIdentifiers(
msg, self.endpoint.local_id, self.endpoint.claimed_id)
class TestAuthRequestOpenID2(TestAuthRequestMixin, unittest.TestCase):
preferred_namespace = message.OPENID2_NS
def failUnlessHasRealm(self, msg):
# check presence of proper realm key and absence of the wrong
# one.
self.failUnlessOpenIDValueEquals(msg, 'realm', self.realm)
self.failIfOpenIDKeyExists(msg, 'trust_root')
def failUnlessIdentifiersPresent(self, msg):
identity_present = msg.hasKey(message.OPENID_NS, 'identity')
claimed_present = msg.hasKey(message.OPENID_NS, 'claimed_id')
self.failUnlessEqual(claimed_present, identity_present)
def failUnlessHasIdentifiers(self, msg, op_specific_id, claimed_id):
self.failUnlessOpenIDValueEquals(msg, 'identity', op_specific_id)
self.failUnlessOpenIDValueEquals(msg, 'claimed_id', claimed_id)
# TESTS
def test_setAnonymousWorksForOpenID2(self):
"""OpenID AuthRequests should be able to set 'anonymous' to true."""
self.failUnless(self.authreq.message.isOpenID2())
self.authreq.setAnonymous(True)
self.authreq.setAnonymous(False)
def test_userAnonymousIgnoresIdentfier(self):
self.authreq.setAnonymous(True)
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
self.failUnlessHasRequiredFields(msg)
self.failUnlessAnonymous(msg)
def test_opAnonymousIgnoresIdentifier(self):
self.endpoint.is_op_identifier = True
self.authreq.setAnonymous(True)
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
self.failUnlessHasRequiredFields(msg)
self.failUnlessAnonymous(msg)
def test_opIdentifierSendsIdentifierSelect(self):
self.endpoint.is_op_identifier = True
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
self.failUnlessHasRequiredFields(msg)
self.failUnlessHasIdentifiers(
msg, message.IDENTIFIER_SELECT, message.IDENTIFIER_SELECT)
class TestAuthRequestOpenID1(TestAuthRequestMixin, unittest.TestCase):
preferred_namespace = message.OPENID1_NS
def setUpEndpoint(self):
TestAuthRequestBase.setUpEndpoint(self)
self.endpoint.preferred_namespace = message.OPENID1_NS
def failUnlessHasIdentifiers(self, msg, op_specific_id, claimed_id):
"""Make sure claimed_is is *absent* in request."""
self.failUnlessOpenIDValueEquals(msg, 'identity', op_specific_id)
self.failIfOpenIDKeyExists(msg, 'claimed_id')
def failUnlessIdentifiersPresent(self, msg):
self.failIfOpenIDKeyExists(msg, 'claimed_id')
self.failUnless(msg.hasKey(message.OPENID_NS, 'identity'))
def failUnlessHasRealm(self, msg):
# check presence of proper realm key and absence of the wrong
# one.
self.failUnlessOpenIDValueEquals(msg, 'trust_root', self.realm)
self.failIfOpenIDKeyExists(msg, 'realm')
# TESTS
def test_setAnonymousFailsForOpenID1(self):
"""OpenID 1 requests MUST NOT be able to set anonymous to True"""
self.failUnless(self.authreq.message.isOpenID1())
self.failUnlessRaises(ValueError, self.authreq.setAnonymous, True)
self.authreq.setAnonymous(False)
def test_identifierSelect(self):
"""Identfier select SHOULD NOT be sent, but this pathway is in
here in case some special discovery stuff is done to trigger
it with OpenID 1. If it is triggered, it will send
identifier_select just like OpenID 2.
"""
self.endpoint.is_op_identifier = True
msg = self.authreq.getMessage(self.realm, self.return_to,
self.immediate)
self.failUnlessHasRequiredFields(msg)
self.failUnlessEqual(message.IDENTIFIER_SELECT,
msg.getArg(message.OPENID1_NS, 'identity'))
class TestAuthRequestOpenID1Immediate(TestAuthRequestOpenID1):
immediate = True
expected_mode = 'checkid_immediate'
class TestAuthRequestOpenID2Immediate(TestAuthRequestOpenID2):
immediate = True
expected_mode = 'checkid_immediate'
if __name__ == '__main__':
unittest.main()
| 3,464 |
30,023 | """Support for Hue binary sensors."""
from __future__ import annotations
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from .bridge import HueBridge
from .const import DOMAIN
from .v1.binary_sensor import async_setup_entry as setup_entry_v1
from .v2.binary_sensor import async_setup_entry as setup_entry_v2
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddEntitiesCallback,
) -> None:
"""Set up binary sensor entities."""
bridge: HueBridge = hass.data[DOMAIN][config_entry.entry_id]
if bridge.api_version == 1:
await setup_entry_v1(hass, config_entry, async_add_entities)
else:
await setup_entry_v2(hass, config_entry, async_add_entities)
| 287 |
1,615 | <reponame>NSOiO/MLN
/**
* Created by MomoLuaNative.
* Copyright (c) 2020, Momo Group. All rights reserved.
*
* This source code is licensed under the MIT.
* For the full copyright and license information,please view the LICENSE file in the root directory of this source tree.
*/
package com.immomo.mmui.anim;
import android.util.LongSparseArray;
import com.immomo.mmui.anim.animations.MultiAnimation;
import com.immomo.mmui.anim.base.Animation;
import java.util.ArrayList;
import java.util.List;
public class Animator {
private LongSparseArray<Animation> animationArray = new LongSparseArray<>();
public static Animator getInstance() {
return SingleTonHolder._INSTANCE;
}
private static class SingleTonHolder {
private static final Animator _INSTANCE = new Animator();
}
private Animator() {
nativeInitCreateAnimator();
}
public long createNativeAnimation(Animation animation) {
return Animator.getInstance().nativeCreateAnimation(animation.getAnimationName(), animation.getAnimationKey(null));
}
public void addAnimation(Animation animation) {
animationArray.put(animation.getAnimationPointer(), animation);
nativeAddAnimation(animation.getAnimationPointer());
}
public void removeAnimation(Animation animation) {
removeAnimation(animation.getAnimationPointer());
}
private void removeAnimation(long animationPointer) {
nativeRemoveAnimation(animationPointer);
}
/**
* @param target : 动画对象
*/
public void removeAnimationOfView(Object target) {
List<Integer> temp = new ArrayList<>();
for (int i = 0; i < animationArray.size(); i++) {
Animation animation = animationArray.valueAt(i);
if (null != animation && animation.getTarget() == target) {
temp.add(i);
}
if (animation instanceof MultiAnimation) {
List<Animation> subAnimations = ((MultiAnimation) animation).getAnimations();
for (Animation a : subAnimations) {
if (a.getTarget() == target) {
temp.add(i);
break;
}
}
}
}
for (int index : temp) {
Animation animation = animationArray.valueAt(index);
if (animation != null)
removeAnimation(animation.getAnimationPointer());
}
}
static void onUpdateAnimation(long pointer) {
//只有一级动画
Animator.getInstance().updateValueAnimation(pointer);
}
static void onAnimationRelRunStart(long pointer) {
//子动画,和multi动画都回调
Animator.getInstance().animationStart(pointer);
}
static void onAnimationFinish(long pointer, boolean finish) {
//子动画,和multi动画都回调
Animator.getInstance().animationFinish(pointer, finish);
}
static void onAnimationRepeat(long caller, long executor, int count) {
//子动画,和multi动画都回调
Animator.getInstance().animationRepeat(caller, executor ,count);
}
static void onAnimationPause(long pointer, boolean paused) {
//暂无调用
Animator.getInstance().animationPause(pointer, paused);
}
private void animationStart(long animationPointer) {
Animation animation = animationArray.get(animationPointer);
if (animation != null)
animation.animationStart(animation);
}
// 动画集合不回调整体的repeatCount,只回调子Animation的repeatCount
private void animationRepeat(long callerPointer, long executorPointer, int count) {
Animation animation = animationArray.get(callerPointer);
if (animation != null) {
if (animation instanceof MultiAnimation) {
Animation subAnimation = getSubAnimation((MultiAnimation) animation, executorPointer);
if (subAnimation != null) {
animation.animationRepeat(subAnimation, count);
} else {
animation.animationRepeat(animation, count);
}
} else {
animation.animationRepeat(animation, count);
}
}
}
private Animation getSubAnimation(MultiAnimation multiAnimation, long animationPointer) {
Animation subAnimation = null;
List<Animation> subAnimations = multiAnimation.getAnimations();
if (subAnimations != null) {
for (Animation animation : subAnimations) {
if (animation.getAnimationPointer() == animationPointer) {
subAnimation = animation;
break;
}
}
}
return subAnimation;
}
private void animationPause(long animationPointer, boolean focusPaused) {
Animation animation = animationArray.get(animationPointer);
if (animation != null)
animation.animationPaused(animation, focusPaused);
}
private void animationFinish(long animationPointer, boolean finished) {
int index = animationArray.indexOfKey(animationPointer);
Animation animation = animationArray.valueAt(index);
animationArray.removeAt(index);
if (animation != null) {
animation.animationFinish(animation, finished);
removeAnimation(animationPointer);
}
}
private void updateValueAnimation(long animationPointer) {
Animation animation = animationArray.get(animationPointer);
if (animation != null) {
animation.onUpdateAnimation();
}
}
private native void nativeInitCreateAnimator();
private native void nativeAddAnimation(long animation);
private native long nativeCreateAnimation(String animationName, String key);
public native long[] nativeGetMultiAnimationRunningList(long animation);
public native float[] nativeGetCurrentValues(long animation);
public native void nativeSetObjectAnimationParams(long aniPoint, float[] f, float[] t, float[] fParams, boolean repeatForever, boolean autoReverse, int timingFunction);
public native void nativeSetMultiAnimationParams(long aniPoint, long[] subAniPoints, boolean isRunTogether);
public native void nativeSetMultiAnimationBeginTime(long aniPoint, float beginTime);
public native void nativeSetMultiAnimationRepeatCount(long aniPoint, float repeatCount);
public native void nativeSetMultiAnimationRepeatForever(long aniPoint, boolean repeatForever);
public native void nativeSetMultiAnimationAutoReverse(long aniPoint, boolean autoReverse);
public native void nativeSetSpringAnimationParams(long aniPoint, float[] f, float[] t, float[] currentVelocity, float[] fParams, boolean repeatForever, boolean autoReverse);
private native void nativeRemoveAnimation(long aniPointer);
public native void nativeAnimatorRelease();
public native void nativePause(long animationP, boolean b);
} | 2,680 |
852 | <gh_stars>100-1000
#include "DataFormats/ParticleFlowCandidate/interface/PFCandidate.h"
#include "FWCore/Framework/interface/stream/EDProducer.h"
#include "FWCore/Framework/interface/Event.h"
#include "FWCore/Framework/interface/MakerMacros.h"
#include "FWCore/ParameterSet/interface/ParameterSet.h"
namespace edm {
class EventSetup;
} // namespace edm
class PFConcretePFCandidateProducer : public edm::stream::EDProducer<> {
public:
explicit PFConcretePFCandidateProducer(const edm::ParameterSet&);
~PFConcretePFCandidateProducer() override;
void produce(edm::Event&, const edm::EventSetup&) override;
private:
edm::InputTag inputColl_;
};
DEFINE_FWK_MODULE(PFConcretePFCandidateProducer);
PFConcretePFCandidateProducer::PFConcretePFCandidateProducer(const edm::ParameterSet& iConfig) {
inputColl_ = iConfig.getParameter<edm::InputTag>("src");
// register products
produces<reco::PFCandidateCollection>();
}
PFConcretePFCandidateProducer::~PFConcretePFCandidateProducer() {}
void PFConcretePFCandidateProducer::produce(edm::Event& iEvent, const edm::EventSetup& iSetup) {
edm::Handle<reco::PFCandidateCollection> inputColl;
bool inputOk = iEvent.getByLabel(inputColl_, inputColl);
if (!inputOk) {
// nothing ... I guess we prefer to send an exception in the next lines
}
auto outputColl = std::make_unique<reco::PFCandidateCollection>();
outputColl->resize(inputColl->size());
for (unsigned int iCopy = 0; iCopy != inputColl->size(); ++iCopy) {
const reco::PFCandidate& pf = (*inputColl)[iCopy];
(*outputColl)[iCopy] = pf;
//dereferenced internally the ref and hardcopy the value
(*outputColl)[iCopy].setVertex(pf.vertex());
//math::XYZPoint(pf.vx(),pf.vy(),pf.vz()));
}
iEvent.put(std::move(outputColl));
}
| 633 |
2,296 | <filename>firmware/src/stm32f0xx_it.c
/*
Copyright 2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include "main.h"
#ifdef __cplusplus
extern "C" {
#endif
void NMI_Handler(void);
void HardFault_Handler(void);
void SVC_Handler(void);
void PendSV_Handler(void);
void SysTick_Handler(void);
#ifdef __cplusplus
}
#endif
// Handles NMI exceptions.
void NMI_Handler(void) {
}
// Handles Hard Fault exceptions.
void HardFault_Handler(void) {
/* Go to infinite loop when Hard Fault exception occurs */
while (1) {
}
}
// Handles SVCall exceptions.
void SVC_Handler(void) {
}
// Handles PendSVC exceptions.
void PendSV_Handler(void) {
}
// Handles SysTick events.
void SysTick_Handler(void) {
HAL_IncTick();
}
| 400 |
2,144 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pinot.sql.parsers.dml;
import org.apache.pinot.common.utils.DataSchema;
import org.apache.pinot.spi.config.task.AdhocTaskConfig;
import org.apache.pinot.sql.parsers.CalciteSqlParser;
import org.testng.Assert;
import org.testng.annotations.Test;
public class InsertIntoFileTest {
@Test
public void testInsertIntoStatementParser()
throws Exception {
String insertIntoSql = "INSERT INTO \"baseballStats\"\n"
+ "FROM FILE 's3://my-bucket/path/to/data/'\n"
+ "OPTION(taskName=myTask-1)\n"
+ "OPTION(input.fs.className=org.apache.pinot.plugin.filesystem.S3PinotFS)\n"
+ "OPTION(input.fs.prop.accessKey=my-access-key)\n"
+ "OPTION(input.fs.prop.secretKey=my-secret-key)\n"
+ "OPTION(input.fs.prop.region=us-west-2)";
InsertIntoFile insertIntoFile = InsertIntoFile.parse(CalciteSqlParser.compileToSqlNodeAndOptions(insertIntoSql));
Assert.assertEquals(insertIntoFile.getTable(), "baseballStats");
Assert.assertEquals(insertIntoFile.getExecutionType(), DataManipulationStatement.ExecutionType.MINION);
Assert.assertEquals(insertIntoFile.getResultSchema(), new DataSchema(new String[]{"tableName", "taskJobName"},
new DataSchema.ColumnDataType[]{DataSchema.ColumnDataType.STRING, DataSchema.ColumnDataType.STRING}));
Assert.assertEquals(insertIntoFile.getQueryOptions().size(), 6);
Assert.assertEquals(insertIntoFile.getQueryOptions().get("taskName"), "myTask-1");
Assert.assertEquals(insertIntoFile.getQueryOptions().get("input.fs.className"),
"org.apache.pinot.plugin.filesystem.S3PinotFS");
Assert.assertEquals(insertIntoFile.getQueryOptions().get("input.fs.prop.accessKey"), "my-access-key");
Assert.assertEquals(insertIntoFile.getQueryOptions().get("input.fs.prop.secretKey"), "my-secret-key");
Assert.assertEquals(insertIntoFile.getQueryOptions().get("input.fs.prop.region"), "us-west-2");
Assert.assertEquals(insertIntoFile.getQueryOptions().get("inputDirURI"), "s3://my-bucket/path/to/data/");
AdhocTaskConfig adhocTaskConfig = insertIntoFile.generateAdhocTaskConfig();
Assert.assertEquals(adhocTaskConfig.getTaskType(), "SegmentGenerationAndPushTask");
Assert.assertEquals(adhocTaskConfig.getTaskName(), "myTask-1");
Assert.assertEquals(adhocTaskConfig.getTableName(), "baseballStats");
Assert.assertEquals(adhocTaskConfig.getTaskConfigs().size(), 6);
Assert.assertEquals(adhocTaskConfig.getTaskConfigs().get("taskName"), "myTask-1");
Assert.assertEquals(adhocTaskConfig.getTaskConfigs().get("input.fs.className"),
"org.apache.pinot.plugin.filesystem.S3PinotFS");
Assert.assertEquals(adhocTaskConfig.getTaskConfigs().get("input.fs.prop.accessKey"), "my-access-key");
Assert.assertEquals(adhocTaskConfig.getTaskConfigs().get("input.fs.prop.secretKey"), "my-secret-key");
Assert.assertEquals(adhocTaskConfig.getTaskConfigs().get("input.fs.prop.region"), "us-west-2");
Assert.assertEquals(adhocTaskConfig.getTaskConfigs().get("inputDirURI"), "s3://my-bucket/path/to/data/");
}
}
| 1,367 |
330 | /* Made by https://github.com/Mangeshrex */
static const char black[] = "#181f21";
static const char gray2[] = "#22292b"; // unfocused window border
static const char gray3[] = "#3b4244";
static const char gray4[] = "#67b0e8";
static const char blue[] = "#6cb5ed"; // focused window border
static const char green[] = "#8ccf7e";
static const char red[] = "#e06e6e";
static const char orange[] = "#e59e67";
static const char yellow[] = "#e5c76b";
static const char pink[] = "#c47fd5";
static const char col_borderbar[] = "#181f21"; // inner border
| 242 |
816 | <reponame>LouisRenWeiWei/JXADF
package com.jxtech.jbo.base;
import java.util.List;
import org.apache.commons.dbutils.handlers.BeanListHandler;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.jxtech.db.DBFactory;
import com.jxtech.db.DataQuery;
import com.jxtech.jbo.App;
import com.jxtech.jbo.auth.JxSession;
import com.jxtech.jbo.util.JxException;
import com.jxtech.util.CacheUtil;
import com.jxtech.util.StrUtil;
/**
* 处理Maxapps表
*
* @author <EMAIL>
* @date 2013.08
*/
public class JxAppsDao {
private static final Logger LOG = LoggerFactory.getLogger(JxAppsDao.class);
public static final String CACHE_PREX = "APP.";
public static JxApps getApp(String appName) throws JxException {
if (StrUtil.isNull(appName)) {
App app = JxSession.getApp();
if (app != null) {
appName = app.getAppName();
}
}
if (StrUtil.isNull(appName)) {
LOG.info("app name is null.");
return null;
}
return query(appName);
}
private static JxApps query(String appName) throws JxException {
if (StrUtil.isNull(appName)) {
return null;
}
// 从缓存中,检查是否以前读取过基本信息
String cachekey = StrUtil.contact(CACHE_PREX, appName.toUpperCase());
Object obj = CacheUtil.getBase(cachekey);
if (obj instanceof JxApps) {
return (JxApps) obj;
}
DataQuery dq = DBFactory.getDataQuery(null, null);
String msql = "Select * From MAXAPPS where app = ?";
List<JxApps> list = dq.getResult(new BeanListHandler<JxApps>(JxApps.class), msql, new Object[] { appName.toUpperCase() });
if (list != null && !list.isEmpty()) {
JxApps app = list.get(0);
CacheUtil.putBaseCache(cachekey, app);
return app;
} else {
LOG.info("app[" + appName + "] is null,请在Maxapps中进行配置,谢谢。");
}
return null;
}
public static JxApps getAppByMaintbname(String tablename) throws JxException {
if (StrUtil.isNull(tablename)) {
return null;
}
// 从缓存中,检查是否以前读取过基本信息
String cachekey = StrUtil.contact(CACHE_PREX, "t.", tablename.toUpperCase());
Object obj = CacheUtil.getBase(cachekey);
if (obj instanceof JxApps) {
return (JxApps) obj;
}
DataQuery dq = DBFactory.getDataQuery(null, null);
String msql = "Select * From MAXAPPS where maintbname = ?";
List<JxApps> list = dq.getResult(new BeanListHandler<JxApps>(JxApps.class), msql, new Object[] { tablename.toUpperCase() });
if (list != null && !list.isEmpty()) {
JxApps app = list.get(0);
CacheUtil.putBaseCache(cachekey, app);
return app;
}
return null;
}
}
| 1,432 |
335 | {
"word": "Expedition",
"definitions": [
"A journey undertaken by a group of people with a particular purpose, especially that of exploration, research, or war.",
"The people involved in an expedition.",
"A short trip made for a particular purpose.",
"Promptness or speed in doing something."
],
"parts-of-speech": "Noun"
} | 128 |
451 | from math import pi, sqrt
from ipywidgets import register
from .._base.Three import ThreeWidget
from .OrbitControls_autogen import OrbitControls as OrbitControlsBase
@register
class OrbitControls(OrbitControlsBase):
def reset(self):
"""Reset the controlled object to its initial state."""
self.exec_three_obj_method('reset')
| 112 |
330 | <reponame>manjunathnilugal/PyBaMM
#
# Tests for Ai (2020) Enertech parameter set loads
#
import pybamm
import unittest
import os
class TestAi2020(unittest.TestCase):
def test_load_params(self):
negative_electrode = pybamm.ParameterValues({}).read_parameters_csv(
pybamm.get_parameters_filepath(
"input/parameters/lithium_ion/negative_electrodes/graphite_Ai2020/"
"parameters.csv"
)
)
self.assertEqual(negative_electrode["Negative electrode porosity"], "0.33")
positive_electrode = pybamm.ParameterValues({}).read_parameters_csv(
pybamm.get_parameters_filepath(
"input/parameters/lithium_ion/positive_electrodes/lico2_Ai2020/"
"parameters.csv"
)
)
self.assertEqual(positive_electrode["Positive electrode porosity"], "0.32")
electrolyte = pybamm.ParameterValues({}).read_parameters_csv(
pybamm.get_parameters_filepath(
"input/parameters/lithium_ion/electrolytes/lipf6_Enertech_Ai2020/"
+ "parameters.csv"
)
)
self.assertEqual(electrolyte["Cation transference number"], "0.38")
cell = pybamm.ParameterValues({}).read_parameters_csv(
pybamm.get_parameters_filepath(
"input/parameters/lithium_ion/cells/Enertech_Ai2020/parameters.csv"
)
)
self.assertAlmostEqual(cell["Negative current collector thickness [m]"], 10e-6)
def test_functions(self):
root = pybamm.root_dir()
param = pybamm.ParameterValues("Ai2020")
sto = pybamm.Scalar(0.5)
T = pybamm.Scalar(298.15)
# Positive electrode
p = "pybamm/input/parameters/lithium_ion/positive_electrodes/lico2_Ai2020/"
k_path = os.path.join(root, p)
fun_test = {
"lico2_cracking_rate_Ai2020": ([T], 3.9e-20),
"lico2_diffusivity_Dualfoil1998": ([sto, T], 5.387e-15),
"lico2_electrolyte_exchange_current_density_Dualfoil1998": (
[1e3, 1e4, T],
0.6098,
),
"lico2_entropic_change_Ai2020_function": ([sto], -2.1373e-4),
"lico2_ocp_Ai2020_function.py": ([sto], 4.1638),
"lico2_volume_change_Ai2020": ([sto], -1.8179e-2),
}
for name, value in fun_test.items():
fun = pybamm.load_function(os.path.join(k_path, name))
self.assertAlmostEqual(param.evaluate(fun(*value[0])), value[1], places=4)
# Negative electrode
p = "pybamm/input/parameters/lithium_ion/negative_electrodes/graphite_Ai2020/"
k_path = os.path.join(root, p)
fun_test = {
"graphite_cracking_rate_Ai2020.py": ([T], 3.9e-20),
"graphite_diffusivity_Dualfoil1998.py": ([sto, T], 3.9e-14),
"graphite_electrolyte_exchange_current_density_Dualfoil1998.py": (
[1e3, 1e4, T],
0.4172,
),
"graphite_entropy_Enertech_Ai2020_function.py": ([sto], -1.1033e-4),
"graphite_ocp_Enertech_Ai2020_function.py": ([sto], 0.1395),
"graphite_volume_change_Ai2020.py": ([sto], 5.1921e-2),
}
for name, value in fun_test.items():
fun = pybamm.load_function(os.path.join(k_path, name))
self.assertAlmostEqual(param.evaluate(fun(*value[0])), value[1], places=4)
def test_standard_lithium_parameters(self):
parameter_values = pybamm.ParameterValues("Ai2020")
options = {"particle mechanics": "swelling and cracking"}
model = pybamm.lithium_ion.DFN(options)
sim = pybamm.Simulation(model, parameter_values=parameter_values)
sim.set_parameters()
sim.build()
if __name__ == "__main__":
print("Add -v for more debug output")
import sys
if "-v" in sys.argv:
debug = True
pybamm.settings.debug_mode = True
unittest.main()
| 2,006 |
12,278 | <gh_stars>1000+
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html
/*
*******************************************************************************
*
* Copyright (C) 2003-2007, International Business Machines
* Corporation and others. All Rights Reserved.
*
*******************************************************************************
* file name: ucnv_set.c
* encoding: UTF-8
* tab size: 8 (not used)
* indentation:4
*
* created on: 2004sep07
* created by: <NAME>
*
* Conversion API functions using USet (ucnv_getUnicodeSet())
* moved here from ucnv.c for removing the dependency of other ucnv_
* implementation functions on the USet implementation.
*/
#include "unicode/utypes.h"
#include "unicode/uset.h"
#include "unicode/ucnv.h"
#include "ucnv_bld.h"
#include "uset_imp.h"
#if !UCONFIG_NO_CONVERSION
U_CAPI void U_EXPORT2
ucnv_getUnicodeSet(const UConverter *cnv,
USet *setFillIn,
UConverterUnicodeSet whichSet,
UErrorCode *pErrorCode) {
/* argument checking */
if(pErrorCode==NULL || U_FAILURE(*pErrorCode)) {
return;
}
if(cnv==NULL || setFillIn==NULL || whichSet<UCNV_ROUNDTRIP_SET || UCNV_SET_COUNT<=whichSet) {
*pErrorCode=U_ILLEGAL_ARGUMENT_ERROR;
return;
}
/* does this converter support this function? */
if(cnv->sharedData->impl->getUnicodeSet==NULL) {
*pErrorCode=U_UNSUPPORTED_ERROR;
return;
}
{
USetAdder sa={
NULL,
uset_add,
uset_addRange,
uset_addString,
uset_remove,
uset_removeRange
};
sa.set=setFillIn;
/* empty the set */
uset_clear(setFillIn);
/* call the converter to add the code points it supports */
cnv->sharedData->impl->getUnicodeSet(cnv, &sa, whichSet, pErrorCode);
}
}
#endif
| 848 |
602 | /*
* Copyright (C) 2011 <NAME> (http://www.cyrilmottier.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.cyrilmottier.android.gdcatalog;
import greendroid.app.GDListActivity;
import greendroid.image.ChainImageProcessor;
import greendroid.image.ImageProcessor;
import greendroid.image.MaskImageProcessor;
import greendroid.image.ScaleImageProcessor;
import greendroid.widget.AsyncImageView;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.Path;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.AbsListView.OnScrollListener;
import android.widget.BaseAdapter;
import android.widget.ImageView.ScaleType;
import android.widget.TextView;
public class AsyncImageViewListActivity extends GDListActivity implements OnScrollListener {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setListAdapter(new MyAdapter(this));
getListView().setOnScrollListener(this);
}
private static class MyAdapter extends BaseAdapter {
private static final String BASE_URL_PREFIX = "http://www.cyrilmottier.com/files/greendroid/images/image";
private static final String BASE_URL_SUFFIX = ".png";
private static final StringBuilder BUILDER = new StringBuilder();
private final String mImageForPosition;
static class ViewHolder {
public AsyncImageView imageView;
public TextView textView;
public StringBuilder textBuilder = new StringBuilder();
}
private LayoutInflater mInflater;
private ImageProcessor mImageProcessor;
public MyAdapter(Context context) {
mInflater = LayoutInflater.from(context);
mImageForPosition = context.getString(R.string.image_for_position);
prepareImageProcessor(context);
}
private void prepareImageProcessor(Context context) {
final int thumbnailSize = context.getResources().getDimensionPixelSize(R.dimen.thumbnail_size);
final int thumbnailRadius = context.getResources().getDimensionPixelSize(R.dimen.thumbnail_radius);
if (Math.random() >= 0.5f) {
//@formatter:off
mImageProcessor = new ChainImageProcessor(
new ScaleImageProcessor(thumbnailSize, thumbnailSize, ScaleType.FIT_XY),
new MaskImageProcessor(thumbnailRadius));
//@formatter:on
} else {
Path path = new Path();
path.moveTo(thumbnailRadius, 0);
path.lineTo(thumbnailSize - thumbnailRadius, 0);
path.lineTo(thumbnailSize, thumbnailRadius);
path.lineTo(thumbnailSize, thumbnailSize - thumbnailRadius);
path.lineTo(thumbnailSize - thumbnailRadius, thumbnailSize);
path.lineTo(thumbnailRadius, thumbnailSize);
path.lineTo(0, thumbnailSize - thumbnailRadius);
path.lineTo(0, thumbnailRadius);
path.close();
Bitmap mask = Bitmap.createBitmap(thumbnailSize, thumbnailSize, Config.ARGB_8888);
Canvas canvas = new Canvas(mask);
Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
paint.setStyle(Style.FILL_AND_STROKE);
paint.setColor(Color.RED);
canvas.drawPath(path, paint);
//@formatter:off
mImageProcessor = new ChainImageProcessor(
new ScaleImageProcessor(thumbnailSize, thumbnailSize, ScaleType.FIT_XY),
new MaskImageProcessor(mask));
//@formatter:on
}
}
public int getCount() {
return 100;
}
public Object getItem(int position) {
return null;
}
public long getItemId(int position) {
return position;
}
public View getView(int position, View convertView, ViewGroup parent) {
ViewHolder holder;
if (convertView == null) {
convertView = mInflater.inflate(R.layout.image_item_view, parent, false);
holder = new ViewHolder();
holder.imageView = (AsyncImageView) convertView.findViewById(R.id.async_image);
holder.imageView.setImageProcessor(mImageProcessor);
holder.textView = (TextView) convertView.findViewById(R.id.text);
convertView.setTag(holder);
} else {
holder = (ViewHolder) convertView.getTag();
}
BUILDER.setLength(0);
BUILDER.append(BASE_URL_PREFIX);
BUILDER.append(position);
BUILDER.append(BASE_URL_SUFFIX);
holder.imageView.setUrl(BUILDER.toString());
final StringBuilder textBuilder = holder.textBuilder;
textBuilder.setLength(0);
textBuilder.append(mImageForPosition);
textBuilder.append(position);
holder.textView.setText(textBuilder);
return convertView;
}
}
public void onScroll(AbsListView arg0, int arg1, int arg2, int arg3) {
}
public void onScrollStateChanged(AbsListView listView, int scrollState) {
if (getListView() == listView) {
searchAsyncImageViews(listView, scrollState == OnScrollListener.SCROLL_STATE_FLING);
}
}
private void searchAsyncImageViews(ViewGroup viewGroup, boolean pause) {
final int childCount = viewGroup.getChildCount();
for (int i = 0; i < childCount; i++) {
AsyncImageView image = (AsyncImageView) viewGroup.getChildAt(i).findViewById(R.id.async_image);
if (image != null) {
image.setPaused(pause);
}
}
}
}
| 2,929 |
836 | <reponame>danpbowen/Mallet<filename>src/cc/mallet/fst/HMM.java
/* Copyright (C) 2002 Univ. of Massachusetts Amherst, Computer Science Dept.
This file is part of "MALLET" (MAchine Learning for LanguagE Toolkit).
http://www.cs.umass.edu/~mccallum/mallet
This software is provided under the terms of the Common Public License,
version 1.0, as published by http://www.opensource.org. For further
information, see the file `LICENSE' included with this distribution. */
/**
@author <NAME> <a href="mailto:<EMAIL>"><EMAIL></a>
@author <NAME> <a href="mailto:<EMAIL>"><EMAIL></a>
*/
package cc.mallet.fst;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Random;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import com.google.errorprone.annotations.Var;
import cc.mallet.pipe.Pipe;
import cc.mallet.types.Alphabet;
import cc.mallet.types.FeatureSequence;
import cc.mallet.types.Instance;
import cc.mallet.types.InstanceList;
import cc.mallet.types.Multinomial;
import cc.mallet.types.Sequence;
import cc.mallet.util.MalletLogger;
/** A Hidden Markov Model. */
public class HMM extends Transducer implements Serializable {
private static Logger logger = MalletLogger.getLogger(HMM.class.getName());
static final String LABEL_SEPARATOR = ",";
Alphabet inputAlphabet;
Alphabet outputAlphabet;
ArrayList<State> states = new ArrayList<State>();
ArrayList<State> initialStates = new ArrayList<State>();
HashMap<String, State> name2state = new HashMap<String, State>();
Multinomial.Estimator[] transitionEstimator;
Multinomial.Estimator[] emissionEstimator;
Multinomial.Estimator initialEstimator;
Multinomial[] transitionMultinomial;
Multinomial[] emissionMultinomial;
Multinomial initialMultinomial;
public HMM(Pipe inputPipe, Pipe outputPipe) {
this.inputPipe = inputPipe;
this.outputPipe = outputPipe;
this.inputAlphabet = inputPipe.getDataAlphabet();
this.outputAlphabet = inputPipe.getTargetAlphabet();
}
public HMM(Alphabet inputAlphabet, Alphabet outputAlphabet) {
inputAlphabet.stopGrowth();
logger.info("HMM input dictionary size = " + inputAlphabet.size());
this.inputAlphabet = inputAlphabet;
this.outputAlphabet = outputAlphabet;
}
public Alphabet getInputAlphabet() {
return inputAlphabet;
}
public Alphabet getOutputAlphabet() {
return outputAlphabet;
}
public Multinomial[] getTransitionMultinomial() {
return transitionMultinomial;
}
public Multinomial[] getEmissionMultinomial() {
return emissionMultinomial;
}
public Multinomial getInitialMultinomial() {
return initialMultinomial;
}
public void print() {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < numStates(); i++) {
State s = (State) getState(i);
sb.append("STATE NAME=\"");
sb.append(s.name);
sb.append("\" (");
sb.append(s.destinations.length);
sb.append(" outgoing transitions)\n");
sb.append(" ");
sb.append("initialWeight= ");
sb.append(s.initialWeight);
sb.append('\n');
sb.append(" ");
sb.append("finalWeight= ");
sb.append(s.finalWeight);
sb.append('\n');
sb.append("Emission distribution:\n" + emissionMultinomial[i]
+ "\n\n");
sb.append("Transition distribution:\n"
+ transitionMultinomial[i].toString());
}
System.out.println(sb.toString());
}
public void addState(String name, double initialWeight, double finalWeight,
String[] destinationNames, String[] labelNames) {
assert (labelNames.length == destinationNames.length);
if (name2state.get(name) != null)
throw new IllegalArgumentException("State with name `" + name
+ "' already exists.");
State s = new State(name, states.size(), initialWeight, finalWeight,
destinationNames, labelNames, this);
s.print();
states.add(s);
if (initialWeight > IMPOSSIBLE_WEIGHT)
initialStates.add(s);
name2state.put(name, s);
}
/**
* Add a state with parameters equal zero, and labels on out-going arcs the
* same name as their destination state names.
*/
public void addState(String name, String[] destinationNames) {
this.addState(name, 0, 0, destinationNames, destinationNames);
}
/**
* Add a group of states that are fully connected with each other, with
* parameters equal zero, and labels on their out-going arcs the same name
* as their destination state names.
*/
public void addFullyConnectedStates(String[] stateNames) {
for (int i = 0; i < stateNames.length; i++)
addState(stateNames[i], stateNames);
}
public void addFullyConnectedStatesForLabels() {
String[] labels = new String[outputAlphabet.size()];
// This is assuming the the entries in the outputAlphabet are Strings!
for (int i = 0; i < outputAlphabet.size(); i++) {
labels[i] = (String) outputAlphabet.lookupObject(i);
}
addFullyConnectedStates(labels);
}
private boolean[][] labelConnectionsIn(InstanceList trainingSet) {
int numLabels = outputAlphabet.size();
boolean[][] connections = new boolean[numLabels][numLabels];
for (Instance instance : trainingSet) {
FeatureSequence output = (FeatureSequence) instance.getTarget();
for (int j = 1; j < output.size(); j++) {
int sourceIndex = outputAlphabet.lookupIndex(output.get(j - 1));
int destIndex = outputAlphabet.lookupIndex(output.get(j));
assert (sourceIndex >= 0 && destIndex >= 0);
connections[sourceIndex][destIndex] = true;
}
}
return connections;
}
/**
* Add states to create a first-order Markov model on labels, adding only
* those transitions the occur in the given trainingSet.
*/
public void addStatesForLabelsConnectedAsIn(InstanceList trainingSet) {
int numLabels = outputAlphabet.size();
boolean[][] connections = labelConnectionsIn(trainingSet);
for (int i = 0; i < numLabels; i++) {
@Var
int numDestinations = 0;
for (int j = 0; j < numLabels; j++)
if (connections[i][j])
numDestinations++;
String[] destinationNames = new String[numDestinations];
@Var
int destinationIndex = 0;
for (int j = 0; j < numLabels; j++)
if (connections[i][j])
destinationNames[destinationIndex++] = (String) outputAlphabet
.lookupObject(j);
addState((String) outputAlphabet.lookupObject(i), destinationNames);
}
}
/**
* Add as many states as there are labels, but don't create separate weights
* for each source-destination pair of states. Instead have all the incoming
* transitions to a state share the same weights.
*/
public void addStatesForHalfLabelsConnectedAsIn(InstanceList trainingSet) {
int numLabels = outputAlphabet.size();
boolean[][] connections = labelConnectionsIn(trainingSet);
for (int i = 0; i < numLabels; i++) {
@Var
int numDestinations = 0;
for (int j = 0; j < numLabels; j++)
if (connections[i][j])
numDestinations++;
String[] destinationNames = new String[numDestinations];
@Var
int destinationIndex = 0;
for (int j = 0; j < numLabels; j++)
if (connections[i][j])
destinationNames[destinationIndex++] = (String) outputAlphabet
.lookupObject(j);
addState((String) outputAlphabet.lookupObject(i), 0.0, 0.0,
destinationNames, destinationNames);
}
}
/**
* Add as many states as there are labels, but don't create separate
* observational-test-weights for each source-destination pair of
* states---instead have all the incoming transitions to a state share the
* same observational-feature-test weights. However, do create separate
* default feature for each transition, (which acts as an HMM-style
* transition probability).
*/
public void addStatesForThreeQuarterLabelsConnectedAsIn(
InstanceList trainingSet) {
int numLabels = outputAlphabet.size();
boolean[][] connections = labelConnectionsIn(trainingSet);
for (int i = 0; i < numLabels; i++) {
@Var
int numDestinations = 0;
for (int j = 0; j < numLabels; j++)
if (connections[i][j])
numDestinations++;
String[] destinationNames = new String[numDestinations];
@Var
int destinationIndex = 0;
for (int j = 0; j < numLabels; j++)
if (connections[i][j]) {
String labelName = (String) outputAlphabet.lookupObject(j);
destinationNames[destinationIndex] = labelName;
// The "transition" weights will include only the default
// feature
// gsc: variable is never used
// String wn = (String)outputAlphabet.lookupObject(i) + "->"
// + (String)outputAlphabet.lookupObject(j);
destinationIndex++;
}
addState((String) outputAlphabet.lookupObject(i), 0.0, 0.0,
destinationNames, destinationNames);
}
}
public void addFullyConnectedStatesForThreeQuarterLabels(
InstanceList trainingSet) {
int numLabels = outputAlphabet.size();
for (int i = 0; i < numLabels; i++) {
String[] destinationNames = new String[numLabels];
for (int j = 0; j < numLabels; j++) {
String labelName = (String) outputAlphabet.lookupObject(j);
destinationNames[j] = labelName;
}
addState((String) outputAlphabet.lookupObject(i), 0.0, 0.0,
destinationNames, destinationNames);
}
}
public void addFullyConnectedStatesForBiLabels() {
String[] labels = new String[outputAlphabet.size()];
// This is assuming the the entries in the outputAlphabet are Strings!
for (int i = 0; i < outputAlphabet.size(); i++) {
labels[i] = outputAlphabet.lookupObject(i).toString();
}
for (int i = 0; i < labels.length; i++) {
for (int j = 0; j < labels.length; j++) {
String[] destinationNames = new String[labels.length];
for (int k = 0; k < labels.length; k++)
destinationNames[k] = labels[j] + LABEL_SEPARATOR
+ labels[k];
addState(labels[i] + LABEL_SEPARATOR + labels[j], 0.0, 0.0,
destinationNames, labels);
}
}
}
/**
* Add states to create a second-order Markov model on labels, adding only
* those transitions the occur in the given trainingSet.
*/
public void addStatesForBiLabelsConnectedAsIn(InstanceList trainingSet) {
int numLabels = outputAlphabet.size();
boolean[][] connections = labelConnectionsIn(trainingSet);
for (int i = 0; i < numLabels; i++) {
for (int j = 0; j < numLabels; j++) {
if (!connections[i][j])
continue;
@Var
int numDestinations = 0;
for (int k = 0; k < numLabels; k++)
if (connections[j][k])
numDestinations++;
String[] destinationNames = new String[numDestinations];
String[] labels = new String[numDestinations];
@Var
int destinationIndex = 0;
for (int k = 0; k < numLabels; k++)
if (connections[j][k]) {
destinationNames[destinationIndex] = (String) outputAlphabet
.lookupObject(j)
+ LABEL_SEPARATOR
+ (String) outputAlphabet.lookupObject(k);
labels[destinationIndex] = (String) outputAlphabet
.lookupObject(k);
destinationIndex++;
}
addState((String) outputAlphabet.lookupObject(i)
+ LABEL_SEPARATOR
+ (String) outputAlphabet.lookupObject(j), 0.0, 0.0,
destinationNames, labels);
}
}
}
public void addFullyConnectedStatesForTriLabels() {
String[] labels = new String[outputAlphabet.size()];
// This is assuming the the entries in the outputAlphabet are Strings!
for (int i = 0; i < outputAlphabet.size(); i++) {
logger.info("HMM: outputAlphabet.lookup class = "
+ outputAlphabet.lookupObject(i).getClass().getName());
labels[i] = outputAlphabet.lookupObject(i).toString();
}
for (int i = 0; i < labels.length; i++) {
for (int j = 0; j < labels.length; j++) {
for (int k = 0; k < labels.length; k++) {
String[] destinationNames = new String[labels.length];
for (int l = 0; l < labels.length; l++)
destinationNames[l] = labels[j] + LABEL_SEPARATOR
+ labels[k] + LABEL_SEPARATOR + labels[l];
addState(labels[i] + LABEL_SEPARATOR + labels[j]
+ LABEL_SEPARATOR + labels[k], 0.0, 0.0,
destinationNames, labels);
}
}
}
}
public void addSelfTransitioningStateForAllLabels(String name) {
String[] labels = new String[outputAlphabet.size()];
String[] destinationNames = new String[outputAlphabet.size()];
for (int i = 0; i < outputAlphabet.size(); i++) {
labels[i] = outputAlphabet.lookupObject(i).toString();
destinationNames[i] = name;
}
addState(name, 0.0, 0.0, destinationNames, labels);
}
private String concatLabels(String[] labels) {
@Var
String sep = "";
StringBuffer buf = new StringBuffer();
for (int i = 0; i < labels.length; i++) {
buf.append(sep).append(labels[i]);
sep = LABEL_SEPARATOR;
}
return buf.toString();
}
private String nextKGram(String[] history, int k, String next) {
@Var
String sep = "";
StringBuffer buf = new StringBuffer();
int start = history.length + 1 - k;
for (int i = start; i < history.length; i++) {
buf.append(sep).append(history[i]);
sep = LABEL_SEPARATOR;
}
buf.append(sep).append(next);
return buf.toString();
}
private boolean allowedTransition(String prev, String curr, Pattern no,
Pattern yes) {
String pair = concatLabels(new String[] { prev, curr });
if (no != null && no.matcher(pair).matches())
return false;
if (yes != null && !yes.matcher(pair).matches())
return false;
return true;
}
private boolean allowedHistory(String[] history, Pattern no, Pattern yes) {
for (int i = 1; i < history.length; i++)
if (!allowedTransition(history[i - 1], history[i], no, yes))
return false;
return true;
}
/**
* Assumes that the HMM's output alphabet contains <code>String</code>s.
* Creates an order-<em>n</em> HMM with input predicates and output labels
* given by <code>trainingSet</code> and order, connectivity, and weights
* given by the remaining arguments.
*
* @param trainingSet
* the training instances
* @param orders
* an array of increasing non-negative numbers giving the orders
* of the features for this HMM. The largest number <em>n</em> is
* the Markov order of the HMM. States are <em>n</em>-tuples of
* output labels. Each of the other numbers <em>k</em> in
* <code>orders</code> represents a weight set shared by all
* destination states whose last (most recent) <em>k</em> labels
* agree. If <code>orders</code> is <code>null</code>, an order-0
* HMM is built.
* @param defaults
* If non-null, it must be the same length as <code>orders</code>
* , with <code>true</code> positions indicating that the weight
* set for the corresponding order contains only the weight for a
* default feature; otherwise, the weight set has weights for all
* features built from input predicates.
* @param start
* The label that represents the context of the start of a
* sequence. It may be also used for sequence labels.
* @param forbidden
* If non-null, specifies what pairs of successive labels are not
* allowed, both for constructing <em>n</em>order states or for
* transitions. A label pair (<em>u</em>,<em>v</em>) is not
* allowed if <em>u</em> + "," + <em>v</em> matches
* <code>forbidden</code>.
* @param allowed
* If non-null, specifies what pairs of successive labels are
* allowed, both for constructing <em>n</em>order states or for
* transitions. A label pair (<em>u</em>,<em>v</em>) is allowed
* only if <em>u</em> + "," + <em>v</em> matches
* <code>allowed</code>.
* @param fullyConnected
* Whether to include all allowed transitions, even those not
* occurring in <code>trainingSet</code>,
* @returns The name of the start state.
*
*/
public String addOrderNStates(InstanceList trainingSet, int[] orders,
boolean[] defaults, String start, Pattern forbidden,
Pattern allowed, boolean fullyConnected) {
@Var
boolean[][] connections = null;
if (!fullyConnected)
connections = labelConnectionsIn(trainingSet);
@Var
int order = -1;
if (defaults != null && defaults.length != orders.length)
throw new IllegalArgumentException(
"Defaults must be null or match orders");
if (orders == null)
order = 0;
else {
for (int i = 0; i < orders.length; i++) {
if (orders[i] <= order)
throw new IllegalArgumentException(
"Orders must be non-negative and in ascending order");
order = orders[i];
}
if (order < 0)
order = 0;
}
if (order > 0) {
int[] historyIndexes = new int[order];
String[] history = new String[order];
String label0 = (String) outputAlphabet.lookupObject(0);
for (int i = 0; i < order; i++)
history[i] = label0;
int numLabels = outputAlphabet.size();
while (historyIndexes[0] < numLabels) {
logger.info("Preparing " + concatLabels(history));
if (allowedHistory(history, forbidden, allowed)) {
String stateName = concatLabels(history);
@Var
int nt = 0;
@Var
String[] destNames = new String[numLabels];
@Var
String[] labelNames = new String[numLabels];
for (int nextIndex = 0; nextIndex < numLabels; nextIndex++) {
String next = (String) outputAlphabet
.lookupObject(nextIndex);
if (allowedTransition(history[order - 1], next,
forbidden, allowed)
&& (fullyConnected || connections[historyIndexes[order - 1]][nextIndex])) {
destNames[nt] = nextKGram(history, order, next);
labelNames[nt] = next;
nt++;
}
}
if (nt < numLabels) {
String[] newDestNames = new String[nt];
String[] newLabelNames = new String[nt];
for (int t = 0; t < nt; t++) {
newDestNames[t] = destNames[t];
newLabelNames[t] = labelNames[t];
}
destNames = newDestNames;
labelNames = newLabelNames;
}
addState(stateName, 0.0, 0.0, destNames, labelNames);
}
for (int o = order - 1; o >= 0; o--)
if (++historyIndexes[o] < numLabels) {
history[o] = (String) outputAlphabet
.lookupObject(historyIndexes[o]);
break;
} else if (o > 0) {
historyIndexes[o] = 0;
history[o] = label0;
}
}
for (int i = 0; i < order; i++)
history[i] = start;
return concatLabels(history);
}
String[] stateNames = new String[outputAlphabet.size()];
for (int s = 0; s < outputAlphabet.size(); s++)
stateNames[s] = (String) outputAlphabet.lookupObject(s);
for (int s = 0; s < outputAlphabet.size(); s++)
addState(stateNames[s], 0.0, 0.0, stateNames, stateNames);
return start;
}
public State getState(String name) {
return (State) name2state.get(name);
}
public int numStates() {
return states.size();
}
public Transducer.State getState(int index) {
return (Transducer.State) states.get(index);
}
public Iterator initialStateIterator() {
return initialStates.iterator();
}
public boolean isTrainable() {
return true;
}
private Alphabet getTransitionAlphabet() {
Alphabet transitionAlphabet = new Alphabet();
for (int i = 0; i < numStates(); i++)
transitionAlphabet.lookupIndex(getState(i).getName(), true);
return transitionAlphabet;
}
@Deprecated
public void reset() {
emissionEstimator = new Multinomial.LaplaceEstimator[numStates()];
transitionEstimator = new Multinomial.LaplaceEstimator[numStates()];
emissionMultinomial = new Multinomial[numStates()];
transitionMultinomial = new Multinomial[numStates()];
Alphabet transitionAlphabet = getTransitionAlphabet();
for (int i = 0; i < numStates(); i++) {
emissionEstimator[i] = new Multinomial.LaplaceEstimator(
inputAlphabet);
transitionEstimator[i] = new Multinomial.LaplaceEstimator(
transitionAlphabet);
emissionMultinomial[i] = new Multinomial(
getUniformArray(inputAlphabet.size()), inputAlphabet);
transitionMultinomial[i] = new Multinomial(
getUniformArray(transitionAlphabet.size()),
transitionAlphabet);
}
initialMultinomial = new Multinomial(getUniformArray(transitionAlphabet
.size()), transitionAlphabet);
initialEstimator = new Multinomial.LaplaceEstimator(transitionAlphabet);
}
/**
* Separate initialization of initial/transitions and emissions. All
* probabilities are proportional to (1+Uniform[0,1])^noise.
*
* @author kedarb
* @param random
* Random object (if null use uniform distribution)
* @param noise
* Noise exponent to use. If zero, then uniform distribution.
*/
public void initTransitions(Random random, double noise) {
Alphabet transitionAlphabet = getTransitionAlphabet();
initialMultinomial = new Multinomial(getRandomArray(transitionAlphabet
.size(), random, noise), transitionAlphabet);
initialEstimator = new Multinomial.LaplaceEstimator(transitionAlphabet);
transitionMultinomial = new Multinomial[numStates()];
transitionEstimator = new Multinomial.LaplaceEstimator[numStates()];
for (int i = 0; i < numStates(); i++) {
transitionMultinomial[i] = new Multinomial(getRandomArray(
transitionAlphabet.size(), random, noise),
transitionAlphabet);
transitionEstimator[i] = new Multinomial.LaplaceEstimator(
transitionAlphabet);
// set state's initial weight
State s = (State) getState(i);
s.setInitialWeight(initialMultinomial.logProbability(s.getName()));
}
}
public void initEmissions(Random random, double noise) {
emissionMultinomial = new Multinomial[numStates()];
emissionEstimator = new Multinomial.LaplaceEstimator[numStates()];
for (int i = 0; i < numStates(); i++) {
emissionMultinomial[i] = new Multinomial(getRandomArray(
inputAlphabet.size(), random, noise), inputAlphabet);
emissionEstimator[i] = new Multinomial.LaplaceEstimator(
inputAlphabet);
}
}
public void estimate() {
Alphabet transitionAlphabet = getTransitionAlphabet();
initialMultinomial = initialEstimator.estimate();
initialEstimator = new Multinomial.LaplaceEstimator(transitionAlphabet);
for (int i = 0; i < numStates(); i++) {
State s = (State) getState(i);
emissionMultinomial[i] = emissionEstimator[i].estimate();
transitionMultinomial[i] = transitionEstimator[i].estimate();
s.setInitialWeight(initialMultinomial.logProbability(s.getName()));
// reset estimators
emissionEstimator[i] = new Multinomial.LaplaceEstimator(
inputAlphabet);
transitionEstimator[i] = new Multinomial.LaplaceEstimator(
transitionAlphabet);
}
}
/**
* Trains a HMM without validation and evaluation.
*/
public boolean train(InstanceList ilist) {
return train(ilist, (InstanceList) null, (InstanceList) null);
}
/**
* Trains a HMM with <tt>evaluator</tt> set to null.
*/
public boolean train(InstanceList ilist, InstanceList validation,
InstanceList testing) {
return train(ilist, validation, testing, (TransducerEvaluator) null);
}
public boolean train(InstanceList ilist, InstanceList validation,
InstanceList testing, TransducerEvaluator eval) {
assert (ilist.size() > 0);
if (emissionEstimator == null) {
emissionEstimator = new Multinomial.LaplaceEstimator[numStates()];
transitionEstimator = new Multinomial.LaplaceEstimator[numStates()];
emissionMultinomial = new Multinomial[numStates()];
transitionMultinomial = new Multinomial[numStates()];
Alphabet transitionAlphabet = new Alphabet();
for (int i = 0; i < numStates(); i++)
transitionAlphabet.lookupIndex(((State) states.get(i))
.getName(), true);
for (int i = 0; i < numStates(); i++) {
emissionEstimator[i] = new Multinomial.LaplaceEstimator(
inputAlphabet);
transitionEstimator[i] = new Multinomial.LaplaceEstimator(
transitionAlphabet);
emissionMultinomial[i] = new Multinomial(
getUniformArray(inputAlphabet.size()), inputAlphabet);
transitionMultinomial[i] = new Multinomial(
getUniformArray(transitionAlphabet.size()),
transitionAlphabet);
}
initialEstimator = new Multinomial.LaplaceEstimator(
transitionAlphabet);
}
for (Instance instance : ilist) {
FeatureSequence input = (FeatureSequence) instance.getData();
FeatureSequence output = (FeatureSequence) instance.getTarget();
new SumLatticeDefault(this, input, output, new Incrementor());
}
initialMultinomial = initialEstimator.estimate();
for (int i = 0; i < numStates(); i++) {
emissionMultinomial[i] = emissionEstimator[i].estimate();
transitionMultinomial[i] = transitionEstimator[i].estimate();
getState(i).setInitialWeight(
initialMultinomial.logProbability(getState(i).getName()));
}
return true;
}
public class Incrementor implements Transducer.Incrementor {
public void incrementFinalState(Transducer.State s, double count) {
}
public void incrementInitialState(Transducer.State s, double count) {
initialEstimator.increment(s.getName(), count);
}
public void incrementTransition(Transducer.TransitionIterator ti,
double count) {
int inputFtr = (Integer) ti.getInput();
State src = (HMM.State) ((TransitionIterator) ti).getSourceState();
State dest = (HMM.State) ((TransitionIterator) ti)
.getDestinationState();
int index = ti.getIndex();
emissionEstimator[index].increment(inputFtr, count);
transitionEstimator[src.getIndex()]
.increment(dest.getName(), count);
}
}
public class WeightedIncrementor implements Transducer.Incrementor {
double weight = 1.0;
public WeightedIncrementor(double wt) {
this.weight = wt;
}
public void incrementFinalState(Transducer.State s, double count) {
}
public void incrementInitialState(Transducer.State s, double count) {
initialEstimator.increment(s.getName(), weight * count);
}
public void incrementTransition(Transducer.TransitionIterator ti,
double count) {
int inputFtr = (Integer) ti.getInput();
State src = (HMM.State) ((TransitionIterator) ti).getSourceState();
State dest = (HMM.State) ((TransitionIterator) ti)
.getDestinationState();
int index = ti.getIndex();
emissionEstimator[index].increment(inputFtr, weight * count);
transitionEstimator[src.getIndex()].increment(dest.getName(),
weight * count);
}
}
public void write(File f) {
try {
ObjectOutputStream oos = new ObjectOutputStream(
new FileOutputStream(f));
oos.writeObject(this);
oos.close();
} catch (IOException e) {
System.err.println("Exception writing file " + f + ": " + e);
}
}
private double[] getUniformArray(int size) {
double[] ret = new double[size];
for (int i = 0; i < size; i++)
// gsc: removing unnecessary cast from 'size'
ret[i] = 1.0 / size;
return ret;
}
// kedarb: p[i] = (1+random)^noise/sum
private double[] getRandomArray(int size, Random random, double noise) {
double[] ret = new double[size];
@Var
double sum = 0;
for (int i = 0; i < size; i++) {
ret[i] = random == null ? 1.0 : Math.pow(1.0 + random.nextDouble(),
noise);
sum += ret[i];
}
for (int i = 0; i < size; i++)
ret[i] /= sum;
return ret;
}
// Serialization
// For HMM class
private static final long serialVersionUID = 1;
private static final int CURRENT_SERIAL_VERSION = 1;
static final int NULL_INTEGER = -1;
/* Need to check for null pointers. */
/* Bug fix from <NAME> <EMAIL> */
private void writeObject(ObjectOutputStream out) throws IOException {
@Var
int i;
@Var
int size;
out.writeInt(CURRENT_SERIAL_VERSION);
out.writeObject(inputPipe);
out.writeObject(outputPipe);
out.writeObject(inputAlphabet);
out.writeObject(outputAlphabet);
size = states.size();
out.writeInt(size);
for (i = 0; i < size; i++)
out.writeObject(states.get(i));
size = initialStates.size();
out.writeInt(size);
for (i = 0; i < size; i++)
out.writeObject(initialStates.get(i));
out.writeObject(name2state);
if (emissionEstimator != null) {
size = emissionEstimator.length;
out.writeInt(size);
for (i = 0; i < size; i++)
out.writeObject(emissionEstimator[i]);
} else
out.writeInt(NULL_INTEGER);
if (emissionMultinomial != null) {
size = emissionMultinomial.length;
out.writeInt(size);
for (i = 0; i < size; i++)
out.writeObject(emissionMultinomial[i]);
} else
out.writeInt(NULL_INTEGER);
if (transitionEstimator != null) {
size = transitionEstimator.length;
out.writeInt(size);
for (i = 0; i < size; i++)
out.writeObject(transitionEstimator[i]);
} else
out.writeInt(NULL_INTEGER);
if (transitionMultinomial != null) {
size = transitionMultinomial.length;
out.writeInt(size);
for (i = 0; i < size; i++)
out.writeObject(transitionMultinomial[i]);
} else
out.writeInt(NULL_INTEGER);
}
/* Bug fix from <NAME> <EMAIL> */
private void readObject(ObjectInputStream in) throws IOException,
ClassNotFoundException {
@Var
int size;
@Var
int i;
int version = in.readInt();
inputPipe = (Pipe) in.readObject();
outputPipe = (Pipe) in.readObject();
inputAlphabet = (Alphabet) in.readObject();
outputAlphabet = (Alphabet) in.readObject();
size = in.readInt();
states = new ArrayList();
for (i = 0; i < size; i++) {
State s = (HMM.State) in.readObject();
states.add(s);
}
size = in.readInt();
initialStates = new ArrayList();
for (i = 0; i < size; i++) {
State s = (HMM.State) in.readObject();
initialStates.add(s);
}
name2state = (HashMap) in.readObject();
size = in.readInt();
if (size == NULL_INTEGER) {
emissionEstimator = null;
} else {
emissionEstimator = new Multinomial.Estimator[size];
for (i = 0; i < size; i++) {
emissionEstimator[i] = (Multinomial.Estimator) in.readObject();
}
}
size = in.readInt();
if (size == NULL_INTEGER) {
emissionMultinomial = null;
} else {
emissionMultinomial = new Multinomial[size];
for (i = 0; i < size; i++) {
emissionMultinomial[i] = (Multinomial) in.readObject();
}
}
size = in.readInt();
if (size == NULL_INTEGER) {
transitionEstimator = null;
} else {
transitionEstimator = new Multinomial.Estimator[size];
for (i = 0; i < size; i++) {
transitionEstimator[i] = (Multinomial.Estimator) in
.readObject();
}
}
size = in.readInt();
if (size == NULL_INTEGER) {
transitionMultinomial = null;
} else {
transitionMultinomial = new Multinomial[size];
for (i = 0; i < size; i++) {
transitionMultinomial[i] = (Multinomial) in.readObject();
}
}
}
public static class State extends Transducer.State implements Serializable {
// Parameters indexed by destination state, feature index
String name;
int index;
double initialWeight, finalWeight;
String[] destinationNames;
State[] destinations;
String[] labels;
HMM hmm;
// No arg constructor so serialization works
protected State() {
super();
}
protected State(String name, int index, double initialWeight,
double finalWeight, String[] destinationNames,
String[] labelNames, HMM hmm) {
super();
assert (destinationNames.length == labelNames.length);
this.name = name;
this.index = index;
this.initialWeight = initialWeight;
this.finalWeight = finalWeight;
this.destinationNames = new String[destinationNames.length];
this.destinations = new State[labelNames.length];
this.labels = new String[labelNames.length];
this.hmm = hmm;
for (int i = 0; i < labelNames.length; i++) {
// Make sure this label appears in our output Alphabet
hmm.outputAlphabet.lookupIndex(labelNames[i]);
this.destinationNames[i] = destinationNames[i];
this.labels[i] = labelNames[i];
}
}
public Transducer getTransducer() {
return hmm;
}
public double getFinalWeight() {
return finalWeight;
}
public double getInitialWeight() {
return initialWeight;
}
public void setFinalWeight(double c) {
finalWeight = c;
}
public void setInitialWeight(double c) {
initialWeight = c;
}
public void print() {
System.out.println("State #" + index + " \"" + name + "\"");
System.out.println("initialWeight=" + initialWeight
+ ", finalWeight=" + finalWeight);
System.out.println("#destinations=" + destinations.length);
for (int i = 0; i < destinations.length; i++)
System.out.println("-> " + destinationNames[i]);
}
public State getDestinationState(int index) {
@Var
State ret;
if ((ret = destinations[index]) == null) {
ret = destinations[index] = (State) hmm.name2state
.get(destinationNames[index]);
assert (ret != null) : index;
}
return ret;
}
public Transducer.TransitionIterator transitionIterator(
Sequence inputSequence, int inputPosition,
Sequence outputSequence, int outputPosition) {
if (inputPosition < 0 || outputPosition < 0)
throw new UnsupportedOperationException(
"Epsilon transitions not implemented.");
if (inputSequence == null)
throw new UnsupportedOperationException(
"HMMs are generative models; but this is not yet implemented.");
if (!(inputSequence instanceof FeatureSequence))
throw new UnsupportedOperationException(
"HMMs currently expect Instances to have FeatureSequence data");
return new TransitionIterator(this,
(FeatureSequence) inputSequence, inputPosition,
(outputSequence == null ? null : (String) outputSequence
.get(outputPosition)), hmm);
}
public String getName() {
return name;
}
public int getIndex() {
return index;
}
public void incrementInitialCount(double count) {
}
public void incrementFinalCount(double count) {
}
// Serialization
// For class State
private static final long serialVersionUID = 1;
private static final int CURRENT_SERIAL_VERSION = 0;
private static final int NULL_INTEGER = -1;
private void writeObject(ObjectOutputStream out) throws IOException {
@Var
int i;
@Var
int size;
out.writeInt(CURRENT_SERIAL_VERSION);
out.writeObject(name);
out.writeInt(index);
size = (destinationNames == null) ? NULL_INTEGER
: destinationNames.length;
out.writeInt(size);
if (size != NULL_INTEGER) {
for (i = 0; i < size; i++) {
out.writeObject(destinationNames[i]);
}
}
size = (destinations == null) ? NULL_INTEGER : destinations.length;
out.writeInt(size);
if (size != NULL_INTEGER) {
for (i = 0; i < size; i++) {
out.writeObject(destinations[i]);
}
}
size = (labels == null) ? NULL_INTEGER : labels.length;
out.writeInt(size);
if (size != NULL_INTEGER) {
for (i = 0; i < size; i++)
out.writeObject(labels[i]);
}
out.writeObject(hmm);
}
private void readObject(ObjectInputStream in) throws IOException,
ClassNotFoundException {
@Var
int size;
@Var
int i;
int version = in.readInt();
name = (String) in.readObject();
index = in.readInt();
size = in.readInt();
if (size != NULL_INTEGER) {
destinationNames = new String[size];
for (i = 0; i < size; i++) {
destinationNames[i] = (String) in.readObject();
}
} else {
destinationNames = null;
}
size = in.readInt();
if (size != NULL_INTEGER) {
destinations = new State[size];
for (i = 0; i < size; i++) {
destinations[i] = (State) in.readObject();
}
} else {
destinations = null;
}
size = in.readInt();
if (size != NULL_INTEGER) {
labels = new String[size];
for (i = 0; i < size; i++)
labels[i] = (String) in.readObject();
// inputAlphabet = (Alphabet) in.readObject();
// outputAlphabet = (Alphabet) in.readObject();
} else {
labels = null;
}
hmm = (HMM) in.readObject();
}
}
protected static class TransitionIterator extends
Transducer.TransitionIterator implements Serializable {
State source;
int index, nextIndex, inputPos;
double[] weights; // -logProb
// Eventually change this because we will have a more space-efficient
// FeatureVectorSequence that cannot break out each FeatureVector
FeatureSequence inputSequence;
Integer inputFeature;
HMM hmm;
public TransitionIterator(State source, FeatureSequence inputSeq,
int inputPosition, String output, HMM hmm) {
this.source = source;
this.hmm = hmm;
this.inputSequence = inputSeq;
this.inputFeature = new Integer(inputSequence
.getIndexAtPosition(inputPosition));
this.inputPos = inputPosition;
this.weights = new double[source.destinations.length];
for (int transIndex = 0; transIndex < source.destinations.length; transIndex++) {
if (output == null || output.equals(source.labels[transIndex])) {
weights[transIndex] = 0;
// xxx should this be emission of the _next_ observation?
// double logEmissionProb =
// hmm.emissionMultinomial[source.getIndex()].logProbability
// (inputSeq.get (inputPosition));
int destIndex = source.getDestinationState(transIndex).getIndex();
double logEmissionProb = hmm.emissionMultinomial[destIndex]
.logProbability(inputSeq.get(inputPosition));
double logTransitionProb = hmm.transitionMultinomial[source
.getIndex()]
.logProbability(source.destinationNames[transIndex]);
// weight = logProbability
weights[transIndex] = (logEmissionProb + logTransitionProb);
assert (!Double.isNaN(weights[transIndex]));
} else
weights[transIndex] = IMPOSSIBLE_WEIGHT;
}
nextIndex = 0;
while (nextIndex < source.destinations.length
&& weights[nextIndex] == IMPOSSIBLE_WEIGHT)
nextIndex++;
}
public boolean hasNext() {
return nextIndex < source.destinations.length;
}
public Transducer.State nextState() {
assert (nextIndex < source.destinations.length);
index = nextIndex;
nextIndex++;
while (nextIndex < source.destinations.length
&& weights[nextIndex] == IMPOSSIBLE_WEIGHT)
nextIndex++;
return source.getDestinationState(index);
}
public int getIndex() {
return index;
}
/*
* Returns an Integer object containing the feature index of the symbol
* at this position in the input sequence.
*/
public Object getInput() {
return inputFeature;
}
// public int getInputPosition () { return inputPos; }
public Object getOutput() {
return source.labels[index];
}
public double getWeight() {
return weights[index];
}
public Transducer.State getSourceState() {
return source;
}
public Transducer.State getDestinationState() {
return source.getDestinationState(index);
}
// Serialization
// TransitionIterator
private static final long serialVersionUID = 1;
private static final int CURRENT_SERIAL_VERSION = 0;
private static final int NULL_INTEGER = -1;
private void writeObject(ObjectOutputStream out) throws IOException {
out.writeInt(CURRENT_SERIAL_VERSION);
out.writeObject(source);
out.writeInt(index);
out.writeInt(nextIndex);
out.writeInt(inputPos);
if (weights != null) {
out.writeInt(weights.length);
for (int i = 0; i < weights.length; i++) {
out.writeDouble(weights[i]);
}
} else {
out.writeInt(NULL_INTEGER);
}
out.writeObject(inputSequence);
out.writeObject(inputFeature);
out.writeObject(hmm);
}
private void readObject(ObjectInputStream in) throws IOException,
ClassNotFoundException {
int version = in.readInt();
source = (State) in.readObject();
index = in.readInt();
nextIndex = in.readInt();
inputPos = in.readInt();
int size = in.readInt();
if (size == NULL_INTEGER) {
weights = null;
} else {
weights = new double[size];
for (int i = 0; i < size; i++) {
weights[i] = in.readDouble();
}
}
inputSequence = (FeatureSequence) in.readObject();
inputFeature = (Integer) in.readObject();
hmm = (HMM) in.readObject();
}
}
}
| 15,096 |
782 | /*
* Copyright (c) 2021, <NAME>. All Rights Reserved.
*
* This file is part of BoofCV (http://boofcv.org).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package boofcv.alg.tracker.meanshift;
import boofcv.struct.image.GrayU8;
import boofcv.struct.image.Planar;
import georegression.struct.shapes.RectangleLength2D_I32;
/**
* <p>
* Creates a histogram in a color image and is used to identify the likelihood of an color being a member
* of the original distribution. The histogram is computed in N-dimensional space, where N is the number
* of bands in the color image. The number of bins for each band is specified in the constructor. There
* is a total of N*numBins elements in the histogram.
* </p>
*
* <p>
* Design Note:<br>
* The reason operations in {@link boofcv.alg.feature.color.GHistogramFeatureOps} is not used internally is because
* those are for histograms stored in double arrays, while this has to use floats/
* </p>
*
* @author <NAME>
*/
public class LikelihoodHistCoupled_PL_U8 implements PixelLikelihood<Planar<GrayU8>> {
Planar<GrayU8> image;
// maximum value a pixel can have.
int maxPixelValue;
// Number of bins for each channel in the histogram
int numBins;
float[] hist = new float[0];
public LikelihoodHistCoupled_PL_U8( int maxPixelValue, int numBins ) {
this.maxPixelValue = maxPixelValue + 1;
this.numBins = numBins;
}
@Override
public void setImage( Planar<GrayU8> image ) {
this.image = image;
int histElements = 1;
for (int i = 0; i < image.getNumBands(); i++) {
histElements *= numBins;
}
if (hist.length != histElements) {
hist = new float[histElements];
}
}
@Override
public boolean isInBounds( int x, int y ) {
return image.isInBounds(x, y);
}
@Override
public void createModel( RectangleLength2D_I32 target ) {
for (int y = 0; y < target.height; y++) {
int index = image.startIndex + (y + target.y0)*image.stride + target.x0;
for (int x = 0; x < target.width; x++, index++) {
int indexBin = 0;
int binStride = 1;
for (int i = 0; i < image.getNumBands(); i++) {
GrayU8 band = image.getBand(i);
int value = band.data[index] & 0xFF;
int bin = numBins*value/maxPixelValue;
indexBin += bin*binStride;
binStride *= numBins;
}
hist[indexBin]++;
}
}
float total = target.width*target.height;
for (int i = 0; i < hist.length; i++) {
hist[i] /= total;
}
}
@Override
public float compute( int x, int y ) {
int index = image.startIndex + y*image.stride + x;
int indexBin = 0;
int binStride = 1;
for (int i = 0; i < image.getNumBands(); i++) {
GrayU8 band = image.getBand(i);
int value = band.data[index] & 0xFF;
int bin = numBins*value/maxPixelValue;
indexBin += bin*binStride;
binStride *= numBins;
}
return hist[indexBin];
}
}
| 1,204 |
3,897 | /*
* Copyright (c) 2015-2017, Pelion and affiliates.
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* \file protocol_core_sleep.c
* \brief Add short description about this file!!!
*
*/
#include "nsconfig.h"
#include "string.h"
#include "ns_types.h"
#include "ns_trace.h"
#include "eventOS_callback_timer.h"
#include "NWK_INTERFACE/Include/protocol.h"
#include "NWK_INTERFACE/Include/protocol_timer.h"
#include "common_functions.h"
#include "platform/arm_hal_interrupt.h"
#include "6LoWPAN/ND/nd_router_object.h"
#include "6LoWPAN/Thread/thread_common.h"
#include "6LoWPAN/MAC/mac_data_poll.h"
#include "sw_mac.h"
#define TRACE_GROUP "pCor"
static void protocol_timer_balance(uint32_t time_in_ms)
{
protocol_timer_sleep_balance(time_in_ms);
}
static int protocol_stack_interface_disable_poll(protocol_interface_info_entry_t *cur)
{
int ret_val = -1;
platform_enter_critical();
if (cur->if_stack_buffer_handler && cur->rfd_poll_info) {
if (!cur->rfd_poll_info->pollActive) {
ret_val = 0;
}
}
platform_exit_critical();
return ret_val;
}
uint32_t arm_net_check_enter_deep_sleep_possibility(void)
{
protocol_interface_info_entry_t *cur = protocol_stack_interface_sleep_possibility();
if (!cur) {
return 0;
}
//Calculate sleeping time
uint32_t current_sleep_time = mac_data_poll_get_max_sleep_period(cur);
if (current_sleep_time) {
uint32_t alternative_sleep = 0;
//core_poll_timer_disable();
current_sleep_time *= 10;
//Check ND time
alternative_sleep = nd_object_time_to_next_nd_reg();
if (alternative_sleep) {
if (alternative_sleep < current_sleep_time) {
current_sleep_time = alternative_sleep;
}
}
return current_sleep_time;
}
return 0;
}
int arm_net_enter_sleep(void)
{
int ret_val = -1;
protocol_interface_info_entry_t *cur = 0;
cur = protocol_stack_interface_sleep_possibility();
if (cur) {
if (protocol_stack_interface_disable_poll(cur) == 0) {
platform_enter_critical();
clear_power_state(SLEEP_MODE_REQ);
platform_exit_critical();
ret_val = 0;
}
}
return ret_val;
}
int arm_net_wakeup_and_timer_synch(uint32_t sleeped_time_in_ms)
{
int ret_val = -1;
protocol_interface_info_entry_t *cur = 0;
cur = protocol_stack_interface_sleep_possibility();
if (cur) {
if (cur->lowpan_info & INTERFACE_NWK_ACTIVE) {
//Update MS to 10ms ticks
//uint32_t sleep_time_in_10ms = (sleeped_time_in_ms + 9) / 10 ;
//Enable Data Polling after sleep
//protocol_stack_interface_info_wake_for_polling_interfaces(sleep_time_in_10ms, cur);
protocol_timer_balance(sleeped_time_in_ms);
}
}
return ret_val;
}
| 1,426 |
1,382 | <filename>src/multichannel/tests/ofdmframesync_autotest.c
/*
* Copyright (c) 2007 - 2019 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <complex.h>
#include <math.h>
#include <assert.h>
#include "autotest/autotest.h"
#include "liquid.h"
// internal callback
// _X : subcarrier symbols
// _p : subcarrier allocation
// _M : number of subcarriers
// _userdata : user-defined data structure
int ofdmframesync_autotest_callback(float complex * _X,
unsigned char * _p,
unsigned int _M,
void * _userdata)
{
printf("******** callback invoked!\n");
// type cast _userdata as complex float array
float complex * X = (float complex *)_userdata;
// copy values and return
memmove(X, _X, _M*sizeof(float complex));
// return
return 0;
}
// Helper function to keep code base small
// _num_subcarriers : number of subcarriers
// _cp_len : cyclic prefix lenght
// _taper_len : taper length
void ofdmframesync_acquire_test(unsigned int _num_subcarriers,
unsigned int _cp_len,
unsigned int _taper_len)
{
// options
unsigned int M = _num_subcarriers; // number of subcarriers
unsigned int cp_len = _cp_len; // cyclic prefix lenght
unsigned int taper_len = _taper_len; // taper length
float tol = 1e-2f; // error tolerance
//
float dphi = 1.0f / (float)M; // carrier frequency offset
// subcarrier allocation (initialize to default)
unsigned char p[M];
ofdmframe_init_default_sctype(M, p);
// derived values
unsigned int num_samples = (3 + 1)*(M + cp_len);
// create synthesizer/analyzer objects
ofdmframegen fg = ofdmframegen_create(M, cp_len, taper_len, p);
//ofdmframegen_print(fg);
float complex X[M]; // original data sequence
float complex X_test[M]; // recovered data sequence
ofdmframesync fs = ofdmframesync_create(M,cp_len,taper_len,p,ofdmframesync_autotest_callback,(void*)X_test);
unsigned int i;
float complex y[num_samples]; // frame samples
// assemble full frame
unsigned int n=0;
// write first S0 symbol
ofdmframegen_write_S0a(fg, &y[n]);
n += M + cp_len;
// write second S0 symbol
ofdmframegen_write_S0b(fg, &y[n]);
n += M + cp_len;
// write S1 symbol
ofdmframegen_write_S1( fg, &y[n]);
n += M + cp_len;
// generate data symbol (random)
for (i=0; i<M; i++) {
X[i] = cexpf(_Complex_I*2*M_PI*randf());
X_test[i] = 0.0f;
}
// write data symbol
ofdmframegen_writesymbol(fg, X, &y[n]);
n += M + cp_len;
// validate frame length
assert(n == num_samples);
// add carrier offset
for (i=0; i<num_samples; i++)
y[i] *= cexpf(_Complex_I*dphi*i);
// run receiver
ofdmframesync_execute(fs,y,num_samples);
// check output
for (i=0; i<M; i++) {
if (p[i] == OFDMFRAME_SCTYPE_DATA) {
float e = crealf( (X[i] - X_test[i])*conjf(X[i] - X_test[i]) );
CONTEND_DELTA( fabsf(e), 0.0f, tol );
}
}
// destroy objects
ofdmframegen_destroy(fg);
ofdmframesync_destroy(fs);
}
//
void autotest_ofdmframesync_acquire_n64() { ofdmframesync_acquire_test(64, 8, 0); }
void autotest_ofdmframesync_acquire_n128() { ofdmframesync_acquire_test(128, 16, 0); }
void autotest_ofdmframesync_acquire_n256() { ofdmframesync_acquire_test(256, 32, 0); }
void autotest_ofdmframesync_acquire_n512() { ofdmframesync_acquire_test(512, 64, 0); }
| 2,025 |
629 | //
// File: fkfkbiecjecjbaie_cat.h
//
// Code generated for Simulink model 'sim_model_lib0'.
//
// Model version : 1.1142
// Simulink Coder version : 8.11 (R2016b) 25-Aug-2016
// C/C++ source code generated on : Tue Oct 16 10:08:00 2018
//
#ifndef SHARE_fkfkbiecjecjbaie_cat
#define SHARE_fkfkbiecjecjbaie_cat
#include "rtwtypes.h"
extern void fkfkbiecjecjbaie_cat(const int32_T varargin_1_sizes, const int32_T
varargin_2_sizes[2], uint8_T y_data[], int32_T y_sizes[2]);
#endif
//
// File trailer for generated code.
//
// [EOF]
//
| 266 |
10,225 | package io.quarkus.it.main;
import io.quarkus.test.junit.NativeImageTest;
/**
* @author <NAME>, <EMAIL>
*/
@NativeImageTest
public class RBACAccessInGraalITCase extends RBACAccessTest {
}
| 70 |
1,179 | <gh_stars>1000+
// SPDX-License-Identifier: BSD-2-Clause
/*
* Copyright (c) 2014-2019, Linaro Limited
*/
#include <assert.h>
#include <crypto/crypto.h>
#include <crypto/crypto_impl.h>
#include <stdlib.h>
#include <tee_api_types.h>
#include <tomcrypt_private.h>
#include <util.h>
struct ltc_ctr_ctx {
struct crypto_cipher_ctx ctx;
int cipher_idx;
int (*update)(const unsigned char *src, unsigned char *dst,
unsigned long len, symmetric_CTR *ctr);
symmetric_CTR state;
};
static const struct crypto_cipher_ops ltc_ctr_ops;
static struct ltc_ctr_ctx *to_ctr_ctx(struct crypto_cipher_ctx *ctx)
{
assert(ctx && ctx->ops == <c_ctr_ops);
return container_of(ctx, struct ltc_ctr_ctx, ctx);
}
static TEE_Result ltc_ctr_init(struct crypto_cipher_ctx *ctx,
TEE_OperationMode mode, const uint8_t *key1,
size_t key1_len, const uint8_t *key2 __unused,
size_t key2_len __unused,
const uint8_t *iv __unused,
size_t iv_len __unused)
{
struct ltc_ctr_ctx *c = to_ctr_ctx(ctx);
if ((int)iv_len != cipher_descriptor[c->cipher_idx]->block_length)
return TEE_ERROR_BAD_PARAMETERS;
if (mode == TEE_MODE_ENCRYPT)
c->update = ctr_encrypt;
else
c->update = ctr_decrypt;
if (ctr_start(c->cipher_idx, iv, key1, key1_len, 0,
CTR_COUNTER_BIG_ENDIAN, &c->state) == CRYPT_OK)
return TEE_SUCCESS;
else
return TEE_ERROR_BAD_STATE;
}
static TEE_Result ltc_ctr_update(struct crypto_cipher_ctx *ctx,
bool last_block __unused,
const uint8_t *data, size_t len, uint8_t *dst)
{
struct ltc_ctr_ctx *c = to_ctr_ctx(ctx);
if (c->update && c->update(data, dst, len, &c->state) == CRYPT_OK)
return TEE_SUCCESS;
else
return TEE_ERROR_BAD_STATE;
}
static void ltc_ctr_final(struct crypto_cipher_ctx *ctx)
{
ctr_done(&to_ctr_ctx(ctx)->state);
}
static void ltc_ctr_free_ctx(struct crypto_cipher_ctx *ctx)
{
free(to_ctr_ctx(ctx));
}
static void ltc_ctr_copy_state(struct crypto_cipher_ctx *dst_ctx,
struct crypto_cipher_ctx *src_ctx)
{
struct ltc_ctr_ctx *src = to_ctr_ctx(src_ctx);
struct ltc_ctr_ctx *dst = to_ctr_ctx(dst_ctx);
assert(src->cipher_idx == dst->cipher_idx);
dst->update = src->update;
dst->state = src->state;
}
static const struct crypto_cipher_ops ltc_ctr_ops = {
.init = ltc_ctr_init,
.update = ltc_ctr_update,
.final = ltc_ctr_final,
.free_ctx = ltc_ctr_free_ctx,
.copy_state = ltc_ctr_copy_state,
};
TEE_Result crypto_aes_ctr_alloc_ctx(struct crypto_cipher_ctx **ctx_ret)
{
struct ltc_ctr_ctx *c = NULL;
int cipher_idx = find_cipher("aes");
if (cipher_idx < 0)
return TEE_ERROR_NOT_SUPPORTED;
c = calloc(1, sizeof(*c));
if (!c)
return TEE_ERROR_OUT_OF_MEMORY;
c->ctx.ops = <c_ctr_ops;
c->cipher_idx = cipher_idx;
*ctx_ret = &c->ctx;
return TEE_SUCCESS;
}
| 1,265 |
784 | package jforgame.merge.service;
import jforgame.merge.config.MergeServer;
import java.io.File;
import java.text.SimpleDateFormat;
import java.util.Date;
public class BackUpService {
private static BackUpService self = new BackUpService();
public static BackUpService getInstance() {
return self;
}
public void dbBackUp(MergeServer server, String backPath, String backName) throws Exception {
dbBackUp(server.getUrl(), server.getUrl(), server.getPassword(), server.getDbName(), backPath, backName);
}
public void dbBackUp(String url, String root, String pwd, String dbName, String backPath, String backName) throws Exception {
String pathSql = backPath + dbName + backName;
File fileSql = new File(pathSql);
//创建备份sql文件
if (!fileSql.exists()) {
// fileSql.createNewFile();
}
// mysqldump -h127.0.0.1 -uroot -p123456 game_user_001 >~/backup/2020-01-17-11-20-11.sql
StringBuffer sb = new StringBuffer();
sb.append("mysqldump");
sb.append(" -h" + url);
sb.append(" -u" + root);
sb.append(" -p" + pwd);
sb.append(" " + dbName + " >");
sb.append(pathSql);
System.out.println("cmd命令为:" + sb.toString());
Runtime runtime = Runtime.getRuntime();
System.out.println("开始备份:" + dbName);
// windows String[] command = { "cmd", "/c", command};
String[] command = {"/bin/sh", "-c", sb.toString()};
Process process = runtime.exec(command);
System.out.println("备份成功!");
}
public static void main(String[] args) throws Exception {
String backName = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss").format(new Date()) + ".sql";
BackUpService.getInstance().dbBackUp("127.0.0.1", "root", "123456", "game_user_001", "~/backup/", backName);
}
}
| 801 |
1,256 | #include <windows.h>
#include "strclass.hpp"
TStrList::TStrList()
{
List=NULL;
Count=0;
}
TStrList::~TStrList()
{
DeleteList();
}
void TStrList::DeleteList()
{
int i;
if (Count)
{
for (i=Count-1; i>=0; i--) delete[] List[i];
delete[] List;
List=NULL;
Count=0;
}
}
void __fastcall TStrList::Clear()
{
DeleteList();
}
BOOL __fastcall TStrList::Insert(const TCHAR *String,int Index)
{
int i;
int len=lstrlen(String);
TCHAR **tmpList=new TCHAR*[Count+1];
if (tmpList==NULL)
return FALSE;
for (i=0; i<Count; i++)
tmpList[i]=List[i];
if ((Index<Count) && (Index>=0))
for (i=Count-1; i>=Index; i--) tmpList[i+1]=tmpList[i];
tmpList[Index]=new TCHAR[len+1];
if (String)
lstrcpy(tmpList[Index],String);
else
tmpList[Index][0]=0;
Count++;
delete[] List;
List=tmpList;
return TRUE;
}
BOOL __fastcall TStrList::Add(const TCHAR *String)
{
return(Insert(String,Count));
}
BOOL __fastcall TStrList::Delete(int Index)
{
int i;
TCHAR **tmpList;
if (Count)
{
if (Count-1)
{
tmpList=new TCHAR*[Count-1];
if ((Index<Count) && (Index>=0))
{
delete[] List[Index];
for (i=0; i<Index; i++)
tmpList[i]=List[i];
for (i=Index+1; i<Count; i++)
tmpList[i-1]=List[i];
delete[] List;
List=tmpList;
Count--;
}
}
else
DeleteList();
return TRUE;
}
return FALSE;
}
BOOL __fastcall TStrList::SetText(const TCHAR *String,int Index)
{
int len=lstrlen(String);
if (Count)
if ((Index<Count) && (Index>=0))
{
delete[] List[Index];
List[Index]=new TCHAR[len+1];
if (String)
lstrcpyn(List[Index],String,len+1);
else
List[Index][0]=0;
return TRUE;
}
return FALSE;
}
TCHAR *__fastcall TStrList::GetText(TCHAR *String,int Index)
{
if (Count)
if ((Index<Count) && (Index>=0))
return lstrcpy(String,List[Index]);
String[0]=0;
return String;
}
TCHAR *__fastcall TStrList::GetText(int Index)
{
if (Count)
if ((Index<Count) && (Index>=0))
return List[Index];
return NULL;
}
void _fastcall TStrList::Sort(int Low,int Up)
{
int i,j;
TCHAR *x;
TCHAR *y;
if (Count)
{
i=Low; j=Up;
x=List[(Low+Up)/2];
do
{
if (*List[i])
{
while (CmpStr(List[i],x)<0)
i++;
}
else
i++;
if (*List[j])
{
while (CmpStr(x,List[j])<0)
j--;
}
else
j--;
if (i<=j)
{
y=List[i];
List[i]=List[j];
List[j]=y;
i++;
j--;
}
}
while (i<j);
if (Low<j) Sort(Low,j);
if (i<Up) Sort(i,Up);
}
}
TStrList &TStrList::operator=(TStrList &lst)
{
int i;
if (this!=&lst)
{
DeleteList();
for (i=0; i<lst.GetCount(); i++)
{
Add(lst.GetText(i));
}
}
return *this;
}
| 1,360 |
410 | <gh_stars>100-1000
////////////////////////////////////////////////////////////////////////////////
//
// Vookoo compute example (C) 2018 <NAME>
//
// This is a simple introduction to the vulkan C++ interface by way of Vookoo
// which is a layer to make creating Vulkan resources easy.
//
#define VKU_NO_GLFW
#include <vku/vku.hpp>
#include <vku/vku_framework.hpp>
int main() {
vku::Framework fw{"Hello compute"};
if (!fw.ok()) {
std::cout << "Framework creation failed" << std::endl;
exit(1);
}
// Get a device from the demo framework.
auto device = fw.device();
auto cache = fw.pipelineCache();
auto descriptorPool = fw.descriptorPool();
auto memprops = fw.memprops();
typedef vk::CommandPoolCreateFlagBits ccbits;
vk::CommandPoolCreateInfo cpci{ ccbits::eTransient|ccbits::eResetCommandBuffer, fw.computeQueueFamilyIndex() };
auto commandPool = device.createCommandPoolUnique(cpci);
static constexpr int N = 128;
// Up to 256 bytes of immediate data.
struct PushConstants {
float value; // The shader just adds this to the buffer.
float pad[3]; // Buffers are usually 16 byte aligned.
};
// Descriptor set layout.
// Shader has access to a single storage buffer.
vku::DescriptorSetLayoutMaker dsetlm{};
dsetlm.buffer(0U, vk::DescriptorType::eStorageBuffer, vk::ShaderStageFlagBits::eCompute, 1);
auto dsetLayout = dsetlm.createUnique(device);
// The descriptor set itself.
vku::DescriptorSetMaker dsm{};
dsm.layout(*dsetLayout);
auto dsets = dsm.create(device, descriptorPool);
auto descriptorSet = dsets[0];
// Pipeline layout.
// Shader has one descriptor set and some push constants.
vku::PipelineLayoutMaker plm{};
plm.descriptorSetLayout(*dsetLayout);
plm.pushConstantRange(vk::ShaderStageFlagBits::eCompute, 0, sizeof(PushConstants));
auto pipelineLayout = plm.createUnique(device);
// The pipeline itself.
auto shader = vku::ShaderModule{device, BINARY_DIR "helloCompute.comp.spv"};
vku::ComputePipelineMaker cpm{};
cpm.shader(vk::ShaderStageFlagBits::eCompute, shader);
auto pipeline = cpm.createUnique(device, cache, *pipelineLayout);
// A buffer to store the results in.
// Note: this won't work for everyone. With some devices you
// may need to explictly upload and download data.
using bflags = vk::BufferUsageFlagBits;
using mflags = vk::MemoryPropertyFlagBits;
auto mybuf = vku::GenericBuffer(device, memprops, bflags::eStorageBuffer, N * sizeof(float), mflags::eHostVisible);
vku::DescriptorSetUpdater update;
update.beginDescriptorSet(descriptorSet);
update.beginBuffers(0, 0, vk::DescriptorType::eStorageBuffer);
update.buffer(mybuf.buffer(), 0, N * sizeof(float));
update.update(device); // this only copies the pointer, not any data.
// Run some code on the GPU.
vku::executeImmediately(device, *commandPool, fw.computeQueue(), [&](vk::CommandBuffer cb) {
PushConstants cu = {2.0f};
cb.pushConstants(*pipelineLayout, vk::ShaderStageFlagBits::eCompute, 0, sizeof(PushConstants), &cu);
cb.bindDescriptorSets(vk::PipelineBindPoint::eCompute, *pipelineLayout, 0, descriptorSet, nullptr);
cb.bindPipeline(vk::PipelineBindPoint::eCompute, *pipeline);
cb.dispatch(N, 1, 1);
});
device.waitIdle();
// Print the result (2.0f + 0..127)
float * p = (float*)mybuf.map(device);
for (int i = 0; i != N; ++i) {
printf("%f ", p[i]);
}
printf("\n");
mybuf.unmap(device);
}
| 1,201 |
1,444 |
package mage.game.command.emblems;
import mage.abilities.Ability;
import mage.abilities.common.SimpleStaticAbility;
import mage.abilities.effects.common.continuous.BoostControlledEffect;
import mage.constants.Duration;
import mage.constants.Zone;
import mage.game.command.Emblem;
/**
*
* @author spjspj
*/
public final class GideonAllyOfZendikarEmblem extends Emblem {
public GideonAllyOfZendikarEmblem() {
this.setName("Emblem Gideon");
BoostControlledEffect effect = new BoostControlledEffect(1, 1, Duration.EndOfGame);
Ability ability = new SimpleStaticAbility(Zone.COMMAND, effect);
this.getAbilities().add(ability);
this.setExpansionSetCodeForImage("BFZ");
}
}
| 246 |
634 | <filename>modules/base/diff-impl/src/main/java/com/intellij/diff/actions/DocumentsSynchronizer.java
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.diff.actions;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.event.DocumentAdapter;
import com.intellij.openapi.editor.event.DocumentEvent;
import com.intellij.openapi.project.Project;
import consulo.ui.annotation.RequiredUIAccess;
import kava.beans.PropertyChangeEvent;
import kava.beans.PropertyChangeListener;
import javax.annotation.Nonnull;
abstract class DocumentsSynchronizer {
@Nonnull
protected final Document myDocument1;
@Nonnull
protected final Document myDocument2;
@javax.annotation.Nullable
private final Project myProject;
private volatile boolean myDuringModification = false;
private final DocumentAdapter myListener1 = new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
if (myDuringModification) return;
onDocumentChanged1(e);
}
};
private final DocumentAdapter myListener2 = new DocumentAdapter() {
@Override
public void documentChanged(DocumentEvent e) {
if (myDuringModification) return;
onDocumentChanged2(e);
}
};
private final PropertyChangeListener myROListener = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
if (Document.PROP_WRITABLE.equals(evt.getPropertyName())) getDocument2().setReadOnly(!getDocument1().isWritable());
}
};
protected DocumentsSynchronizer(@javax.annotation.Nullable Project project, @Nonnull Document document1, @Nonnull Document document2) {
myProject = project;
myDocument1 = document1;
myDocument2 = document2;
}
@Nonnull
public Document getDocument1() {
return myDocument1;
}
@Nonnull
public Document getDocument2() {
return myDocument2;
}
protected abstract void onDocumentChanged1(@Nonnull DocumentEvent event);
protected abstract void onDocumentChanged2(@Nonnull DocumentEvent event);
@RequiredUIAccess
protected void replaceString(@Nonnull final Document document,
final int startOffset,
final int endOffset,
@Nonnull final CharSequence newText) {
try {
myDuringModification = true;
CommandProcessor.getInstance().executeCommand(myProject, new Runnable() {
@Override
public void run() {
assert endOffset <= document.getTextLength();
ApplicationManager.getApplication().runWriteAction(new Runnable() {
@Override
public void run() {
document.replaceString(startOffset, endOffset, newText);
}
});
}
}, "Synchronize document and its fragment", document);
}
finally {
myDuringModification = false;
}
}
public void startListen() {
myDocument1.addDocumentListener(myListener1);
myDocument2.addDocumentListener(myListener2);
myDocument1.addPropertyChangeListener(myROListener);
}
public void stopListen() {
myDocument1.removeDocumentListener(myListener1);
myDocument2.removeDocumentListener(myListener2);
myDocument1.removePropertyChangeListener(myROListener);
}
}
| 1,331 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-wg8h-q3rc-9wvf",
"modified": "2022-05-04T00:28:36Z",
"published": "2022-05-04T00:28:36Z",
"aliases": [
"CVE-2012-0187"
],
"details": "Untrusted search path vulnerability in IBM Lotus Expeditor 6.1.x and 6.2.x before 6.2 FP5+Security Pack allows local users to gain privileges via a Trojan horse DLL in the current working directory.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2012-0187"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/72097"
},
{
"type": "WEB",
"url": "http://www.ibm.com/support/docview.wss?uid=swg21575642"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 405 |
361 | <gh_stars>100-1000
package li.cil.oc2.common.container;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.tags.ITag;
public class TypedItemStackHandler extends FixedSizeItemStackHandler {
private final ITag<Item> deviceType;
///////////////////////////////////////////////////////////////////
public TypedItemStackHandler(final int size, final ITag<Item> deviceType) {
super(size);
this.deviceType = deviceType;
}
///////////////////////////////////////////////////////////////////
@Override
public boolean isItemValid(final int slot, final ItemStack stack) {
return super.isItemValid(slot, stack) && !stack.isEmpty() && deviceType.contains(stack.getItem());
}
}
| 230 |
2,177 | <gh_stars>1000+
from lixian_commands.util import *
from lixian_cli_parser import *
from lixian_encoding import default_encoding
import lixian_help
import lixian_query
@command_line_parser(help=lixian_help.pause)
@with_parser(parse_login)
@with_parser(parse_colors)
@with_parser(parse_logging)
@command_line_option('i')
@command_line_option('all')
def pause_task(args):
client = create_client(args)
to_pause = lixian_query.search_tasks(client, args)
print "Below files are going to be paused:"
for x in to_pause:
print x['name'].encode(default_encoding)
client.pause_tasks(to_pause)
| 220 |
310 | {
"name": "Seaboard Block",
"description": "A portable MIDI keyboard.",
"url": "https://roli.com/products/blocks/seaboard-block"
}
| 49 |
852 | #ifndef HeavyFlavorAnalysis_SpecificDecay_BPHDecayGenericBuilder_h
#define HeavyFlavorAnalysis_SpecificDecay_BPHDecayGenericBuilder_h
/** \class BPHDecayGenericBuilder
*
* Description:
* Class to build a generic decay applying selections to the
* reconstructed particle
*
* \author <NAME> INFN Padova
*
*/
//----------------------
// Base Class Headers --
//----------------------
//------------------------------------
// Collaborating Class Declarations --
//------------------------------------
#include "HeavyFlavorAnalysis/SpecificDecay/interface/BPHMassSelect.h"
#include "HeavyFlavorAnalysis/SpecificDecay/interface/BPHChi2Select.h"
#include "HeavyFlavorAnalysis/SpecificDecay/interface/BPHMassFitSelect.h"
#include "FWCore/Framework/interface/Event.h"
//---------------
// C++ Headers --
//---------------
#include <string>
#include <vector>
// ---------------------
// -- Class Interface --
// ---------------------
class BPHDecayGenericBuilder {
public:
/** Constructor
*/
BPHDecayGenericBuilder(const edm::EventSetup& es, BPHMassFitSelect* mfs = nullptr);
// deleted copy constructor and assignment operator
BPHDecayGenericBuilder(const BPHDecayGenericBuilder& x) = delete;
BPHDecayGenericBuilder& operator=(const BPHDecayGenericBuilder& x) = delete;
/** Destructor
*/
virtual ~BPHDecayGenericBuilder();
/** Operations
*/
/// set cuts
void setMassMin(double m);
void setMassMax(double m);
void setMassRange(double mMin, double mMax);
void setProbMin(double p);
void setMassFitMin(double m);
void setMassFitMax(double m);
void setMassFitRange(double mMin, double mMax);
/// get current cuts
double getMassMin() const { return massSel->getMassMin(); }
double getMassMax() const { return massSel->getMassMax(); }
double getProbMin() const { return chi2Sel->getProbMin(); }
double getMassFitMin() const { return mFitSel->getMassMin(); }
double getMassFitMax() const { return mFitSel->getMassMax(); }
/// track min p difference
void setMinPDiff(double mpd) { minPDiff = mpd; }
double getMinPDiff() { return minPDiff; }
protected:
const edm::EventSetup* evSetup;
BPHMassSelect* massSel;
BPHChi2Select* chi2Sel;
BPHMassFitSelect* mFitSel;
double minPDiff;
bool updated;
};
#endif
| 741 |
1,337 | /*
* Copyright (c) 2008-2016 Haulmont.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.haulmont.cuba.core.app.serialization;
import com.google.gson.*;
import com.haulmont.chile.core.model.MetaClass;
import com.haulmont.chile.core.model.MetaProperty;
import com.haulmont.cuba.core.entity.Entity;
import com.haulmont.cuba.core.global.FetchMode;
import com.haulmont.cuba.core.global.Metadata;
import com.haulmont.cuba.core.global.View;
import com.haulmont.cuba.core.global.ViewProperty;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import javax.annotation.Nullable;
import javax.inject.Inject;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static com.haulmont.cuba.core.app.serialization.ViewSerializationOption.COMPACT_FORMAT;
import static com.haulmont.cuba.core.app.serialization.ViewSerializationOption.INCLUDE_FETCH_MODE;
import static com.haulmont.cuba.core.global.FetchMode.AUTO;
/**
*/
@Component(ViewSerializationAPI.NAME)
public class ViewSerialization implements ViewSerializationAPI {
@Inject
protected Metadata metadata;
private static final Logger log = LoggerFactory.getLogger(ViewSerialization.class);
@Override
public View fromJson(String json) {
return createGson().fromJson(json, View.class);
}
@Override
public String toJson(View view, ViewSerializationOption... options) {
return createGson(options).toJson(view);
}
protected Gson createGson(ViewSerializationOption... options) {
return new GsonBuilder()
.registerTypeHierarchyAdapter(View.class, new ViewSerializer(options))
.registerTypeHierarchyAdapter(View.class, new ViewDeserializer())
.create();
}
protected class ViewSerializer implements JsonSerializer<View> {
protected boolean compactFormat = false;
protected boolean includeFetchMode = false;
protected List<View> processedViews = new ArrayList<>();
public ViewSerializer(ViewSerializationOption[] options) {
for (ViewSerializationOption option : options) {
if (option == COMPACT_FORMAT) compactFormat = true;
if (option == INCLUDE_FETCH_MODE) includeFetchMode = true;
}
}
@Override
public JsonElement serialize(View src, Type typeOfSrc, JsonSerializationContext context) {
return serializeView(src);
}
protected JsonObject serializeView(View view) {
JsonObject jsonObject = new JsonObject();
jsonObject.addProperty("name", view.getName());
MetaClass metaClass = metadata.getClassNN(view.getEntityClass());
jsonObject.addProperty("entity", metaClass.getName());
jsonObject.add("properties", createJsonArrayOfViewProperties(view));
return jsonObject;
}
protected JsonArray createJsonArrayOfViewProperties(View view) {
JsonArray propertiesArray = new JsonArray();
for (ViewProperty viewProperty : view.getProperties()) {
View nestedView = viewProperty.getView();
if (nestedView == null) {
//add simple property as string primitive
propertiesArray.add(viewProperty.getName());
} else {
JsonObject propertyObject = new JsonObject();
propertyObject.addProperty("name", viewProperty.getName());
String nestedViewName = nestedView.getName();
if (compactFormat) {
if (StringUtils.isNotEmpty(nestedViewName)) {
View processedView = findProcessedView(processedViews, nestedView.getEntityClass(), nestedViewName);
if (processedView == null) {
processedViews.add(nestedView);
propertyObject.add("view", createJsonObjectForNestedView(nestedView));
} else {
//if we already processed this view, just add its name as a string
propertyObject.addProperty("view", nestedViewName);
}
} else {
propertyObject.add("view", createJsonObjectForNestedView(nestedView));
}
} else {
propertyObject.add("view", createJsonObjectForNestedView(nestedView));
}
if (includeFetchMode && viewProperty.getFetchMode() != null && viewProperty.getFetchMode() != FetchMode.AUTO) {
propertyObject.addProperty("fetch", viewProperty.getFetchMode().name());
}
propertiesArray.add(propertyObject);
}
}
return propertiesArray;
}
protected JsonObject createJsonObjectForNestedView(View nestedView) {
JsonObject viewObject = new JsonObject();
String nestedViewName = nestedView.getName();
if (StringUtils.isNotEmpty(nestedViewName)) {
viewObject.addProperty("name", nestedViewName);
}
JsonArray nestedViewProperties = createJsonArrayOfViewProperties(nestedView);
viewObject.add("properties", nestedViewProperties);
return viewObject;
}
}
protected class ViewDeserializer implements JsonDeserializer<View> {
protected List<View> processedViews = new ArrayList<>();
@Override
public View deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
return deserializeView(json.getAsJsonObject());
}
protected View deserializeView(JsonObject jsonObject) {
String viewName = jsonObject.getAsJsonPrimitive("name").getAsString();
String entityName = jsonObject.getAsJsonPrimitive("entity").getAsString();
JsonArray properties = jsonObject.getAsJsonArray("properties");
MetaClass metaClass = metadata.getClass(entityName);
if (metaClass == null) {
throw new ViewSerializationException(String.format("Entity with name %s not found", entityName));
}
View view = new View(metaClass.getJavaClass(), viewName, false);
fillViewProperties(view, properties, metaClass);
return view;
}
protected void fillViewProperties(View view, JsonArray propertiesArray, MetaClass viewMetaClass) {
for (JsonElement propertyElement : propertiesArray) {
//there may be a primitive or json object inside the properties array
if (propertyElement.isJsonPrimitive()) {
String propertyName = propertyElement.getAsJsonPrimitive().getAsString();
view.addProperty(propertyName);
} else {
JsonObject viewPropertyObj = propertyElement.getAsJsonObject();
FetchMode fetchMode = AUTO;
JsonPrimitive fetchPrimitive = viewPropertyObj.getAsJsonPrimitive("fetch");
if (fetchPrimitive != null) {
String fetch = fetchPrimitive.getAsString();
try {
fetchMode = FetchMode.valueOf(fetch);
} catch (IllegalArgumentException e) {
log.warn("Invalid fetch mode {}", fetch);
}
}
String propertyName = viewPropertyObj.getAsJsonPrimitive("name").getAsString();
JsonElement nestedViewElement = viewPropertyObj.get("view");
if (nestedViewElement == null) {
view.addProperty(propertyName, null, fetchMode);
} else {
MetaProperty metaProperty = viewMetaClass.getProperty(propertyName);
if (metaProperty == null) {
log.warn("Cannot deserialize view property. Property {} of entity {} doesn't exist",
propertyName, viewMetaClass.getName());
continue;
}
MetaClass nestedViewMetaClass = metaProperty.getRange().asClass();
Class<? extends Entity> nestedViewEntityClass = nestedViewMetaClass.getJavaClass();
if (nestedViewElement.isJsonObject()) {
JsonObject nestedViewObject = nestedViewElement.getAsJsonObject();
View nestedView;
JsonPrimitive viewNamePrimitive = nestedViewObject.getAsJsonPrimitive("name");
if (viewNamePrimitive != null) {
nestedView = new View(nestedViewEntityClass, viewNamePrimitive.getAsString(), false);
processedViews.add(nestedView);
} else {
nestedView = new View(nestedViewEntityClass, false);
}
JsonArray nestedProperties = nestedViewObject.getAsJsonArray("properties");
fillViewProperties(nestedView, nestedProperties, nestedViewMetaClass);
view.addProperty(propertyName, nestedView, fetchMode);
} else if (nestedViewElement.isJsonPrimitive()) {
//if view was serialized with the ViewSerializationOption.COMPACT_FORMAT
String nestedViewName = nestedViewElement.getAsString();
View processedView = findProcessedView(processedViews, nestedViewEntityClass, nestedViewName);
if (processedView != null) {
view.addProperty(propertyName, processedView, fetchMode);
} else {
throw new ViewSerializationException(String.format("View %s was not defined in the JSON", nestedViewName));
}
}
}
}
}
}
}
@Nullable
protected View findProcessedView(Collection<View> processedViews, Class<? extends Entity> aClass, String viewName) {
for (View view : processedViews) {
if (aClass.equals(view.getEntityClass()) && viewName.equals(view.getName())) {
return view;
}
}
return null;
}
}
| 5,184 |
6,185 | <gh_stars>1000+
package skin.support.observe;
public interface SkinObserver {
void updateSkin(SkinObservable observable, Object o);
}
| 45 |
415 | /*--------------------------------------------------------------------------*\
Copyright (c) 2008-2009, <NAME>. All rights reserved.
http://www.dannyruijters.nl/cubicinterpolation/
This file is part of CUDA Cubic B-Spline Interpolation (CI).
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright holders nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are
those of the authors and should not be interpreted as representing official
policies, either expressed or implied.
\*--------------------------------------------------------------------------*/
#include <intrin.h>
const __m128 mc0 = _mm_set_ps(1.0f/6.0f, -0.5f, -0.5f, 1.0f/6.0f);
const __m128 mc1 = _mm_set_ps(0.0f, 2.0f/3.0f, 2.0f/3.0f, 0.0f);
inline __m128 bsplineSSE(float fraction)
{
// Creat all 4 weights
const float one_frac = 1.0f - fraction;
//w0 = 1.0f/6.0f * one_frac*one_frac*one_frac;
//w1 = 2.0f/3.0f - 0.5f * fraction*fraction*(2.0f-fraction);
//w2 = 2.0f/3.0f - 0.5f * one_frac*one_frac*(2.0f-one_frac);
//w3 = 1.0f/6.0f * fraction*fraction*fraction;
__m128 m0 = _mm_set_ps(one_frac, fraction, one_frac, fraction);
__m128 m1 = _mm_set_ps(one_frac, 2.0f-fraction, 2.0f-one_frac, fraction);
m0 = _mm_mul_ps(m0, m0);
m0 = _mm_mul_ps(m0, m1);
m0 = _mm_mul_ps(mc0, m0);
return _mm_add_ps(mc1, m0);
}
inline float dot_product(__m128 a, __m128 b)
{
#if defined(SSE4)
__m128 m = _mm_dp_ps(a, b, 0xff);
return m.m128_f32[0];
#elif defined(SSE3)
__m128 m = _mm_mul_ps(a, b);
m = _mm_hadd_ps(m, m);
m = _mm_hadd_ps(m, m);
return m.m128_f32[0];
#else
__m128 m = _mm_mul_ps(a, b);
return m.m128_f32[0] + m.m128_f32[1] + m.m128_f32[2] + m.m128_f32[3];
#endif
}
inline __m128 convolute_loop(__m128 bspline, __m128 m[4])
{
#if defined(SSE4)
return _mm_set_ps(
dot_product(bspline, m[0]),
dot_product(bspline, m[1]),
dot_product(bspline, m[2]),
dot_product(bspline, m[3]));
#else
_MM_TRANSPOSE4_PS(m[3], m[2], m[1], m[0]);
return
_mm_add_ps( _mm_add_ps( _mm_add_ps(
_mm_mul_ps(m[0], _mm_shuffle_ps(bspline, bspline, _MM_SHUFFLE(3,3,3,3))),
_mm_mul_ps(m[1], _mm_shuffle_ps(bspline, bspline, _MM_SHUFFLE(2,2,2,2)))),
_mm_mul_ps(m[2], _mm_shuffle_ps(bspline, bspline, _MM_SHUFFLE(1,1,1,1)))),
_mm_mul_ps(m[3], _mm_shuffle_ps(bspline, bspline, _MM_SHUFFLE(0,0,0,0))));
#endif
}
float interpolate_tricubic_SSE(float* tex, float3 coord, uint3 volumeExtent)
{
// transform the coordinate from [0,extent] to [-0.5, extent-0.5]
const __m128 coord_grid = _mm_sub_ps(_mm_set_ps(coord.x, coord.y, coord.z, 0.5f), _mm_set1_ps(0.5f)); //coord_grid = coord - 0.5f;
__m128 indexF = _mm_cvtepi32_ps(_mm_cvttps_epi32(coord_grid)); //indexF = floor(coord_grid);
const __m128 fraction = _mm_sub_ps(coord_grid, indexF); //fraction = coord_grid - indexF;
// clamp between 1 and volumeExtent-3
indexF = _mm_max_ps(indexF, _mm_set1_ps(1.0f));
indexF = _mm_min_ps(indexF, _mm_cvtepi32_ps(
_mm_sub_epi32(_mm_set_epi32(volumeExtent.x, volumeExtent.y, volumeExtent.z, 4), _mm_set1_epi32(3))));
// note that x,y,z are located in registers 3,2,1
__m128 bspline_x = bsplineSSE(fraction.m128_f32[3]);
__m128 bspline_y = bsplineSSE(fraction.m128_f32[2]);
__m128 bspline_z = bsplineSSE(fraction.m128_f32[1]);
// load the data
__m128 m0[16];
__m128i index = _mm_sub_epi32(_mm_cvttps_epi32(indexF), _mm_set1_epi32(1)); //index = indexF - 1
const float* p0 = tex + (index.m128i_i32[1] * volumeExtent.y + index.m128i_i32[2]) * volumeExtent.x + index.m128i_i32[3];
const size_t slice = volumeExtent.x * volumeExtent.y;
for (int z=0, i=0; z<4; z++)
{
const float* p1 = p0 + z * slice;
for (int y=0; y<4; y++, i++)
{
m0[i] = _mm_set_ps(p1[0], p1[1], p1[2], p1[3]);
p1 += volumeExtent.x;
}
}
// convolution
__m128 m1[4] = {
convolute_loop(bspline_x, m0),
convolute_loop(bspline_x, m0+4),
convolute_loop(bspline_x, m0+8),
convolute_loop(bspline_x, m0+12)};
return dot_product(bspline_z, convolute_loop(bspline_y, m1) );
}
| 2,277 |
575 | <filename>chrome/browser/plugins/plugin_installer.cc
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/plugins/plugin_installer.h"
#include <utility>
#include "base/bind.h"
#include "base/callback_helpers.h"
#include "base/process/process.h"
#include "base/strings/stringprintf.h"
#include "chrome/browser/platform_util.h"
#include "chrome/browser/plugins/plugin_installer_observer.h"
#include "content/public/browser/browser_context.h"
#include "content/public/browser/web_contents.h"
PluginInstaller::PluginInstaller() : strong_observer_count_(0) {}
PluginInstaller::~PluginInstaller() {
}
void PluginInstaller::AddObserver(PluginInstallerObserver* observer) {
strong_observer_count_++;
observers_.AddObserver(observer);
}
void PluginInstaller::RemoveObserver(PluginInstallerObserver* observer) {
strong_observer_count_--;
observers_.RemoveObserver(observer);
if (strong_observer_count_ == 0) {
for (WeakPluginInstallerObserver& observer : weak_observers_)
observer.OnlyWeakObserversLeft();
}
}
void PluginInstaller::AddWeakObserver(WeakPluginInstallerObserver* observer) {
weak_observers_.AddObserver(observer);
}
void PluginInstaller::RemoveWeakObserver(
WeakPluginInstallerObserver* observer) {
weak_observers_.RemoveObserver(observer);
}
void PluginInstaller::OpenDownloadURL(const GURL& plugin_url,
content::WebContents* web_contents) {
web_contents->OpenURL(content::OpenURLParams(
plugin_url,
content::Referrer(web_contents->GetURL(),
network::mojom::ReferrerPolicy::kDefault),
WindowOpenDisposition::NEW_FOREGROUND_TAB, ui::PAGE_TRANSITION_TYPED,
false));
for (PluginInstallerObserver& observer : observers_)
observer.DownloadFinished();
}
| 680 |
703 | <filename>Code/ThirdParty/Kraut/KrautFoundation/Containers/Inline/Map.inl
#ifndef AE_FOUNDATION_CONTAINERS_MAP_INL
#define AE_FOUNDATION_CONTAINERS_MAP_INL
#include "../../Math/Math.h"
#include "../../Basics/Checks.h"
#include "../../Memory/Memory.h"
namespace AE_NS_FOUNDATION
{
// ***** base iterator *****
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
void aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator_base::forward (void)
{
const int dir0 = 0;
const int dir1 = 1;
if (m_pElement == nullptr)
{
AE_CHECK_DEV (m_pElement != nullptr, "aeMap::iterator_base::forward: The iterator is invalid (end).");
return;
}
// if this element has a right child, go there and then search for the left most child of that
if (m_pElement->m_pLink[dir1] != m_pElement->m_pLink[dir1]->m_pLink[dir1])
{
m_pElement = m_pElement->m_pLink[dir1];
while (m_pElement->m_pLink[dir0] != m_pElement->m_pLink[dir0]->m_pLink[dir0])
m_pElement = m_pElement->m_pLink[dir0];
return;
}
// if this element has a parent and this element is that parents left child, go directly to the parent
if ((m_pElement->m_pParent != m_pElement->m_pParent->m_pParent) &&
(m_pElement->m_pParent->m_pLink[dir0] == m_pElement))
{
m_pElement = m_pElement->m_pParent;
return;
}
// if this element has a parent and this element is that parents right child, search for the next parent, whose left child this is
if ((m_pElement->m_pParent != m_pElement->m_pParent->m_pParent) &&
(m_pElement->m_pParent->m_pLink[dir1] == m_pElement))
{
while (m_pElement->m_pParent->m_pLink[dir1] == m_pElement)
m_pElement = m_pElement->m_pParent;
// if we are at the root node..
if ((m_pElement->m_pParent == m_pElement->m_pParent->m_pParent) ||
(m_pElement->m_pParent == nullptr))
{
m_pElement = nullptr;
return;
}
m_pElement = m_pElement->m_pParent;
return;
}
m_pElement = nullptr;
return;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
void aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator_base::backward (void)
{
const int dir0 = 1;
const int dir1 = 0;
if (m_pElement == nullptr)
{
AE_CHECK_DEV (m_pElement != nullptr, "aeMap::iterator_base::backward: The iterator is invalid (end).");
return;
}
// if this element has a right child, go there and then search for the left most child of that
if (m_pElement->m_pLink[dir1] != m_pElement->m_pLink[dir1]->m_pLink[dir1])
{
m_pElement = m_pElement->m_pLink[dir1];
while (m_pElement->m_pLink[dir0] != m_pElement->m_pLink[dir0]->m_pLink[dir0])
m_pElement = m_pElement->m_pLink[dir0];
return;
}
// if this element has a parent and this element is that parents left child, go directly to the parent
if ((m_pElement->m_pParent != m_pElement->m_pParent->m_pParent) &&
(m_pElement->m_pParent->m_pLink[dir0] == m_pElement))
{
m_pElement = m_pElement->m_pParent;
return;
}
// if this element has a parent and this element is that parents right child, search for the next parent, whose left child this is
if ((m_pElement->m_pParent != m_pElement->m_pParent->m_pParent) &&
(m_pElement->m_pParent->m_pLink[dir1] == m_pElement))
{
while (m_pElement->m_pParent->m_pLink[dir1] == m_pElement)
m_pElement = m_pElement->m_pParent;
// if we are at the root node..
if ((m_pElement->m_pParent == m_pElement->m_pParent->m_pParent) ||
(m_pElement->m_pParent == nullptr))
{
m_pElement = nullptr;
return;
}
m_pElement = m_pElement->m_pParent;
return;
}
m_pElement = nullptr;
return;
}
// ***** aeMap *****
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode::aeNode (void) : m_uiLevel (0), m_pParent (nullptr)
{
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeMap (void)
{
m_uiSize = 0;
m_NilNode.m_uiLevel = 0;
m_NilNode.m_pLink[0] = &m_NilNode;
m_NilNode.m_pLink[1] = &m_NilNode;
m_NilNode.m_pParent = &m_NilNode;
m_pRoot = &m_NilNode;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeMap (const aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>& cc)
{
m_uiSize = 0;
m_NilNode.m_uiLevel = 0;
m_NilNode.m_pLink[0] = &m_NilNode;
m_NilNode.m_pLink[1] = &m_NilNode;
m_NilNode.m_pParent = &m_NilNode;
m_pRoot = &m_NilNode;
operator= (cc);
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::~aeMap ()
{
clear ();
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
void aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::operator= (const aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>& rhs)
{
clear ();
const_iterator itend = rhs.end ();
for (const_iterator it = rhs.begin (); it != itend; ++it)
insert (it.key (), it.value ());
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
void aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::clear (void)
{
iterator it = begin ();
const iterator itend = end ();
for ( ; it != itend; ++it)
aeMemoryManagement::Destruct<aeNode> (it.m_pElement);
m_FreeElements.clear ();
m_Elements.clear ();
m_uiSize = 0;
m_NilNode.m_uiLevel = 0;
m_NilNode.m_pLink[0] = &m_NilNode;
m_NilNode.m_pLink[1] = &m_NilNode;
m_NilNode.m_pParent = &m_NilNode;
m_pRoot = &m_NilNode;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
bool aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::empty (void) const
{
return (m_uiSize == 0);
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
aeUInt32 aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::size (void) const
{
return (m_uiSize);
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::begin (void)
{
return (iterator (GetLeftMost ()));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::end (void)
{
return (iterator (nullptr));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::const_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::begin (void) const
{
return (const_iterator (GetLeftMost ()));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::const_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::end (void) const
{
return (const_iterator (nullptr));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::reverse_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::rbegin (void)
{
return (reverse_iterator (GetRightMost ()));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::reverse_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::rend (void)
{
return (reverse_iterator (nullptr));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::const_reverse_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::rbegin (void) const
{
return (const_reverse_iterator (GetRightMost ()));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::const_reverse_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::rend (void) const
{
return (const_reverse_iterator (nullptr));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode* aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::GetLeftMost (void) const
{
if (empty ())
return (nullptr);
aeNode* pNode = m_pRoot;
while (pNode->m_pLink[0] != &m_NilNode)
pNode = pNode->m_pLink[0];
return (pNode);
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode* aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::GetRightMost (void) const
{
if (empty ())
return (nullptr);
aeNode* pNode = m_pRoot;
while (pNode->m_pLink[1] != &m_NilNode)
pNode = pNode->m_pLink[1];
return (pNode);
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::find (const KEY& key)
{
aeNode* pNode = m_pRoot;
while (pNode != &m_NilNode)// && (pNode->m_Key != key))
{
const int dir = (int) COMPARE () (pNode->m_Key, key);
const int dir2= (int) COMPARE () (key, pNode->m_Key);
if (dir == dir2)
break;
pNode = pNode->m_pLink[dir];
}
if (pNode == &m_NilNode)
return (end ());
return (iterator (pNode));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::const_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::find (const KEY& key) const
{
aeNode* pNode = m_pRoot;
while (pNode != &m_NilNode)// && (pNode->m_Key != key))
{
const int dir = (int) COMPARE () (pNode->m_Key, key);
const int dir2= (int) COMPARE () (key, pNode->m_Key);
if (dir == dir2)
break;
pNode = pNode->m_pLink[dir];
}
if (pNode == &m_NilNode)
return (end ());
return (const_iterator (pNode));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::lower_bound (const KEY& key)
{
aeNode* pNode = m_pRoot;
aeNode* pNodeSmaller = nullptr;
while (pNode != &m_NilNode)
{
const int dir = (int) COMPARE () (pNode->m_Key, key);
const int dir2= (int) COMPARE () (key, pNode->m_Key);
if (dir == dir2)
return (iterator (pNode));
if (dir == 0)
pNodeSmaller = pNode;
pNode = pNode->m_pLink[dir];
}
return (iterator (pNodeSmaller));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::upper_bound (const KEY& key)
{
aeNode* pNode = m_pRoot;
aeNode* pNodeSmaller = nullptr;
while (pNode != &m_NilNode)
{
const int dir = (int) COMPARE () (pNode->m_Key, key);
const int dir2= (int) COMPARE () (key, pNode->m_Key);
if (dir == dir2)
{
iterator it (pNode);
++it;
return (it);
}
if (dir == 0)
pNodeSmaller = pNode;
pNode = pNode->m_pLink[dir];
}
return (iterator (pNodeSmaller));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::const_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::lower_bound (const KEY& key) const
{
aeNode* pNode = m_pRoot;
aeNode* pNodeSmaller = nullptr;
while (pNode != &m_NilNode)
{
const int dir = (int) COMPARE () (pNode->m_Key, key);
const int dir2= (int) COMPARE () (key, pNode->m_Key);
if (dir == dir2)
return (const_iterator (pNode));
if (dir == 0)
pNodeSmaller = pNode;
pNode = pNode->m_pLink[dir];
}
return (const_iterator (pNodeSmaller));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::const_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::upper_bound (const KEY& key) const
{
aeNode* pNode = m_pRoot;
aeNode* pNodeSmaller = nullptr;
while (pNode != &m_NilNode)
{
const int dir = (int) COMPARE () (pNode->m_Key, key);
const int dir2= (int) COMPARE () (key, pNode->m_Key);
if (dir == dir2)
{
const_iterator it (pNode);
++it;
return (it);
}
if (dir == 0)
pNodeSmaller = pNode;
pNode = pNode->m_pLink[dir];
}
return (const_iterator (pNodeSmaller));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
VALUE& aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::operator[] (const KEY& key)
{
iterator it = find (key);
if (it != end ())
return (it.value ());
return (insert (key, VALUE ()).value ());
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::insert (const KEY& key, const VALUE& value)
{
aeNode* pInsertedNode = nullptr;
m_pRoot = insert (m_pRoot, key, value, pInsertedNode);
m_pRoot->m_pParent = &m_NilNode;
m_NilNode.m_pParent = &m_NilNode;
return (iterator (pInsertedNode));
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
void aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::erase (const KEY& key)
{
m_pRoot = erase (m_pRoot, key);
m_pRoot->m_pParent = &m_NilNode;
m_NilNode.m_pParent = &m_NilNode;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode* aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::AccquireNode (const KEY& key, const VALUE& value, int m_uiLevel, aeNode* pParent)
{
aeNode* pNode;
if (m_FreeElements.empty ())
{
m_Elements.push_back ();
pNode = &m_Elements.back ();
}
else
{
pNode = m_FreeElements.top ();
m_FreeElements.pop ();
}
aeMemoryManagement::Construct<aeNode> (pNode);
pNode->m_pParent = pParent;
pNode->m_Key = key;
pNode->m_Value = value;
pNode->m_uiLevel = m_uiLevel;
pNode->m_pLink[0] = &m_NilNode;
pNode->m_pLink[1] = &m_NilNode;
++m_uiSize;
return (pNode);
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
void aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::ReleaseNode (aeNode* pNode)
{
AE_CHECK_ALWAYS (pNode != nullptr, "aeSet::ReleaseNode: pNode is invalid.");
aeMemoryManagement::Destruct<aeNode> (pNode);
m_FreeElements.push (pNode);
--m_uiSize;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode* aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::SkewNode (aeNode* root)
{
if ((root->m_pLink[0]->m_uiLevel == root->m_uiLevel) && (root->m_uiLevel != 0))
{
aeNode* save = root->m_pLink[0];
root->m_pLink[0] = save->m_pLink[1];
root->m_pLink[0]->m_pParent = root;
save->m_pLink[1] = root;
save->m_pLink[1]->m_pParent = save;
root = save;
}
return root;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode* aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::SplitNode (aeNode* root)
{
if ((root->m_pLink[1]->m_pLink[1]->m_uiLevel == root->m_uiLevel) && (root->m_uiLevel != 0))
{
aeNode* save = root->m_pLink[1];
root->m_pLink[1] = save->m_pLink[0];
root->m_pLink[1]->m_pParent = root;
save->m_pLink[0] = root;
save->m_pLink[0]->m_pParent = save;
root = save;
++root->m_uiLevel;
}
return root;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode* aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::insert (aeNode* root, const KEY& key, const VALUE& value, aeNode*& pInsertedNode)
{
if (root == &m_NilNode)
{
pInsertedNode = AccquireNode (key, value, 1, &m_NilNode);
root = pInsertedNode;
}
else
{
aeNode* it = root;
aeNode* up[32];
int top = 0;
int dir = 0;
while (true)
{
up[top++] = it;
dir = (int) COMPARE ()(it->m_Key, key);
// element is identical => do not insert
if ((int) COMPARE ()(key, it->m_Key) == dir)
{
it->m_Value = value;
return (root);
}
if (it->m_pLink[dir] == &m_NilNode)
break;
it = it->m_pLink[dir];
}
pInsertedNode = AccquireNode (key, value, 1, it);
it->m_pLink[dir] = pInsertedNode;
while ( --top >= 0 )
{
if (top != 0)
dir = up[top - 1]->m_pLink[1] == up[top];
up[top] = SkewNode (up[top]);
up[top] = SplitNode (up[top]);
if (top != 0)
{
up[top - 1]->m_pLink[dir] = up[top];
up[top - 1]->m_pLink[dir]->m_pParent = up[top - 1];
}
else
root = up[top];
}
}
return root;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::aeNode* aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::erase (aeNode* root, const KEY& key)
{
aeNode* ToErase = &m_NilNode;
aeNode* ToOverride = &m_NilNode;
if (root != &m_NilNode)
{
aeNode* it = root;
aeNode* up[32];
int top = 0;
int dir = 0;
while (true)
{
up[top++] = it;
if (it == &m_NilNode)
return root;
int newdir = (int) (COMPARE () (it->m_Key, key));
if (newdir == (int) (COMPARE () (key, it->m_Key)))
break;
dir = newdir;
it = it->m_pLink[dir];
}
ToOverride = it;
if ((it->m_pLink[0] == &m_NilNode) || (it->m_pLink[1] == &m_NilNode))
{
int dir2 = it->m_pLink[0] == &m_NilNode;
if ( --top != 0 )
{
up[top - 1]->m_pLink[dir] = it->m_pLink[dir2];
up[top - 1]->m_pLink[dir]->m_pParent = up[top - 1];
}
else
root = it->m_pLink[1];
}
else
{
aeNode* heir = it->m_pLink[1];
aeNode* prev = it;
while (heir->m_pLink[0] != &m_NilNode)
{
up[top++] = prev = heir;
heir = heir->m_pLink[0];
}
ToErase = heir;
ToOverride = it;
prev->m_pLink[prev == it] = heir->m_pLink[1];
prev->m_pLink[prev == it]->m_pParent = prev;
//ToOverride->m_Key = ToErase->m_Key;
//ToOverride->m_Value = ToErase->m_Value;
}
while ( --top >= 0 )
{
if (top != 0)
dir = up[top - 1]->m_pLink[1] == up[top];
if ((up[top]->m_pLink[0]->m_uiLevel < up[top]->m_uiLevel - 1) || (up[top]->m_pLink[1]->m_uiLevel < up[top]->m_uiLevel - 1))
{
if (up[top]->m_pLink[1]->m_uiLevel > --up[top]->m_uiLevel)
up[top]->m_pLink[1]->m_uiLevel = up[top]->m_uiLevel;
up[top] = SkewNode (up[top]);
up[top]->m_pLink[1] = SkewNode (up[top]->m_pLink[1]);
up[top]->m_pLink[1]->m_pParent = up[top];
up[top]->m_pLink[1]->m_pLink[1] = SkewNode (up[top]->m_pLink[1]->m_pLink[1]);
up[top] = SplitNode (up[top]);
up[top]->m_pLink[1] = SplitNode (up[top]->m_pLink[1]);
up[top]->m_pLink[1]->m_pParent = up[top];
}
if (top != 0)
{
up[top - 1]->m_pLink[dir] = up[top];
up[top - 1]->m_pLink[dir]->m_pParent = up[top - 1];
}
else
root = up[top];
}
}
root->m_pParent = &m_NilNode;
// if necessary, swap nodes
if (ToErase != &m_NilNode)
{
aeNode* parent = ToOverride->m_pParent;
if (parent != &m_NilNode)
{
if (parent->m_pLink[0] == ToOverride)
{
parent->m_pLink[0] = ToErase;
parent->m_pLink[0]->m_pParent = parent;
}
if (parent->m_pLink[1] == ToOverride)
{
parent->m_pLink[1] = ToErase;
parent->m_pLink[1]->m_pParent = parent;
}
}
else
root = ToErase;
ToErase->m_uiLevel = ToOverride->m_uiLevel;
ToErase->m_pLink[0] = ToOverride->m_pLink[0];
ToErase->m_pLink[0]->m_pParent = ToErase;
ToErase->m_pLink[1] = ToOverride->m_pLink[1];
ToErase->m_pLink[1]->m_pParent = ToErase;
}
// remove the erased node
ReleaseNode (ToOverride);
return root;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::erase (const iterator& pos)
{
AE_CHECK_DEV (pos.m_pElement != nullptr, "aeMap::erase: The iterator (pos) is invalid.");
iterator temp (pos);
++temp;
erase (pos.key ());
return temp;
}
template < class KEY, class VALUE, class COMPARE, bool NO_DEBUG_ALLOCATOR>
typename aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::reverse_iterator aeMap<KEY, VALUE, COMPARE, NO_DEBUG_ALLOCATOR>::erase (const reverse_iterator& pos)
{
AE_CHECK_DEV (pos.m_pElement != nullptr, "aeMap::erase: The reverse_iterator (pos) is invalid.");
reverse_iterator temp (pos);
++temp;
erase (pos.key ());
return temp;
}
}
#endif
| 11,062 |
678 | /**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/CorePDF.framework/CorePDF
*/
@interface CPPreformatter : NSObject {
}
+ (void)preformatInPage:(id)page; // 0x43369
- (void)preformatInParagraph:(id)paragraph; // 0x433c5
- (void)preformatIn:(id)anIn; // 0x43281
@end
| 122 |
2,577 | <filename>engine-rest/engine-rest/src/main/java/org/camunda/bpm/engine/rest/impl/history/HistoricTaskInstanceRestServiceImpl.java
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest.impl.history;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.camunda.bpm.engine.ProcessEngine;
import org.camunda.bpm.engine.history.HistoricTaskInstance;
import org.camunda.bpm.engine.history.HistoricTaskInstanceQuery;
import org.camunda.bpm.engine.history.HistoricTaskInstanceReportResult;
import org.camunda.bpm.engine.history.ReportResult;
import org.camunda.bpm.engine.rest.dto.AbstractReportDto;
import org.camunda.bpm.engine.rest.dto.CountResultDto;
import org.camunda.bpm.engine.rest.dto.history.HistoricTaskInstanceDto;
import org.camunda.bpm.engine.rest.dto.history.HistoricTaskInstanceQueryDto;
import org.camunda.bpm.engine.rest.dto.history.HistoricTaskInstanceReportQueryDto;
import org.camunda.bpm.engine.rest.dto.history.HistoricTaskInstanceReportResultDto;
import org.camunda.bpm.engine.rest.dto.history.ReportResultDto;
import org.camunda.bpm.engine.rest.exception.InvalidRequestException;
import org.camunda.bpm.engine.rest.history.HistoricTaskInstanceRestService;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.util.ArrayList;
import java.util.List;
/**
* @author <NAME>
*
*/
public class HistoricTaskInstanceRestServiceImpl implements HistoricTaskInstanceRestService {
protected ObjectMapper objectMapper;
protected ProcessEngine processEngine;
public HistoricTaskInstanceRestServiceImpl(ObjectMapper objectMapper, ProcessEngine processEngine) {
this.objectMapper = objectMapper;
this.processEngine = processEngine;
}
@Override
public List<HistoricTaskInstanceDto> getHistoricTaskInstances(UriInfo uriInfo, Integer firstResult, Integer maxResults) {
HistoricTaskInstanceQueryDto queryDto = new HistoricTaskInstanceQueryDto(objectMapper, uriInfo.getQueryParameters());
return queryHistoricTaskInstances(queryDto, firstResult, maxResults);
}
@Override
public List<HistoricTaskInstanceDto> queryHistoricTaskInstances(HistoricTaskInstanceQueryDto queryDto, Integer firstResult, Integer maxResults) {
queryDto.setObjectMapper(objectMapper);
HistoricTaskInstanceQuery query = queryDto.toQuery(processEngine);
List<HistoricTaskInstance> match;
if (firstResult != null || maxResults != null) {
match = executePaginatedQuery(query, firstResult, maxResults);
} else {
match = query.list();
}
List<HistoricTaskInstanceDto> result = new ArrayList<HistoricTaskInstanceDto>();
for (HistoricTaskInstance taskInstance : match) {
HistoricTaskInstanceDto taskInstanceDto = HistoricTaskInstanceDto.fromHistoricTaskInstance(taskInstance);
result.add(taskInstanceDto);
}
return result;
}
private List<HistoricTaskInstance> executePaginatedQuery(HistoricTaskInstanceQuery query, Integer firstResult, Integer maxResults) {
if (firstResult == null) {
firstResult = 0;
}
if (maxResults == null) {
maxResults = Integer.MAX_VALUE;
}
return query.listPage(firstResult, maxResults);
}
@Override
public CountResultDto getHistoricTaskInstancesCount(UriInfo uriInfo) {
HistoricTaskInstanceQueryDto queryDto = new HistoricTaskInstanceQueryDto(objectMapper, uriInfo.getQueryParameters());
return queryHistoricTaskInstancesCount(queryDto);
}
@Override
public CountResultDto queryHistoricTaskInstancesCount(HistoricTaskInstanceQueryDto queryDto) {
queryDto.setObjectMapper(objectMapper);
HistoricTaskInstanceQuery query = queryDto.toQuery(processEngine);
long count = query.count();
CountResultDto result = new CountResultDto();
result.setCount(count);
return result;
}
@Override
public Response getHistoricTaskInstanceReport(UriInfo uriInfo) {
HistoricTaskInstanceReportQueryDto queryDto = new HistoricTaskInstanceReportQueryDto(objectMapper, uriInfo.getQueryParameters());
Response response;
if (AbstractReportDto.REPORT_TYPE_DURATION.equals(queryDto.getReportType())) {
List<? extends ReportResult> reportResults = queryDto.executeReport(processEngine);
response = Response.ok(generateDurationDto(reportResults)).build();
} else if (AbstractReportDto.REPORT_TYPE_COUNT.equals(queryDto.getReportType())) {
List<HistoricTaskInstanceReportResult> reportResults = queryDto.executeCompletedReport(processEngine);
response = Response.ok(generateCountDto(reportResults)).build();
} else {
throw new InvalidRequestException(Response.Status.BAD_REQUEST, "Parameter reportType is not set.");
}
return response;
}
protected List<HistoricTaskInstanceReportResultDto> generateCountDto(List<HistoricTaskInstanceReportResult> results) {
List<HistoricTaskInstanceReportResultDto> dtoList = new ArrayList<HistoricTaskInstanceReportResultDto>();
for( HistoricTaskInstanceReportResult result : results ) {
dtoList.add(HistoricTaskInstanceReportResultDto.fromHistoricTaskInstanceReportResult(result));
}
return dtoList;
}
protected List<ReportResultDto> generateDurationDto(List<? extends ReportResult> results) {
List<ReportResultDto> dtoList = new ArrayList<ReportResultDto>();
for( ReportResult result : results ) {
dtoList.add(ReportResultDto.fromReportResult(result));
}
return dtoList;
}
}
| 1,920 |
403 | package io.craft.atom.rpc.api;
/**
* MBean for {@link RpcClient }
*
* @author mindwind
* @version 1.0, Oct 13, 2014
*/
public interface RpcClientMBean {
/**
* @return x-ray of {@link RpcClient }
*/
RpcClientX x();
}
| 98 |
22,688 | <gh_stars>1000+
/******************************************************************************
* Copyright 2017 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
#include "modules/planning/tasks/deciders/speed_bounds_decider/st_boundary_mapper.h"
#include "cyber/common/log.h"
#include "gmock/gmock.h"
#include "modules/map/hdmap/hdmap_util.h"
#include "modules/planning/common/obstacle.h"
#include "modules/planning/reference_line/qp_spline_reference_line_smoother.h"
#include "modules/planning/tasks/deciders/speed_bounds_decider/speed_limit_decider.h"
namespace apollo {
namespace planning {
class StBoundaryMapperTest : public ::testing::Test {
public:
virtual void SetUp() {
hdmap_.LoadMapFromFile(map_file);
const std::string lane_id = "1_-1";
lane_info_ptr = hdmap_.GetLaneById(hdmap::MakeMapId(lane_id));
if (!lane_info_ptr) {
AERROR << "failed to find lane " << lane_id << " from map " << map_file;
return;
}
ReferenceLineSmootherConfig config;
injector_ = std::make_shared<DependencyInjector>();
std::vector<ReferencePoint> ref_points;
const auto& points = lane_info_ptr->points();
const auto& headings = lane_info_ptr->headings();
const auto& accumulate_s = lane_info_ptr->accumulate_s();
for (size_t i = 0; i < points.size(); ++i) {
std::vector<hdmap::LaneWaypoint> waypoint;
waypoint.emplace_back(lane_info_ptr, accumulate_s[i]);
hdmap::MapPathPoint map_path_point(points[i], headings[i], waypoint);
ref_points.emplace_back(map_path_point, 0.0, 0.0);
}
reference_line_.reset(new ReferenceLine(ref_points));
vehicle_position_ = points[0];
path_data_.SetReferenceLine(reference_line_.get());
std::vector<common::FrenetFramePoint> ff_points;
for (int i = 0; i < 100; ++i) {
common::FrenetFramePoint ff_point;
ff_point.set_s(i * 1.0);
ff_point.set_l(0.1);
ff_points.push_back(std::move(ff_point));
}
frenet_frame_path_ = FrenetFramePath(std::move(ff_points));
path_data_.SetFrenetPath(std::move(frenet_frame_path_));
}
protected:
const std::string map_file =
"modules/planning/testdata/garage_map/base_map.txt";
hdmap::HDMap hdmap_;
common::math::Vec2d vehicle_position_;
std::unique_ptr<ReferenceLine> reference_line_;
hdmap::LaneInfoConstPtr lane_info_ptr = nullptr;
PathData path_data_;
FrenetFramePath frenet_frame_path_;
std::shared_ptr<DependencyInjector> injector_;
};
TEST_F(StBoundaryMapperTest, check_overlap_test) {
SpeedBoundsDeciderConfig config;
double planning_distance = 70.0;
double planning_time = 10.0;
STBoundaryMapper mapper(config, *reference_line_, path_data_,
planning_distance, planning_time, injector_);
common::PathPoint path_point;
path_point.set_x(1.0);
path_point.set_y(1.0);
common::math::Box2d box(common::math::Vec2d(1.0, 1.0), 0.0, 5.0, 3.0);
EXPECT_TRUE(mapper.CheckOverlap(path_point, box, 0.0));
}
} // namespace planning
} // namespace apollo
| 1,331 |
4,345 | //
// AMPopTip+Animation.h
// AMPopTip
//
// Created by <NAME> on 10/06/15.
// Copyright (c) 2015 Fancy Pixel. All rights reserved.
//
#import "AMPopTip.h"
@interface AMPopTip (Animation)
/** Start the popover action animation
*
* Starts the popover action animation. Does nothing if the popover wasn't animating in the first place.
*/
- (void)performActionAnimation;
/** Stops the popover action animation
*
* Stops the popover action animation. Does nothing if the popover wasn't animating in the first place.
*/
- (void)dismissActionAnimation;
@end
| 174 |
348 | {"nom":"Compreignac","circ":"3ème circonscription","dpt":"Haute-Vienne","inscrits":1415,"abs":675,"votants":740,"blancs":9,"nuls":8,"exp":723,"res":[{"nuance":"REM","nom":"<NAME>","voix":245},{"nuance":"FI","nom":"<NAME>","voix":143},{"nuance":"LR","nom":"M. <NAME>","voix":87},{"nuance":"FN","nom":"M. <NAME>","voix":83},{"nuance":"SOC","nom":"Mme <NAME>","voix":80},{"nuance":"ECO","nom":"M. <NAME>","voix":34},{"nuance":"COM","nom":"Mme <NAME>","voix":26},{"nuance":"EXG","nom":"<NAME>","voix":9},{"nuance":"DLF","nom":"M. <NAME>","voix":8},{"nuance":"DIV","nom":"Mme <NAME>","voix":4},{"nuance":"DIV","nom":"Mme <NAME>","voix":4}]} | 258 |
697 | <gh_stars>100-1000
# Copyright 2018 <NAME> LLC, <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Lookup():
"""
A representation of a condition that is fed to a "when" clause.
condition.evaluate(resource) facts will return true or false.
"""
def __init__(self):
pass
def evaluate(self, resource):
raise NotImplementedError()
| 263 |
970 | <gh_stars>100-1000
/*
* Copyright 2021-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <php.h>
#include <Zend/zend_interfaces.h>
#include <ext/standard/php_var.h>
#include <zend_smart_str.h>
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "phongo_compat.h"
#include "php_phongo.h"
zend_class_entry* php_phongo_serverapi_ce;
static bool php_phongo_serverapi_create_libmongoc_object(mongoc_server_api_t** server_api, zend_string* version, bool strict_set, bool strict, bool deprecation_errors_set, bool deprecation_errors) /* {{{ */
{
mongoc_server_api_version_t server_api_version;
if (!mongoc_server_api_version_from_string(ZSTR_VAL(version), &server_api_version)) {
phongo_throw_exception(PHONGO_ERROR_INVALID_ARGUMENT, "Server API version \"%s\" is not supported in this driver version", ZSTR_VAL(version));
return false;
}
if (*server_api) {
phongo_throw_exception(PHONGO_ERROR_LOGIC, "Server API object already initialized. Please file a bug report as this should not happen.");
return false;
}
*server_api = mongoc_server_api_new(server_api_version);
if (strict_set) {
mongoc_server_api_strict(*server_api, strict);
}
if (deprecation_errors_set) {
mongoc_server_api_deprecation_errors(*server_api, deprecation_errors);
}
return true;
} /* }}} */
/* Initialize the object from a HashTable and return whether it was successful.
* An exception will be thrown on error. */
static bool php_phongo_serverapi_init_from_hash(php_phongo_serverapi_t* intern, HashTable* props) /* {{{ */
{
zval* version;
zval* strict;
zval* deprecation_errors;
version = zend_hash_str_find(props, ZEND_STRL("version"));
if (!version || Z_TYPE_P(version) != IS_STRING) {
// Exception
phongo_throw_exception(PHONGO_ERROR_INVALID_ARGUMENT, "%s initialization requires \"version\" field to be string", ZSTR_VAL(php_phongo_serverapi_ce->name));
return false;
}
if ((strict = zend_hash_str_find(props, ZEND_STRL("strict"))) && !ZVAL_IS_NULL(strict)) {
if (Z_TYPE_P(strict) != IS_TRUE && Z_TYPE_P(strict) != IS_FALSE) {
phongo_throw_exception(PHONGO_ERROR_INVALID_ARGUMENT, "%s initialization requires \"strict\" field to be bool or null", ZSTR_VAL(php_phongo_serverapi_ce->name));
return false;
}
}
if ((deprecation_errors = zend_hash_str_find(props, ZEND_STRL("deprecationErrors"))) && !ZVAL_IS_NULL(deprecation_errors)) {
if (Z_TYPE_P(deprecation_errors) != IS_TRUE && Z_TYPE_P(deprecation_errors) != IS_FALSE) {
phongo_throw_exception(PHONGO_ERROR_INVALID_ARGUMENT, "%s initialization requires \"deprecationErrors\" field to be bool or null", ZSTR_VAL(php_phongo_serverapi_ce->name));
return false;
}
}
return php_phongo_serverapi_create_libmongoc_object(
&intern->server_api,
Z_STR_P(version),
strict && !ZVAL_IS_NULL(strict),
strict && zval_is_true(strict),
deprecation_errors && !ZVAL_IS_NULL(deprecation_errors),
deprecation_errors && zval_is_true(deprecation_errors));
} /* }}} */
/* {{{ proto void MongoDB\Driver\ServerApi::__construct(string $version, [?bool $strict], [?bool $deprecationErrors])
Constructs a new ServerApi object */
static PHP_METHOD(ServerApi, __construct)
{
php_phongo_serverapi_t* intern;
zend_string* version;
zend_bool strict = 0;
zend_bool strict_null = 1;
zend_bool deprecation_errors = 0;
zend_bool deprecation_errors_null = 1;
intern = Z_SERVERAPI_OBJ_P(getThis());
PHONGO_PARSE_PARAMETERS_START(1, 3)
Z_PARAM_STR(version)
Z_PARAM_OPTIONAL
Z_PARAM_BOOL_EX(strict, strict_null, 1, 0)
Z_PARAM_BOOL_EX(deprecation_errors, deprecation_errors_null, 1, 0)
PHONGO_PARSE_PARAMETERS_END();
// Will throw on failure
php_phongo_serverapi_create_libmongoc_object(
&intern->server_api,
version,
(bool) !strict_null,
(bool) strict,
(bool) !deprecation_errors_null,
(bool) deprecation_errors);
} /* }}} */
/* {{{ proto MongoDB\Driver\ServerApi MongoDB\Driver\ServerApi::__set_state(array $properties)
*/
static PHP_METHOD(ServerApi, __set_state)
{
php_phongo_serverapi_t* intern;
HashTable* props;
zval* array;
PHONGO_PARSE_PARAMETERS_START(1, 1)
Z_PARAM_ARRAY(array)
PHONGO_PARSE_PARAMETERS_END();
object_init_ex(return_value, php_phongo_serverapi_ce);
intern = Z_SERVERAPI_OBJ_P(return_value);
props = Z_ARRVAL_P(array);
php_phongo_serverapi_init_from_hash(intern, props);
} /* }}} */
static HashTable* php_phongo_serverapi_get_properties_hash(phongo_compat_object_handler_type* object, bool is_temp, bool include_null) /* {{{ */
{
php_phongo_serverapi_t* intern;
HashTable* props;
zval version, strict, deprecation_errors;
bool is_set;
intern = Z_OBJ_SERVERAPI(PHONGO_COMPAT_GET_OBJ(object));
PHONGO_GET_PROPERTY_HASH_INIT_PROPS(is_temp, intern, props, 1);
ZVAL_STRING(&version, mongoc_server_api_version_to_string(mongoc_server_api_get_version(intern->server_api)));
zend_hash_str_add(props, "version", sizeof("version") - 1, &version);
is_set = mongoc_optional_is_set(mongoc_server_api_get_strict(intern->server_api));
if (is_set) {
ZVAL_BOOL(&strict, mongoc_optional_value(mongoc_server_api_get_strict(intern->server_api)));
} else {
ZVAL_NULL(&strict);
}
if (include_null || is_set) {
zend_hash_str_add(props, "strict", sizeof("strict") - 1, &strict);
}
is_set = mongoc_optional_is_set(mongoc_server_api_get_deprecation_errors(intern->server_api));
if (is_set) {
ZVAL_BOOL(&deprecation_errors, mongoc_optional_value(mongoc_server_api_get_deprecation_errors(intern->server_api)));
} else {
ZVAL_NULL(&deprecation_errors);
}
if (include_null || is_set) {
zend_hash_str_add(props, "deprecationErrors", sizeof("deprecationErrors") - 1, &deprecation_errors);
}
return props;
} /* }}} */
/* {{{ proto array MongoDB\Driver\ServerApi::bsonSerialize()
*/
static PHP_METHOD(ServerApi, bsonSerialize)
{
PHONGO_PARSE_PARAMETERS_NONE();
ZVAL_ARR(return_value, php_phongo_serverapi_get_properties_hash(PHONGO_COMPAT_OBJ_P(getThis()), true, false));
convert_to_object(return_value);
} /* }}} */
/* {{{ proto string MongoDB\Driver\ServerApi::serialize()
*/
static PHP_METHOD(ServerApi, serialize)
{
php_phongo_serverapi_t* intern;
zval retval;
php_serialize_data_t var_hash;
smart_str buf = { 0 };
intern = Z_SERVERAPI_OBJ_P(getThis());
PHONGO_PARSE_PARAMETERS_NONE();
array_init_size(&retval, 3);
ADD_ASSOC_STRING(&retval, "version", mongoc_server_api_version_to_string(mongoc_server_api_get_version(intern->server_api)));
if (mongoc_optional_is_set(mongoc_server_api_get_strict(intern->server_api))) {
ADD_ASSOC_BOOL_EX(&retval, "strict", mongoc_optional_value(mongoc_server_api_get_strict(intern->server_api)));
} else {
ADD_ASSOC_NULL_EX(&retval, "strict");
}
if (mongoc_optional_is_set(mongoc_server_api_get_deprecation_errors(intern->server_api))) {
ADD_ASSOC_BOOL_EX(&retval, "deprecationErrors", mongoc_optional_value(mongoc_server_api_get_deprecation_errors(intern->server_api)));
} else {
ADD_ASSOC_NULL_EX(&retval, "deprecationErrors");
}
PHP_VAR_SERIALIZE_INIT(var_hash);
php_var_serialize(&buf, &retval, &var_hash);
smart_str_0(&buf);
PHP_VAR_SERIALIZE_DESTROY(var_hash);
PHONGO_RETVAL_SMART_STR(buf);
smart_str_free(&buf);
zval_ptr_dtor(&retval);
} /* }}} */
/* {{{ proto void MongoDB\Driver\ServerApi::unserialize(string $serialized)
*/
static PHP_METHOD(ServerApi, unserialize)
{
php_phongo_serverapi_t* intern;
char* serialized;
size_t serialized_len;
zval props;
php_unserialize_data_t var_hash;
intern = Z_SERVERAPI_OBJ_P(getThis());
PHONGO_PARSE_PARAMETERS_START(1, 1)
Z_PARAM_STRING(serialized, serialized_len)
PHONGO_PARSE_PARAMETERS_END();
if (!serialized_len) {
return;
}
PHP_VAR_UNSERIALIZE_INIT(var_hash);
if (!php_var_unserialize(&props, (const unsigned char**) &serialized, (unsigned char*) serialized + serialized_len, &var_hash)) {
zval_ptr_dtor(&props);
phongo_throw_exception(PHONGO_ERROR_UNEXPECTED_VALUE, "%s unserialization failed", ZSTR_VAL(php_phongo_serverapi_ce->name));
PHP_VAR_UNSERIALIZE_DESTROY(var_hash);
return;
}
PHP_VAR_UNSERIALIZE_DESTROY(var_hash);
php_phongo_serverapi_init_from_hash(intern, HASH_OF(&props));
zval_ptr_dtor(&props);
} /* }}} */
/* {{{ proto array MongoDB\Driver\ServerApi::__serialize()
*/
static PHP_METHOD(ServerApi, __serialize)
{
PHONGO_PARSE_PARAMETERS_NONE();
RETURN_ARR(php_phongo_serverapi_get_properties_hash(PHONGO_COMPAT_OBJ_P(getThis()), true, true));
} /* }}} */
/* {{{ proto void MongoDB\Driver\ServerApi::__unserialize(array $data)
*/
static PHP_METHOD(ServerApi, __unserialize)
{
zval* data;
PHONGO_PARSE_PARAMETERS_START(1, 1)
Z_PARAM_ARRAY(data)
PHONGO_PARSE_PARAMETERS_END();
php_phongo_serverapi_init_from_hash(Z_SERVERAPI_OBJ_P(getThis()), Z_ARRVAL_P(data));
} /* }}} */
/* {{{ MongoDB\Driver\ServerApi function entries */
ZEND_BEGIN_ARG_INFO_EX(ai_ServerApi___construct, 0, 0, 1)
ZEND_ARG_TYPE_INFO(0, version, IS_STRING, 0)
ZEND_ARG_TYPE_INFO(0, strict, _IS_BOOL, 1)
ZEND_ARG_TYPE_INFO(0, deprecationErrors, _IS_BOOL, 1)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(ai_ServerApi___set_state, 0, 0, 1)
ZEND_ARG_ARRAY_INFO(0, properties, 0)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(ai_ServerApi___unserialize, 0, 0, 1)
ZEND_ARG_ARRAY_INFO(0, data, 0)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(ai_ServerApi_unserialize, 0, 0, 1)
#if PHP_VERSION_ID >= 80000
ZEND_ARG_TYPE_INFO(0, serialized, IS_STRING, 0)
#else
ZEND_ARG_INFO(0, serialized)
#endif
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(ai_ServerApi_void, 0, 0, 0)
ZEND_END_ARG_INFO()
static zend_function_entry php_phongo_serverapi_me[] = {
/* clang-format off */
PHP_ME(ServerApi, __construct, ai_ServerApi___construct, ZEND_ACC_PUBLIC | ZEND_ACC_FINAL)
PHP_ME(ServerApi, __serialize, ai_ServerApi_void, ZEND_ACC_PUBLIC | ZEND_ACC_FINAL)
PHP_ME(ServerApi, __set_state, ai_ServerApi___set_state, ZEND_ACC_PUBLIC | ZEND_ACC_STATIC)
PHP_ME(ServerApi, __unserialize, ai_ServerApi___unserialize, ZEND_ACC_PUBLIC | ZEND_ACC_FINAL)
PHP_ME(ServerApi, bsonSerialize, ai_ServerApi_void, ZEND_ACC_PUBLIC | ZEND_ACC_FINAL)
PHP_ME(ServerApi, serialize, ai_ServerApi_void, ZEND_ACC_PUBLIC | ZEND_ACC_FINAL)
PHP_ME(ServerApi, unserialize, ai_ServerApi_unserialize, ZEND_ACC_PUBLIC | ZEND_ACC_FINAL)
PHP_FE_END
/* clang-format on */
};
/* }}} */
/* {{{ MongoDB\Driver\ServerApi object handlers */
static zend_object_handlers php_phongo_handler_serverapi;
static void php_phongo_serverapi_free_object(zend_object* object) /* {{{ */
{
php_phongo_serverapi_t* intern = Z_OBJ_SERVERAPI(object);
zend_object_std_dtor(&intern->std);
if (intern->properties) {
zend_hash_destroy(intern->properties);
FREE_HASHTABLE(intern->properties);
}
if (intern->server_api) {
mongoc_server_api_destroy(intern->server_api);
}
}
static zend_object* php_phongo_serverapi_create_object(zend_class_entry* class_type) /* {{{ */
{
php_phongo_serverapi_t* intern = NULL;
intern = PHONGO_ALLOC_OBJECT_T(php_phongo_serverapi_t, class_type);
zend_object_std_init(&intern->std, class_type);
object_properties_init(&intern->std, class_type);
intern->std.handlers = &php_phongo_handler_serverapi;
return &intern->std;
} /* }}} */
static HashTable* php_phongo_serverapi_get_debug_info(phongo_compat_object_handler_type* object, int* is_temp) /* {{{ */
{
*is_temp = 1;
return php_phongo_serverapi_get_properties_hash(object, true, true);
} /* }}} */
static HashTable* php_phongo_serverapi_get_properties(phongo_compat_object_handler_type* object) /* {{{ */
{
return php_phongo_serverapi_get_properties_hash(object, false, true);
} /* }}} */
void php_phongo_serverapi_init_ce(INIT_FUNC_ARGS) /* {{{ */
{
zend_class_entry ce;
INIT_NS_CLASS_ENTRY(ce, "MongoDB\\Driver", "ServerApi", php_phongo_serverapi_me);
php_phongo_serverapi_ce = zend_register_internal_class(&ce);
php_phongo_serverapi_ce->create_object = php_phongo_serverapi_create_object;
PHONGO_CE_FINAL(php_phongo_serverapi_ce);
zend_class_implements(php_phongo_serverapi_ce, 1, php_phongo_serializable_ce);
zend_class_implements(php_phongo_serverapi_ce, 1, zend_ce_serializable);
memcpy(&php_phongo_handler_serverapi, phongo_get_std_object_handlers(), sizeof(zend_object_handlers));
php_phongo_handler_serverapi.get_debug_info = php_phongo_serverapi_get_debug_info;
php_phongo_handler_serverapi.get_properties = php_phongo_serverapi_get_properties;
php_phongo_handler_serverapi.free_obj = php_phongo_serverapi_free_object;
php_phongo_handler_serverapi.offset = XtOffsetOf(php_phongo_serverapi_t, std);
zend_declare_class_constant_stringl(php_phongo_serverapi_ce, ZEND_STRL("V1"), ZEND_STRL("1"));
} /* }}} */
/*
* Local variables:
* tab-width: 4
* c-basic-offset: 4
* End:
* vim600: noet sw=4 ts=4 fdm=marker
* vim<600: noet sw=4 ts=4
*/
| 5,688 |
3,128 | <filename>oclint-rules/test/size/CyclomaticComplexityRuleTest.cpp
#include "TestRuleOnCode.h"
#include "rules/size/CyclomaticComplexityRule.cpp"
class CyclomaticComplexityRuleTest : public ::testing::Test {
protected:
virtual void SetUp() override
{
RuleConfiguration::addConfiguration("CYCLOMATIC_COMPLEXITY", "1");
}
virtual void TearDown() override
{
RuleConfiguration::removeAll();
}
};
TEST_F(CyclomaticComplexityRuleTest, PropertyTest)
{
CyclomaticComplexityRule rule;
EXPECT_EQ(2, rule.priority());
EXPECT_EQ("high cyclomatic complexity", rule.name());
EXPECT_EQ("size", rule.category());
}
TEST_F(CyclomaticComplexityRuleTest, NoDecisionPoint)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { }");
}
TEST_F(CyclomaticComplexityRuleTest, OneIfStatement)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { if (1) {} }",
0, 1, 1, 1, 28, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneForStatement)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { for(;;) {} }",
0, 1, 1, 1, 29, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneObjCForCollectionStatement)
{
testRuleOnObjCCode(new CyclomaticComplexityRule(), "void aMethod() { id array; for(id one in array) {} }",
0, 1, 1, 1, 52, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneWhileStatement)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { while(1) {} }",
0, 1, 1, 1, 30, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneDoStatement)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { do {} while(1); }",
0, 1, 1, 1, 34, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneCaseStatement)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { int i = 1; switch (i) { case 1: break; } }",
0, 1, 1, 1, 59, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, TwoCaseStatements)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { int i = 1; switch (i) { case 1: case 2: break; } }",
0, 1, 1, 1, 67, "Cyclomatic Complexity Number 3 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneCXXCatchStatement)
{
testRuleOnCXXCode(new CyclomaticComplexityRule(), "void aMethod() { try {} catch (...) {} }",
0, 1, 1, 1, 40, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneObjCAtCatchStatement)
{
testRuleOnObjCCode(new CyclomaticComplexityRule(), "void aMethod() { @try {} @catch (id ex) {} }",
0, 1, 1, 1, 44, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneConditionalOperator)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { int i = 0 ? 1 : -1; }",
0, 1, 1, 1, 38, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneLogicAndOperator)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { int b = 1 && 0; }",
0, 1, 1, 1, 34, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, OneLogicOrOperator)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { int b = 1 || 0; }",
0, 1, 1, 1, 34, "Cyclomatic Complexity Number 2 exceeds limit of 1");
}
TEST_F(CyclomaticComplexityRuleTest, ABinaryOperatorButHasNoEffectOnCCNCouting)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void aMethod() { int b = 1 == 0; }");
}
TEST_F(CyclomaticComplexityRuleTest, SuppressCyclomaticComplexity)
{
testRuleOnCode(new CyclomaticComplexityRule(), "void __attribute__((annotate(\"oclint:suppress[high cyclomatic complexity]\"))) aMethod() { int b = 1 || 0; }");
}
| 1,533 |
3,428 | <reponame>ghalimi/stdlib<gh_stars>1000+
{"id":"01645","group":"easy-ham-1","checksum":{"type":"MD5","value":"f61b77d47c074402d1ee5976e9a4fd7d"},"text":"Return-Path: <EMAIL>\nDelivery-Date: Fri Sep 6 15:43:33 2002\nFrom: <EMAIL> (<NAME>)\nDate: Fri, 06 Sep 2002 10:43:33 -0400\nSubject: [Spambayes] Deployment\nIn-Reply-To: Your message of \"Fri, 06 Sep 2002 10:39:48 EDT.\"\n <3D788653.9143.1D8992DA@localhost> \nReferences: <3D788653.9143.1D8992DA@localhost> \nMessage-ID: <<EMAIL>>\n\n> > your mail, and gives you only the non-spam. To train it, you'd only need\n> > to send it the false negatives somehow; it can assume that anything is\n> > ham that you don't say is spam within 48 hours.\n> \n> I have folks who leave their email programs running 24 hours a day,\n> constantly polling for mail. If they go away for a long weekend,\n> lots of \"friday night spam\" will become ham on sunday night.\n> (Friday night seems to be the most popular time)\n\nSo we'll make this a config parameter.\n\n> > - Your idea here.\n> \n> Ultimately I'd like to see tight integration into the \"most popular\n> email clients\".. As a stop-gap to the auto-ham ..\n\nWhat's an auto-ham?\n\n> How about adding an IMAP server with a spam and deleted-ham\n> folder. Most email clients can handle IMAP. Users should be able to\n> quickly move \"spam\" into the spam folder.\n\nI personally don't think IMAP has a bright future, but for people who\ndo use it, that's certainly a good approach.\n\n> Instead of deleting messages (or, by reprogramming the delete\n> function) they can quickly move ham into the ham folder.\n\nYes.\n\n--<NAME> (home page: http://www.python.org/~guido/)\n"} | 579 |
327 | {
"jupyter.lab.setting-icon-class": "jp-GitHub-icon",
"jupyter.lab.setting-icon-label": "GitHub",
"title": "GitHub",
"description": "Settings for the GitHub plugin.",
"properties": {
"baseUrl": {
"type": "string",
"title": "The GitHub Base URL",
"default": "https://github.com"
},
"accessToken": {
"type": "string",
"title": "A GitHub Personal Access Token",
"description": "WARNING: For security reasons access tokens should be set in the server extension.",
"default": ""
},
"defaultRepo": {
"type": "string",
"title": "Default Repository",
"default": ""
}
},
"type": "object"
}
| 274 |
377 | <reponame>gburd/Kundera<gh_stars>100-1000
/**
* Copyright 2013 Impetus Infotech.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.impetus.client.crud;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.NoSuchElementException;
import javax.persistence.EntityManager;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import com.impetus.client.crud.entities.PersonRDBMS;
import com.impetus.client.crud.entities.RDBMSToken;
import com.impetus.client.crud.entities.RDBMSTokenClient;
import com.impetus.kundera.query.IResultIterator;
import com.impetus.kundera.query.Query;
/**
* @author kuldeep.mishra junit for {@link IResultIterator}.
*/
public class ResultIteratorTest extends BaseTest
{
/** The emf. */
private EntityManagerFactory emf;
/** The em. */
private EntityManager em;
private RDBMSCli cli;
private static final String SCHEMA = "testdb";
/**
* Sets the up.
*
* @throws Exception
* the exception
*/
@Before
public void setUp() throws Exception
{
createSchema();
emf = Persistence.createEntityManagerFactory("testHibernate");
em = emf.createEntityManager();
}
@Test
public void testScroll() throws Exception
{
onScroll();
}
@Test
public void testScrollAssociation() throws Exception
{
assertOnTokenScroll();
}
private void assertOnTokenScroll()
{
RDBMSTokenClient client = new RDBMSTokenClient();
client.setClientName("tokenClient1");
client.setId("tokenClientId");
RDBMSToken token1 = new RDBMSToken();
token1.setId("RdbmsTokenId1");
token1.setTokenName("tokenName1");
token1.setClient(client);
RDBMSToken token2 = new RDBMSToken();
token2.setId("tokenId2");
token2.setTokenName("tokenName2");
token2.setClient(client);
em.persist(token1);
em.persist(token2);
String queryWithoutClause = "Select t from RDBMSToken t";
assertOnTokenScroll(queryWithoutClause, 2);
String queryWithClause = "Select t from RDBMSToken t where t.tokenName='tokenName1'";
assertOnTokenScroll(queryWithClause, 1);
// TODO:: Need to discuss , this should be working with token
// support. Special scenario.
String queryWithIdClause = "Select t from RDBMSToken t where t.tokenId = 'RDBMSTokenId1'";
// assertOnTokenScroll(queryWithIdClause, 1);
}
private void assertOnTokenScroll(String queryClause, int expected)
{
Query query = (Query) em.createQuery(queryClause, RDBMSToken.class);
int count = 0;
Iterator<RDBMSToken> tokens = query.iterate();
while (tokens.hasNext())
{
RDBMSToken token = tokens.next();
Assert.assertNotNull(token);
RDBMSTokenClient client = token.getClient();
Assert.assertNotNull(client);
Assert.assertEquals("tokenClient1", client.getClientName());
// Assert.assertEquals(2, client.getTokens().size());
count++;
}
Assert.assertTrue(count > 0);
Assert.assertTrue(count == expected);
}
private void onScroll()
{
Object p1 = prepareRDBMSInstance("1", 10);
Object p2 = prepareRDBMSInstance("2", 20);
Object p3 = prepareRDBMSInstance("3", 15);
em.persist(p1);
em.persist(p2);
em.persist(p3);
em.flush();
em.clear();
final String queryWithoutClause = "Select p from PersonRDBMS p";
assertOnScroll(queryWithoutClause, 3);
final String queryWithClause = "Select p from PersonRDBMS p where p.personName = vivek";
assertOnScroll(queryWithClause, 3);
final String queryWithAndClause = "Select p from PersonRDBMS p where p.personName = vivek and p.age = 15";
assertOnScroll(queryWithAndClause, 1);
final String queryWithLTClause = "Select p from PersonRDBMS p where p.personName = vivek and p.age < 15";
assertOnScroll(queryWithLTClause, 1);
final String queryWithGTClause = "Select p from PersonRDBMS p where p.personName = vivek and p.age >= 15";
assertOnScroll(queryWithGTClause, 2);
final String queryWithLTGTClause = "Select p from PersonRDBMS p where p.personName = vivek and p.age > 10 and p.age < 20";
assertOnScroll(queryWithLTGTClause, 1);
final String queryWithLTGTEClause = "Select p from PersonRDBMS p where p.personName = vivek and p.age >= 10 and p.age < 20";
assertOnScroll(queryWithLTGTEClause, 2);
String queryWithIdClause = "Select p from PersonRDBMS p where p.personId = '2' ";
assertOnScroll(queryWithIdClause, 1);
}
private void assertOnScroll(final String queryWithoutClause, int expectedCount)
{
Query query = (Query) em.createQuery(queryWithoutClause, PersonRDBMS.class);
assertOnFetch(query, 0, expectedCount);
assertOnFetch(query, 2, expectedCount); // less records
assertOnFetch(query, 4, expectedCount); // more fetch size than
// available in db.
assertOnFetch(query, 3, expectedCount); // more fetch size than
// available in db.
assertOnFetch(query, null, expectedCount); // set to null;
}
private void assertOnFetch(Query query, Integer fetchSize, int available)
{
query.setFetchSize(fetchSize);
int counter = 0;
Iterator<PersonRDBMS> iter = query.iterate();
while (iter.hasNext())
{
Assert.assertNotNull(iter.next());
counter++;
}
Assert.assertEquals(counter, fetchSize == null || available < fetchSize ? available : fetchSize);
try
{
iter.next();
Assert.fail();
}
catch (NoSuchElementException nsex)
{
Assert.assertNotNull(nsex.getMessage());
}
}
@After
public void tearDown()
{
em.close();
emf.close();
dropSchema();
}
private void createSchema() throws SQLException
{
try
{
cli = new RDBMSCli(SCHEMA);
cli.createSchema(SCHEMA);
cli.update("CREATE MEMORY TABLE TESTDB.PERSON (PERSON_ID VARCHAR(90) PRIMARY KEY, PERSON_NAME VARCHAR(256), AGE INTEGER)");
cli.update("CREATE MEMORY TABLE TESTDB.TOKENS (TOKEN_ID VARCHAR(90) PRIMARY KEY, TOKEN_NAME VARCHAR(256), CLIENT_ID VARCHAR(256))");
cli.update("CREATE MEMORY TABLE TESTDB.CLIENT (CLIENT_ID VARCHAR(90) PRIMARY KEY, CLIENT_NAME VARCHAR(256))");
}
catch (Exception e)
{
cli.update("DELETE FROM TESTDB.PERSON");
cli.update("DROP TABLE TESTDB.PERSON");
cli.update("DELETE FROM TESTDB.TOKENS");
cli.update("DROP TABLE TESTDB.TOKENS");
cli.update("DELETE FROM TESTDB.CLIENT");
cli.update("DROP TABLE TESTDB.CLIENT");
cli.update("DROP SCHEMA TESTDB");
cli.update("CREATE MEMORY TABLE TESTDB.PERSON (PERSON_ID VARCHAR(90) PRIMARY KEY, PERSON_NAME VARCHAR(256), AGE INTEGER)");
cli.update("CREATE MEMORY TABLE TESTDB.TOKENS (TOKEN_ID VARCHAR(90) PRIMARY KEY, TOKEN_NAME VARCHAR(256), CLIENT_ID VARCHAR(256))");
cli.update("CREATE MEMORY TABLE TESTDB.CLIENT (CLIENT_ID VARCHAR(90) PRIMARY KEY, CLIENT_NAME VARCHAR(256))");
// nothing
// do
}
}
private void dropSchema()
{
try
{
cli.update("DELETE FROM TESTDB.PERSON");
cli.update("DROP TABLE TESTDB.PERSON");
cli.update("DELETE FROM TESTDB.tokens");
cli.update("DROP TABLE TESTDB.tokens");
cli.update("DELETE FROM TESTDB.client");
cli.update("DROP TABLE TESTDB.client");
cli.update("DROP SCHEMA TESTDB");
cli.closeConnection();
cli.shutdown();
}
catch (Exception e)
{
// Nothing to do
}
}
} | 3,835 |
8,805 | <gh_stars>1000+
//
// KBPrefGPGView.h
// Keybase
//
// Created by Gabriel on 4/3/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "KBPreferences.h"
#import <YOLayout/YOBox.h>
@interface KBPrefGPGView : YOVBox
- (instancetype)initWithPreferences:(KBPreferences *)preferences;
@end
| 137 |
1,738 | <filename>dev/Code/Sandbox/Editor/TerrainMoveToolPanel.h
/*
* All or portions of this file Copyright (c) Amazon.com, Inc. or its affiliates or
* its licensors.
*
* For complete copyright and license terms please see the LICENSE at the root of this
* distribution (the "License"). All use of this software is governed by the License,
* or, if provided, by the license below or the license accompanying this file. Do not
* remove or modify any license notices. This file is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
*/
// Original file Copyright Crytek GMBH or its affiliates, used under license.
// Description : Terrain modification tool.
#ifndef CRYINCLUDE_EDITOR_TERRAINMOVETOOLPANEL_H
#define CRYINCLUDE_EDITOR_TERRAINMOVETOOLPANEL_H
#pragma once
#include <QWidget>
#include <QScopedPointer>
namespace Ui {
class CTerrainMoveToolPanel;
}
class QPushButton;
class QSpinBox;
class QComboBox;
class QLabel;
/////////////////////////////////////////////////////////////////////////////
// CTerrainMoveToolPanel dialog
class CTerrainMoveTool;
class CTerrainMoveToolPanel
: public QWidget
{
// Construction
public:
CTerrainMoveToolPanel(CTerrainMoveTool* tool, QWidget* pParent = nullptr); // standard constructor
void UpdateButtons();
void UpdateOffsetText(const Vec3& offset, bool bReset);
// Dialog Data
QPushButton* m_selectSource;
QPushButton* m_selectTarget;
// Implementation
protected:
void SetOffsetText(QLabel* label, QString title, float offset, bool bReset);
void OnSelectSource();
void OnSelectTarget();
void OnApplyButton();
void OnUpdateNumbers();
void OnChangeTargetRot();
void OnSyncHeight();
QSpinBox* m_dymX;
QSpinBox* m_dymY;
QSpinBox* m_dymZ;
QComboBox* m_cbTargetRot;
CTerrainMoveTool* m_tool;
QScopedPointer<Ui::CTerrainMoveToolPanel> ui;
};
#endif // CRYINCLUDE_EDITOR_TERRAINMOVETOOLPANEL_H
| 677 |
4,772 | <reponame>Balkerm/spring-data-examples
/*
* Copyright 2017-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package example.springdata.mongodb.fluent;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.data.geo.Point;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.index.GeoSpatialIndexType;
import org.springframework.data.mongodb.core.index.GeospatialIndex;
/**
* @author <NAME>
*/
@SpringBootApplication
public class ApplicationConfiguration {
static final String COLLECTION = "star-wars";
@Bean
CommandLineRunner init(MongoTemplate template) {
return (args) -> {
if (template.collectionExists(COLLECTION)) {
template.dropCollection(COLLECTION);
}
var index = new GeospatialIndex("homePlanet.coordinates") //
.typed(GeoSpatialIndexType.GEO_2DSPHERE) //
.named("planet-coordinate-idx");
template.createCollection(COLLECTION);
template.indexOps(SWCharacter.class).ensureIndex(index);
var alderaan = new Planet("alderaan", new Point(-73.9667, 40.78));
var stewjon = new Planet("stewjon", new Point(-73.9836, 40.7538));
var tatooine = new Planet("tatooine", new Point(-73.9928, 40.7193));
var anakin = new Jedi("anakin", "skywalker");
anakin.setHomePlanet(tatooine);
var luke = new Jedi("luke", "skywalker");
luke.setHomePlanet(tatooine);
var leia = new Jedi("leia", "organa");
leia.setHomePlanet(alderaan);
var obiWan = new Jedi("obi-wan", "kenobi");
obiWan.setHomePlanet(stewjon);
var han = new Human("han", "solo");
template.save(anakin, COLLECTION);
template.save(luke, COLLECTION);
template.save(leia, COLLECTION);
template.save(obiWan, COLLECTION);
template.save(han, COLLECTION);
};
}
}
| 836 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import org.netbeans.junit.NbTestCase;
/**
*
* @author <NAME> <<EMAIL>>
*/
public class StampsClusterMovedTest extends NbTestCase implements Stamps.Updater{
private File userdir;
private File ide;
private File platform;
private File install;
private File mainCluster;
public StampsClusterMovedTest(String testName) {
super(testName);
}
public void testMoveOfAClusterIsDetected() throws Exception {
clearWorkDir();
install = new File(getWorkDir(), "install");
platform = new File(install, "platform");
platform.mkdirs();
new File(platform, ".lastModified").createNewFile();
ide = new File(install, "ide");
ide.mkdirs();
new File(ide, ".lastModified").createNewFile();
mainCluster = new File(install, "extra");
mainCluster.mkdirs();
assertTrue("Extra cluster exists", mainCluster.isDirectory());
new File(mainCluster, ".lastModified").createNewFile();
userdir = new File(getWorkDir(), "tmp");
System.setProperty("netbeans.home", platform.getPath());
System.setProperty("netbeans.dirs", ide.getPath() + File.pathSeparator + mainCluster.getPath());
System.setProperty("netbeans.user", userdir.getPath());
Thread.sleep(500);
long between = System.currentTimeMillis();
Thread.sleep(500);
Stamps.main("init");
Stamps.getModulesJARs().scheduleSave(this, "test-cache", false);
Stamps.getModulesJARs().waitFor(true);
int[] arr = { 0 };
File f = Stamps.getModulesJARs().file("test-cache", arr);
assertNotNull("Cache found", f);
assertEquals("Stamps of caches shall be the same as stamps of .lastModified",
f.lastModified(), Stamps.moduleJARs()
);
Thread.sleep(500);
File subDir = new File(getWorkDir(), "subdir");
subDir.mkdirs();
final File newExtra = new File(subDir, mainCluster.getName());
boolean renRes = mainCluster.renameTo(newExtra);
assertTrue("Rename succeeded", renRes);
assertTrue("Extra renamed: " + newExtra, newExtra.isDirectory());
System.setProperty("netbeans.dirs", ide.getPath() + File.pathSeparator + newExtra.getPath());
Stamps.main("init");
assertNull("Cache invalidated as relative location of clusters changed",
Stamps.getModulesJARs().asByteBuffer("test-cache")
);
}
public void testChangeOfClustersIsDetectedInSharedConfig() throws Exception {
clearWorkDir();
install = new File(getWorkDir(), "install");
platform = new File(install, "platform");
platform.mkdirs();
new File(platform, ".lastModified").createNewFile();
ide = new File(install, "ide");
ide.mkdirs();
new File(ide, ".lastModified").createNewFile();
mainCluster = new File(install, "extra");
mainCluster.mkdirs();
assertTrue("Extra cluster exists", mainCluster.isDirectory());
new File(mainCluster, ".lastModified").createNewFile();
userdir = new File(getWorkDir(), "tmp");
userdir.mkdirs();
System.setProperty("netbeans.home", platform.getPath());
System.setProperty("netbeans.dirs", ide.getPath());
// generate the cache to mainCluster directory
System.setProperty("netbeans.user", mainCluster.getPath());
Thread.sleep(500);
long between = System.currentTimeMillis();
Thread.sleep(500);
Stamps.main("init");
Stamps.getModulesJARs().scheduleSave(this, "test-cache", false);
Stamps.getModulesJARs().waitFor(true);
int[] arr = { 0 };
File f = Stamps.getModulesJARs().file("test-cache", arr);
assertNotNull("Cache found", f);
assertEquals("Stamps of caches shall be the same as stamps of .lastModified",
f.lastModified(), Stamps.moduleJARs()
);
File lmdir = new File(new File(new File(mainCluster, "var"), "cache"), "lastModified");
assertTrue(lmdir + " is dir", lmdir.isDirectory());
lmdir.renameTo(new File(lmdir.getParentFile(), "ignore"));
assertFalse(lmdir + " is no longer dir", lmdir.isDirectory());
Thread.sleep(500);
System.setProperty("netbeans.user", userdir.getPath());
// use mainCluster as cluster
System.setProperty("netbeans.dirs", mainCluster.getPath() + File.pathSeparator + ide.getPath());
Stamps.main("init");
assertNull("Cache invalidated set of clusters changed",
Stamps.getModulesJARs().asByteBuffer("test-cache")
);
}
@Override
public void flushCaches(DataOutputStream os) throws IOException {
os.write(1);
}
@Override
public void cacheReady() {
}
}
| 2,344 |
1,092 | <reponame>altus34/spring-restdocs<filename>spring-restdocs-core/src/main/java/org/springframework/restdocs/operation/preprocess/PrettyPrintingContentModifier.java
/*
* Copyright 2014-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.restdocs.operation.preprocess;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import javax.xml.transform.ErrorListener;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.sax.SAXSource;
import javax.xml.transform.stream.StreamResult;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import org.xml.sax.ErrorHandler;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.SAXParseException;
import org.xml.sax.XMLReader;
import org.springframework.http.MediaType;
/**
* A {@link ContentModifier} that modifies the content by pretty printing it.
*
* @author <NAME>
*/
public class PrettyPrintingContentModifier implements ContentModifier {
private static final List<PrettyPrinter> PRETTY_PRINTERS = Collections
.unmodifiableList(Arrays.asList(new JsonPrettyPrinter(), new XmlPrettyPrinter()));
@Override
public byte[] modifyContent(byte[] originalContent, MediaType contentType) {
if (originalContent.length > 0) {
for (PrettyPrinter prettyPrinter : PRETTY_PRINTERS) {
try {
return prettyPrinter.prettyPrint(originalContent);
}
catch (Exception ex) {
// Continue
}
}
}
return originalContent;
}
private interface PrettyPrinter {
byte[] prettyPrint(byte[] content) throws Exception;
}
private static final class XmlPrettyPrinter implements PrettyPrinter {
@Override
public byte[] prettyPrint(byte[] original) throws Exception {
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4");
transformer.setOutputProperty(OutputKeys.DOCTYPE_PUBLIC, "yes");
ByteArrayOutputStream transformed = new ByteArrayOutputStream();
transformer.setErrorListener(new SilentErrorListener());
transformer.transform(createSaxSource(original), new StreamResult(transformed));
return transformed.toByteArray();
}
private SAXSource createSaxSource(byte[] original) throws ParserConfigurationException, SAXException {
SAXParserFactory parserFactory = SAXParserFactory.newInstance();
SAXParser parser = parserFactory.newSAXParser();
XMLReader xmlReader = parser.getXMLReader();
xmlReader.setErrorHandler(new SilentErrorHandler());
return new SAXSource(xmlReader, new InputSource(new ByteArrayInputStream(original)));
}
private static final class SilentErrorListener implements ErrorListener {
@Override
public void warning(TransformerException exception) throws TransformerException {
// Suppress
}
@Override
public void error(TransformerException exception) throws TransformerException {
// Suppress
}
@Override
public void fatalError(TransformerException exception) throws TransformerException {
// Suppress
}
}
private static final class SilentErrorHandler implements ErrorHandler {
@Override
public void warning(SAXParseException exception) throws SAXException {
// Suppress
}
@Override
public void error(SAXParseException exception) throws SAXException {
// Suppress
}
@Override
public void fatalError(SAXParseException exception) throws SAXException {
// Suppress
}
}
}
private static final class JsonPrettyPrinter implements PrettyPrinter {
private final ObjectMapper objectMapper = new ObjectMapper().configure(SerializationFeature.INDENT_OUTPUT, true)
.configure(DeserializationFeature.FAIL_ON_TRAILING_TOKENS, true);
@Override
public byte[] prettyPrint(byte[] original) throws IOException {
return this.objectMapper.writeValueAsBytes(this.objectMapper.readTree(original));
}
}
}
| 1,560 |
6,160 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jmeter.visualizers.backend;
import java.util.List;
import org.apache.jmeter.config.Arguments;
import org.apache.jmeter.samplers.SampleResult;
/**
* This interface defines the interactions between the {@link BackendListener}
* and external Java programs which can be executed by JMeter. Any Java class
* which wants to be executed using the {@link BackendListener} test element
* must implement this interface (either directly or through
* {@link AbstractBackendListenerClient}).
* <p>
* JMeter will create one instance of a BackendListenerClient implementation for
* each user/thread in the test. Additional instances may be created for
* internal use by JMeter (for example, to find out what parameters are
* supported by the client).
* <p>
* When the test is started, setupTest() will be called on each thread's
* BackendListenerClient instance to initialize the client.
* Then {@link #handleSampleResults(List, BackendListenerContext)} will be
* called for each {@link SampleResult} notification. Finally,
* {@link #teardownTest(BackendListenerContext)}
* will be called to allow the client to do any necessary clean-up.
* <p>
* The JMeter BackendListener GUI allows a list of parameters to be defined for
* the test. These are passed to the various test methods through the
* {@link BackendListenerContext}. A list of default parameters can be defined
* through the {@link #getDefaultParameters()} method. These parameters and any
* default values associated with them will be shown in the GUI. Users can add
* other parameters as well.
* <p>
* Listeners should extend {@link AbstractBackendListenerClient}
* rather than implementing {@link BackendListenerClient} directly to protect
* your code from future changes to the interface.
* <p>
* While it may be necessary to make changes to the {@link BackendListenerClient}
* interface from time to time (therefore requiring changes to any
* implementations of this interface), we intend to make this abstract class
* provide reasonable default implementations of any new methods so that
* subclasses do not necessarily need to be updated for new versions.
* Implementing BackendListenerClient directly will continue to be supported for
* cases where extending this class is not possible (for example, when the
* client class is already a subclass of some other class).
*
* @since 2.13
*/
public interface BackendListenerClient {
/**
* Do any initialization required by this client. It is generally
* recommended to do any initialization such as getting parameter values
* here rather than {@link #handleSampleResults(List, BackendListenerContext)}
* in order to add as little overhead as possible to the test.
*
* @param context provides access to initialization parameters.
* @throws Exception when setup fails
*/
void setupTest(BackendListenerContext context) throws Exception; // NOSONAR
/**
* Handle sampleResults, this can be done in many ways:
* <ul>
* <li>Write to a file</li>
* <li>Write to a remote server</li>
* <li>...</li>
* </ul>
*
* @param sampleResults List of {@link SampleResult}
* @param context provides access to initialization parameters.
*/
void handleSampleResults(List<SampleResult> sampleResults, BackendListenerContext context);
/**
* Do any clean-up required at the end of a test run.
*
* @param context provides access to initialization parameters.
* @throws Exception when tear down fails
*/
void teardownTest(BackendListenerContext context) throws Exception; // NOSONAR
/**
* Provide a list of parameters which this test supports. Any parameter
* names and associated values returned by this method will appear in the
* GUI by default so the user doesn't have to remember the exact names. The
* user can add other parameters which are not listed here. If this method
* returns null then no parameters will be listed. If the value for some
* parameter is null then that parameter will be listed in the GUI with an
* empty value.
*
* @return a specification of the parameters used by this test which should
* be listed in the GUI, or null if no parameters should be listed.
*/
default Arguments getDefaultParameters() {
return null;
}
/**
* Create a copy of SampleResult, this method is here to allow customizing
* what is kept in the copy, for example copy could remove some useless fields.
* Note that if it returns null, the sample result is not put in the queue.
* Defaults to returning result.
*
* @param context {@link BackendListenerContext}
* @param result {@link SampleResult}
* @return {@link SampleResult}
*/
default SampleResult createSampleResult(
BackendListenerContext context, SampleResult result) {
return result;
}
}
| 1,574 |
852 | import FWCore.ParameterSet.Config as cms
##### event content for heavy-ion analysis objects
from Configuration.EventContent.EventContentHeavyIons_cff import *
from RecoHI.HiMuonAlgos.RecoHiMuon_EventContent_cff import *
#jets
jetContent = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep double*_*CaloJets_*_*',
'drop recoCaloJets_*_*_*',
'keep recoGenJets_*_*_*',
'keep patJets_*_*_*'
)
)
jetContentExtended = jetContent.clone()
jetContentExtended.outputCommands.extend(cms.untracked.vstring('keep *_towerMaker_*_*'))
#tracks
trkContent = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep *_offlineBeamSpot_*_*',
'keep recoTracks_hiSelectedTracks_*_*',
'keep recoTracks_hiPixel3PrimTracks_*_*' # low-fake selection to lower pt?
)
)
lightTrackContent = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep *_offlineBeamSpot_*_*',
'keep *_allTracks_*_*' # Selected Charged Candidates
)
)
#muons
muonContent = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep patMuons_*_*_*'
)
)
muonContentExtended = muonContent.clone()
muonContentExtended.outputCommands.extend(RecoMuonAOD.outputCommands)
muonContentExtended.outputCommands.extend(RecoHiMuonAOD.outputCommands)
#photons
photonContent = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep patPhotons_selected*_*_*'
)
)
photonContentExtended = photonContent.clone()
photonContentExtended.outputCommands.extend(RecoEgammaAOD.outputCommands)
#correlations
corrContent = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep recoRecoChargedCandidates_allTracks_*_*',
'keep recoRecoChargedCandidates_allPxltracks_*_*'
)
)
#common
hiCommon = cms.PSet(
outputCommands = cms.untracked.vstring('drop *',
'keep *_TriggerResults_*_HLT',
'keep L1GlobalTriggerReadoutRecord_gtDigis_*_*',
'keep recoVertexs_hiSelectedVertex_*_*',
'keep *_heavyIon_*_*',
'keep *_hiCentrality_*_*',
'keep *_hiEvtPlane_*_*'
)
)
#mc content
hiContentMC = cms.PSet(
outputCommands = cms.untracked.vstring(
'keep *_generator_*_*',
'keep *_hiSignal_*_*',
'keep *_genParticles_*_*',
'keep *_hiGenParticles_*_*'
)
)
##### combinations for specific skims
# HI PAG skim
hiAnalysisSkimContent = hiCommon.clone()
hiAnalysisSkimContent.outputCommands.extend(jetContentExtended.outputCommands)
hiAnalysisSkimContent.outputCommands.extend(trkContent.outputCommands)
hiAnalysisSkimContent.outputCommands.extend(muonContent.outputCommands)
hiAnalysisSkimContent.outputCommands.extend(photonContent.outputCommands)
hiAnalysisSkimContent.outputCommands.extend(corrContent.outputCommands)
# [highpt] skim
jetTrkSkimContent = hiCommon.clone()
jetTrkSkimContent.outputCommands.extend(jetContentExtended.outputCommands)
jetTrkSkimContent.outputCommands.extend(lightTrackContent.outputCommands)
# [highpt] MC skim
jetTrkSkimContentMC = jetTrkSkimContent.clone()
jetTrkSkimContentMC.outputCommands.extend(hiContentMC.outputCommands)
# [dilepton] skim 0
muonSkimContent = hiCommon.clone()
muonSkimContent.outputCommands.extend(cms.untracked.vstring('keep patMuons_*_*_*'))
muonSkimContent.outputCommands.extend(RecoMuonRECO.outputCommands)
muonSkimContent.outputCommands.extend(RecoHiTrackerRECO.outputCommands)
# [dilepton] skim MC
muonSkimContentMC = muonSkimContent.clone()
muonSkimContentMC.outputCommands.extend(hiContentMC.outputCommands)
# [dilepton] skim 1
muonTrkSkimContent = hiCommon.clone() # trigger, L!, vertex,centrality, etc
muonTrkSkimContent.outputCommands.extend(muonContentExtended.outputCommands) # muon AOD
muonTrkSkimContent.outputCommands.extend(trkContent.outputCommands)
| 1,571 |
319 | //
// ReferenceTypeConstructor.h
// NativeScript
//
// Created by <NAME> on 11/3/14.
// Copyright (c) 2014 Telerik. All rights reserved.
//
#ifndef __NativeScript__ReferenceTypeConstructor__
#define __NativeScript__ReferenceTypeConstructor__
namespace NativeScript {
class ReferenceTypeConstructor : public JSC::InternalFunction {
public:
typedef JSC::InternalFunction Base;
static JSC::Strong<ReferenceTypeConstructor> create(JSC::VM& vm, JSC::Structure* structure, JSObject* referenceTypePrototype) {
JSC::Strong<ReferenceTypeConstructor> constructor(vm, new (NotNull, JSC::allocateCell<ReferenceTypeConstructor>(vm.heap)) ReferenceTypeConstructor(vm, structure));
constructor->finishCreation(vm, referenceTypePrototype);
return constructor;
}
DECLARE_INFO;
static JSC::Structure* createStructure(JSC::VM& vm, JSC::JSGlobalObject* globalObject, JSC::JSValue prototype) {
return JSC::Structure::create(vm, globalObject, prototype, JSC::TypeInfo(JSC::InternalFunctionType, StructureFlags), info());
}
private:
ReferenceTypeConstructor(JSC::VM& vm, JSC::Structure* structure)
: Base(vm, structure, &constructReferenceType, &constructReferenceType) {
}
void finishCreation(JSC::VM&, JSObject*);
static JSC::EncodedJSValue JSC_HOST_CALL constructReferenceType(JSC::ExecState* execState);
};
} // namespace NativeScript
#endif /* defined(__NativeScript__ReferenceTypeConstructor__) */
| 482 |
2,291 | <gh_stars>1000+
{
"id" : 411,
"status" : "Invalid",
"summary" : "Junk data is displayed as a tile instead of the correct tile.",
"labels" : [ "Type-Defect", "Priority-Medium" ],
"stars" : 0,
"commentCount" : 4,
"comments" : [ {
"id" : 0,
"commenterId" : 6125753346405818702,
"content" : "<b>What steps will reproduce the problem?</b>\n1. Go to any tile set, although I notice it is mostly from the cycle map tiles.\r\n2. Scroll around the map and zoom in and out while tiles are loading.\r\n\r\n<b>What is the expected output? What do you see instead?</b>\nI should get the tiles from the tile server.\r\nInstead I will get random junk data in the place of a tile, or what seems like random images.\r\nHere is an example screen shot: http://imgur.com/Xq8nI4w\r\n(I also attached the same screen shot)\r\n\r\n<b>What version of the product are you using? On what operating system?</b>\nThis is from revision 1152 last grabbed on 3/20 running on Android 2.3.",
"timestamp" : 1364238614,
"attachments" : [ {
"id" : 4110000000,
"fileName" : "weirdimagebug.png",
"fileSize" : 408056
} ]
}, {
"id" : 1,
"commenterId" : 7646092065249173135,
"content" : "We have not seen any reports like this. Can you narrow it down to a specific device, API level, tile set, etc? Is it the same image? Does that image exist somewhere on your SD card?\r\n\r\nFinally, try to clear the tile cache on your SD card and see if that clears it up.",
"timestamp" : 1365024700,
"attachments" : [ ]
}, {
"id" : 2,
"commenterId" : 6125753346405818702,
"content" : "I am using a G2. The tile set only seems to be the Cycle Map Tiles. It is always a different image, or sometimes just a collection of colors and lines. Where the image is coming from is confusing. I thought there was a data stream confusion coming from a web browser or something.\r\n\r\nI cleared the cache, and I have not been able to recreate the issue since. It is a mystery to me how it happened.",
"timestamp" : 1365025468,
"attachments" : [ ]
}, {
"id" : 3,
"commenterId" : 7646092065249173135,
"content" : "There could have been a hiccup with the Cycle Map Tiles server. If you see it in other places or if it repeats itself, reopen this ticket.",
"timestamp" : 1365027041,
"attachments" : [ ]
} ]
} | 821 |
1,227 | import numpy as np
from typing import Callable, Dict, Optional, Union
from alibi_detect.utils.frameworks import has_pytorch, has_tensorflow
if has_pytorch:
from alibi_detect.cd.pytorch.spot_the_diff import SpotTheDiffDriftTorch
from alibi_detect.utils.pytorch.data import TorchDataset
from torch.utils.data import DataLoader
if has_tensorflow:
from alibi_detect.cd.tensorflow.spot_the_diff import SpotTheDiffDriftTF
from alibi_detect.utils.tensorflow.data import TFDataset
class SpotTheDiffDrift:
def __init__(
self,
x_ref: Union[np.ndarray, list],
backend: str = 'tensorflow',
p_val: float = .05,
preprocess_fn: Optional[Callable] = None,
kernel: Callable = None,
n_diffs: int = 1,
initial_diffs: Optional[np.ndarray] = None,
l1_reg: float = 0.01,
binarize_preds: bool = False,
train_size: Optional[float] = .75,
n_folds: Optional[int] = None,
retrain_from_scratch: bool = True,
seed: int = 0,
optimizer: Optional[Callable] = None,
learning_rate: float = 1e-3,
batch_size: int = 32,
preprocess_batch_fn: Optional[Callable] = None,
epochs: int = 3,
verbose: int = 0,
train_kwargs: Optional[dict] = None,
device: Optional[str] = None,
dataset: Optional[Callable] = None,
dataloader: Optional[Callable] = None,
data_type: Optional[str] = None
) -> None:
"""
Classifier-based drift detector with a classifier of form y = a + b_1*k(x,w_1) + ... + b_J*k(x,w_J),
where k is a kernel and w_1,...,w_J are learnable test locations. If drift has occured the test locations
learn to be more/less (given by sign of b_i) similar to test instances than reference instances.
The test locations are regularised to be close to the average reference instance such that the **difference**
is then interpretable as the transformation required for each feature to make the average instance more/less
like a test instance than a reference instance.
The classifier is trained on a fraction of the combined reference and test data and drift is detected on
the remaining data. To use all the data to detect drift, a stratified cross-validation scheme can be chosen.
Parameters
----------
x_ref
Data used as reference distribution.
backend
Backend used for the training loop implementation.
p_val
p-value used for the significance of the test.
preprocess_fn
Function to preprocess the data before computing the data drift metrics.
kernel
Kernel used to define similarity between instances, defaults to Gaussian RBF
n_diffs
The number of test locations to use, each corresponding to an interpretable difference.
initial_diffs
Array used to initialise the diffs that will be learned. Defaults to Gaussian
for each feature with equal variance to that of reference data.
l1_reg
Strength of l1 regularisation to apply to the differences.
binarize_preds
Whether to test for discrepency on soft (e.g. probs/logits) model predictions directly
with a K-S test or binarise to 0-1 prediction errors and apply a binomial test.
train_size
Optional fraction (float between 0 and 1) of the dataset used to train the classifier.
The drift is detected on `1 - train_size`. Cannot be used in combination with `n_folds`.
n_folds
Optional number of stratified folds used for training. The model preds are then calculated
on all the out-of-fold instances. This allows to leverage all the reference and test data
for drift detection at the expense of longer computation. If both `train_size` and `n_folds`
are specified, `n_folds` is prioritized.
retrain_from_scratch
Whether the classifier should be retrained from scratch for each set of test data or whether
it should instead continue training from where it left off on the previous set.
seed
Optional random seed for fold selection.
optimizer
Optimizer used during training of the classifier.
learning_rate
Learning rate used by optimizer.
batch_size
Batch size used during training of the classifier.
preprocess_batch_fn
Optional batch preprocessing function. For example to convert a list of objects to a batch which can be
processed by the model.
epochs
Number of training epochs for the classifier for each (optional) fold.
verbose
Verbosity level during the training of the classifier. 0 is silent, 1 a progress bar.
train_kwargs
Optional additional kwargs when fitting the classifier.
device
Device type used. The default None tries to use the GPU and falls back on CPU if needed.
Can be specified by passing either 'cuda', 'gpu' or 'cpu'. Only relevant for 'pytorch' backend.
dataset
Dataset object used during training.
dataloader
Dataloader object used during training. Only relevant for 'pytorch' backend.
data_type
Optionally specify the data type (tabular, image or time-series). Added to metadata.
"""
super().__init__()
backend = backend.lower()
if backend == 'tensorflow' and not has_tensorflow or backend == 'pytorch' and not has_pytorch:
raise ImportError(f'{backend} not installed. Cannot initialize and run the '
f'SpotTheDiffDrift detector with {backend} backend.')
elif backend not in ['tensorflow', 'pytorch']:
raise NotImplementedError(f'{backend} not implemented. Use tensorflow or pytorch instead.')
kwargs = locals()
args = [kwargs['x_ref']]
pop_kwargs = ['self', 'x_ref', 'backend', '__class__']
if kwargs['optimizer'] is None:
pop_kwargs += ['optimizer']
[kwargs.pop(k, None) for k in pop_kwargs]
if backend == 'tensorflow' and has_tensorflow:
pop_kwargs = ['device', 'dataloader']
[kwargs.pop(k, None) for k in pop_kwargs]
if dataset is None:
kwargs.update({'dataset': TFDataset})
self._detector = SpotTheDiffDriftTF(*args, **kwargs) # type: ignore
else:
if dataset is None:
kwargs.update({'dataset': TorchDataset})
if dataloader is None:
kwargs.update({'dataloader': DataLoader})
self._detector = SpotTheDiffDriftTorch(*args, **kwargs) # type: ignore
self.meta = self._detector.meta
def predict(
self, x: np.ndarray, return_p_val: bool = True, return_distance: bool = True,
return_probs: bool = True, return_model: bool = True
) -> Dict[str, Dict[str, Union[int, str, float, Callable]]]:
"""
Predict whether a batch of data has drifted from the reference data.
Parameters
----------
x
Batch of instances.
return_p_val
Whether to return the p-value of the test.
return_distance
Whether to return a notion of strength of the drift.
K-S test stat if binarize_preds=False, otherwise relative error reduction.
return_probs
Whether to return the instance level classifier probabilities for the reference and test data
(0=reference data, 1=test data).
return_model
Whether to return the updated model trained to discriminate reference and test instances.
Returns
-------
Dictionary containing 'meta' and 'data' dictionaries.
'meta' has the detector's metadata.
'data' contains the drift prediction, the diffs used to distinguish reference from test instances,
and optionally the p-value, performance of the classifier relative to its expectation under the
no-change null, the out-of-fold classifier model prediction probabilities on the reference and test
data, and the trained model.
"""
return self._detector.predict(x, return_p_val, return_distance, return_probs, return_model)
| 3,454 |
621 | package me.wangyuwei.signuptransition;
import android.animation.AnimatorSet;
import android.animation.ValueAnimator;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.View;
import android.view.animation.LinearInterpolator;
/**
* 作者: 巴掌 on 16/8/19 09:00
* Github: https://github.com/JeasonWong
*/
public class LoginLoadingView extends View {
//正常状态
public static final int STATUS_LOGIN = 0;
//正在登录中
public static final int STATUS_LOGGING = 1;
//登录成功
public static final int STATUS_LOGIN_SUCCESS = 2;
private int mWidth, mHeight;
private Paint mPaint;
private int mDuration;
private int mStatus = STATUS_LOGIN;
//下方线条长度
private float mLineWidth;
//成功Text的x坐标
private float mSuccessTextX;
//成功Text的文案
private String mSuccessText = "SUCCESS";
//登录Text的文案
private String mLoginText = "SIGN UP";
//登录Text的alpha值
private int mLoginTextAlpha;
public LoginLoadingView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public LoginLoadingView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
initView();
}
private void initView() {
mDuration = getResources().getInteger(R.integer.duration);
mPaint = new Paint();
mPaint.setAntiAlias(true);
mPaint.setColor(Color.WHITE);
mPaint.setTextSize(DensityUtil.sp2px(getContext(), 18));
mPaint.setStrokeWidth(DensityUtil.dp2px(getContext(), 3));
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mWidth = w;
mHeight = h;
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
switch (mStatus) {
case STATUS_LOGIN:
canvas.drawText(mLoginText, (mWidth - getTextWidth(mLoginText)) / 2, (mHeight + getTextHeight(mLoginText)) / 2, mPaint);
break;
case STATUS_LOGGING:
canvas.drawText(mLoginText, (mWidth - getTextWidth(mLoginText)) / 2, (mHeight + getTextHeight(mLoginText)) / 2, mPaint);
canvas.drawLine((mWidth - getTextWidth(mLoginText)) / 2, mHeight, (mWidth - getTextWidth(mLoginText)) / 2 + mLineWidth, mHeight, mPaint);
break;
case STATUS_LOGIN_SUCCESS:
mPaint.setAlpha(mLoginTextAlpha);
canvas.drawText(mLoginText, mSuccessTextX + getTextWidth(mSuccessText) + DensityUtil.dp2px(getContext(), 10), (mHeight + getTextHeight(mLoginText)) / 2, mPaint);
mPaint.setAlpha(255 - mLoginTextAlpha);
canvas.drawText(mSuccessText, mSuccessTextX, (mHeight + getTextHeight(mSuccessText)) / 2, mPaint);
mPaint.setAlpha(255);
canvas.drawLine((mWidth - getTextWidth(mSuccessText)) / 2, mHeight, (mWidth + getTextWidth(mSuccessText)) / 2, mHeight, mPaint);
break;
}
}
/**
* 设置状态
*
* @param status 状态
*/
public void setStatus(int status) {
mStatus = status;
switch (status) {
case STATUS_LOGIN:
break;
case STATUS_LOGGING:
startLoggingAnim();
break;
case STATUS_LOGIN_SUCCESS:
startLoginSuccessAnim();
break;
}
}
/**
* 启动登录动画
*/
private void startLoggingAnim() {
ValueAnimator animator = ValueAnimator.ofFloat(0, getTextWidth(mLoginText));
animator.setDuration(1000);
animator.setRepeatCount(2);
animator.setRepeatMode(ValueAnimator.RESTART);
animator.setInterpolator(new LinearInterpolator());
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
mLineWidth = (float) animation.getAnimatedValue();
invalidate();
}
});
animator.start();
}
/**
* 启动登录成功动画
*/
private void startLoginSuccessAnim() {
ValueAnimator textXAnim = ValueAnimator.ofFloat(0, (mWidth - getTextWidth(mSuccessText)) / 2);
textXAnim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
mSuccessTextX = (float) animation.getAnimatedValue();
}
});
ValueAnimator alphaAnim = ValueAnimator.ofInt(255, 0);
alphaAnim.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
mLoginTextAlpha = (int) animation.getAnimatedValue();
invalidate();
}
});
AnimatorSet set = new AnimatorSet();
set.playTogether(textXAnim, alphaAnim);
set.setDuration(mDuration);
set.setInterpolator(new LinearInterpolator());
set.start();
}
private float getTextHeight(String text) {
Rect rect = new Rect();
mPaint.getTextBounds(text, 0, text.length(), rect);
return rect.height();
}
private float getTextWidth(String text) {
return mPaint.measureText(text);
}
}
| 2,555 |
648 | <filename>spring-rabbit-test/src/test/java/org/springframework/amqp/rabbit/test/context/SpringRabbitTestTests.java
/*
* Copyright 2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.amqp.rabbit.test.context;
import static org.assertj.core.api.Assertions.assertThat;
import org.junit.jupiter.api.Test;
import org.springframework.amqp.rabbit.config.AbstractRabbitListenerContainerFactory;
import org.springframework.amqp.rabbit.connection.CachingConnectionFactory;
import org.springframework.amqp.rabbit.core.RabbitAdmin;
import org.springframework.amqp.rabbit.core.RabbitTemplate;
import org.springframework.amqp.rabbit.junit.RabbitAvailable;
import org.springframework.amqp.rabbit.junit.RabbitAvailableCondition;
import org.springframework.amqp.rabbit.listener.RabbitListenerEndpointRegistry;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
/**
* @author <NAME>
* @since 2.3
*
*/
@RabbitAvailable
@SpringJUnitConfig
@SpringRabbitTest
public class SpringRabbitTestTests {
@Autowired
private RabbitTemplate template;
@SuppressWarnings("unused")
@Autowired
private RabbitAdmin admin;
@SuppressWarnings("unused")
@Autowired
private AbstractRabbitListenerContainerFactory<?> factory;
@SuppressWarnings("unused")
@Autowired
private RabbitListenerEndpointRegistry registry;
@Test
void testAutowiring() {
assertThat(((CachingConnectionFactory) template.getConnectionFactory()).getRabbitConnectionFactory())
.isSameAs(RabbitAvailableCondition.getBrokerRunning().getConnectionFactory());
}
@Configuration
public static class Config {
}
}
| 687 |
348 | {"nom":"Bouillon","circ":"3ème circonscription","dpt":"Pyrénées-Atlantiques","inscrits":112,"abs":34,"votants":78,"blancs":3,"nuls":1,"exp":74,"res":[{"nuance":"REM","nom":"M. <NAME>","voix":46},{"nuance":"SOC","nom":"<NAME>","voix":28}]} | 98 |
3,269 | # Time: O(n) on average
# Space: O(1)
import random
# quick select solution
class Solution(object):
def largestEvenSum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
def nth_element(nums, n, compare=lambda a, b: a < b):
def tri_partition(nums, left, right, target, compare):
mid = left
while mid <= right:
if nums[mid] == target:
mid += 1
elif compare(nums[mid], target):
nums[left], nums[mid] = nums[mid], nums[left]
left += 1
mid += 1
else:
nums[mid], nums[right] = nums[right], nums[mid]
right -= 1
return left, right
left, right = 0, len(nums)-1
while left <= right:
pivot_idx = random.randint(left, right)
pivot_left, pivot_right = tri_partition(nums, left, right, nums[pivot_idx], compare)
if pivot_left <= n <= pivot_right:
return
elif pivot_left > n:
right = pivot_left-1
else: # pivot_right < n.
left = pivot_right+1
nth_element(nums, k-1, compare=lambda a, b: a > b)
total = sum(nums[i] for i in xrange(k))
if total%2 == 0:
return total
min_k = [float("inf")]*2
for i in xrange(k):
min_k[nums[i]%2] = min(min_k[nums[i]%2], nums[i])
result = -1
for i in xrange(k, len(nums)):
result = max(result, total-min_k[not (nums[i]%2)]+nums[i])
return result
| 1,030 |
550 | <gh_stars>100-1000
package play.db.helper;
import java.util.ArrayList;
import java.util.List;
public abstract class SqlQuery {
protected List<Object> params;
protected SqlQuery() {
params = new ArrayList<Object>();
}
public SqlQuery param(Object obj) { params.add(obj); return this; }
public SqlQuery params(Object ... objs) { for (Object obj : objs) params.add(obj); return this; }
public List<Object> getParams() { return params; }
public int paramCurrentIndex() { return params.size()+1; }
public String pmark() { return "?"+Integer.toString(paramCurrentIndex()); }
public String pmark(int offset) { return "?"+Integer.toString(paramCurrentIndex()+offset); }
public static class Concat {
private String prefix, separator, suffix;
private String defaultValue;
private String expr;
public Concat(String prefix, String separator, String suffix) {
this.prefix = prefix;
this.separator = separator;
this.suffix = suffix;
this.defaultValue = "";
this.expr = "";
}
public Concat(String prefix, String separator) {
this(prefix, separator, "");
}
public Concat(Concat src) {
this.prefix = src.prefix;
this.separator = src.separator;
this.suffix = src.suffix;
this.defaultValue = src.defaultValue;
this.expr = src.expr;
}
public Concat defaultValue(String defaultValue) {
this.defaultValue = defaultValue;
return this;
}
public Concat prefix(String prefix) {
this.prefix = prefix;
return this;
}
public Concat separator(String separator) {
this.separator = separator;
return this;
}
public Concat append(Object obj) {
final String text;
if (obj != null) {
String objStr = obj.toString();
if (objStr.length() > 0) text = objStr;
else text = defaultValue;
} else text = defaultValue;
if (text != null) {
if (expr.length() > 0) {
if (separator == null) throw new NullPointerException();
expr += separator;
}
expr += text;
}
return this;
}
public Concat add(String ... texts) {
for (String text : texts) append(text);
return this;
}
public boolean isEmpty() {
return expr.length()<=0;
}
@Override
public String toString() {
if (isEmpty()) return "";
if (prefix == null || suffix == null) throw new NullPointerException();
return prefix + expr + suffix;
}
}
public static String quote(String str) {
return "'" + str.replace("'","\\'") + "'";
}
public static String inlineParam(Object param) {
if (param == null) return "NULL";
String str;
if (param instanceof String) str = quote(param.toString());
else if (param instanceof Iterable<?>) {
Concat list = new Concat("(", ", ", ")");
for (Object p : (Iterable<?>)param) list.append(inlineParam(p));
str = list.toString();
} else if (param instanceof Object[]) {
Concat list = new Concat("(", ", ", ")");
for (Object p : (Object[])param) list.append(inlineParam(p));
str = list.toString();
} else if (param instanceof Enum<?>) {
str = quote(param.toString());
} else str = param.toString();
return str;
}
public static String whereIn(String column, Object param) {
String value = inlineParam(param);
if (value.length() == 0) return value;
String operator;
if (param instanceof Object[]) {
operator = " in ";
} else if (param instanceof Iterable<?>) {
operator = " in ";
} else {
operator = "=";
}
return column + operator + value;
}
}
| 1,900 |
327 | <reponame>akabomb-prog/AVAb
#include <engine.h>
EXPORT
int main()
{}
| 34 |
335 | {
"word": "Rebar",
"definitions": [
"Reinforcing steel used as rods in concrete."
],
"parts-of-speech": "Noun"
} | 62 |
3,212 | /*
* Copyright 2014 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.gettcp;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.nio.ByteBuffer;
import java.nio.channels.SelectionKey;
import java.nio.channels.SocketChannel;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
/**
* Implementation of receiving network client.
*/
class ReceivingClient extends AbstractSocketHandler {
private final ScheduledExecutorService connectionScheduler;
private volatile int reconnectAttempts;
private volatile long delayMillisBeforeReconnect;
private volatile MessageHandler messageHandler;
private volatile InetSocketAddress connectedAddress;
public ReceivingClient(InetSocketAddress address, ScheduledExecutorService connectionScheduler, int readingBufferSize, byte endOfMessageByte) {
super(address, readingBufferSize, endOfMessageByte);
this.connectionScheduler = connectionScheduler;
}
public void setReconnectAttempts(int reconnectAttempts) {
this.reconnectAttempts = reconnectAttempts;
}
public void setDelayMillisBeforeReconnect(long delayMillisBeforeReconnect) {
this.delayMillisBeforeReconnect = delayMillisBeforeReconnect;
}
public void setMessageHandler(MessageHandler messageHandler) {
this.messageHandler = messageHandler;
}
/**
* Connects to the endpoint specified and if that fails, will attempt to connect to the
* secondary endpoint up to the number of reconnection attempts (inclusive) using the
* configured delay between attempts.
*/
@Override
InetSocketAddress connect() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
AtomicInteger attempt = new AtomicInteger();
AtomicReference<Exception> connectionError = new AtomicReference<Exception>();
this.connectionScheduler.execute(new Runnable() {
@Override
public void run() {
try {
rootChannel = doConnect(address);
latch.countDown();
connectedAddress = address;
} catch (Exception e) {
if (logger.isInfoEnabled()) {
logger.info("Failed to connect to primary endpoint '" + address + "'.");
}
if (attempt.incrementAndGet() <= reconnectAttempts) {
if (logger.isInfoEnabled()) {
logger.info("Will attempt to reconnect to '" + address + "'.");
}
connectionScheduler.schedule(this, delayMillisBeforeReconnect, TimeUnit.MILLISECONDS);
} else {
connectionError.set(e);
logger.error("Failed to connect to secondary endpoint.");
latch.countDown();
}
}
}
});
try {
boolean finishedTask = latch.await(this.reconnectAttempts * delayMillisBeforeReconnect + 2000, TimeUnit.MILLISECONDS);
if (finishedTask){
if (connectionError.get() != null) {
throw connectionError.get();
}
} else {
logger.error("Exceeded wait time to connect. Possible deadlock, please report!. Interrupting."); // should never happen!
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new IllegalStateException("Current thread is interrupted");
}
return this.connectedAddress;
}
private SocketChannel doConnect(InetSocketAddress addressToConnect) throws IOException {
SocketChannel channel = SocketChannel.open();
if (channel.connect(addressToConnect)) {
channel.configureBlocking(false);
channel.register(this.selector, SelectionKey.OP_READ);
} else {
throw new IllegalStateException("Failed to connect to Server at: " + addressToConnect);
}
return channel;
}
/**
* Process the message that has arrived off the wire.
*/
@Override
void processData(SelectionKey selectionKey, ByteBuffer messageBuffer) throws IOException {
byte[] message = new byte[messageBuffer.limit()];
logger.debug("Received message(size=" + message.length + ")");
messageBuffer.get(message);
byte lastByteValue = message[message.length - 1];
boolean partialMessage = false;
if (lastByteValue != this.endOfMessageByte) {
partialMessage = true;
selectionKey.attach(1);
} else {
Integer wasLastPartial = (Integer) selectionKey.attachment();
if (wasLastPartial != null) {
if (wasLastPartial.intValue() == 1) {
partialMessage = true;
selectionKey.attach(0);
}
}
}
if (this.messageHandler != null) {
this.messageHandler.handle(this.connectedAddress, message, partialMessage);
}
}
} | 2,326 |
372 | <gh_stars>100-1000
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.vault.v1.model;
/**
* Gmail and classic Hangouts-specific count metrics.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Google Vault API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class MailCountResult extends com.google.api.client.json.GenericJson {
/**
* Errors occurred when querying these accounts.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<AccountCountError> accountCountErrors;
static {
// hack to force ProGuard to consider AccountCountError used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(AccountCountError.class);
}
/**
* Subtotal count per matching account that have more than zero messages.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<AccountCount> accountCounts;
static {
// hack to force ProGuard to consider AccountCount used, since otherwise it would be stripped out
// see https://github.com/google/google-api-java-client/issues/543
com.google.api.client.util.Data.nullOf(AccountCount.class);
}
/**
* Total number of accounts that can be queried and have more than zero messages.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long matchingAccountsCount;
/**
* When **DataScope** is **HELD_DATA**, the number of accounts in the request that are not queried
* because they are not on hold. For other data scopes, this field is not set.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<java.lang.String> nonQueryableAccounts;
/**
* Total number of accounts involved in this count operation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key @com.google.api.client.json.JsonString
private java.lang.Long queriedAccountsCount;
/**
* Errors occurred when querying these accounts.
* @return value or {@code null} for none
*/
public java.util.List<AccountCountError> getAccountCountErrors() {
return accountCountErrors;
}
/**
* Errors occurred when querying these accounts.
* @param accountCountErrors accountCountErrors or {@code null} for none
*/
public MailCountResult setAccountCountErrors(java.util.List<AccountCountError> accountCountErrors) {
this.accountCountErrors = accountCountErrors;
return this;
}
/**
* Subtotal count per matching account that have more than zero messages.
* @return value or {@code null} for none
*/
public java.util.List<AccountCount> getAccountCounts() {
return accountCounts;
}
/**
* Subtotal count per matching account that have more than zero messages.
* @param accountCounts accountCounts or {@code null} for none
*/
public MailCountResult setAccountCounts(java.util.List<AccountCount> accountCounts) {
this.accountCounts = accountCounts;
return this;
}
/**
* Total number of accounts that can be queried and have more than zero messages.
* @return value or {@code null} for none
*/
public java.lang.Long getMatchingAccountsCount() {
return matchingAccountsCount;
}
/**
* Total number of accounts that can be queried and have more than zero messages.
* @param matchingAccountsCount matchingAccountsCount or {@code null} for none
*/
public MailCountResult setMatchingAccountsCount(java.lang.Long matchingAccountsCount) {
this.matchingAccountsCount = matchingAccountsCount;
return this;
}
/**
* When **DataScope** is **HELD_DATA**, the number of accounts in the request that are not queried
* because they are not on hold. For other data scopes, this field is not set.
* @return value or {@code null} for none
*/
public java.util.List<java.lang.String> getNonQueryableAccounts() {
return nonQueryableAccounts;
}
/**
* When **DataScope** is **HELD_DATA**, the number of accounts in the request that are not queried
* because they are not on hold. For other data scopes, this field is not set.
* @param nonQueryableAccounts nonQueryableAccounts or {@code null} for none
*/
public MailCountResult setNonQueryableAccounts(java.util.List<java.lang.String> nonQueryableAccounts) {
this.nonQueryableAccounts = nonQueryableAccounts;
return this;
}
/**
* Total number of accounts involved in this count operation.
* @return value or {@code null} for none
*/
public java.lang.Long getQueriedAccountsCount() {
return queriedAccountsCount;
}
/**
* Total number of accounts involved in this count operation.
* @param queriedAccountsCount queriedAccountsCount or {@code null} for none
*/
public MailCountResult setQueriedAccountsCount(java.lang.Long queriedAccountsCount) {
this.queriedAccountsCount = queriedAccountsCount;
return this;
}
@Override
public MailCountResult set(String fieldName, Object value) {
return (MailCountResult) super.set(fieldName, value);
}
@Override
public MailCountResult clone() {
return (MailCountResult) super.clone();
}
}
| 1,926 |
902 | <filename>tests/cli/16_test_stack_services_restart.py
from wo.utils import test
from wo.cli.main import WOTestApp
class CliTestCaseStackRestart(test.WOTestCase):
def test_wo_cli_stack_services_restart_nginx(self):
with WOTestApp(argv=['stack', 'restart', '--nginx']) as app:
app.run()
def test_wo_cli_stack_services_restart_php_fpm(self):
with WOTestApp(argv=['stack', 'restart', '--php']) as app:
app.run()
def test_wo_cli_stack_services_restart_mysql(self):
with WOTestApp(argv=['stack', 'restart', '--mysql']) as app:
app.run()
def test_wo_cli_stack_services_restart_all(self):
with WOTestApp(argv=['stack', 'restart']) as app:
app.run()
| 340 |
474 | /*
* Copyright (C) 2006 Sun Microsystems, Inc. All rights reserved.
* Use is subject to license terms.
*
* Redistribution and use in source and binary forms, with or without modification, are
* permitted provided that the following conditions are met: Redistributions of source code
* must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of
* conditions and the following disclaimer in the documentation and/or other materials
* provided with the distribution. Neither the name of the Sun Microsystems nor the names of
* is contributors may be used to endorse or promote products derived from this software
* without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS
* OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
* AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.gearvrf.script.javascript.util;
import java.util.*;
/**
* Entry set implementation for Bindings implementations
*
* @version 1.0
* @author <NAME>
* @since 1.6
*/
public class BindingsEntrySet extends AbstractSet<Map.Entry<String, Object>> {
private BindingsBase base;
private String[] keys;
public BindingsEntrySet(BindingsBase base) {
this.base = base;
keys = base.getNames();
}
public int size() {
return keys.length;
}
public Iterator<Map.Entry<String, Object>> iterator() {
return new BindingsIterator();
}
public class BindingsEntry implements Map.Entry<String, Object> {
private String key;
public BindingsEntry(String key) {
this.key = key;
}
public Object setValue(Object value) {
throw new UnsupportedOperationException();
}
public String getKey() {
return key;
}
public Object getValue() {
return base.get(key);
}
}
public class BindingsIterator implements Iterator<Map.Entry<String, Object>> {
private int current = 0;
private boolean stale = false;
public boolean hasNext() {
return (current < keys.length);
}
public BindingsEntry next() {
stale = false;
return new BindingsEntry(keys[current++]);
}
public void remove() {
if (stale || current == 0) {
throw new IllegalStateException();
}
stale = true;
base.remove(keys[current - 1]);
}
}
}
| 1,254 |
7,409 | <filename>ee/api/test/test_team_memberships.py
from rest_framework import status
from ee.api.test.base import APILicensedTest
from ee.models.explicit_team_membership import ExplicitTeamMembership
from posthog.models import OrganizationMembership, Team, User
class TestTeamMembershipsAPI(APILicensedTest):
CLASS_DATA_LEVEL_SETUP = False
def setUp(self):
super().setUp()
self.team.access_control = True
self.team.save()
def test_add_member_as_org_owner_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.OWNER
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
self.assertEqual(self.team.explicit_memberships.count(), 0)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(self.team.explicit_memberships.count(), 1)
def test_add_member_as_org_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
self.assertEqual(self.team.explicit_memberships.count(), 0)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(self.team.explicit_memberships.count(), 1)
def test_add_member_as_org_member_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
self.assertEqual(self.team.explicit_memberships.count(), 0)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You don't have sufficient permissions in the project."), response_data
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(self.team.explicit_memberships.count(), 0)
def test_add_yourself_as_org_member_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
self.assertEqual(self.team.explicit_memberships.count(), 0)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": self.user.uuid})
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You don't have sufficient permissions in the project."), response_data
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(self.team.explicit_memberships.count(), 0)
def test_add_yourself_as_org_admin_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
self.assertEqual(self.team.explicit_memberships.count(), 0)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": self.user.uuid})
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You can't explicitly add yourself to projects."), response_data
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(self.team.explicit_memberships.count(), 0)
def test_add_member_as_org_member_and_project_member_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER
)
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
self.assertEqual(self.team.explicit_memberships.count(), 1)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You don't have sufficient permissions in the project."), response_data
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
self.assertEqual(self.team.explicit_memberships.count(), 1)
def test_add_member_as_org_member_but_project_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN
)
self.assertEqual(self.team.explicit_memberships.count(), 1)
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(self.team.explicit_memberships.count(), 2)
def test_add_member_as_org_admin_and_project_member_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER
)
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_admin_as_org_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
response = self.client.post(
"/api/projects/@current/explicit_members/",
{"user_uuid": new_user.uuid, "level": ExplicitTeamMembership.Level.ADMIN},
)
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_admin_as_project_member_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER
)
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
response = self.client.post(
"/api/projects/@current/explicit_members/",
{"user_uuid": new_user.uuid, "level": ExplicitTeamMembership.Level.ADMIN},
)
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You don't have sufficient permissions in the project."), response_data
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_add_admin_as_project_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN
)
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
response = self.client.post(
"/api/projects/@current/explicit_members/",
{"user_uuid": new_user.uuid, "level": ExplicitTeamMembership.Level.ADMIN},
)
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_member_to_non_current_project_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
another_team = Team.objects.create(organization=self.organization, access_control=True)
new_user: User = User.objects.create_and_join(
self.organization, "<EMAIL>", None,
)
response = self.client.post(f"/api/projects/{another_team.id}/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.MEMBER, "level": ExplicitTeamMembership.Level.MEMBER,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_add_member_to_project_in_outside_organization_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
_, new_team, new_user = User.objects.bootstrap(
"Acme", "<EMAIL>", None, team_fields={"access_control": True}
)
response = self.client.post(f"/api/projects/{new_team.id}/explicit_members/", {"user_uuid": new_user.uuid,})
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You don't have sufficient permissions in the project."), response_data
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_add_member_to_project_that_is_not_organization_member_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
_, new_team, new_user = User.objects.bootstrap("Acme", "<EMAIL>", None)
response = self.client.post(f"/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid,})
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You both need to belong to the same organization."), response_data
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_add_member_to_nonexistent_project_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
response = self.client.post(f"/api/projects/2137/explicit_members/", {"user_uuid": new_user.uuid,})
response_data = response.json()
self.assertDictEqual(self.not_found_response("Project not found."), response_data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_set_level_of_member_to_admin_as_org_owner_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.OWNER
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
new_org_membership: OrganizationMembership = OrganizationMembership.objects.get(
user=new_user, organization=self.organization
)
new_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=new_org_membership
)
response = self.client.patch(
f"/api/projects/@current/explicit_members/{new_user.uuid}", {"level": ExplicitTeamMembership.Level.ADMIN}
)
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_set_level_of_member_to_admin_as_org_member_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
new_org_membership: OrganizationMembership = OrganizationMembership.objects.get(
user=new_user, organization=self.organization
)
new_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=new_org_membership
)
response = self.client.patch(
f"/api/projects/@current/explicit_members/{new_user.uuid}", {"level": ExplicitTeamMembership.Level.ADMIN}
)
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You don't have sufficient permissions in the project."), response_data,
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_demote_yourself_as_org_member_and_project_admin_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
self_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN
)
response = self.client.patch(
f"/api/projects/@current/explicit_members/{self.user.uuid}", {"level": ExplicitTeamMembership.Level.MEMBER}
)
response_data = response.json()
self.assertDictEqual(
self.permission_denied_response("You can't set your own access level."), response_data,
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_set_level_of_member_to_admin_as_org_member_but_project_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
self_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN
)
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
new_org_membership: OrganizationMembership = OrganizationMembership.objects.get(
user=new_user, organization=self.organization
)
new_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=new_org_membership
)
response = self.client.patch(
f"/api/projects/@current/explicit_members/{new_user.uuid}", {"level": ExplicitTeamMembership.Level.ADMIN}
)
response_data = response.json()
self.assertDictContainsSubset(
{"effective_level": ExplicitTeamMembership.Level.ADMIN, "level": ExplicitTeamMembership.Level.ADMIN,},
response_data,
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_remove_member_as_org_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.ADMIN
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
new_org_membership: OrganizationMembership = OrganizationMembership.objects.get(
user=new_user, organization=self.organization
)
new_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=new_org_membership
)
response = self.client.delete(f"/api/projects/@current/explicit_members/{new_user.uuid}")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_remove_member_as_org_member_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
new_org_membership: OrganizationMembership = OrganizationMembership.objects.get(
user=new_user, organization=self.organization
)
new_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=new_org_membership
)
response = self.client.delete(f"/api/projects/@current/explicit_members/{new_user.uuid}")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_remove_member_as_org_member_but_project_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
self_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN
)
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
new_org_membership: OrganizationMembership = OrganizationMembership.objects.get(
user=new_user, organization=self.organization
)
new_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=new_org_membership
)
response = self.client.delete(f"/api/projects/@current/explicit_members/{new_user.uuid}")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_add_member_to_non_private_project_forbidden(self):
self.organization_membership.level = OrganizationMembership.Level.OWNER
self.organization_membership.save()
self.team.access_control = False
self.team.save()
new_user: User = User.objects.create_and_join(self.organization, "<EMAIL>", None)
response = self.client.post("/api/projects/@current/explicit_members/", {"user_uuid": new_user.uuid})
response_data = response.json()
self.assertDictEqual(
self.validation_error_response(
"Explicit members can only be accessed for projects with project-based permissioning enabled.",
),
response_data,
)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_leave_project_as_admin_allowed(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
explicit_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.ADMIN
)
response = self.client.delete(f"/api/projects/@current/explicit_members/{self.user.uuid}")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_leave_project_as_admin_member(self):
self.organization_membership.level = OrganizationMembership.Level.MEMBER
self.organization_membership.save()
explicit_team_membership = ExplicitTeamMembership.objects.create(
team=self.team, parent_membership=self.organization_membership, level=ExplicitTeamMembership.Level.MEMBER
)
response = self.client.delete(f"/api/projects/@current/explicit_members/{self.user.uuid}")
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_leave_project_as_project_outsider(self):
response = self.client.delete(f"/api/projects/@current/explicit_members/{self.user.uuid}")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_leave_project_as_organization_outsider(self):
self.organization_membership.delete()
response = self.client.delete(f"/api/projects/@current/explicit_members/{self.user.uuid}")
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| 8,626 |
319 | <filename>src/main/java/org/clapper/util/classutil/ClassLoaderBuilder.java
package org.clapper.util.classutil;
import java.io.File;
import java.net.URL;
import java.net.URLClassLoader;
import java.net.MalformedURLException;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.StringTokenizer;
import org.clapper.util.logging.Logger;
/**
* A <tt>ClassLoaderBuilder</tt> is used to build an alternate class loader
* that includes additional jar files, zip files and/or directories in its
* load path. It's basically a convenient wrapper around
* <tt>java.net.URLClassLoader</tt>.
*/
public class ClassLoaderBuilder
{
/*----------------------------------------------------------------------*\
Private Data Items
\*----------------------------------------------------------------------*/
private Collection<URL> urlList = new LinkedHashSet<URL>();
/**
* For logging
*/
private static final Logger log = new Logger (ClassLoaderBuilder.class);
/*----------------------------------------------------------------------*\
Constructor
\*----------------------------------------------------------------------*/
/**
* Create a new <tt>ClassLoaderBuilder</tt>.
*/
public ClassLoaderBuilder()
{
// Nothing to do
}
/*----------------------------------------------------------------------*\
Public Methods
\*----------------------------------------------------------------------*/
/**
* Add a jar file, zip file or directory to the list of places the
* not-yet-constructed class loader will search. If the directory or
* file does not exist, or isn't a jar file, zip file, or directory,
* this method just ignores it and returns <tt>false</tt>.
*
* @param file the jar file, zip file or directory
*
* @return <tt>true</tt> if the file was suitable for adding;
* <tt>false</tt> if it was not a jar file, zip file, or
* directory.
*/
public boolean add (File file)
{
boolean added = false;
String fileName = file.getPath();
try
{
if (ClassUtil.fileCanContainClasses (file))
{
if (file.isDirectory() && (! fileName.endsWith ("/")))
{
fileName = fileName + "/";
file = new File(fileName);
}
urlList.add(file.toURI().toURL());
added = true;
}
}
catch (MalformedURLException ex)
{
log.error ("Unexpected exception", ex);
}
if (! added)
{
log.debug ("Skipping non-jar, non-zip, non-directory \"" +
fileName +
"\"");
}
return added;
}
/**
* Add an array of jar files, zip files or directories to the list of
* places the not-yet-constructed class loader will search. If the
* directory or file does not exist, or isn't a jar file, zip file, or
* directory, this method just ignores it and returns <tt>false</tt>.
*
* @param files the array
*
* @return the number of entries from the array that were actually added
*/
public int add (File[] files)
{
int total = 0;
for (File f : files)
{
if (add (f))
total++;
}
return total;
}
/**
* Add a <tt>Collection</tt> of jar files, zip files or directories to
* the list of places the not-yet-constructed class loader will search.
* If the directory or file does not exist, or isn't a jar file, zip
* file, or directory, this method just ignores it and returns
* <tt>false</tt>.
*
* @param files the collection
*
* @return the number of entries from the collection that were
* actually added
*/
public int add (Collection<File> files)
{
int total = 0;
for (File f : files)
{
if (add (f))
total++;
}
return total;
}
/**
* Add the contents of the classpath.
*/
public void addClassPath()
{
String path = null;
try
{
path = System.getProperty ("java.class.path");
}
catch (Exception ex)
{
path= "";
log.error ("Unable to get class path", ex);
}
if (path != null)
{
StringTokenizer tok = new StringTokenizer (path,
File.pathSeparator);
while (tok.hasMoreTokens())
add (new File (tok.nextToken()));
}
}
/**
* Clear the stored files in this object.
*/
public void clear()
{
urlList.clear();
}
/**
* Create and return a class loader that will search the additional
* places defined in this builder. The resulting class loader uses
* the default delegation parent <tt>ClassLoader</tt>.
*
* @return a new class loader
*
* @throws SecurityException if a security manager exists and its
* <tt>checkCreateClassLoader()</tt> method
* does not allow creation of a class loader
*/
public ClassLoader createClassLoader()
throws SecurityException
{
return new URLClassLoader (urlList.toArray (new URL[urlList.size()]),
getClass().getClassLoader());
}
/**
* Create and return a class loader that will search the additional
* places defined in this builder. The resulting class loader uses
* the specified parent <tt>ClassLoader</tt>.
*
* @param parentLoader the desired parent class loader
*
* @return a new class loader
*
* @throws SecurityException if a security manager exists and its
* <tt>checkCreateClassLoader()</tt> method
* does not allow creation of a class loader
*/
public ClassLoader createClassLoader (ClassLoader parentLoader)
throws SecurityException
{
return new URLClassLoader (urlList.toArray (new URL[urlList.size()]),
parentLoader);
}
}
| 2,735 |
1,165 | /*******************************************************************************
* Copyright 2018 T Mobile, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy
* of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
/**
Copyright (C) 2017 T Mobile Inc - All Rights Reserve
Purpose:
Author :kkumar
Modified Date: Jul 16, 2018
**/
/*
*Copyright 2016-2017 T Mobile, Inc. or its affiliates. All Rights Reserved.
*
*Licensed under the Amazon Software License (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* or in the "license" file accompanying this file. This file is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or
* implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tmobile.pacman.util;
import static org.junit.Assert.*;
import java.util.Map;
import javax.net.ssl.SSLContext;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.powermock.core.classloader.annotations.PowerMockIgnore;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import com.google.common.collect.Maps;
import com.tmobile.pacman.common.PacmanSdkConstants;
import com.tmobile.pacman.commons.rule.Annotation;
import com.tmobile.pacman.commons.rule.PacmanRule;
import com.tmobile.pacman.commons.rule.RuleResult;
import com.tmobile.pacman.commons.rule.Annotation.Type;
// TODO: Auto-generated Javadoc
/**
* The Class RuleExecutionUtilsTest.
*/
@PowerMockIgnore("org.apache.http.conn.ssl.*")
@RunWith(PowerMockRunner.class)
@PrepareForTest({ SSLContext.class })
public class RuleExecutionUtilsTest {
/**
* Post audit trail.
*
* @throws Exception the exception
*/
@Test
public void postAuditTrail() throws Exception {
Map<String, String> param = Maps.newHashMap();
param.put(PacmanSdkConstants.ACCOUNT_ID, "acc123");
param.put(PacmanSdkConstants.REGION, "region123");
param.put(PacmanSdkConstants.RESOURCE_ID, "resou123");
param.put(PacmanSdkConstants.REGION, "region");
boolean response = RuleExecutionUtils.ifFilterMatchesTheCurrentResource(param, param);
assertTrue(response);
}
/**
* Gets the local rule param.
*
* @throws Exception the exception
*/
@Test
public void getLocalRuleParam() throws Exception {
Map<String, String> param = Maps.newHashMap();
param.put(PacmanSdkConstants.ACCOUNT_ID, "acc123");
param.put(PacmanSdkConstants.REGION, "region123");
param.put(PacmanSdkConstants.RESOURCE_ID, "resou123");
param.put(PacmanSdkConstants.REGION, "region");
Map<String, String> response = RuleExecutionUtils.getLocalRuleParam(param, param);
assertNotNull(response);
}
/**
* Builds the annotation.
*
* @throws Exception the exception
*/
@Test
public void buildAnnotation() throws Exception {
Type annotationType = Type.INFO;
Map<String, String> param = Maps.newHashMap();
param.put(PacmanSdkConstants.ACCOUNT_ID, "acc123");
param.put(PacmanSdkConstants.APPLICATION_TAG_KEY, "acc123");
param.put(PacmanSdkConstants.INVOCATION_ID, "acc123");
param.put(PacmanSdkConstants.RULE_SEVERITY, "acc123");
param.put(PacmanSdkConstants.REGION, "region123");
param.put(PacmanSdkConstants.RESOURCE_ID, "resou123");
param.put(PacmanSdkConstants.REGION, "region");
Map<String, String> response = RuleExecutionUtils.buildAnnotation(param, param, "executionId123",annotationType, getPacmanRule());
assertNotNull(response);
}
/**
* Gets the rule attribute.
*
* @throws Exception the exception
*/
@Test
public void getRuleAttribute() throws Exception {
Map<String, String> param = Maps.newHashMap();
param.put(PacmanSdkConstants.ACCOUNT_ID, "acc123");
param.put(PacmanSdkConstants.REGION, "region123");
param.put(PacmanSdkConstants.RESOURCE_ID, "resou123");
param.put(PacmanSdkConstants.REGION, "region");
Annotation annotation = new Annotation();
annotation.put(PacmanSdkConstants.RULE_ID, "ruleId123");
annotation.put(PacmanSdkConstants.DOC_ID, "docId123");
annotation.put(PacmanSdkConstants.DATA_SOURCE_KEY, "sKey123");
annotation.put(PacmanSdkConstants.TARGET_TYPE, "target123");
RuleResult result = new RuleResult();
result.setAnnotation(annotation);
result.setDesc("desc");
result.setResource(param);
result.setStatus("status");
PacmanRule ruleAnnotation = getPacmanRule();
String response = RuleExecutionUtils.getRuleAttribute(result, param, ruleAnnotation, "attribute");
assertNotNull(response);
response = RuleExecutionUtils.getRuleAttribute(result, null, null, "attribute");
assertNull(response);
}
/**
* Gets the pacman rule.
*
*/
private PacmanRule getPacmanRule() {
return new PacmanRule() {
@Override
public Class<? extends java.lang.annotation.Annotation> annotationType() {
return null;
}
@Override
public String severity() {
return "high";
}
@Override
public String key() {
return "key";
}
@Override
public String desc() {
return "desc";
}
@Override
public String category() {
return "category";
}
};
}
}
| 1,925 |
386 | <filename>bus-health/src/main/java/org/aoju/bus/health/unix/aix/drivers/Lssrad.java
/*********************************************************************************
* *
* The MIT License (MIT) *
* *
* Copyright (c) 2015-2021 aoju.org OSHI and other contributors. *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy *
* of this software and associated documentation files (the "Software"), to deal *
* in the Software without restriction, including without limitation the rights *
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell *
* copies of the Software, and to permit persons to whom the Software is *
* furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, *
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN *
* THE SOFTWARE. *
* *
********************************************************************************/
package org.aoju.bus.health.unix.aix.drivers;
import org.aoju.bus.core.annotation.ThreadSafe;
import org.aoju.bus.core.lang.Normal;
import org.aoju.bus.core.lang.RegEx;
import org.aoju.bus.core.lang.Symbol;
import org.aoju.bus.core.lang.tuple.Pair;
import org.aoju.bus.health.Builder;
import org.aoju.bus.health.Executor;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Utility to query lssrad
*
* @author <NAME>
* @version 6.3.2
* @since JDK 1.8+
*/
@ThreadSafe
public final class Lssrad {
private Lssrad() {
}
/**
* Query {@code lssrad} to get numa node and physical package info
*
* @return A map of processor number to a pair containing the ref (NUMA
* equivalent) and srad (package)
*/
public static Map<Integer, Pair<Integer, Integer>> queryNodesPackages() {
/*-
# lssrad -av
REF1 SRAD MEM CPU
0
0 32749.12 0-63
1 9462.00 64-67 72-75
80-83 88-91
1
2 2471.19 92-95
2
3 1992.00
4 249.00
*/
int node = 0;
int slot = 0;
Map<Integer, Pair<Integer, Integer>> nodeMap = new HashMap<>();
List<String> lssrad = Executor.runNative("lssrad -av");
// remove header
if (!lssrad.isEmpty()) {
lssrad.remove(0);
}
for (String s : lssrad) {
String t = s.trim();
if (!t.isEmpty()) {
if (Character.isDigit(s.charAt(0))) {
node = Builder.parseIntOrDefault(t, 0);
} else {
if (t.contains(Symbol.DOT)) {
String[] split = RegEx.SPACES.split(t, 3);
slot = Builder.parseIntOrDefault(split[0], 0);
t = split.length > 2 ? split[2] : Normal.EMPTY;
}
for (Integer proc : Builder.parseHyphenatedIntList(t)) {
nodeMap.put(proc, Pair.of(node, slot));
}
}
}
}
return nodeMap;
}
}
| 2,344 |
22,481 | <gh_stars>1000+
"""Support for OpenTherm Gateway binary sensors."""
import logging
from pprint import pformat
from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity
from homeassistant.const import CONF_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import DeviceInfo, async_generate_entity_id
from homeassistant.helpers.entity_registry import async_get_registry
from . import DOMAIN
from .const import (
BINARY_SENSOR_INFO,
DATA_GATEWAYS,
DATA_OPENTHERM_GW,
DEPRECATED_BINARY_SENSOR_SOURCE_LOOKUP,
TRANSLATE_SOURCE,
)
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the OpenTherm Gateway binary sensors."""
sensors = []
deprecated_sensors = []
gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]]
ent_reg = await async_get_registry(hass)
for var, info in BINARY_SENSOR_INFO.items():
device_class = info[0]
friendly_name_format = info[1]
status_sources = info[2]
for source in status_sources:
sensors.append(
OpenThermBinarySensor(
gw_dev,
var,
source,
device_class,
friendly_name_format,
)
)
old_style_entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, f"{var}_{gw_dev.gw_id}", hass=gw_dev.hass
)
old_ent = ent_reg.async_get(old_style_entity_id)
if old_ent and old_ent.config_entry_id == config_entry.entry_id:
if old_ent.disabled:
ent_reg.async_remove(old_style_entity_id)
else:
deprecated_sensors.append(
DeprecatedOpenThermBinarySensor(
gw_dev,
var,
device_class,
friendly_name_format,
)
)
sensors.extend(deprecated_sensors)
if deprecated_sensors:
_LOGGER.warning(
"The following binary_sensor entities are deprecated and may "
"no longer behave as expected. They will be removed in a "
"future version. You can force removal of these entities by "
"disabling them and restarting Home Assistant.\n%s",
pformat([s.entity_id for s in deprecated_sensors]),
)
async_add_entities(sensors)
class OpenThermBinarySensor(BinarySensorEntity):
"""Represent an OpenTherm Gateway binary sensor."""
def __init__(self, gw_dev, var, source, device_class, friendly_name_format):
"""Initialize the binary sensor."""
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, f"{var}_{source}_{gw_dev.gw_id}", hass=gw_dev.hass
)
self._gateway = gw_dev
self._var = var
self._source = source
self._state = None
self._device_class = device_class
if TRANSLATE_SOURCE[source] is not None:
friendly_name_format = (
f"{friendly_name_format} ({TRANSLATE_SOURCE[source]})"
)
self._friendly_name = friendly_name_format.format(gw_dev.name)
self._unsub_updates = None
async def async_added_to_hass(self):
"""Subscribe to updates from the component."""
_LOGGER.debug("Added OpenTherm Gateway binary sensor %s", self._friendly_name)
self._unsub_updates = async_dispatcher_connect(
self.hass, self._gateway.update_signal, self.receive_report
)
async def async_will_remove_from_hass(self):
"""Unsubscribe from updates from the component."""
_LOGGER.debug(
"Removing OpenTherm Gateway binary sensor %s", self._friendly_name
)
self._unsub_updates()
@property
def available(self):
"""Return availability of the sensor."""
return self._state is not None
@property
def entity_registry_enabled_default(self):
"""Disable binary_sensors by default."""
return False
@callback
def receive_report(self, status):
"""Handle status updates from the component."""
state = status[self._source].get(self._var)
self._state = None if state is None else bool(state)
self.async_write_ha_state()
@property
def name(self):
"""Return the friendly name."""
return self._friendly_name
@property
def device_info(self) -> DeviceInfo:
"""Return device info."""
return DeviceInfo(
identifiers={(DOMAIN, self._gateway.gw_id)},
manufacturer="Schelte Bron",
model="OpenTherm Gateway",
name=self._gateway.name,
sw_version=self._gateway.gw_version,
)
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._gateway.gw_id}-{self._source}-{self._var}"
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this device."""
return self._device_class
@property
def should_poll(self):
"""Return False because entity pushes its state."""
return False
class DeprecatedOpenThermBinarySensor(OpenThermBinarySensor):
"""Represent a deprecated OpenTherm Gateway Binary Sensor."""
# pylint: disable=super-init-not-called
def __init__(self, gw_dev, var, device_class, friendly_name_format):
"""Initialize the binary sensor."""
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, f"{var}_{gw_dev.gw_id}", hass=gw_dev.hass
)
self._gateway = gw_dev
self._var = var
self._source = DEPRECATED_BINARY_SENSOR_SOURCE_LOOKUP[var]
self._state = None
self._device_class = device_class
self._friendly_name = friendly_name_format.format(gw_dev.name)
self._unsub_updates = None
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._gateway.gw_id}-{self._var}"
| 2,836 |
870 | <reponame>KikiManjaro/accumulo
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.accumulo.core.spi.compaction;
import java.util.Collection;
import org.apache.accumulo.core.client.admin.compaction.CompactableFile;
import org.apache.accumulo.core.spi.compaction.CompactionPlanner.PlanningParameters;
/**
* The return value of {@link CompactionPlanner#makePlan(PlanningParameters)} that is created using
* {@link PlanningParameters#createPlanBuilder()}
*
* @since 2.1.0
* @see CompactionPlanner
* @see org.apache.accumulo.core.spi.compaction
*/
public interface CompactionPlan {
/**
* @since 2.1.0
* @see PlanningParameters#createPlanBuilder()
*/
interface Builder {
/**
* @param priority
* This determines the order in which the job is taken off the execution queue. Larger
* numbers are taken off the queue first. If two jobs are on the queue, one with a
* priority of 4 and another with 5, then the one with 5 will be taken first.
* @param executor
* Where the job should run.
* @param group
* The files to compact.
* @return this
*/
Builder addJob(short priority, CompactionExecutorId executor,
Collection<CompactableFile> group);
CompactionPlan build();
}
/**
* Return the set of jobs this plan will submit for compaction.
*/
Collection<CompactionJob> getJobs();
}
| 688 |
5,169 | <reponame>Gantios/Specs
{
"name": "YHWallet",
"version": "0.3.0",
"summary": "A short description of YHWallet.",
"description": "TODO: Add long description of the pod here.",
"homepage": "https://bitbucket.org/lyjflyingdog/yhwallet",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"KamishiroAlice": "<EMAIL>"
},
"source": {
"git": "https://[email protected]/lyjflyingdog/yhwallet.git",
"tag": "0.3.0"
},
"platforms": {
"ios": "8.0"
},
"vendored_frameworks": [
"YHWallet/FDWallet.framework",
"YHWallet/thirdlib/baiduocr/AipBase.framework",
"YHWallet/thirdlib/baiduocr/AipOcrSdk.framework",
"YHWallet/thirdlib/DKNightVersion.framework"
],
"source_files": "YHWallet/**/*.h",
"dependencies": {
"AFNetworking": [
"~> 3.1.0"
],
"MJExtension": [
],
"YYWebImage": [
],
"ReactiveCocoa": [
"~> 2.5"
],
"Reachability": [
],
"SVProgressHUD": [
"~> 2.2.2"
],
"CYLTabBarController": [
],
"IQKeyboardManager": [
],
"UITableView+FDTemplateLayoutCell": [
],
"MJRefresh": [
]
},
"frameworks": "JavaScriptCore"
}
| 564 |
384 | <filename>jprotobuf-rpc-core-spring-base/src/main/java/com/baidu/jprotobuf/pbrpc/client/ha/lb/strategy/RoundRobinLoadBalanceStrategy.java
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.baidu.jprotobuf.pbrpc.client.ha.NamingService;
import com.baidu.jprotobuf.pbrpc.registry.RegisterInfo;
/**
* A weighted round robin strategy implementation for {@link LoadBalanceStrategy} interface.
*
* @author xiemalin
* @see LoadBalanceStrategy
* @see RoundRobinLoadBalanceStrategy
* @since 2.16
*/
public class RoundRobinLoadBalanceStrategy implements NamingServiceLoadBalanceStrategy {
/** The Constant MIN_LB_FACTOR. */
private static final int MIN_LB_FACTOR = 1;
/** The targets. */
private List<String> targets;
/** The current pos. */
private int currentPos;
/** The current targets. */
private Map<String, Integer> currentTargets;
/** The failed targets. */
private Map<String, Integer> failedTargets;
/** The naming service. */
private NamingService namingService;
/** defalut load factor for {@link RoundRobinLoadBalanceStrategy}. */
private static final int DEFAULT_LOAD_FACTOR = 1;
/** The service signature. */
private String serviceSignature;
/**
* Gets the naming service.
*
* @return the naming service
*/
public NamingService getNamingService() {
return namingService;
}
/**
* Constructor with load balance factors.
*
* @param serviceSignature the service signature
* @param namingService the naming service
*/
public RoundRobinLoadBalanceStrategy(String serviceSignature, NamingService namingService) {
this.serviceSignature = serviceSignature;
this.namingService = namingService;
doReInit(this.serviceSignature, namingService);
}
/**
* Instantiates a new round robin load balance strategy.
*
* @param lbFactors the lb factors
*/
public RoundRobinLoadBalanceStrategy(Map<String, Integer> lbFactors) {
init(lbFactors);
}
/**
* Inits the.
*
* @param servers the servers
*/
protected void init(List<RegisterInfo> servers) {
Map<String, Integer> lbFactors = parseLbFactors(servers);
init(lbFactors);
}
/**
* Parses the lb factors.
*
* @param servers the servers
* @return the map
*/
private Map<String, Integer> parseLbFactors(List<RegisterInfo> servers) {
Map<String, Integer> lbFactors = new HashMap<String, Integer>();
if (servers == null) {
return lbFactors;
}
for (RegisterInfo address : servers) {
String serviceUrl = address.getHost() + ":" + address.getPort();
lbFactors.put(serviceUrl, DEFAULT_LOAD_FACTOR);
}
return lbFactors;
}
/**
* Inits the.
*
* @param lbFactors the lb factors
*/
protected synchronized void init(Map<String, Integer> lbFactors) {
currentTargets = Collections.synchronizedMap(lbFactors);
failedTargets = Collections.synchronizedMap(new HashMap<String, Integer>(currentTargets.size()));
reInitTargets(currentTargets);
}
/**
* Re init targets.
*
* @param lbFactors the lb factors
*/
private void reInitTargets(Map<String, Integer> lbFactors) {
targets = initTargets(lbFactors);
if (targets != null) {
Collections.shuffle(targets); // shuffle the list to random order.
}
currentPos = 0;
}
/* (non-Javadoc)
* @see com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy.LoadBalanceStrategy#elect()
*/
public synchronized String elect() {
if (targets == null || targets.isEmpty()) {
throw new RuntimeException("no target is available");
}
if (currentPos >= targets.size()) {
currentPos = 0;
}
return targets.get(currentPos++);
}
/* (non-Javadoc)
* @see com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy.LoadBalanceStrategy#getTargets()
*/
public synchronized Set<String> getTargets() {
if (targets == null) {
return new HashSet<String>(0);
}
return new HashSet<String>(targets);
}
/**
* Initialize the targets and set weighted value for each target.
*
* @param lbFactors the lb factors
* @return the list
*/
public List<String> initTargets(Map<String, Integer> lbFactors) {
if (lbFactors == null || lbFactors.size() == 0) {
return null;
}
if (lbFactors.size() == 1) { // only one target
return new ArrayList<String>(lbFactors.keySet());
}
fixFactor(lbFactors);
Collection<Integer> factors = lbFactors.values();
// get min factor
int min = Collections.min(factors);
if (min > MIN_LB_FACTOR) {
List<Integer> divisors = getDivisors(min);
int maxDivisor = getMaxDivisor(divisors, factors);
return buildBalanceTargets(lbFactors, maxDivisor);
}
return buildBalanceTargets(lbFactors, MIN_LB_FACTOR);
}
/**
* Gets the max divisor.
*
* @param divisors the divisors
* @param factors the factors
* @return the max divisor
*/
private int getMaxDivisor(List<Integer> divisors, Collection<Integer> factors) {
for (Integer divisor : divisors) {
if (canModAll(divisor, factors)) {
return divisor;
}
}
return 1;
}
/**
* Gets the divisors.
*
* @param value the value
* @return the divisors
*/
private List<Integer> getDivisors(int value) {
if (value <= MIN_LB_FACTOR) {
return Collections.emptyList();
}
int count = value / 2;
List<Integer> divisors = new ArrayList<Integer>(count + 1);
divisors.add(value);
for (; count > 0; count--) {
if (value % count == 0) {
divisors.add(count);
}
}
return divisors;
}
/**
* lb factor must great than 0.
*
* @param lbFactors the lb factors
*/
private void fixFactor(Map<String, Integer> lbFactors) {
Set<Map.Entry<String, Integer>> setEntries = lbFactors.entrySet();
for (Map.Entry<String, Integer> entry : setEntries) {
if (entry.getValue() < MIN_LB_FACTOR) {
entry.setValue(MIN_LB_FACTOR);
}
}
}
/**
* Can do modulo operation for all factor by base value.
*
* @param base the base
* @param factors the factors
* @return true, if successful
*/
private boolean canModAll(int base, Collection<Integer> factors) {
for (Integer integer : factors) {
if (integer % base != 0) {
return false;
}
}
return true;
}
/**
* Builds the balance targets.
*
* @param lbFactors the lb factors
* @param baseFactor the base factor
* @return the list
*/
private List<String> buildBalanceTargets(Map<String, Integer> lbFactors, int baseFactor) {
Set<Map.Entry<String, Integer>> setEntries = lbFactors.entrySet();
int factor;
List<String> targets = new LinkedList<String>();
for (Map.Entry<String, Integer> entry : setEntries) {
factor = entry.getValue() / baseFactor;
for (int i = 0; i < factor; i++) {
targets.add(entry.getKey());
}
}
return targets;
}
/* (non-Javadoc)
* @see com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy.LoadBalanceStrategy#removeTarget(java.lang.String)
*/
public synchronized void removeTarget(String key) {
if (currentTargets.containsKey(key)) {
failedTargets.put(key, currentTargets.get(key));
currentTargets.remove(key);
reInitTargets(currentTargets);
}
}
/* (non-Javadoc)
* @see com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy.LoadBalanceStrategy#recoverTarget(java.lang.String)
*/
public synchronized void recoverTarget(String key) {
if (failedTargets.containsKey(key)) {
currentTargets.put(key, failedTargets.get(key));
failedTargets.remove(key);
reInitTargets(currentTargets);
}
}
/* (non-Javadoc)
* @see com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy.LoadBalanceStrategy#hasTargets()
*/
public boolean hasTargets() {
return (getTargets() != null && !getTargets().isEmpty());
}
/* (non-Javadoc)
* @see com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy.LoadBalanceStrategy#getFailedTargets()
*/
public Set<String> getFailedTargets() {
return failedTargets.keySet();
}
/*
* (non-Javadoc)
*
* @see
* com.baidu.jprotobuf.pbrpc.client.ha.lb.strategy.NamingServiceLoadBalanceStrategy#doReInit(com.baidu.jprotobuf
* .pbrpc.client.ha.NamingService)
*/
@Override
public void doReInit(String serviceSignagure, NamingService namingService) {
// get server list from NamingService
Set<String> serviceSignatures = new HashSet<String>();
serviceSignatures.add(serviceSignagure);
List<RegisterInfo> servers;
try {
servers = namingService.list(serviceSignatures).get(serviceSignagure);
} catch (Exception e) {
throw new RuntimeException(e.getMessage(), e);
}
init(servers);
}
}
| 4,450 |
1,755 | /*=========================================================================
Program: Visualization Toolkit
Module: vtkRectilinearGridPartitioner.cxx
Copyright (c) <NAME>, <NAME>, <NAME>
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================*/
#include "vtkRectilinearGridPartitioner.h"
// VTK includes
#include "vtkDoubleArray.h"
#include "vtkExtentRCBPartitioner.h"
#include "vtkIndent.h"
#include "vtkInformation.h"
#include "vtkInformationVector.h"
#include "vtkMultiBlockDataSet.h"
#include "vtkObjectFactory.h"
#include "vtkRectilinearGrid.h"
#include "vtkStreamingDemandDrivenPipeline.h"
#include "vtkStructuredData.h"
#include "vtkStructuredExtent.h"
#include <cassert>
vtkStandardNewMacro(vtkRectilinearGridPartitioner);
//------------------------------------------------------------------------------
vtkRectilinearGridPartitioner::vtkRectilinearGridPartitioner()
{
this->NumberOfPartitions = 2;
this->NumberOfGhostLayers = 0;
this->DuplicateNodes = 1;
this->SetNumberOfInputPorts(1);
this->SetNumberOfOutputPorts(1);
}
//------------------------------------------------------------------------------
vtkRectilinearGridPartitioner::~vtkRectilinearGridPartitioner() = default;
//------------------------------------------------------------------------------
void vtkRectilinearGridPartitioner::PrintSelf(std::ostream& oss, vtkIndent indent)
{
this->Superclass::PrintSelf(oss, indent);
oss << "NumberOfPartitions: " << this->NumberOfPartitions << std::endl;
oss << "NumberOfGhostLayers: " << this->NumberOfGhostLayers << std::endl;
}
//------------------------------------------------------------------------------
int vtkRectilinearGridPartitioner::FillInputPortInformation(
int vtkNotUsed(port), vtkInformation* info)
{
info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkRectilinearGrid");
return 1;
}
//------------------------------------------------------------------------------
int vtkRectilinearGridPartitioner::FillOutputPortInformation(
int vtkNotUsed(port), vtkInformation* info)
{
info->Set(vtkDataObject::DATA_TYPE_NAME(), "vtkMultiBlockDataSet");
return 1;
}
//------------------------------------------------------------------------------
void vtkRectilinearGridPartitioner::ExtractGridCoordinates(vtkRectilinearGrid* grd, int subext[6],
vtkDoubleArray* xcoords, vtkDoubleArray* ycoords, vtkDoubleArray* zcoords)
{
assert("pre: nullptr rectilinear grid" && (grd != nullptr));
assert("pre: nullptr xcoords" && (xcoords != nullptr));
assert("pre: nullptr ycoords" && (ycoords != nullptr));
assert("pre: nullptr zcoords" && (zcoords != nullptr));
int dataDescription = vtkStructuredData::GetDataDescriptionFromExtent(subext);
int ndims[3];
vtkStructuredData::GetDimensionsFromExtent(subext, ndims, dataDescription);
vtkDoubleArray* coords[3];
coords[0] = xcoords;
coords[1] = ycoords;
coords[2] = zcoords;
vtkDataArray* src_coords[3];
src_coords[0] = grd->GetXCoordinates();
src_coords[1] = grd->GetYCoordinates();
src_coords[2] = grd->GetZCoordinates();
for (int dim = 0; dim < 3; ++dim)
{
coords[dim]->SetNumberOfComponents(1);
coords[dim]->SetNumberOfTuples(ndims[dim]);
for (int idx = subext[dim * 2]; idx <= subext[dim * 2 + 1]; ++idx)
{
vtkIdType lidx = idx - subext[dim * 2];
coords[dim]->SetTuple1(lidx, src_coords[dim]->GetTuple1(idx));
} // END for all ids
} // END for all dimensions
}
//------------------------------------------------------------------------------
int vtkRectilinearGridPartitioner::RequestData(vtkInformation* vtkNotUsed(request),
vtkInformationVector** inputVector, vtkInformationVector* outputVector)
{
// STEP 0: Get input object
vtkInformation* input = inputVector[0]->GetInformationObject(0);
assert("pre: input information object is nullptr" && (input != nullptr));
vtkRectilinearGrid* grd =
vtkRectilinearGrid::SafeDownCast(input->Get(vtkDataObject::DATA_OBJECT()));
// STEP 1: Get output object
vtkInformation* output = outputVector->GetInformationObject(0);
assert("pre: output information object is nullptr" && (output != nullptr));
vtkMultiBlockDataSet* multiblock =
vtkMultiBlockDataSet::SafeDownCast(output->Get(vtkDataObject::DATA_OBJECT()));
assert("pre: multi-block grid is nullptr" && (multiblock != nullptr));
// STEP 2: Get the global extent
int extent[6];
grd->GetExtent(extent);
// STEP 3: Setup extent partitioner
vtkExtentRCBPartitioner* extentPartitioner = vtkExtentRCBPartitioner::New();
assert("pre: extent partitioner is nullptr" && (extentPartitioner != nullptr));
extentPartitioner->SetGlobalExtent(extent);
extentPartitioner->SetNumberOfPartitions(this->NumberOfPartitions);
extentPartitioner->SetNumberOfGhostLayers(this->NumberOfGhostLayers);
if (this->DuplicateNodes == 1)
{
extentPartitioner->DuplicateNodesOn();
}
else
{
extentPartitioner->DuplicateNodesOff();
}
// STEP 4: Partition
extentPartitioner->Partition();
// STEP 5: Extract partition in a multi-block
multiblock->SetNumberOfBlocks(extentPartitioner->GetNumExtents());
// Set the whole extent of the grid
multiblock->GetInformation()->Set(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), extent, 6);
int subext[6];
unsigned int blockIdx = 0;
for (; blockIdx < multiblock->GetNumberOfBlocks(); ++blockIdx)
{
extentPartitioner->GetPartitionExtent(blockIdx, subext);
vtkRectilinearGrid* subgrid = vtkRectilinearGrid::New();
subgrid->SetExtent(subext);
vtkDoubleArray* xcoords = vtkDoubleArray::New();
vtkDoubleArray* ycoords = vtkDoubleArray::New();
vtkDoubleArray* zcoords = vtkDoubleArray::New();
this->ExtractGridCoordinates(grd, subext, xcoords, ycoords, zcoords);
subgrid->SetXCoordinates(xcoords);
subgrid->SetYCoordinates(ycoords);
subgrid->SetZCoordinates(zcoords);
xcoords->Delete();
ycoords->Delete();
zcoords->Delete();
vtkInformation* metadata = multiblock->GetMetaData(blockIdx);
assert("pre: metadata is nullptr" && (metadata != nullptr));
metadata->Set(vtkDataObject::PIECE_EXTENT(), subext, 6);
multiblock->SetBlock(blockIdx, subgrid);
subgrid->Delete();
} // END for all blocks
extentPartitioner->Delete();
return 1;
}
| 2,161 |
1,602 | //===-- NVPTXMCAsmInfo.cpp - NVPTX asm properties -------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file contains the declarations of the NVPTXMCAsmInfo properties.
//
//===----------------------------------------------------------------------===//
#include "NVPTXMCAsmInfo.h"
#include "llvm/ADT/Triple.h"
using namespace llvm;
void NVPTXMCAsmInfo::anchor() {}
NVPTXMCAsmInfo::NVPTXMCAsmInfo(const Triple &TheTriple,
const MCTargetOptions &Options) {
if (TheTriple.getArch() == Triple::nvptx64) {
CodePointerSize = CalleeSaveStackSlotSize = 8;
}
CommentString = "//";
HasSingleParameterDotFile = false;
InlineAsmStart = " begin inline asm";
InlineAsmEnd = " end inline asm";
SupportsDebugInformation = true;
// PTX does not allow .align on functions.
HasFunctionAlignment = false;
HasDotTypeDotSizeDirective = false;
// PTX does not allow .hidden or .protected
HiddenDeclarationVisibilityAttr = HiddenVisibilityAttr = MCSA_Invalid;
ProtectedVisibilityAttr = MCSA_Invalid;
Data8bitsDirective = ".b8 ";
Data16bitsDirective = nullptr; // not supported
Data32bitsDirective = ".b32 ";
Data64bitsDirective = ".b64 ";
ZeroDirective = ".b8";
AsciiDirective = nullptr; // not supported
AscizDirective = nullptr; // not supported
SupportsQuotedNames = false;
SupportsExtendedDwarfLocDirective = false;
// @TODO: Can we just disable this?
WeakDirective = "\t// .weak\t";
GlobalDirective = "\t// .globl\t";
UseIntegratedAssembler = false;
}
| 594 |
14,668 | <gh_stars>1000+
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef NET_LOG_NET_LOG_EVENT_TYPE_H_
#define NET_LOG_NET_LOG_EVENT_TYPE_H_
namespace net {
enum class NetLogEventType {
#define EVENT_TYPE(label) label,
#include "net/log/net_log_event_type_list.h"
#undef EVENT_TYPE
COUNT
};
// The 'phase' of an event trace (whether it marks the beginning or end
// of an event.).
enum class NetLogEventPhase {
NONE,
BEGIN,
END,
};
} // namespace net
#endif // NET_LOG_NET_LOG_EVENT_TYPE_H_
| 224 |
9,136 | <reponame>ptrbortolotti/WISDEM
/*
Copyright (c) 2011, Intel Corporation. All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Intel Corporation nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
********************************************************************************
* Content : Eigen bindings to Intel(R) MKL
* Include file with common MKL declarations
********************************************************************************
*/
#ifndef EIGEN_MKL_SUPPORT_H
#define EIGEN_MKL_SUPPORT_H
#ifdef EIGEN_USE_MKL_ALL
#ifndef EIGEN_USE_BLAS
#define EIGEN_USE_BLAS
#endif
#ifndef EIGEN_USE_LAPACKE
#define EIGEN_USE_LAPACKE
#endif
#ifndef EIGEN_USE_MKL_VML
#define EIGEN_USE_MKL_VML
#endif
#endif
#ifdef EIGEN_USE_LAPACKE_STRICT
#define EIGEN_USE_LAPACKE
#endif
#if defined(EIGEN_USE_MKL_VML) && !defined(EIGEN_USE_MKL)
#define EIGEN_USE_MKL
#endif
#if defined EIGEN_USE_MKL
# include <mkl.h>
/*Check IMKL version for compatibility: < 10.3 is not usable with Eigen*/
# ifndef INTEL_MKL_VERSION
# undef EIGEN_USE_MKL /* INTEL_MKL_VERSION is not even defined on older versions */
# elif INTEL_MKL_VERSION < 100305 /* the intel-mkl-103-release-notes say this was when the lapacke.h interface was added*/
# undef EIGEN_USE_MKL
# endif
# ifndef EIGEN_USE_MKL
/*If the MKL version is too old, undef everything*/
# undef EIGEN_USE_MKL_ALL
# undef EIGEN_USE_LAPACKE
# undef EIGEN_USE_MKL_VML
# undef EIGEN_USE_LAPACKE_STRICT
# undef EIGEN_USE_LAPACKE
# endif
#endif
#if defined EIGEN_USE_MKL
#define EIGEN_MKL_VML_THRESHOLD 128
/* MKL_DOMAIN_BLAS, etc are defined only in 10.3 update 7 */
/* MKL_BLAS, etc are not defined in 11.2 */
#ifdef MKL_DOMAIN_ALL
#define EIGEN_MKL_DOMAIN_ALL MKL_DOMAIN_ALL
#else
#define EIGEN_MKL_DOMAIN_ALL MKL_ALL
#endif
#ifdef MKL_DOMAIN_BLAS
#define EIGEN_MKL_DOMAIN_BLAS MKL_DOMAIN_BLAS
#else
#define EIGEN_MKL_DOMAIN_BLAS MKL_BLAS
#endif
#ifdef MKL_DOMAIN_FFT
#define EIGEN_MKL_DOMAIN_FFT MKL_DOMAIN_FFT
#else
#define EIGEN_MKL_DOMAIN_FFT MKL_FFT
#endif
#ifdef MKL_DOMAIN_VML
#define EIGEN_MKL_DOMAIN_VML MKL_DOMAIN_VML
#else
#define EIGEN_MKL_DOMAIN_VML MKL_VML
#endif
#ifdef MKL_DOMAIN_PARDISO
#define EIGEN_MKL_DOMAIN_PARDISO MKL_DOMAIN_PARDISO
#else
#define EIGEN_MKL_DOMAIN_PARDISO MKL_PARDISO
#endif
#endif
#if defined(EIGEN_USE_BLAS) && !defined(EIGEN_USE_MKL)
#include "../../misc/blas.h"
#endif
namespace Eigen {
typedef std::complex<double> dcomplex;
typedef std::complex<float> scomplex;
#if defined(EIGEN_USE_MKL)
typedef MKL_INT BlasIndex;
#else
typedef int BlasIndex;
#endif
} // end namespace Eigen
#endif // EIGEN_MKL_SUPPORT_H
| 1,527 |
892 | <reponame>westonsteimel/advisory-database-github
{
"schema_version": "1.2.0",
"id": "GHSA-x63c-6qqq-h574",
"modified": "2022-05-01T07:36:24Z",
"published": "2022-05-01T07:36:24Z",
"aliases": [
"CVE-2006-6266"
],
"details": "Teredo clients, when following item 6 of RFC4380 section 5.2.3, start direct IPv6 connectivity tests (aka ping tests) in response to packets from non-Teredo source addresses, which might allow remote attackers to induce Teredo clients to send packets to third parties.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2006-6266"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/452989/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/452996/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.symantec.com/avcenter/reference/Teredo_Security.pdf"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 492 |
1,723 | #!/usr/bin/env python3
from faker import Faker
from flask import Flask
from google.cloud import pubsub_v1
import datetime
import logging
import os
import random
import uuid
PUBSUB_TOPIC_NAME = 'YOUR_TOPIC_NAME'
PROJECT_ID = 'YOUR_PROJECT_ID'
DEVICE_ID_VALUES = 50000000
TIMESTAMP_RANGE_DAYS = 30
# Number of fields of type string.
RANDOM_STR_FIELDS = 200
# Set and subset size for calculating string permutations based on ascii
# characters from value 97 (character 'a') to value 122 (character 'z').
# PERMUTATION_SET_SIZE and PERMUTATION_SUBSET_SIZE should not be larger than 23
# PERMUTATION_SUBSET_SIZE should be smaller than PERMUTATION_SET_SIZE.
PERMUTATION_SET_SIZE = 7
PERMUTATION_SUBSET_SIZE = 5
# Number of fields of type integer and number of possible random values.
RANDOM_INT_FIELDS = 200
RANDOM_INT_VALUES = 30
# Number of fields of type string that will be nested and number of levels.
# The number of possible values will be taken from RANDOM_STRING_FIELDS.
NESTED_STR_FIELDS = 20
NESTED_STR_LEVELS = 6
# Number of fields of type integer that will be nested and number of levels.
# The number of possible values will be taken from RANDOM_INT_FIELDS.
NESTED_INT_FIELDS = 3
NESTED_INT_LEVELS = 2
# Create Pub/Sub publisher client.
batch_settings = pubsub_v1.types.BatchSettings(
max_latency=1, max_messages=10
)
publisher = pubsub_v1.PublisherClient(batch_settings)
topic_name = 'projects/{project_id}/topics/{topic}'.format(
project_id = PROJECT_ID,
topic = PUBSUB_TOPIC_NAME
)
# Create Faker client.
fake = Faker()
Faker.seed(0)
# Auxiliary functions to generate random data.
def generate_string(set_size, subset_size):
string = ''
for _ in range(subset_size):
string += chr(fake.random_int(min=97, max=97 + subset_size, step=1))
return string
def generate_int_field(field_number):
return {
'int_field_{}'.format(field_number): fake.random_int(
min=0, max=RANDOM_INT_VALUES, step=1)
}
def generate_str_field(field_number):
return {
'str_field_{}'.format(field_number): generate_string(
PERMUTATION_SET_SIZE, PERMUTATION_SUBSET_SIZE)
}
def generate_nested_int_field(field_number):
return _generate_nested_int_field(field_number, 0)
def _generate_nested_int_field(field_number, level):
if level == NESTED_INT_LEVELS:
return {
'nested_int_field_{}_level_{}'.format(field_number, level):
fake.random_int(min=0, max=RANDOM_INT_VALUES, step=1)
}
else:
return {
'nested_int_field_{}_level_{}'.format(field_number, level):
_generate_nested_int_field(field_number, level+1)
}
def generate_nested_str_field(field_number):
return _generate_nested_str_field(field_number, 0)
def _generate_nested_str_field(field_number, level):
if level == NESTED_STR_LEVELS:
return {
'nested_str_field_{}_level_{}'.format(field_number, level):
generate_string(PERMUTATION_SET_SIZE, PERMUTATION_SUBSET_SIZE)
}
else:
return {
'nested_str_field_{}_level_{}'.format(field_number, level):
_generate_nested_str_field(field_number, level+1)
}
# Main function to create a record with random data.
def fake_record():
unix_time = fake.unix_time(start_datetime='-30d', end_datetime='now')
millis = fake.numerify(text='###')
record = {
'device_id': fake.random_int(min=0, max=DEVICE_ID_VALUES, step=1),
'random_timestamp': fake.unix_time(start_datetime='-30d',
end_datetime='now'),
'current_timestamp': datetime.datetime.now().timestamp(),
'row_id': str(uuid.uuid4()),
'ip': fake.ipv4(),
'country': fake.country(),
'user_name': fake.name()
}
for field_number in range(0, RANDOM_INT_FIELDS):
record.update(generate_int_field(field_number))
for field_number in range(0, RANDOM_STR_FIELDS):
record.update(generate_str_field(field_number))
for field_number in range(0, NESTED_INT_FIELDS):
record.update(generate_nested_int_field(field_number))
for field_number in range(0, NESTED_STR_FIELDS):
record.update(generate_nested_str_field(field_number))
return record
# Publish a message to Pub/Sub.
def publish(message):
future = publisher.publish(topic_name, str.encode(str(message)))
print('pubsub published')
print(future.result())
# Create Flask client.
app = Flask(__name__)
@app.route('/publish', methods=['GET'])
def publish_route():
record = fake_record()
publish(record)
return 'Message published to Pub/Sub {}'.format(record)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)
| 1,935 |
311 | #
# Copyright [2020] JD.com, Inc. TIG. ChubaoStream team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import subprocess
from shlex import split
class Shell:
def __init__(self):
self.__logger = logging.getLogger(__name__)
self.__logger.setLevel(logging.DEBUG)
handler = logging.FileHandler("log.txt")
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
self.__logger.addHandler(handler)
def __run_script(self, bash, script):
self.__logger.debug('Script to execute:\n%s\n', script)
with subprocess.Popen(
bash,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT
) as proc:
outs, errs = proc.communicate(script.encode('utf-8'))
code = proc.returncode
self.__logger.debug('Return code = %s', code)
self.__logger.debug('The output is as following:\n%s', str(outs, encoding= 'utf-8'))
return code, outs, errs
def run_local_script(self, script):
return self.__run_script(['/bin/bash'], script)
def run_remote_script(self, remote, script, port=22, user='root'):
ssh = 'ssh {}@{} -p {}'.format(user, remote, port)
bash = split(ssh) + ['/bin/bash']
script ="""
source /etc/profile
"""+script
return self.__run_script(bash, script)
def local_scp(self, file, remote, target_dir, port=22, user='root'):
scp = """ scp -P {} {} {}@{}:{} """.format(port, file, user, remote, target_dir)
return self.run_local_script(scp)
def remote_scp_dir(self, remote, remote_dir, file, local_dir, port=22, user='root'):
scp = """
scp -P {port} {user}@{remote}:{remote_dir}/{file} {local_dir}
""".format(port=port,
remote_dir=remote_dir,
file=file,
local_dir=local_dir,
remote=remote, user=user,)
return self.run_local_script(scp)
| 1,210 |
672 | <reponame>apple-open-source-mirror/libplatform
/*
* Copyright (c) 2008-2013 Apple Inc. All rights reserved.
*
* @APPLE_APACHE_LICENSE_HEADER_START@
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* @APPLE_APACHE_LICENSE_HEADER_END@
*/
#ifndef __OS_SEMAPHORE_PRIVATE__
#define __OS_SEMAPHORE_PRIVATE__
#include <Availability.h>
#include <stdint.h>
#include <os/base_private.h>
#include <os/tsd.h>
OS_ASSUME_NONNULL_BEGIN
__BEGIN_DECLS
#define OS_SEMAPHORE_SPI_VERSION 20130313
typedef uintptr_t os_semaphore_t;
__OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0)
OS_EXPORT OS_WARN_RESULT OS_NOTHROW
os_semaphore_t _os_semaphore_create(void);
__OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0)
OS_EXPORT OS_NOTHROW
void _os_semaphore_dispose(os_semaphore_t);
__OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0)
OS_EXPORT OS_NOTHROW
void _os_semaphore_wait(os_semaphore_t);
__OSX_AVAILABLE_STARTING(__MAC_10_9,__IPHONE_7_0)
OS_EXPORT OS_NOTHROW
void _os_semaphore_signal(os_semaphore_t);
OS_WARN_RESULT OS_NOTHROW
__header_always_inline os_semaphore_t
os_get_cached_semaphore(void)
{
os_semaphore_t sema;
sema = (os_semaphore_t)_os_tsd_get_direct(__TSD_SEMAPHORE_CACHE);
if (os_unlikely(!sema)) {
return _os_semaphore_create();
}
_os_tsd_set_direct(__TSD_SEMAPHORE_CACHE, 0);
return sema;
}
OS_NOTHROW
__header_always_inline void
os_put_cached_semaphore(os_semaphore_t sema)
{
os_semaphore_t old_sema;
old_sema = (os_semaphore_t)_os_tsd_get_direct(__TSD_SEMAPHORE_CACHE);
_os_tsd_set_direct(__TSD_SEMAPHORE_CACHE, (void*)sema);
if (os_unlikely(old_sema)) {
return _os_semaphore_dispose(old_sema);
}
}
OS_NOTHROW
__header_always_inline void
os_semaphore_wait(os_semaphore_t sema)
{
return _os_semaphore_wait(sema);
}
OS_NOTHROW
__header_always_inline void
os_semaphore_signal(os_semaphore_t sema)
{
return _os_semaphore_signal(sema);
}
__END_DECLS
OS_ASSUME_NONNULL_END
#endif // __OS_SEMAPHORE_PRIVATE__
| 1,063 |
16,461 | // Copyright 2017-present 650 Industries. All rights reserved.
#import <AVFoundation/AVFoundation.h>
#import <ABI42_0_0UMCore/ABI42_0_0UMModuleRegistryConsumer.h>
#import <ABI42_0_0UMCore/ABI42_0_0UMAppLifecycleListener.h>
#import <ABI42_0_0UMCore/ABI42_0_0UMExportedModule.h>
#import <ABI42_0_0UMCore/ABI42_0_0UMEventEmitter.h>
#import <ABI42_0_0EXAV/ABI42_0_0EXAVObject.h>
typedef NS_OPTIONS(NSUInteger, ABI42_0_0EXAudioInterruptionMode)
{
ABI42_0_0EXAudioInterruptionModeMixWithOthers = 0,
ABI42_0_0EXAudioInterruptionModeDoNotMix = 1,
ABI42_0_0EXAudioInterruptionModeDuckOthers = 2
};
typedef NS_OPTIONS(NSUInteger, ABI42_0_0EXAudioRecordingOptionBitRateStrategy)
{
ABI42_0_0EXAudioRecordingOptionBitRateStrategyConstant = 0,
ABI42_0_0EXAudioRecordingOptionBitRateStrategyLongTermAverage = 1,
ABI42_0_0EXAudioRecordingOptionBitRateStrategyVariableConstrained = 2,
ABI42_0_0EXAudioRecordingOptionBitRateStrategyVariable = 3
};
@protocol ABI42_0_0EXAVScopedModuleDelegate
// Call this when your module knows it won't use the audio session now
// but may do in the future (settings persist).
- (void)moduleDidBackground:(id)module;
// Call this when your module knows it will use the audio session now.
- (void)moduleDidForeground:(id)module;
// Call this when your module knows it won't use the audio session now
// or in the future (forget settings).
- (void)moduleWillDeallocate:(id)module;
- (BOOL)isActiveForModule:(id)module;
- (NSString *)activeCategory;
- (AVAudioSessionCategoryOptions)activeCategoryOptions;
- (NSError *)setActive:(BOOL)active forModule:(id)module;
- (NSError *)setCategory:(NSString *)category withOptions:(AVAudioSessionCategoryOptions)options forModule:(id)module;
@end
@protocol ABI42_0_0EXAVInterface
- (NSError *)promoteAudioSessionIfNecessary;
- (NSError *)demoteAudioSessionIfPossible;
- (void)registerVideoForAudioLifecycle:(NSObject<ABI42_0_0EXAVObject> *)video;
- (void)unregisterVideoForAudioLifecycle:(NSObject<ABI42_0_0EXAVObject> *)video;
@end
@interface ABI42_0_0EXAV : ABI42_0_0UMExportedModule <ABI42_0_0UMEventEmitter, ABI42_0_0UMAppLifecycleListener, ABI42_0_0UMModuleRegistryConsumer, ABI42_0_0EXAVInterface>
- (void)handleMediaServicesReset:(NSNotification *)notification;
- (void)handleAudioSessionInterruption:(NSNotification *)notification;
@end
| 888 |
362 | <gh_stars>100-1000
// Copyright (c) 2018-2020, <NAME>. For more information see 'LICENSE'
#include "VBuffer.h"
#include "VEnumCast.h"
#include "FGEnumCast.h"
#include "VDevice.h"
#include "VMemoryObj.h"
#include "VResourceManager.h"
namespace FG
{
/*
=================================================
destructor
=================================================
*/
VBuffer::~VBuffer ()
{
ASSERT( _buffer == VK_NULL_HANDLE );
ASSERT( not _memoryId );
}
/*
=================================================
GetAllBufferReadAccessMasks
=================================================
*/
static VkAccessFlagBits GetAllBufferReadAccessMasks (VkBufferUsageFlags usage)
{
VkAccessFlagBits result = Zero;
for (VkBufferUsageFlags t = 1; t <= usage; t <<= 1)
{
if ( not AllBits( usage, t ))
continue;
BEGIN_ENUM_CHECKS();
switch ( VkBufferUsageFlagBits(t) )
{
case VK_BUFFER_USAGE_TRANSFER_SRC_BIT : result |= VK_ACCESS_TRANSFER_READ_BIT; break;
case VK_BUFFER_USAGE_TRANSFER_DST_BIT : break;
case VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT : result |= VK_ACCESS_SHADER_READ_BIT; break;
case VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT : result |= VK_ACCESS_SHADER_READ_BIT; break;
case VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT : result |= VK_ACCESS_UNIFORM_READ_BIT; break;
case VK_BUFFER_USAGE_STORAGE_BUFFER_BIT : result |= VK_ACCESS_SHADER_READ_BIT; break;
case VK_BUFFER_USAGE_INDEX_BUFFER_BIT : result |= VK_ACCESS_INDEX_READ_BIT; break;
case VK_BUFFER_USAGE_VERTEX_BUFFER_BIT : result |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT; break;
case VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT : result |= VK_ACCESS_INDIRECT_COMMAND_READ_BIT; break;
#ifdef VK_NV_ray_tracing
case VK_BUFFER_USAGE_RAY_TRACING_BIT_NV : result |= VK_ACCESS_SHADER_READ_BIT; break;
#endif
#ifdef VK_KHR_buffer_device_address
case VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT : result |= VK_ACCESS_SHADER_READ_BIT; break;
#endif
#ifdef VK_EXT_transform_feedback
case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT : break;
case VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT: result |= VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT; break;
#endif
#ifdef VK_EXT_conditional_rendering
case VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT : result |= VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT; break;
#endif
case VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM : break; // to shutup compiler warnings
}
END_ENUM_CHECKS();
}
return result;
}
/*
=================================================
Create
=================================================
*/
bool VBuffer::Create (VResourceManager &resMngr, const BufferDesc &desc, RawMemoryID memId, VMemoryObj &memObj,
EQueueFamilyMask queueFamilyMask, StringView dbgName)
{
EXLOCK( _drCheck );
CHECK_ERR( _buffer == VK_NULL_HANDLE );
CHECK_ERR( not _memoryId );
CHECK_ERR( not desc.isExternal );
CHECK_ERR( desc.size > 0 );
CHECK_ERR( desc.usage != Default );
auto& dev = resMngr.GetDevice();
ASSERT( IsSupported( dev, desc, EMemoryType(memObj.MemoryType()) ));
_desc = desc;
_memoryId = MemoryID{ memId };
// create buffer
VkBufferCreateInfo info = {};
info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
info.pNext = null;
info.flags = 0;
info.usage = VEnumCast( _desc.usage );
info.size = VkDeviceSize( _desc.size );
StaticArray<uint32_t, 8> queue_family_indices = {};
// setup sharing mode
if ( queueFamilyMask != Default )
{
info.sharingMode = VK_SHARING_MODE_CONCURRENT;
info.pQueueFamilyIndices = queue_family_indices.data();
for (uint i = 0, mask = (1u<<i);
mask <= uint(queueFamilyMask) and info.queueFamilyIndexCount < queue_family_indices.size();
++i, mask = (1u<<i))
{
if ( AllBits( queueFamilyMask, mask ))
queue_family_indices[ info.queueFamilyIndexCount++ ] = i;
}
}
// reset to exclusive mode
if ( info.queueFamilyIndexCount < 2 )
{
info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
info.pQueueFamilyIndices = null;
info.queueFamilyIndexCount = 0;
}
VK_CHECK( dev.vkCreateBuffer( dev.GetVkDevice(), &info, null, OUT &_buffer ));
CHECK_ERR( memObj.AllocateForBuffer( resMngr.GetMemoryManager(), _buffer ));
if ( not dbgName.empty() )
{
dev.SetObjectName( BitCast<uint64_t>(_buffer), dbgName, VK_OBJECT_TYPE_BUFFER );
}
_readAccessMask = GetAllBufferReadAccessMasks( info.usage );
_queueFamilyMask = queueFamilyMask;
_debugName = dbgName;
return true;
}
/*
=================================================
Create
=================================================
*/
bool VBuffer::Create (const VDevice &dev, const VulkanBufferDesc &desc, StringView dbgName, OnRelease_t &&onRelease)
{
EXLOCK( _drCheck );
CHECK_ERR( _buffer == VK_NULL_HANDLE );
CHECK_ERR( desc.buffer != VK_NULL_HANDLE );
_buffer = BitCast<VkBuffer>( desc.buffer );
_desc.size = desc.size;
_desc.usage = FGEnumCast( BitCast<VkBufferUsageFlagBits>( desc.usage ));
_desc.isExternal = true;
ASSERT( IsSupported( dev, _desc, EMemoryType::Default ));
if ( not dbgName.empty() )
{
dev.SetObjectName( BitCast<uint64_t>(_buffer), dbgName, VK_OBJECT_TYPE_BUFFER );
}
CHECK( desc.queueFamily == VK_QUEUE_FAMILY_IGNORED ); // not supported yet
CHECK( desc.queueFamilyIndices.empty() or desc.queueFamilyIndices.size() >= 2 );
_queueFamilyMask = Default;
for (auto idx : desc.queueFamilyIndices) {
_queueFamilyMask |= BitCast<EQueueFamily>(idx);
}
_debugName = dbgName;
_onRelease = std::move(onRelease);
return true;
}
/*
=================================================
Destroy
=================================================
*/
void VBuffer::Destroy (VResourceManager &resMngr)
{
EXLOCK( _drCheck );
auto& dev = resMngr.GetDevice();
{
SHAREDLOCK( _viewMapLock );
for (auto& view : _viewMap) {
dev.vkDestroyBufferView( dev.GetVkDevice(), view.second, null );
}
_viewMap.clear();
}
if ( _desc.isExternal and _onRelease ) {
_onRelease( BitCast<BufferVk_t>(_buffer) );
}
if ( not _desc.isExternal and _buffer ) {
dev.vkDestroyBuffer( dev.GetVkDevice(), _buffer, null );
}
if ( _memoryId ) {
resMngr.ReleaseResource( _memoryId.Release() );
}
_buffer = VK_NULL_HANDLE;
_memoryId = Default;
_desc = Default;
_queueFamilyMask = Default;
_onRelease = {};
_debugName.clear();
}
/*
=================================================
GetView
=================================================
*/
VkBufferView VBuffer::GetView (const VDevice &dev, const BufferViewDesc &desc) const
{
SHAREDLOCK( _drCheck );
// find already created image view
{
SHAREDLOCK( _viewMapLock );
auto iter = _viewMap.find( desc );
if ( iter != _viewMap.end() )
return iter->second;
}
// create new image view
EXLOCK( _viewMapLock );
auto[iter, inserted] = _viewMap.insert({ desc, VK_NULL_HANDLE });
if ( not inserted )
return iter->second; // other thread create view before
CHECK_ERR( _CreateView( dev, desc, OUT iter->second ));
return iter->second;
}
/*
=================================================
_CreateView
=================================================
*/
bool VBuffer::_CreateView (const VDevice &dev, const BufferViewDesc &desc, OUT VkBufferView &outView) const
{
VkBufferViewCreateInfo info = {};
info.sType = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO;
info.flags = 0;
info.buffer = _buffer;
info.format = VEnumCast( desc.format );
info.offset = VkDeviceSize(desc.offset);
info.range = VkDeviceSize(desc.size);
VK_CHECK( dev.vkCreateBufferView( dev.GetVkDevice(), &info, null, OUT &outView ));
return true;
}
/*
=================================================
IsReadOnly
=================================================
*/
bool VBuffer::IsReadOnly () const
{
SHAREDLOCK( _drCheck );
return not AnyBits( _desc.usage, EBufferUsage::TransferDst | EBufferUsage::StorageTexel | EBufferUsage::Storage | EBufferUsage::RayTracing );
}
/*
=================================================
GetApiSpecificDescription
=================================================
*/
VulkanBufferDesc VBuffer::GetApiSpecificDescription () const
{
VulkanBufferDesc desc;
desc.buffer = BitCast<BufferVk_t>( _buffer );
desc.usage = BitCast<BufferUsageFlagsVk_t>( VEnumCast( _desc.usage ));
desc.size = _desc.size;
desc.queueFamily = VK_QUEUE_FAMILY_IGNORED;
//desc.queueFamilyIndices // TODO
return desc;
}
/*
=================================================
IsSupported
=================================================
*/
bool VBuffer::IsSupported (const VDevice &dev, const BufferDesc &desc, EMemoryType memType)
{
Unused( memType );
for (EBufferUsage t = EBufferUsage(1); t <= desc.usage; t = EBufferUsage(uint(t) << 1))
{
if ( not AllBits( desc.usage, t ))
continue;
BEGIN_ENUM_CHECKS();
switch ( t )
{
case EBufferUsage::UniformTexel : break;
case EBufferUsage::StorageTexel : break;
case EBufferUsage::StorageTexelAtomic: break;
case EBufferUsage::TransferSrc : break;
case EBufferUsage::TransferDst : break;
case EBufferUsage::Uniform : break;
case EBufferUsage::Storage : break;
case EBufferUsage::Index : break;
case EBufferUsage::Vertex : break;
case EBufferUsage::Indirect : break;
case EBufferUsage::RayTracing : if ( not dev.GetFeatures().rayTracingNV ) return false; break;
//case EBufferUsage::ShaderAddress : if ( not dev.GetFeatures().bufferAddress ) return false; break;
case EBufferUsage::VertexPplnStore : if ( not dev.GetProperties().features.vertexPipelineStoresAndAtomics ) return false; break;
case EBufferUsage::FragmentPplnStore : if ( not dev.GetProperties().features.fragmentStoresAndAtomics ) return false; break;
case EBufferUsage::_Last :
case EBufferUsage::All :
case EBufferUsage::Transfer :
case EBufferUsage::Unknown :
default : ASSERT(false); break;
}
END_ENUM_CHECKS();
}
return true;
}
/*
=================================================
IsSupported
=================================================
*/
bool VBuffer::IsSupported (const VDevice &dev, const BufferViewDesc &desc) const
{
SHAREDLOCK( _drCheck );
VkFormatProperties props = {};
vkGetPhysicalDeviceFormatProperties( dev.GetVkPhysicalDevice(), VEnumCast( desc.format ), OUT &props );
const VkFormatFeatureFlags available_flags = props.bufferFeatures;
VkFormatFeatureFlags required_flags = 0;
for (EBufferUsage t = EBufferUsage(1); t <= _desc.usage; t = EBufferUsage(uint(t) << 1))
{
if ( not AllBits( _desc.usage, t ))
continue;
BEGIN_ENUM_CHECKS();
switch ( t )
{
case EBufferUsage::UniformTexel : required_flags |= VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT; break;
case EBufferUsage::StorageTexel : required_flags |= VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT; break;
case EBufferUsage::StorageTexelAtomic: required_flags |= VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT; break;
case EBufferUsage::TransferSrc : break;
case EBufferUsage::TransferDst : break;
case EBufferUsage::Uniform : break;
case EBufferUsage::Storage : break;
case EBufferUsage::Index : break;
case EBufferUsage::Vertex : break;
case EBufferUsage::Indirect : break;
case EBufferUsage::RayTracing : break;
//case EBufferUsage::ShaderAddress : break;
case EBufferUsage::VertexPplnStore : break;
case EBufferUsage::FragmentPplnStore : break;
case EBufferUsage::_Last :
case EBufferUsage::All :
case EBufferUsage::Transfer :
case EBufferUsage::Unknown :
default : ASSERT(false); break;
}
END_ENUM_CHECKS();
}
return AllBits( available_flags, required_flags );
}
} // FG
| 4,753 |
495 | from __future__ import absolute_import, print_function, division
from petl.test.helpers import ieq
from petl import join, leftjoin, rightjoin, outerjoin, crossjoin, antijoin, \
lookupjoin, hashjoin, hashleftjoin, hashrightjoin, hashantijoin, \
hashlookupjoin, unjoin, sort, cut
def _test_join_basic(join_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
# normal inner join
table3 = join_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = join_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
ieq(expect4, table4) # check twice
# multiple rows for each key
table5 = (('id', 'colour'),
(1, 'blue'),
(1, 'red'),
(2, 'purple'))
table6 = (('id', 'shape'),
(1, 'circle'),
(1, 'square'),
(2, 'ellipse'))
table7 = join_impl(table5, table6, key='id')
expect7 = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(1, 'blue', 'square'),
(1, 'red', 'circle'),
(1, 'red', 'square'),
(2, 'purple', 'ellipse'))
ieq(expect7, table7)
def _test_join_compound_keys(join_impl):
# compound keys
table8 = (('id', 'time', 'height'),
(1, 1, 12.3),
(1, 2, 34.5),
(2, 1, 56.7))
table9 = (('id', 'time', 'weight'),
(1, 2, 4.5),
(2, 1, 6.7),
(2, 2, 8.9))
table10 = join_impl(table8, table9, key=['id', 'time'])
expect10 = (('id', 'time', 'height', 'weight'),
(1, 2, 34.5, 4.5),
(2, 1, 56.7, 6.7))
ieq(expect10, table10)
# natural join on compound key
table11 = join_impl(table8, table9)
expect11 = expect10
ieq(expect11, table11)
def _test_join_string_key(join_impl):
table1 = (('id', 'colour'),
('aa', 'blue'),
('bb', 'red'),
('cc', 'purple'))
table2 = (('id', 'shape'),
('aa', 'circle'),
('cc', 'square'),
('dd', 'ellipse'))
# normal inner join
table3 = join_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
('aa', 'blue', 'circle'),
('cc', 'purple', 'square'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
def _test_join_empty(join_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),)
table3 = join_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),)
ieq(expect3, table3)
table1 = (('id', 'colour'),)
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = join_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),)
ieq(expect3, table3)
def _test_join_novaluefield(join_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
expect = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'))
actual = join_impl(table1, table2, key='id')
ieq(expect, actual)
actual = join_impl(cut(table1, 'id'), table2, key='id')
ieq(cut(expect, 'id', 'shape'), actual)
actual = join_impl(table1, cut(table2, 'id'), key='id')
ieq(cut(expect, 'id', 'colour'), actual)
actual = join_impl(cut(table1, 'id'), cut(table2, 'id'), key='id')
ieq(cut(expect, 'id'), actual)
def _test_join_prefix(join_impl):
table1 = (('id', 'colour'),
('aa', 'blue'),
('bb', 'red'),
('cc', 'purple'))
table2 = (('id', 'shape'),
('aa', 'circle'),
('cc', 'square'),
('dd', 'ellipse'))
table3 = join_impl(table1, table2, key='id', lprefix='l_', rprefix='r_')
expect3 = (('l_id', 'l_colour', 'r_shape'),
('aa', 'blue', 'circle'),
('cc', 'purple', 'square'))
ieq(expect3, table3)
def _test_join_lrkey(join_impl):
table1 = (('id', 'colour'),
('aa', 'blue'),
('bb', 'red'),
('cc', 'purple'))
table2 = (('identifier', 'shape'),
('aa', 'circle'),
('cc', 'square'),
('dd', 'ellipse'))
table3 = join_impl(table1, table2, lkey='id', rkey='identifier')
expect3 = (('id', 'colour', 'shape'),
('aa', 'blue', 'circle'),
('cc', 'purple', 'square'))
ieq(expect3, table3)
def _test_join_multiple(join_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(1, 'red', 8),
(2, 'yellow', 15),
(2, 'orange', 5),
(3, 'purple', 4),
(4, 'chartreuse', 42))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(2, 'square', 'big'),
(3, 'ellipse', 'small'),
(3, 'ellipse', 'tiny'),
(5, 'didodecahedron', 3.14159265))
actual = join_impl(table1, table2, key='id')
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(1, 'red', 8, 'circle', 'big'),
(2, 'yellow', 15, 'square', 'tiny'),
(2, 'yellow', 15, 'square', 'big'),
(2, 'orange', 5, 'square', 'tiny'),
(2, 'orange', 5, 'square', 'big'),
(3, 'purple', 4, 'ellipse', 'small'),
(3, 'purple', 4, 'ellipse', 'tiny'))
ieq(expect, actual)
def _test_join(join_impl):
_test_join_basic(join_impl)
_test_join_compound_keys(join_impl)
_test_join_string_key(join_impl)
_test_join_empty(join_impl)
_test_join_novaluefield(join_impl)
_test_join_prefix(join_impl)
_test_join_lrkey(join_impl)
_test_join_multiple(join_impl)
def test_join():
_test_join(join)
def _test_leftjoin_1(leftjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'orange'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = leftjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(5, 'yellow', None,),
(7, 'orange', None))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = leftjoin_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def _test_leftjoin_2(leftjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'orange'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'))
table3 = leftjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(5, 'yellow', None,),
(7, 'orange', None))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = leftjoin_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def _test_leftjoin_3(leftjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'),
(5, 'triangle'))
table3 = leftjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = leftjoin_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def _test_leftjoin_compound_keys(leftjoin_impl):
# compound keys
table5 = (('id', 'time', 'height'),
(1, 1, 12.3),
(1, 2, 34.5),
(2, 1, 56.7))
table6 = (('id', 'time', 'weight', 'bp'),
(1, 2, 4.5, 120),
(2, 1, 6.7, 110),
(2, 2, 8.9, 100))
table7 = leftjoin_impl(table5, table6, key=['id', 'time'])
expect7 = (('id', 'time', 'height', 'weight', 'bp'),
(1, 1, 12.3, None, None),
(1, 2, 34.5, 4.5, 120),
(2, 1, 56.7, 6.7, 110))
ieq(expect7, table7)
def _test_leftjoin_empty(leftjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'orange'))
table2 = (('id', 'shape'),)
table3 = leftjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(1, 'blue', None),
(2, 'red', None),
(3, 'purple', None),
(5, 'yellow', None,),
(7, 'orange', None))
ieq(expect3, table3)
def _test_leftjoin_novaluefield(leftjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'orange'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
expect = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(5, 'yellow', None,),
(7, 'orange', None))
actual = leftjoin_impl(table1, table2, key='id')
ieq(expect, actual)
actual = leftjoin_impl(cut(table1, 'id'), table2, key='id')
ieq(cut(expect, 'id', 'shape'), actual)
actual = leftjoin_impl(table1, cut(table2, 'id'), key='id')
ieq(cut(expect, 'id', 'colour'), actual)
actual = leftjoin_impl(cut(table1, 'id'), cut(table2, 'id'), key='id')
ieq(cut(expect, 'id'), actual)
def _test_leftjoin_multiple(leftjoin_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(1, 'red', 8),
(2, 'yellow', 15),
(2, 'orange', 5),
(3, 'purple', 4),
(4, 'chartreuse', 42))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(2, 'square', 'big'),
(3, 'ellipse', 'small'),
(3, 'ellipse', 'tiny'),
(5, 'didodecahedron', 3.14159265))
actual = leftjoin_impl(table1, table2, key='id')
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(1, 'red', 8, 'circle', 'big'),
(2, 'yellow', 15, 'square', 'tiny'),
(2, 'yellow', 15, 'square', 'big'),
(2, 'orange', 5, 'square', 'tiny'),
(2, 'orange', 5, 'square', 'big'),
(3, 'purple', 4, 'ellipse', 'small'),
(3, 'purple', 4, 'ellipse', 'tiny'),
(4, 'chartreuse', 42, None, None))
ieq(expect, actual)
def _test_leftjoin_prefix(leftjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'orange'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = leftjoin_impl(table1, table2, key='id', lprefix='l_', rprefix='r_')
expect3 = (('l_id', 'l_colour', 'r_shape'),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(5, 'yellow', None,),
(7, 'orange', None))
ieq(expect3, table3)
def _test_leftjoin_lrkey(leftjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'orange'))
table2 = (('identifier', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = leftjoin_impl(table1, table2, lkey='id', rkey='identifier')
expect3 = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(5, 'yellow', None,),
(7, 'orange', None))
ieq(expect3, table3)
def _test_leftjoin(leftjoin_impl):
_test_leftjoin_1(leftjoin_impl)
_test_leftjoin_2(leftjoin_impl)
_test_leftjoin_3(leftjoin_impl)
_test_leftjoin_compound_keys(leftjoin_impl)
_test_leftjoin_empty(leftjoin_impl)
_test_leftjoin_novaluefield(leftjoin_impl)
_test_leftjoin_multiple(leftjoin_impl)
_test_leftjoin_prefix(leftjoin_impl)
_test_leftjoin_lrkey(leftjoin_impl)
def test_leftjoin():
_test_leftjoin(leftjoin)
def _test_rightjoin_1(rightjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),
(0, 'triangle'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'),
(5, 'pentagon'))
table3 = rightjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(0, None, 'triangle'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, None, 'pentagon'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = rightjoin_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def _test_rightjoin_2(rightjoin_impl):
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'white'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = rightjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'),
(4, None, 'ellipse'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = rightjoin_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def _test_rightjoin_3(rightjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(4, 'orange'))
table2 = (('id', 'shape'),
(0, 'triangle'),
(1, 'circle'),
(3, 'square'),
(5, 'ellipse'),
(7, 'pentagon'))
table3 = rightjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(0, None, 'triangle'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'),
(5, None, 'ellipse'),
(7, None, 'pentagon'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = rightjoin_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def _test_rightjoin_empty(rightjoin_impl):
table1 = (('id', 'colour'),)
table2 = (('id', 'shape'),
(0, 'triangle'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'),
(5, 'pentagon'))
table3 = rightjoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(0, None, 'triangle'),
(1, None, 'circle'),
(3, None, 'square'),
(4, None, 'ellipse'),
(5, None, 'pentagon'))
ieq(expect3, table3)
def _test_rightjoin_novaluefield(rightjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),
(0, 'triangle'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'),
(5, 'pentagon'))
expect = (('id', 'colour', 'shape'),
(0, None, 'triangle'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, None, 'pentagon'))
actual = rightjoin_impl(table1, table2, key='id')
ieq(expect, actual)
actual = rightjoin_impl(cut(table1, 'id'), table2, key='id')
ieq(cut(expect, 'id', 'shape'), actual)
actual = rightjoin_impl(table1, cut(table2, 'id'), key='id')
ieq(cut(expect, 'id', 'colour'), actual)
actual = rightjoin_impl(cut(table1, 'id'), cut(table2, 'id'), key='id')
ieq(cut(expect, 'id'), actual)
def _test_rightjoin_prefix(rightjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),
(0, 'triangle'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'),
(5, 'pentagon'))
table3 = rightjoin_impl(table1, table2, key='id', lprefix='l_',
rprefix='r_')
expect3 = (('l_id', 'l_colour', 'r_shape'),
(0, None, 'triangle'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, None, 'pentagon'))
ieq(expect3, table3)
def _test_rightjoin_lrkey(rightjoin_impl):
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('identifier', 'shape'),
(0, 'triangle'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'),
(5, 'pentagon'))
table3 = rightjoin_impl(table1, table2, lkey='id', rkey='identifier')
expect3 = (('id', 'colour', 'shape'),
(0, None, 'triangle'),
(1, 'blue', 'circle'),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, None, 'pentagon'))
ieq(expect3, table3)
def _test_rightjoin_multiple(rightjoin_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(1, 'red', 8),
(2, 'yellow', 15),
(2, 'orange', 5),
(3, 'purple', 4),
(4, 'chartreuse', 42))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(2, 'square', 'big'),
(3, 'ellipse', 'small'),
(3, 'ellipse', 'tiny'),
(5, 'didodecahedron', 3.14159265))
actual = rightjoin_impl(table1, table2, key='id')
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(1, 'red', 8, 'circle', 'big'),
(2, 'yellow', 15, 'square', 'tiny'),
(2, 'yellow', 15, 'square', 'big'),
(2, 'orange', 5, 'square', 'tiny'),
(2, 'orange', 5, 'square', 'big'),
(3, 'purple', 4, 'ellipse', 'small'),
(3, 'purple', 4, 'ellipse', 'tiny'),
(5, None, None, 'didodecahedron', 3.14159265))
# N.B., need to sort because hash and sort implementations will return
# rows in a different order
ieq(sort(expect), sort(actual))
def _test_rightjoin(rightjoin_impl):
_test_rightjoin_1(rightjoin_impl)
_test_rightjoin_2(rightjoin_impl)
_test_rightjoin_3(rightjoin_impl)
_test_rightjoin_empty(rightjoin_impl)
_test_rightjoin_novaluefield(rightjoin_impl)
_test_rightjoin_prefix(rightjoin_impl)
_test_rightjoin_lrkey(rightjoin_impl)
_test_rightjoin_multiple(rightjoin_impl)
def test_rightjoin():
_test_rightjoin(rightjoin)
def test_outerjoin():
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'white'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = outerjoin(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(0, 'black', None),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, 'yellow', None),
(7, 'white', None))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = outerjoin(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def test_outerjoin_2():
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'))
table2 = (('id', 'shape'),
(0, 'pentagon'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'),
(5, 'triangle'))
table3 = outerjoin(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(0, None, 'pentagon'),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, None, 'triangle'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
# natural join
table4 = outerjoin(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def test_outerjoin_fieldorder():
table1 = (('colour', 'id'),
('blue', 1),
('red', 2),
('purple', 3))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = outerjoin(table1, table2, key='id')
expect3 = (('colour', 'id', 'shape'),
('blue', 1, 'circle'),
('red', 2, None),
('purple', 3, 'square'),
(None, 4, 'ellipse'))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
def test_outerjoin_empty():
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'white'))
table2 = (('id', 'shape'),)
table3 = outerjoin(table1, table2, key='id')
expect3 = (('id', 'colour', 'shape'),
(0, 'black', None),
(1, 'blue', None),
(2, 'red', None),
(3, 'purple', None),
(5, 'yellow', None),
(7, 'white', None))
ieq(expect3, table3)
def test_outerjoin_novaluefield():
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'white'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
expect = (('id', 'colour', 'shape'),
(0, 'black', None),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, 'yellow', None),
(7, 'white', None))
actual = outerjoin(table1, table2, key='id')
ieq(expect, actual)
actual = outerjoin(cut(table1, 'id'), table2, key='id')
ieq(cut(expect, 'id', 'shape'), actual)
actual = outerjoin(table1, cut(table2, 'id'), key='id')
ieq(cut(expect, 'id', 'colour'), actual)
actual = outerjoin(cut(table1, 'id'), cut(table2, 'id'), key='id')
ieq(cut(expect, 'id'), actual)
def test_outerjoin_prefix():
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'white'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = outerjoin(table1, table2, key='id', lprefix='l_', rprefix='r_')
expect3 = (('l_id', 'l_colour', 'r_shape'),
(0, 'black', None),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, 'yellow', None),
(7, 'white', None))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
def test_outerjoin_lrkey():
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(3, 'purple'),
(5, 'yellow'),
(7, 'white'))
table2 = (('identifier', 'shape'),
(1, 'circle'),
(3, 'square'),
(4, 'ellipse'))
table3 = outerjoin(table1, table2, lkey='id', rkey='identifier')
expect3 = (('id', 'colour', 'shape'),
(0, 'black', None),
(1, 'blue', 'circle'),
(2, 'red', None),
(3, 'purple', 'square'),
(4, None, 'ellipse'),
(5, 'yellow', None),
(7, 'white', None))
ieq(expect3, table3)
ieq(expect3, table3) # check twice
def test_outerjoin_multiple():
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(1, 'red', 8),
(2, 'yellow', 15),
(2, 'orange', 5),
(3, 'purple', 4),
(4, 'chartreuse', 42))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(2, 'square', 'big'),
(3, 'ellipse', 'small'),
(3, 'ellipse', 'tiny'),
(5, 'didodecahedron', 3.14159265))
actual = outerjoin(table1, table2, key='id')
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(1, 'red', 8, 'circle', 'big'),
(2, 'yellow', 15, 'square', 'tiny'),
(2, 'yellow', 15, 'square', 'big'),
(2, 'orange', 5, 'square', 'tiny'),
(2, 'orange', 5, 'square', 'big'),
(3, 'purple', 4, 'ellipse', 'small'),
(3, 'purple', 4, 'ellipse', 'tiny'),
(4, 'chartreuse', 42, None, None),
(5, None, None, 'didodecahedron', 3.14159265))
ieq(expect, actual)
def test_crossjoin():
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'))
table3 = crossjoin(table1, table2)
expect3 = (('id', 'colour', 'id', 'shape'),
(1, 'blue', 1, 'circle'),
(1, 'blue', 3, 'square'),
(2, 'red', 1, 'circle'),
(2, 'red', 3, 'square'))
ieq(expect3, table3)
def test_crossjoin_empty():
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'))
table2 = (('id', 'shape'),)
table3 = crossjoin(table1, table2)
expect3 = (('id', 'colour', 'id', 'shape'),)
ieq(expect3, table3)
def test_crossjoin_novaluefield():
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'))
expect = (('id', 'colour', 'id', 'shape'),
(1, 'blue', 1, 'circle'),
(1, 'blue', 3, 'square'),
(2, 'red', 1, 'circle'),
(2, 'red', 3, 'square'))
actual = crossjoin(table1, table2, key='id')
ieq(expect, actual)
actual = crossjoin(cut(table1, 'id'), table2, key='id')
ieq(cut(expect, 0, 2, 'shape'), actual)
actual = crossjoin(table1, cut(table2, 'id'), key='id')
ieq(cut(expect, 0, 'colour', 2), actual)
actual = crossjoin(cut(table1, 'id'), cut(table2, 'id'), key='id')
ieq(cut(expect, 0, 2), actual)
def test_crossjoin_prefix():
table1 = (('id', 'colour'),
(1, 'blue'),
(2, 'red'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'))
table3 = crossjoin(table1, table2, prefix=True)
expect3 = (('1_id', '1_colour', '2_id', '2_shape'),
(1, 'blue', 1, 'circle'),
(1, 'blue', 3, 'square'),
(2, 'red', 1, 'circle'),
(2, 'red', 3, 'square'))
ieq(expect3, table3)
def _test_antijoin_basics(antijoin_impl):
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(4, 'yellow'),
(5, 'white'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'))
table3 = antijoin_impl(table1, table2, key='id')
expect3 = (('id', 'colour'),
(0, 'black'),
(2, 'red'),
(4, 'yellow'),
(5, 'white'))
ieq(expect3, table3)
table4 = antijoin_impl(table1, table2)
expect4 = expect3
ieq(expect4, table4)
def _test_antijoin_empty(antijoin_impl):
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(4, 'yellow'),
(5, 'white'))
table2 = (('id', 'shape'),)
actual = antijoin_impl(table1, table2, key='id')
expect = table1
ieq(expect, actual)
def _test_antijoin_novaluefield(antijoin_impl):
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(4, 'yellow'),
(5, 'white'))
table2 = (('id', 'shape'),
(1, 'circle'),
(3, 'square'))
expect = (('id', 'colour'),
(0, 'black'),
(2, 'red'),
(4, 'yellow'),
(5, 'white'))
actual = antijoin_impl(table1, table2, key='id')
ieq(expect, actual)
actual = antijoin_impl(cut(table1, 'id'), table2, key='id')
ieq(cut(expect, 'id'), actual)
actual = antijoin_impl(table1, cut(table2, 'id'), key='id')
ieq(expect, actual)
actual = antijoin_impl(cut(table1, 'id'), cut(table2, 'id'), key='id')
ieq(cut(expect, 'id'), actual)
def _test_antijoin_lrkey(antijoin_impl):
table1 = (('id', 'colour'),
(0, 'black'),
(1, 'blue'),
(2, 'red'),
(4, 'yellow'),
(5, 'white'))
table2 = (('identifier', 'shape'),
(1, 'circle'),
(3, 'square'))
table3 = antijoin_impl(table1, table2, lkey='id', rkey='identifier')
expect3 = (('id', 'colour'),
(0, 'black'),
(2, 'red'),
(4, 'yellow'),
(5, 'white'))
ieq(expect3, table3)
def _test_antijoin(antijoin_impl):
_test_antijoin_basics(antijoin_impl)
_test_antijoin_empty(antijoin_impl)
_test_antijoin_novaluefield(antijoin_impl)
_test_antijoin_lrkey(antijoin_impl)
def test_antijoin():
_test_antijoin(antijoin)
def _test_lookupjoin_1(lookupjoin_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(2, 'red', 8),
(3, 'purple', 4))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(3, 'ellipse', 'small'))
actual = lookupjoin_impl(table1, table2, key='id')
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(2, 'red', 8, 'square', 'tiny'),
(3, 'purple', 4, 'ellipse', 'small'))
ieq(expect, actual)
ieq(expect, actual)
# natural join
actual = lookupjoin_impl(table1, table2)
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(2, 'red', 8, 'square', 'tiny'),
(3, 'purple', 4, 'ellipse', 'small'))
ieq(expect, actual)
ieq(expect, actual)
def _test_lookupjoin_2(lookupjoin_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(2, 'red', 8),
(3, 'purple', 4))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(1, 'circle', 'small'),
(2, 'square', 'tiny'),
(2, 'square', 'big'),
(3, 'ellipse', 'small'),
(3, 'ellipse', 'tiny'))
actual = lookupjoin_impl(table1, table2, key='id')
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(2, 'red', 8, 'square', 'tiny'),
(3, 'purple', 4, 'ellipse', 'small'))
ieq(expect, actual)
ieq(expect, actual)
def _test_lookupjoin_prefix(lookupjoin_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(2, 'red', 8),
(3, 'purple', 4))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(3, 'ellipse', 'small'))
actual = lookupjoin_impl(table1, table2, key='id', lprefix='l_',
rprefix='r_')
expect = (('l_id', 'l_color', 'l_cost', 'r_shape', 'r_size'),
(1, 'blue', 12, 'circle', 'big'),
(2, 'red', 8, 'square', 'tiny'),
(3, 'purple', 4, 'ellipse', 'small'))
ieq(expect, actual)
def _test_lookupjoin_lrkey(lookupjoin_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(2, 'red', 8),
(3, 'purple', 4))
table2 = (('identifier', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(3, 'ellipse', 'small'))
actual = lookupjoin_impl(table1, table2, lkey='id', rkey='identifier')
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(2, 'red', 8, 'square', 'tiny'),
(3, 'purple', 4, 'ellipse', 'small'))
ieq(expect, actual)
def _test_lookupjoin_novaluefield(lookupjoin_impl):
table1 = (('id', 'color', 'cost'),
(1, 'blue', 12),
(2, 'red', 8),
(3, 'purple', 4))
table2 = (('id', 'shape', 'size'),
(1, 'circle', 'big'),
(2, 'square', 'tiny'),
(3, 'ellipse', 'small'))
expect = (('id', 'color', 'cost', 'shape', 'size'),
(1, 'blue', 12, 'circle', 'big'),
(2, 'red', 8, 'square', 'tiny'),
(3, 'purple', 4, 'ellipse', 'small'))
actual = lookupjoin_impl(table1, table2, key='id')
ieq(expect, actual)
actual = lookupjoin_impl(cut(table1, 'id'), table2, key='id')
ieq(cut(expect, 'id', 'shape', 'size'), actual)
actual = lookupjoin_impl(table1, cut(table2, 'id'), key='id')
ieq(cut(expect, 'id', 'color', 'cost'), actual)
actual = lookupjoin_impl(cut(table1, 'id'), cut(table2, 'id'), key='id')
ieq(cut(expect, 'id'), actual)
def _test_lookupjoin(lookupjoin_impl):
_test_lookupjoin_1(lookupjoin_impl)
_test_lookupjoin_2(lookupjoin_impl)
_test_lookupjoin_prefix(lookupjoin_impl)
_test_lookupjoin_lrkey(lookupjoin_impl)
_test_lookupjoin_novaluefield(lookupjoin_impl)
def test_lookupjoin():
_test_lookupjoin(lookupjoin)
def test_hashjoin():
_test_join(hashjoin)
def test_hashleftjoin():
_test_leftjoin(hashleftjoin)
def test_hashrightjoin():
_test_rightjoin(hashrightjoin)
def test_hashantijoin():
_test_antijoin(hashantijoin)
def test_hashlookupjoin():
_test_lookupjoin(hashlookupjoin)
def test_unjoin_implicit_key():
# test the case where the join key needs to be reconstructed
table1 = (('foo', 'bar'),
(1, 'apple'),
(2, 'apple'),
(3, 'orange'))
expect_left = (('foo', 'bar_id'),
(1, 1),
(2, 1),
(3, 2))
expect_right = (('id', 'bar'),
(1, 'apple'),
(2, 'orange'))
left, right = unjoin(table1, 'bar')
ieq(expect_left, left)
ieq(expect_left, left)
ieq(expect_right, right)
ieq(expect_right, right)
def test_unjoin_explicit_key():
# test the case where the join key is still present
table2 = (('Customer ID', 'First Name', 'Surname', 'Telephone Number'),
(123, 'Robert', 'Ingram', '555-861-2025'),
(456, 'Jane', 'Wright', '555-403-1659'),
(456, 'Jane', 'Wright', '555-776-4100'),
(789, 'Maria', 'Fernandez', '555-808-9633'))
expect_left = (('Customer ID', 'First Name', 'Surname'),
(123, 'Robert', 'Ingram'),
(456, 'Jane', 'Wright'),
(789, 'Maria', 'Fernandez'))
expect_right = (('Customer ID', 'Telephone Number'),
(123, '555-861-2025'),
(456, '555-403-1659'),
(456, '555-776-4100'),
(789, '555-808-9633'))
left, right = unjoin(table2, 'Telephone Number', key='Customer ID')
ieq(expect_left, left)
ieq(expect_left, left)
ieq(expect_right, right)
ieq(expect_right, right)
def test_unjoin_explicit_key_2():
table3 = (('Employee', 'Skill', 'Current Work Location'),
('Jones', 'Typing', '114 Main Street'),
('Jones', 'Shorthand', '114 Main Street'),
('Jones', 'Whittling', '114 Main Street'),
('Bravo', 'Light Cleaning', '73 Industrial Way'),
('Ellis', 'Alchemy', '73 Industrial Way'),
('Ellis', 'Flying', '73 Industrial Way'),
('Harrison', 'Light Cleaning', '73 Industrial Way'))
# N.B., we do expect rows will get sorted
expect_left = (('Employee', 'Current Work Location'),
('Bravo', '73 Industrial Way'),
('Ellis', '73 Industrial Way'),
('Harrison', '73 Industrial Way'),
('Jones', '114 Main Street'))
expect_right = (('Employee', 'Skill'),
('Bravo', 'Light Cleaning'),
('Ellis', 'Alchemy'),
('Ellis', 'Flying'),
('Harrison', 'Light Cleaning'),
('Jones', 'Shorthand'),
('Jones', 'Typing'),
('Jones', 'Whittling'))
left, right = unjoin(table3, 'Skill', key='Employee')
ieq(expect_left, left)
ieq(expect_left, left)
ieq(expect_right, right)
ieq(expect_right, right)
def test_unjoin_explicit_key_3():
table4 = (('Tournament', 'Year', 'Winner', 'Date of Birth'),
('Indiana Invitational', 1998, '<NAME>', '21 July 1975'),
('Cleveland Open', 1999, '<NAME>', '28 September 1968'),
('Des Moines Masters', 1999, '<NAME>', '21 July 1975'),
('Indiana Invitational', 1999, '<NAME>', '14 March 1977'))
# N.B., we do expect rows will get sorted
expect_left = (('Tournament', 'Year', 'Winner'),
('Cleveland Open', 1999, '<NAME>'),
('Des Moines Masters', 1999, '<NAME>'),
('Indiana Invitational', 1998, '<NAME>'),
('Indiana Invitational', 1999, '<NAME>'))
expect_right = (('Winner', 'Date of Birth'),
('<NAME>', '21 July 1975'),
('<NAME>', '28 September 1968'),
('<NAME>', '14 March 1977'))
left, right = unjoin(table4, 'Date of Birth', key='Winner')
ieq(expect_left, left)
ieq(expect_left, left)
ieq(expect_right, right)
ieq(expect_right, right)
def test_unjoin_explicit_key_4():
table5 = (('Restaurant', 'Pizza Variety', 'Delivery Area'),
('A1 Pizza', 'Thick Crust', 'Springfield'),
('A1 Pizza', 'Thick Crust', 'Shelbyville'),
('A1 Pizza', 'Thick Crust', 'Capital City'),
('A1 Pizza', 'Stuffed Crust', 'Springfield'),
('A1 Pizza', 'Stuffed Crust', 'Shelbyville'),
('A1 Pizza', 'Stuffed Crust', 'Capital City'),
('Elite Pizza', 'Thin Crust', 'Capital City'),
('Elite Pizza', 'Stuffed Crust', 'Capital City'),
("Vincenzo's Pizza", "Thick Crust", "Springfield"),
("Vincenzo's Pizza", "Thick Crust", "Shelbyville"),
("Vincenzo's Pizza", "Thin Crust", "Springfield"),
("Vincenzo's Pizza", "Thin Crust", "Shelbyville"))
# N.B., we do expect rows will get sorted
expect_left = (('Restaurant', 'Pizza Variety'),
('A1 Pizza', 'Stuffed Crust'),
('A1 Pizza', 'Thick Crust'),
('Elite Pizza', 'Stuffed Crust'),
('Elite Pizza', 'Thin Crust'),
("Vincenzo's Pizza", "Thick Crust"),
("Vincenzo's Pizza", "Thin Crust"))
expect_right = (('Restaurant', 'Delivery Area'),
('A1 Pizza', 'Capital City'),
('A1 Pizza', 'Shelbyville'),
('A1 Pizza', 'Springfield'),
('Elite Pizza', 'Capital City'),
("Vincenzo's Pizza", "Shelbyville"),
("Vincenzo's Pizza", "Springfield"))
left, right = unjoin(table5, 'Delivery Area', key='Restaurant')
ieq(expect_left, left)
ieq(expect_left, left)
ieq(expect_right, right)
ieq(expect_right, right)
def test_unjoin_explicit_key_5():
table6 = (('ColA', 'ColB', 'ColC'),
('A', 1, 'apple'),
('B', 1, 'apple'),
('C', 2, 'orange'),
('D', 3, 'lemon'),
('E', 3, 'lemon'))
# N.B., we do expect rows will get sorted
expect_left = (('ColA', 'ColB'),
('A', 1),
('B', 1),
('C', 2),
('D', 3),
('E', 3))
expect_right = (('ColB', 'ColC'),
(1, 'apple'),
(2, 'orange'),
(3, 'lemon'))
left, right = unjoin(table6, 'ColC', key='ColB')
ieq(expect_left, left)
ieq(expect_left, left)
ieq(expect_right, right)
ieq(expect_right, right)
| 23,721 |
460 | <reponame>manovotn/thorntail
/**
* Copyright 2017 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wildfly.swarm.cdi.beanvalidation.test;
import javax.enterprise.context.ApplicationScoped;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.atomic.AtomicLong;
@ApplicationScoped
public class BooksService {
private AtomicLong idGenerator = new AtomicLong();
private List<Book> books = new CopyOnWriteArrayList<>();
public List<Book> list() {
return books;
}
@Valid
public Book create(
@Isbn(message = "ISBN must be set to a valid value")
String isbn,
@NotNull(message = "Title must be set")
String title,
@NotNull(message = "Author must be set")
String author
) {
Book book = new Book(idGenerator.incrementAndGet(), isbn, title, author);
books.add(book);
return book;
}
}
| 549 |
737 | <filename>soa/service/zmq.hpp
/*
Copyright (c) 2007-2011 iMatix Corporation
Copyright (c) 2007-2011 Other contributors as noted in the AUTHORS file
This file is part of 0MQ.
0MQ is free software; you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
0MQ is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef __ZMQ_HPP_INCLUDED__
#define __ZMQ_HPP_INCLUDED__
#include "zmq.h"
#include <cassert>
#include <cstring>
#include <exception>
#include <mutex>
#include <string>
#include <iostream>
#include <thread>
#include <set>
#include "jml/utils/exc_assert.h"
namespace zmq
{
typedef zmq_free_fn free_fn;
typedef zmq_pollitem_t pollitem_t;
class socket_t;
class error_t : public std::exception
{
public:
error_t () : errnum (zmq_errno ()) {}
virtual const char *what () const throw ()
{
return zmq_strerror (errnum);
}
int num () const
{
return errnum;
}
private:
int errnum;
};
inline int poll (zmq_pollitem_t *items_, int nitems_, long timeout_ = -1)
{
int rc = zmq_poll (items_, nitems_, timeout_);
if (rc < 0)
throw error_t ();
return rc;
}
inline void device (int device_, void * insocket_, void* outsocket_)
{
int rc = zmq_device (device_, insocket_, outsocket_);
if (rc != 0)
throw error_t ();
}
class message_t : public zmq_msg_t
{
friend class socket_t;
public:
inline message_t ()
{
int rc = zmq_msg_init (this);
if (rc != 0)
throw error_t ();
}
inline message_t (size_t size_)
{
int rc = zmq_msg_init_size (this, size_);
if (rc != 0)
throw error_t ();
}
inline message_t (void *data_, size_t size_, free_fn *ffn_,
void *hint_ = NULL)
{
int rc = zmq_msg_init_data (this, data_, size_, ffn_, hint_);
if (rc != 0)
throw error_t ();
}
inline message_t (const std::string & s)
{
int rc = zmq_msg_init_size (this, s.size());
if (rc != 0)
throw error_t ();
std::copy(s.begin(), s.end(), (char *)data());
}
inline message_t (const message_t & other) noexcept
{
int rc = zmq_msg_init (this);
if (rc == 0)
rc = zmq_msg_copy(this, const_cast<message_t *>(&other));
if (rc != 0)
throw error_t ();
}
inline message_t (message_t && other) noexcept
{
int rc = zmq_msg_init (this);
if (rc == 0)
rc = zmq_msg_move(this, &other);
if (rc != 0)
throw error_t ();
}
inline message_t & operator = (const message_t & other) noexcept
{
int rc = zmq_msg_copy(this, const_cast<message_t *>(&other));
if (rc != 0)
throw error_t ();
return *this;
}
inline message_t & operator = (message_t && other) noexcept
{
int rc = zmq_msg_move(this, &other);
if (rc != 0)
throw error_t ();
return *this;
}
inline ~message_t ()
{
int rc = zmq_msg_close (this);
assert (rc == 0);
(void)rc;
}
inline void rebuild ()
{
int rc = zmq_msg_close (this);
if (rc != 0)
throw error_t ();
rc = zmq_msg_init (this);
if (rc != 0)
throw error_t ();
}
inline void rebuild (size_t size_)
{
int rc = zmq_msg_close (this);
if (rc != 0)
throw error_t ();
rc = zmq_msg_init_size (this, size_);
if (rc != 0)
throw error_t ();
}
inline void rebuild (void *data_, size_t size_, free_fn *ffn_,
void *hint_ = NULL)
{
int rc = zmq_msg_close (this);
if (rc != 0)
throw error_t ();
rc = zmq_msg_init_data (this, data_, size_, ffn_, hint_);
if (rc != 0)
throw error_t ();
}
inline void move (message_t *msg_)
{
int rc = zmq_msg_move (this, (zmq_msg_t*) msg_);
if (rc != 0)
throw error_t ();
}
inline void copy (message_t *msg_)
{
int rc = zmq_msg_copy (this, (zmq_msg_t*) msg_);
if (rc != 0)
throw error_t ();
}
inline char *data ()
{
return (char *)zmq_msg_data (this);
}
inline const char *data () const
{
return (const char *)zmq_msg_data (const_cast<message_t *>(this));
}
inline size_t size () const
{
return zmq_msg_size (const_cast<message_t *>(this));
}
std::string toString() const
{
return std::string(data(), data() + size());
}
private:
// Disable implicit message copying, so that users won't use shared
// messages (less efficient) without being aware of the fact.
//message_t (const message_t&);
//void operator = (const message_t&);
};
class context_t
{
friend class socket_t;
public:
inline context_t (int io_threads_)
{
ptr = zmq_init (io_threads_);
if (ptr == NULL)
throw error_t ();
}
inline ~context_t ()
{
// If this throws, it means that you forgot to destroy a zmq_socket_t object
// created with this context before you closed the context.
ExcAssertEqual(sockets.size(), 0);
int rc = zmq_term (ptr);
assert (rc == 0);
(void)rc;
}
// Be careful with this, it's probably only useful for
// using the C api together with an existing C++ api.
// Normally you should never need to use this.
inline operator void* ()
{
return ptr;
}
void registerSocket(socket_t * sock)
{
std::unique_lock<std::mutex> guard(lock);
ExcAssertEqual(sockets.count(sock), 0);
sockets.insert(sock);
}
void unregisterSocket(socket_t * sock)
{
std::unique_lock<std::mutex> guard(lock);
ExcAssertEqual(sockets.count(sock), 1);
sockets.erase(sock);
}
private:
void *ptr;
context_t (const context_t&);
void operator = (const context_t&);
std::mutex lock;
std::set<socket_t *> sockets;
};
class socket_t
{
public:
context_t * context_;
inline socket_t (context_t &context_, int type_)
: context_(&context_)
{
ptr = zmq_socket (context_.ptr, type_);
if (ptr == NULL)
throw error_t ();
context_.registerSocket(this);
}
inline ~socket_t ()
{
int linger = 0;
setsockopt (ZMQ_LINGER, &linger, sizeof(linger));
int rc = zmq_close (ptr);
assert (rc == 0);
(void)rc;
context_->unregisterSocket(this);
}
inline operator void* ()
{
return ptr;
}
inline void setsockopt (int option_, const void *optval_,
size_t optvallen_)
{
int rc = zmq_setsockopt (ptr, option_, optval_, optvallen_);
if (rc != 0)
throw error_t ();
}
inline void getsockopt (int option_, void *optval_,
size_t *optvallen_)
{
int rc = zmq_getsockopt (ptr, option_, optval_, optvallen_);
if (rc != 0)
throw error_t ();
}
inline void bind (const std::string & addr_)
{
int rc = zmq_bind (ptr, addr_.c_str());
if (rc != 0)
throw error_t ();
}
inline void unbind (const std::string & addr_)
{
int rc = zmq_unbind (ptr, addr_.c_str());
if (rc != 0)
throw error_t ();
}
inline int tryUnbind (const std::string & addr_)
{
int rc = zmq_unbind (ptr, addr_.c_str());
return rc;
}
inline void connect (const std::string & addr_)
{
int rc = zmq_connect (ptr, addr_.c_str());
if (rc != 0)
throw error_t ();
}
inline void disconnect (const std::string & addr_)
{
int rc = zmq_disconnect (ptr, addr_.c_str());
if (rc != 0)
throw error_t ();
}
inline int tryDisconnect (const std::string & addr_)
{
return zmq_disconnect (ptr, addr_.c_str());
}
inline bool send (message_t &msg_, int flags_ = 0)
{
int rc = zmq_sendmsg (ptr, &msg_, flags_);
if (rc >= 0)
return true;
if (rc == -1 && zmq_errno () == EAGAIN)
return false;
throw error_t ();
}
inline bool send (message_t && msg_, int flags_ = 0)
{
int rc = zmq_sendmsg (ptr, &msg_, flags_);
if (rc >= 0)
return true;
if (rc == -1 && zmq_errno () == EAGAIN)
return false;
throw error_t ();
}
inline bool recv (message_t *msg_, int flags_ = 0)
{
int rc = zmq_recvmsg (ptr, msg_, flags_);
if (rc >= 0)
return true;
if (rc == -1 && zmq_errno () == EAGAIN)
return false;
throw error_t ();
}
private:
void *ptr;
socket_t (const socket_t&);
socket_t (socket_t&&);
void operator = (const socket_t&);
void operator = (socket_t&&);
};
}
#endif
| 5,723 |
342 | <reponame>None1637/osu-droid-1
package com.edlplan.framework.support.graphics.texture;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.os.Build;
import com.edlplan.andengine.TextureHelper;
import com.edlplan.framework.math.Vec2Int;
import com.edlplan.framework.support.graphics.BitmapUtil;
import com.edlplan.framework.utils.interfaces.Consumer;
import org.anddev.andengine.BuildConfig;
import org.anddev.andengine.opengl.texture.ITexture;
import org.anddev.andengine.opengl.texture.TextureOptions;
import org.anddev.andengine.opengl.texture.atlas.bitmap.BitmapTextureAtlas;
import org.anddev.andengine.opengl.texture.region.TextureRegion;
import org.anddev.andengine.opengl.util.GLHelper;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Set;
import ru.nsu.ccfit.zuev.osu.GlobalManager;
import ru.nsu.ccfit.zuev.osu.helper.QualityFileBitmapSource;
public class TexturePool {
int glMaxWidth;
BitmapFactory.Options options = new BitmapFactory.Options() {{
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
inPremultiplied = true;
}
}};
private File dir;
private Set<ITexture> createdTextures = new HashSet<>();
private HashMap<String, TextureRegion> textures = new HashMap<>();
private int currentPack = 0;
private int currentX;
private int currentY;
private int lineMaxY;
private int marginX = 2, marginY = 2;
private int maxW, maxH;
public TexturePool(File dir) {
this.dir = dir;
glMaxWidth = GLHelper.GlMaxTextureWidth;
if (BuildConfig.DEBUG) System.out.println("GL_MAX_TEXTURE_SIZE = " + glMaxWidth);
if (glMaxWidth == 0) {
throw new RuntimeException("glMaxWidth not found");
}
glMaxWidth = Math.min(glMaxWidth, 4096);
maxW = Math.min(400, glMaxWidth / 2);
maxH = Math.min(400, glMaxWidth / 2);
}
public void clear() {
textures.clear();
for (ITexture texture : createdTextures) {
GlobalManager.getInstance().getEngine().getTextureManager().unloadTexture(texture);
}
createdTextures.clear();
currentPack = 0;
currentX = currentY = lineMaxY = 0;
}
public void add(String name) {
TextureInfo info = loadInfo(name);
Bitmap bmp = loadBitmap(info);
info.texture = TextureHelper.createRegion(bmp);
createdTextures.add(info.texture.getTexture());
directPut(info.name, info.texture);
bmp.recycle();
}
public void packAll(Iterator<String> collection, Consumer<Bitmap> onPackDrawDone) {
clear();
List<TextureInfo> infos = new ArrayList<>();
for (String n : (Iterable<String>) () -> collection) {
infos.add(loadInfo(n));
}
Collections.sort(infos, (p1, p2) -> {
if (p1.size.y == p2.size.y) {
return Float.compare(p1.size.x, p2.size.x);
} else {
return Float.compare(p1.size.y, p2.size.y);
}
});
for (TextureInfo t : infos) {
testAddRaw(t);
}
Collections.sort(infos, (p1, p2) -> Integer.compare(p1.pageIndex, p2.pageIndex));
ListIterator<TextureInfo> iterator = infos.listIterator();
while (iterator.hasNext()) {
TextureInfo info = iterator.next();
if (info.pageIndex != -1) {
iterator.previous();
break;
}
Bitmap bmp = loadBitmap(info);
info.texture = TextureHelper.createRegion(bmp);
createdTextures.add(info.texture.getTexture());
directPut(info.name, info.texture);
bmp.recycle();
}
Bitmap pack = null;
if (iterator.hasNext()) {
int width = glMaxWidth;
int height = currentPack == 0 ? lineMaxY + 10 : glMaxWidth;
pack = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
}
if (pack == null) {
return;
}
Canvas canvas = new Canvas(pack);
Paint paint = new Paint();
paint.setAntiAlias(true);
paint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.SRC));
List<TextureInfo> toLoad = new ArrayList<>();
Bitmap tmp;
while (iterator.hasNext()) {
toLoad.clear();
pack.eraseColor(Color.argb(0, 0, 0, 0));
int currentPack = iterator.next().pageIndex;
iterator.previous();
while (iterator.hasNext()) {
TextureInfo info = iterator.next();
if (info.pageIndex != currentPack) {
break;
}
toLoad.add(info);
canvas.drawBitmap(tmp = loadBitmap(info), info.pos.x, info.pos.y, paint);
tmp.recycle();
}
if (onPackDrawDone != null) {
onPackDrawDone.consume(pack);
}
final QualityFileBitmapSource source = new QualityFileBitmapSource(
TextureHelper.createFactoryFromBitmap(pack));
final BitmapTextureAtlas tex = new BitmapTextureAtlas(glMaxWidth, glMaxWidth, TextureOptions.BILINEAR);
tex.addTextureAtlasSource(source, 0, 0);
GlobalManager.getInstance().getEngine().getTextureManager().loadTexture(tex);
createdTextures.add(tex);
for (TextureInfo info : toLoad) {
info.texture = new TextureRegion(tex, info.pos.x, info.pos.y, info.size.x, info.size.y);
info.texture.setTextureRegionBufferManaged(false);
}
}
pack.recycle();
for (TextureInfo info : infos) {
directPut(info.name, info.texture);
}
}
private void testAddRaw(TextureInfo raw) {
if (raw.size.x > maxW || raw.size.y > maxH) {
raw.single = true;
raw.pageIndex = -1;
} else {
tryAddToPack(raw);
}
}
private void tryAddToPack(TextureInfo raw) {
if (currentX + raw.size.x + marginX < glMaxWidth) {
tryAddInLine(raw);
} else {
toNextLine();
tryAddToPack(raw);
}
}
private void tryAddInLine(TextureInfo raw) {
if (currentY + raw.size.y + marginY < glMaxWidth) {
raw.single = false;
raw.pageIndex = currentPack;
raw.pos = new Vec2Int(currentX, currentY);
currentX += raw.size.x + marginX;
lineMaxY = Math.round(Math.max(lineMaxY, currentY + raw.size.y + marginY));
} else {
toNewPack();
tryAddToPack(raw);
}
}
public void toNewPack() {
currentPack++;
currentX = 0;
currentY = 0;
lineMaxY = 0;
}
private void toNextLine() {
currentX = 0;
currentY = lineMaxY + marginY;
}
private Bitmap loadBitmap(TextureInfo info) {
Bitmap bmp;
if (info.err) {
bmp = Bitmap.createBitmap(1, 1, Bitmap.Config.ARGB_8888);
bmp.setPixel(0, 0, Color.argb(255, 255, 0, 0));
} else {
try {
bmp = BitmapFactory.decodeFile(info.file, options);
} catch (Exception e) {
bmp = Bitmap.createBitmap(1, 1, Bitmap.Config.ARGB_8888);
bmp.setPixel(0, 0, Color.argb(255, 255, 0, 0));
}
}
return bmp;
}
protected void directPut(String name, TextureRegion region) {
textures.put(name, region);
}
private TextureInfo loadInfo(String name) {
TextureInfo info = new TextureInfo();
try {
info.name = name;
info.file = new File(dir, name).getAbsolutePath();
Vec2Int size = BitmapUtil.parseBitmapSize(new File(info.file));
info.pos = new Vec2Int(0, 0);
info.size = size;
} catch (Exception e) {
e.printStackTrace();
info.err = true;
info.pos = new Vec2Int(0, 0);
info.size = new Vec2Int(1, 1);
}
return info;
}
public TextureRegion get(String name) {
TextureRegion region;
if ((region = textures.get(name)) == null) {
add(name);
region = get(name);
}
return region;
}
private static class TextureInfo {
public TextureRegion texture;
public String name;
public String file;
public Vec2Int size;
public Vec2Int pos;
public boolean err = false;
public boolean single = true;
public int pageIndex = -1;
}
}
| 4,216 |
6,215 | /*
* Copyright (c) 2021 Airbyte, Inc., all rights reserved.
*/
package io.airbyte.integrations.debezium.internals;
public enum SnapshotMetadata {
TRUE,
FALSE,
LAST
}
| 61 |
826 | # -*- coding: utf-8 -*-
################################################################################
# Copyright 1998-2018 by authors (see AUTHORS.txt)
#
# This file is part of LuxCoreRender.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import unittest
import pyluxcore
from pyluxcoreunittests.tests.utils import *
from pyluxcoreunittests.tests.imagetest import *
# Note: keep in alphabetical order
################################################################################
# Environment material test
################################################################################
def TestEnvironmentCamera(cls, params):
StandardSceneTest(cls, params, "simple/camera-environment-area.cfg", "EnvironmentCamera")
class EnvironmentCamera(ImageTest):
pass
EnvironmentCamera = AddTests(EnvironmentCamera, TestEnvironmentCamera, GetTestCases())
################################################################################
# Orthographic material test
################################################################################
def TestOrthographicCamera(cls, params):
StandardSceneTest(cls, params, "simple/camera-orthographic-area.cfg", "OrthographicCamera")
class OrthographicCamera(ImageTest):
pass
OrthographicCamera = AddTests(OrthographicCamera, TestOrthographicCamera, GetTestCases())
################################################################################
# OrthographicDOF material test
################################################################################
def TestOrthographicDOFCamera(cls, params):
StandardSceneTest(cls, params, "simple/camera-orthographicdof-area.cfg", "OrthographicDOFCamera")
class OrthographicDOFCamera(ImageTest):
pass
OrthographicDOFCamera = AddTests(OrthographicDOFCamera, TestOrthographicDOFCamera, GetTestCases())
################################################################################
# Perspective material test
################################################################################
def TestPerspectiveCamera(cls, params):
StandardSceneTest(cls, params, "simple/camera-perspective-area.cfg", "PerspectiveCamera")
class PerspectiveCamera(ImageTest):
pass
PerspectiveCamera = AddTests(PerspectiveCamera, TestPerspectiveCamera, GetTestCases())
################################################################################
# PerspectiveDOF material test
################################################################################
def TestPerspectiveDOFCamera(cls, params):
StandardSceneTest(cls, params, "simple/camera-perspectivedof-area.cfg", "PerspectiveDOFCamera")
class PerspectiveDOFCamera(ImageTest):
pass
PerspectiveDOFCamera = AddTests(PerspectiveDOFCamera, TestPerspectiveDOFCamera, GetTestCases())
| 762 |
334 | /*
* This file is part of Sponge, licensed under the MIT License (MIT).
*
* Copyright (c) SpongePowered <https://www.spongepowered.org>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.spongepowered.forge.applaunch.loading.moddiscovery.locator;
import com.google.common.base.Predicates;
import net.minecraftforge.fml.Logging;
import net.minecraftforge.fml.loading.LibraryFinder;
import net.minecraftforge.fml.loading.moddiscovery.AbstractJarFileLocator;
import net.minecraftforge.forgespi.locating.IModFile;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.spongepowered.common.applaunch.plugin.PluginPlatformConstants;
import org.spongepowered.forge.applaunch.loading.moddiscovery.ModFileParsers;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Enumeration;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;
import java.util.stream.Collectors;
public final class ClasspathPluginLocator extends AbstractJarFileLocator {
private static final Logger LOGGER = LogManager.getLogger();
private static final String PLUGINS_JSON = "META-INF/sponge_plugins.json";
private Set<Path> modCoords;
@Override
public List<IModFile> scanMods() {
return this.modCoords.stream().
map(mc -> ModFileParsers.newPluginInstance(mc, this, PluginPlatformConstants.METADATA_FILE_NAME)).
peek(f -> this.modJars.compute(f, (mf, fs)->createFileSystem(mf))).
collect(Collectors.toList());
}
@Override
public String name() {
return "plugin classpath";
}
@Override
public void initArguments(final Map<String, ?> arguments) {
try {
this.modCoords = new LinkedHashSet<>();
this.locateMods(ClasspathPluginLocator.PLUGINS_JSON, "classpath_plugin", Predicates.alwaysTrue());
} catch (IOException e) {
ClasspathPluginLocator.LOGGER.fatal(Logging.CORE,"Error trying to find resources", e);
throw new RuntimeException("wha?", e);
}
}
private void locateMods(final String resource, final String name, final Predicate<Path> filter) throws IOException {
final Enumeration<URL> pluginJsons = ClassLoader.getSystemClassLoader().getResources(resource);
while (pluginJsons.hasMoreElements()) {
final URL url = pluginJsons.nextElement();
final Path path = LibraryFinder.findJarPathFor(resource, name, url);
if (Files.isDirectory(path))
continue;
if (filter.test(path)) {
ClasspathPluginLocator.LOGGER.debug(Logging.CORE, "Found classpath plugin: {}", path);
this.modCoords.add(path);
}
}
}
}
| 1,366 |
2,151 | /*
** 2011 March 18
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
**
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
**
*************************************************************************
**
** This file contains a VFS "shim" - a layer that sits in between the
** pager and the real VFS.
**
** This particular shim enforces a multiplex system on DB files.
** This shim shards/partitions a single DB file into smaller
** "chunks" such that the total DB file size may exceed the maximum
** file size of the underlying file system.
**
*/
#ifndef SQLITE_TEST_MULTIPLEX_H
#define SQLITE_TEST_MULTIPLEX_H
/*
** CAPI: File-control Operations Supported by Multiplex VFS
**
** Values interpreted by the xFileControl method of a Multiplex VFS db file-handle.
**
** MULTIPLEX_CTRL_ENABLE:
** This file control is used to enable or disable the multiplex
** shim.
**
** MULTIPLEX_CTRL_SET_CHUNK_SIZE:
** This file control is used to set the maximum allowed chunk
** size for a multiplex file set. The chunk size should be
** a multiple of SQLITE_MAX_PAGE_SIZE, and will be rounded up
** if not.
**
** MULTIPLEX_CTRL_SET_MAX_CHUNKS:
** This file control is used to set the maximum number of chunks
** allowed to be used for a mutliplex file set.
*/
#define MULTIPLEX_CTRL_ENABLE 214014
#define MULTIPLEX_CTRL_SET_CHUNK_SIZE 214015
#define MULTIPLEX_CTRL_SET_MAX_CHUNKS 214016
#ifdef __cplusplus
extern "C" {
#endif
/*
** CAPI: Initialize the multiplex VFS shim - sqlite3_multiplex_initialize()
**
** Use the VFS named zOrigVfsName as the VFS that does the actual work.
** Use the default if zOrigVfsName==NULL.
**
** The multiplex VFS shim is named "multiplex". It will become the default
** VFS if makeDefault is non-zero.
**
** An auto-extension is registered which will make the function
** multiplex_control() available to database connections. This
** function gives access to the xFileControl interface of the
** multiplex VFS shim.
**
** SELECT multiplex_control(<op>,<val>);
**
** <op>=1 MULTIPLEX_CTRL_ENABLE
** <val>=0 disable
** <val>=1 enable
**
** <op>=2 MULTIPLEX_CTRL_SET_CHUNK_SIZE
** <val> int, chunk size
**
** <op>=3 MULTIPLEX_CTRL_SET_MAX_CHUNKS
** <val> int, max chunks
**
** THIS ROUTINE IS NOT THREADSAFE. Call this routine exactly once
** during start-up.
*/
extern int sqlite3_multiplex_initialize(const char *zOrigVfsName, int makeDefault);
/*
** CAPI: Shutdown the multiplex system - sqlite3_multiplex_shutdown()
**
** All SQLite database connections must be closed before calling this
** routine.
**
** THIS ROUTINE IS NOT THREADSAFE. Call this routine exactly once while
** shutting down in order to free all remaining multiplex groups.
*/
extern int sqlite3_multiplex_shutdown(int eForce);
#ifdef __cplusplus
} /* End of the 'extern "C"' block */
#endif
#endif /* SQLITE_TEST_MULTIPLEX_H */
| 1,018 |
696 | <gh_stars>100-1000
/*
* Copyright (C) 2020 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.strata.basics.date;
import java.io.Serializable;
import java.lang.invoke.MethodHandles;
import java.time.LocalDate;
import java.time.Period;
import java.time.YearMonth;
import java.util.Optional;
import org.joda.beans.ImmutableBean;
import org.joda.beans.JodaBeanUtils;
import org.joda.beans.MetaBean;
import org.joda.beans.TypedMetaBean;
import org.joda.beans.gen.BeanDefinition;
import org.joda.beans.gen.ImmutableConstructor;
import org.joda.beans.gen.PropertyDefinition;
import org.joda.beans.impl.light.LightMetaBean;
import com.opengamma.strata.collect.ArgChecker;
/**
* Instructions to obtain a specific date from a sequence of dates.
* <p>
* A {@link DateSequence} can be complex, with interlinked sub-sequences.
* This class allows the instructions for specifying a single date from the sequence to be expressed.
* <p>
* For example, the "base sequence" of a future is often March, June, September and December.
* But additionally, the nearest two "serial" months are also listed.
* Together these make the "full sequence".
* <p>
* This class can be setup to select from either the base or full sequence, and starting from a specific
* year-month or from the input date plus a period.
*/
@BeanDefinition(style = "light")
public final class SequenceDate
implements ImmutableBean, Serializable {
/**
* The base year-month.
* <p>
* The start of this month is used instead of the input date when starting to count the sequence.
*/
@PropertyDefinition(get = "optional")
private final YearMonth yearMonth;
/**
* The minimum period before using the sequence number.
* <p>
* This is added to the input date before starting to count the sequence.
*/
@PropertyDefinition(get = "optional")
private final Period minimumPeriod;
/**
* The 1-based sequence number.
* <p>
* A value of 1 obtains the first date in the sequence.
*/
@PropertyDefinition(validate = "ArgChecker.notNegativeOrZero")
private final int sequenceNumber;
/**
* Whether to use the full sequence (true) or base sequence (false).
* <p>
* Many date sequences have two interlinked sequences.
* One is considered to be the base sequence, selected by setting this to false.
* The other is considered to be the full sequence, selected by setting this to true.
* <p>
* For example, the "base sequence" of a future is often March, June, September and December.
* But additionally, the nearest two "serial" months are also listed.
* Together these make the "full sequence".
*/
@PropertyDefinition
private final boolean fullSequence;
//-------------------------------------------------------------------------
/**
* Obtains an instance that selects the next base sequence date on or after the start of the specified month.
*
* @param yearMonth the month to start from
* @return the sequence date
*/
public static SequenceDate base(YearMonth yearMonth) {
return new SequenceDate(yearMonth, null, 1, false);
}
/**
* Obtains an instance that selects the nth base sequence date on or after the start of the specified month.
*
* @param yearMonth the month to start from
* @param sequenceNumber the 1-based sequence number of the futures, not zero or negative
* @return the sequence date
*/
public static SequenceDate base(YearMonth yearMonth, int sequenceNumber) {
return new SequenceDate(yearMonth, null, sequenceNumber, false);
}
/**
* Obtains an instance that selects the nth base sequence date on or after the input date.
*
* @param sequenceNumber the 1-based sequence number of the futures
* @return the sequence date
*/
public static SequenceDate base(int sequenceNumber) {
return new SequenceDate(null, null, sequenceNumber, false);
}
/**
* Obtains an instance that selects the nth base sequence date on or after the input date
* once the minimum period is added.
*
* @param minimumPeriod minimum period between the input date and the first sequence date
* @param sequenceNumber the 1-based sequence number of the futures, not zero or negative
* @return the sequence date
*/
public static SequenceDate base(Period minimumPeriod, int sequenceNumber) {
return new SequenceDate(null, minimumPeriod, sequenceNumber, false);
}
//-------------------------------------------------------------------------
/**
* Obtains an instance that selects the next full sequence date on or after the start of the specified month.
*
* @param yearMonth the month to start from
* @return the sequence date
*/
public static SequenceDate full(YearMonth yearMonth) {
return new SequenceDate(yearMonth, null, 1, true);
}
/**
* Obtains an instance that selects the nth full sequence date on or after the start of the specified month.
*
* @param yearMonth the month to start from
* @param sequenceNumber the 1-based sequence number of the futures, not zero or negative
* @return the sequence date
*/
public static SequenceDate full(YearMonth yearMonth, int sequenceNumber) {
return new SequenceDate(yearMonth, null, sequenceNumber, true);
}
/**
* Obtains an instance that selects the nth full sequence date on or after the input date.
*
* @param sequenceNumber the 1-based sequence number of the futures
* @return the sequence date
*/
public static SequenceDate full(int sequenceNumber) {
return new SequenceDate(null, null, sequenceNumber, true);
}
/**
* Obtains an instance that selects the nth full sequence date on or after the input date
* once the minimum period is added.
*
* @param minimumPeriod minimum period between the input date and the first sequence date
* @param sequenceNumber the 1-based sequence number of the futures, not zero or negative
* @return the sequence date
*/
public static SequenceDate full(Period minimumPeriod, int sequenceNumber) {
return new SequenceDate(null, minimumPeriod, sequenceNumber, true);
}
//-------------------------------------------------------------------------
@ImmutableConstructor
private SequenceDate(
YearMonth yearMonth,
Period minimumPeriod,
int sequenceNumber,
boolean fullSequence) {
if (yearMonth != null && minimumPeriod != null) {
throw new IllegalArgumentException("Minimum period cannot be set when year-month is present");
}
if (minimumPeriod != null && minimumPeriod.isNegative()) {
throw new IllegalArgumentException("Minimum period cannot be negative");
}
this.yearMonth = yearMonth;
this.minimumPeriod = Period.ZERO.equals(minimumPeriod) ? null : minimumPeriod;
this.sequenceNumber = ArgChecker.notNegativeOrZero(sequenceNumber, "sequenceNumber");
this.fullSequence = fullSequence;
}
//-------------------------------------------------------------------------
// finds the matching date in the sequence
LocalDate selectDate(LocalDate inputDate, DateSequence sequence, boolean allowSame) {
DateSequence seq = this.fullSequence ? sequence : sequence.baseSequence();
if (yearMonth != null) {
return seq.nthOrSame(yearMonth.atDay(1), sequenceNumber);
}
LocalDate startDate = minimumPeriod != null ? inputDate.plus(minimumPeriod) : inputDate;
return allowSame ? seq.nthOrSame(startDate, sequenceNumber) : seq.nth(startDate, sequenceNumber);
}
//------------------------- AUTOGENERATED START -------------------------
/**
* The meta-bean for {@code SequenceDate}.
*/
private static final TypedMetaBean<SequenceDate> META_BEAN =
LightMetaBean.of(
SequenceDate.class,
MethodHandles.lookup(),
new String[] {
"yearMonth",
"minimumPeriod",
"sequenceNumber",
"fullSequence"},
new Object[0]);
/**
* The meta-bean for {@code SequenceDate}.
* @return the meta-bean, not null
*/
public static TypedMetaBean<SequenceDate> meta() {
return META_BEAN;
}
static {
MetaBean.register(META_BEAN);
}
/**
* The serialization version id.
*/
private static final long serialVersionUID = 1L;
@Override
public TypedMetaBean<SequenceDate> metaBean() {
return META_BEAN;
}
//-----------------------------------------------------------------------
/**
* Gets the base year-month.
* <p>
* The start of this month is used instead of the input date when starting to count the sequence.
* @return the optional value of the property, not null
*/
public Optional<YearMonth> getYearMonth() {
return Optional.ofNullable(yearMonth);
}
//-----------------------------------------------------------------------
/**
* Gets the minimum period before using the sequence number.
* <p>
* This is added to the input date before starting to count the sequence.
* @return the optional value of the property, not null
*/
public Optional<Period> getMinimumPeriod() {
return Optional.ofNullable(minimumPeriod);
}
//-----------------------------------------------------------------------
/**
* Gets the 1-based sequence number.
* <p>
* A value of 1 obtains the first date in the sequence.
* @return the value of the property
*/
public int getSequenceNumber() {
return sequenceNumber;
}
//-----------------------------------------------------------------------
/**
* Gets whether to use the full sequence (true) or base sequence (false).
* <p>
* Many date sequences have two interlinked sequences.
* One is considered to be the base sequence, selected by setting this to false.
* The other is considered to be the full sequence, selected by setting this to true.
* <p>
* For example, the "base sequence" of a future is often March, June, September and December.
* But additionally, the nearest two "serial" months are also listed.
* Together these make the "full sequence".
* @return the value of the property
*/
public boolean isFullSequence() {
return fullSequence;
}
//-----------------------------------------------------------------------
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj != null && obj.getClass() == this.getClass()) {
SequenceDate other = (SequenceDate) obj;
return JodaBeanUtils.equal(yearMonth, other.yearMonth) &&
JodaBeanUtils.equal(minimumPeriod, other.minimumPeriod) &&
(sequenceNumber == other.sequenceNumber) &&
(fullSequence == other.fullSequence);
}
return false;
}
@Override
public int hashCode() {
int hash = getClass().hashCode();
hash = hash * 31 + JodaBeanUtils.hashCode(yearMonth);
hash = hash * 31 + JodaBeanUtils.hashCode(minimumPeriod);
hash = hash * 31 + JodaBeanUtils.hashCode(sequenceNumber);
hash = hash * 31 + JodaBeanUtils.hashCode(fullSequence);
return hash;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder(160);
buf.append("SequenceDate{");
buf.append("yearMonth").append('=').append(JodaBeanUtils.toString(yearMonth)).append(',').append(' ');
buf.append("minimumPeriod").append('=').append(JodaBeanUtils.toString(minimumPeriod)).append(',').append(' ');
buf.append("sequenceNumber").append('=').append(JodaBeanUtils.toString(sequenceNumber)).append(',').append(' ');
buf.append("fullSequence").append('=').append(JodaBeanUtils.toString(fullSequence));
buf.append('}');
return buf.toString();
}
//-------------------------- AUTOGENERATED END --------------------------
}
| 3,514 |
1,396 | package com.necer.ncalendar.activity;
import android.os.Bundle;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import android.view.View;
import com.necer.calendar.Miui10Calendar;
import com.necer.enumeration.CheckModel;
import com.necer.ncalendar.R;
import com.necer.ncalendar.painter.LigaturePainter;
import com.necer.ncalendar.painter.TicketPainter;
import org.joda.time.LocalDate;
import java.util.HashMap;
import java.util.Map;
/**
* Created by necer on 2019/1/4.
*/
public class CustomCalendarActivity extends AppCompatActivity {
Miui10Calendar miui10Calendar;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_custom);
miui10Calendar = findViewById(R.id.miui10Calendar);
miui10Calendar.setCheckMode(CheckModel.MULTIPLE);
LigaturePainter painter = new LigaturePainter(this);
miui10Calendar.setCalendarPainter(painter);
}
public void ligaturePainter(View view) {
LigaturePainter painter = new LigaturePainter(this);
miui10Calendar.setCalendarPainter(painter);
}
public void ticketPainter(View view) {
TicketPainter ticketPainter = new TicketPainter(this, miui10Calendar);
Map<LocalDate, String> priceMap = new HashMap<>();
priceMap.put(new LocalDate("2019-06-07"), "¥350");
priceMap.put(new LocalDate("2019-07-07"), "¥350");
priceMap.put(new LocalDate("2019-06-30"), "¥350");
priceMap.put(new LocalDate("2019-07-03"), "¥350");
priceMap.put(new LocalDate("2019-07-04"), "¥350");
priceMap.put(new LocalDate("2019-07-10"), "¥350");
priceMap.put(new LocalDate("2019-07-15"), "¥350");
priceMap.put(new LocalDate("2019-07-30"), "¥350");
priceMap.put(new LocalDate("2019-08-04"), "¥350");
priceMap.put(new LocalDate("2019-08-29"), "¥350");
ticketPainter.setPriceMap(priceMap);
miui10Calendar.setCalendarPainter(ticketPainter);
}
}
| 857 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.