max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
9,953 |
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
# System Imports
import sys
from twisted.trial import unittest
try:
import cPickle as pickle
except ImportError:
import pickle
import io
try:
from cStringIO import StringIO as _oldStyleCStringIO
except ImportError:
skipStringIO = "No cStringIO available."
else:
skipStringIO = None
try:
import copyreg
except:
import copy_reg as copyreg
# Twisted Imports
from twisted.persisted import styles, aot, crefutil
from twisted.python.compat import _PY3
class VersionTests(unittest.TestCase):
def test_nullVersionUpgrade(self):
global NullVersioned
class NullVersioned(object):
def __init__(self):
self.ok = 0
pkcl = pickle.dumps(NullVersioned())
class NullVersioned(styles.Versioned, object):
persistenceVersion = 1
def upgradeToVersion1(self):
self.ok = 1
mnv = pickle.loads(pkcl)
styles.doUpgrade()
assert mnv.ok, "initial upgrade not run!"
def test_versionUpgrade(self):
global MyVersioned
class MyVersioned(styles.Versioned):
persistenceVersion = 2
persistenceForgets = ['garbagedata']
v3 = 0
v4 = 0
def __init__(self):
self.somedata = 'xxx'
self.garbagedata = lambda q: 'cant persist'
def upgradeToVersion3(self):
self.v3 += 1
def upgradeToVersion4(self):
self.v4 += 1
mv = MyVersioned()
assert not (mv.v3 or mv.v4), "hasn't been upgraded yet"
pickl = pickle.dumps(mv)
MyVersioned.persistenceVersion = 4
obj = pickle.loads(pickl)
styles.doUpgrade()
assert obj.v3, "didn't do version 3 upgrade"
assert obj.v4, "didn't do version 4 upgrade"
pickl = pickle.dumps(obj)
obj = pickle.loads(pickl)
styles.doUpgrade()
assert obj.v3 == 1, "upgraded unnecessarily"
assert obj.v4 == 1, "upgraded unnecessarily"
def test_nonIdentityHash(self):
global ClassWithCustomHash
class ClassWithCustomHash(styles.Versioned):
def __init__(self, unique, hash):
self.unique = unique
self.hash = hash
def __hash__(self):
return self.hash
v1 = ClassWithCustomHash('v1', 0)
v2 = ClassWithCustomHash('v2', 0)
pkl = pickle.dumps((v1, v2))
del v1, v2
ClassWithCustomHash.persistenceVersion = 1
ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
v1, v2 = pickle.loads(pkl)
styles.doUpgrade()
self.assertEqual(v1.unique, 'v1')
self.assertEqual(v2.unique, 'v2')
self.assertTrue(v1.upgraded)
self.assertTrue(v2.upgraded)
def test_upgradeDeserializesObjectsRequiringUpgrade(self):
global ToyClassA, ToyClassB
class ToyClassA(styles.Versioned):
pass
class ToyClassB(styles.Versioned):
pass
x = ToyClassA()
y = ToyClassB()
pklA, pklB = pickle.dumps(x), pickle.dumps(y)
del x, y
ToyClassA.persistenceVersion = 1
def upgradeToVersion1(self):
self.y = pickle.loads(pklB)
styles.doUpgrade()
ToyClassA.upgradeToVersion1 = upgradeToVersion1
ToyClassB.persistenceVersion = 1
ToyClassB.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
x = pickle.loads(pklA)
styles.doUpgrade()
self.assertTrue(x.y.upgraded)
class VersionedSubClass(styles.Versioned):
pass
class SecondVersionedSubClass(styles.Versioned):
pass
class VersionedSubSubClass(VersionedSubClass):
pass
class VersionedDiamondSubClass(VersionedSubSubClass, SecondVersionedSubClass):
pass
class AybabtuTests(unittest.TestCase):
"""
L{styles._aybabtu} gets all of classes in the inheritance hierarchy of its
argument that are strictly between L{Versioned} and the class itself.
"""
def test_aybabtuStrictEmpty(self):
"""
L{styles._aybabtu} of L{Versioned} itself is an empty list.
"""
self.assertEqual(styles._aybabtu(styles.Versioned), [])
def test_aybabtuStrictSubclass(self):
"""
There are no classes I{between} L{VersionedSubClass} and L{Versioned},
so L{styles._aybabtu} returns an empty list.
"""
self.assertEqual(styles._aybabtu(VersionedSubClass), [])
def test_aybabtuSubsubclass(self):
"""
With a sub-sub-class of L{Versioned}, L{styles._aybabtu} returns a list
containing the intervening subclass.
"""
self.assertEqual(styles._aybabtu(VersionedSubSubClass),
[VersionedSubClass])
def test_aybabtuStrict(self):
"""
For a diamond-shaped inheritance graph, L{styles._aybabtu} returns a
list containing I{both} intermediate subclasses.
"""
self.assertEqual(
styles._aybabtu(VersionedDiamondSubClass),
[VersionedSubSubClass, VersionedSubClass, SecondVersionedSubClass])
class MyEphemeral(styles.Ephemeral):
def __init__(self, x):
self.x = x
class EphemeralTests(unittest.TestCase):
def test_ephemeral(self):
o = MyEphemeral(3)
self.assertEqual(o.__class__, MyEphemeral)
self.assertEqual(o.x, 3)
pickl = pickle.dumps(o)
o = pickle.loads(pickl)
self.assertEqual(o.__class__, styles.Ephemeral)
self.assertFalse(hasattr(o, 'x'))
class Pickleable:
def __init__(self, x):
self.x = x
def getX(self):
return self.x
class NotPickleable(object):
"""
A class that is not pickleable.
"""
def __reduce__(self):
"""
Raise an exception instead of pickling.
"""
raise TypeError("Not serializable.")
class CopyRegistered(object):
"""
A class that is pickleable only because it is registered with the
C{copyreg} module.
"""
def __init__(self):
"""
Ensure that this object is normally not pickleable.
"""
self.notPickleable = NotPickleable()
class CopyRegisteredLoaded(object):
"""
L{CopyRegistered} after unserialization.
"""
def reduceCopyRegistered(cr):
"""
Externally implement C{__reduce__} for L{CopyRegistered}.
@param cr: The L{CopyRegistered} instance.
@return: a 2-tuple of callable and argument list, in this case
L{CopyRegisteredLoaded} and no arguments.
"""
return CopyRegisteredLoaded, ()
copyreg.pickle(CopyRegistered, reduceCopyRegistered)
class A:
"""
dummy class
"""
def amethod(self):
pass
class B:
"""
dummy class
"""
def bmethod(self):
pass
def funktion():
pass
class PicklingTests(unittest.TestCase):
"""Test pickling of extra object types."""
def test_module(self):
pickl = pickle.dumps(styles)
o = pickle.loads(pickl)
self.assertEqual(o, styles)
def test_classMethod(self):
"""
After importing L{twisted.persisted.styles}, it is possible to pickle
classmethod objects.
"""
pickl = pickle.dumps(Pickleable.getX)
o = pickle.loads(pickl)
self.assertEqual(o, Pickleable.getX)
if sys.version_info > (3, 4):
test_classMethod.skip = (
"As of Python 3.4 it is no longer possible to globally change "
"the behavior of function pickling."
)
def test_instanceMethod(self):
obj = Pickleable(4)
pickl = pickle.dumps(obj.getX)
o = pickle.loads(pickl)
self.assertEqual(o(), 4)
self.assertEqual(type(o), type(obj.getX))
def test_stringIO(self):
f = _oldStyleCStringIO()
f.write("abc")
pickl = pickle.dumps(f)
o = pickle.loads(pickl)
self.assertEqual(type(o), type(f))
self.assertEqual(f.getvalue(), "abc")
if skipStringIO:
test_stringIO.skip = skipStringIO
class StringIOTransitionTests(unittest.TestCase):
"""
When pickling a cStringIO in Python 2, it should unpickle as a BytesIO or a
StringIO in Python 3, depending on the type of its contents.
"""
if not _PY3:
skip = "In Python 2 we can still unpickle cStringIO as such."
def test_unpickleBytesIO(self):
"""
A cStringIO pickled with bytes in it will yield an L{io.BytesIO} on
python 3.
"""
pickledStringIWithText = (
b"ctwisted.persisted.styles\nunpickleStringI\np0\n"
b"(S'test'\np1\nI0\ntp2\nRp3\n."
)
loaded = pickle.loads(pickledStringIWithText)
self.assertIsInstance(loaded, io.StringIO)
self.assertEqual(loaded.getvalue(), u"test")
class EvilSourceror:
def __init__(self, x):
self.a = self
self.a.b = self
self.a.b.c = x
class NonDictState:
def __getstate__(self):
return self.state
def __setstate__(self, state):
self.state = state
class AOTTests(unittest.TestCase):
def test_simpleTypes(self):
obj = (1, 2.0, 3j, True, slice(1, 2, 3), 'hello', u'world',
sys.maxsize + 1, None, Ellipsis)
rtObj = aot.unjellyFromSource(aot.jellyToSource(obj))
self.assertEqual(obj, rtObj)
def test_methodSelfIdentity(self):
a = A()
b = B()
a.bmethod = b.bmethod
b.a = a
im_ = aot.unjellyFromSource(aot.jellyToSource(b)).a.bmethod
self.assertEqual(aot._selfOfMethod(im_).__class__,
aot._classOfMethod(im_))
def test_methodNotSelfIdentity(self):
"""
If a class change after an instance has been created,
L{aot.unjellyFromSource} shoud raise a C{TypeError} when trying to
unjelly the instance.
"""
a = A()
b = B()
a.bmethod = b.bmethod
b.a = a
savedbmethod = B.bmethod
del B.bmethod
try:
self.assertRaises(TypeError, aot.unjellyFromSource,
aot.jellyToSource(b))
finally:
B.bmethod = savedbmethod
def test_unsupportedType(self):
"""
L{aot.jellyToSource} should raise a C{TypeError} when trying to jelly
an unknown type without a C{__dict__} property or C{__getstate__}
method.
"""
class UnknownType(object):
@property
def __dict__(self):
raise AttributeError()
self.assertRaises(TypeError, aot.jellyToSource, UnknownType())
def test_basicIdentity(self):
# Anyone wanting to make this datastructure more complex, and thus this
# test more comprehensive, is welcome to do so.
aj = aot.AOTJellier().jellyToAO
d = {'hello': 'world', "method": aj}
l = [1, 2, 3,
"he\tllo\n\n\"x world!",
u"goodbye \n\t\u1010 world!",
1, 1.0, 100 ** 100, unittest, aot.AOTJellier, d,
funktion
]
t = tuple(l)
l.append(l)
l.append(t)
l.append(t)
uj = aot.unjellyFromSource(aot.jellyToSource([l, l]))
assert uj[0] is uj[1]
assert uj[1][0:5] == l[0:5]
def test_nonDictState(self):
a = NonDictState()
a.state = "meringue!"
assert aot.unjellyFromSource(aot.jellyToSource(a)).state == a.state
def test_copyReg(self):
"""
L{aot.jellyToSource} and L{aot.unjellyFromSource} honor functions
registered in the pickle copy registry.
"""
uj = aot.unjellyFromSource(aot.jellyToSource(CopyRegistered()))
self.assertIsInstance(uj, CopyRegisteredLoaded)
def test_funkyReferences(self):
o = EvilSourceror(EvilSourceror([]))
j1 = aot.jellyToAOT(o)
oj = aot.unjellyFromAOT(j1)
assert oj.a is oj
assert oj.a.b is oj.b
assert oj.c is not oj.c.c
def test_circularTuple(self):
"""
L{aot.jellyToAOT} can persist circular references through tuples.
"""
l = []
t = (l, 4321)
l.append(t)
j1 = aot.jellyToAOT(l)
oj = aot.unjellyFromAOT(j1)
self.assertIsInstance(oj[0], tuple)
self.assertIs(oj[0][0], oj)
self.assertEqual(oj[0][1], 4321)
class CrefUtilTests(unittest.TestCase):
"""
Tests for L{crefutil}.
"""
def test_dictUnknownKey(self):
"""
L{crefutil._DictKeyAndValue} only support keys C{0} and C{1}.
"""
d = crefutil._DictKeyAndValue({})
self.assertRaises(RuntimeError, d.__setitem__, 2, 3)
def test_deferSetMultipleTimes(self):
"""
L{crefutil._Defer} can be assigned a key only one time.
"""
d = crefutil._Defer()
d[0] = 1
self.assertRaises(RuntimeError, d.__setitem__, 0, 1)
def test_containerWhereAllElementsAreKnown(self):
"""
A L{crefutil._Container} where all of its elements are known at
construction time is nonsensical and will result in errors in any call
to addDependant.
"""
container = crefutil._Container([1, 2, 3], list)
self.assertRaises(AssertionError,
container.addDependant, {}, "ignore-me")
def test_dontPutCircularReferencesInDictionaryKeys(self):
"""
If a dictionary key contains a circular reference (which is probably a
bad practice anyway) it will be resolved by a
L{crefutil._DictKeyAndValue}, not by placing a L{crefutil.NotKnown}
into a dictionary key.
"""
self.assertRaises(AssertionError,
dict().__setitem__, crefutil.NotKnown(), "value")
def test_dontCallInstanceMethodsThatArentReady(self):
"""
L{crefutil._InstanceMethod} raises L{AssertionError} to indicate it
should not be called. This should not be possible with any of its API
clients, but is provided for helping to debug.
"""
self.assertRaises(AssertionError,
crefutil._InstanceMethod(
"no_name", crefutil.NotKnown(), type))
testCases = [VersionTests, EphemeralTests, PicklingTests]
| 6,820 |
511 | <filename>os/drivers/wireless/scsc/misc/panicmon.c<gh_stars>100-1000
/*****************************************************************************
*
* Copyright 2017 Samsung Electronics All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific
* language governing permissions and limitations under the License.
*
****************************************************************************/
#include "panicmon.h"
#include "scsc_mif_abs.h"
#include "mxman.h"
#include "debug_scsc.h"
#ifdef CONFIG_SLSI_WLAN_STATS
int firmware_triggered_panic = 0;
#endif
static void panicmon_isr(int irq, void *data)
{
struct panicmon *panicmon = (struct panicmon *)data;
#ifdef CONFIG_SLSI_WLAN_STATS
firmware_triggered_panic++;
#endif
SLSI_INFO_NODEV("panicmon=%p panicmon->mx=%p mxman=%p\n", panicmon, panicmon->mx, scsc_mx_get_mxman(panicmon->mx));
/* Avoid unused parameter error */
(void)irq;
mxman_fail(scsc_mx_get_mxman(panicmon->mx));
}
void panicmon_init(struct panicmon *panicmon, struct scsc_mx *mx)
{
struct scsc_mif_abs *mif;
panicmon->mx = mx;
mif = scsc_mx_get_mif_abs(mx);
/* register isr with mif abstraction */
mif->irq_reg_reset_request_handler(mif, panicmon_isr, (void *)panicmon);
}
void panicmon_deinit(struct panicmon *panicmon)
{
struct scsc_mif_abs *mif;
mif = scsc_mx_get_mif_abs(panicmon->mx);
mif->irq_unreg_reset_request_handler(mif);
}
| 617 |
3,436 | <reponame>seewpx/range-v3
//! [transform example]
#include <iostream>
#include <vector>
#include <range/v3/view/transform.hpp>
int main()
{
std::vector<int> numbers{1, 2, 3};
auto halved = numbers
// Divide each integer by 2, converting it into a double
| ranges::views::transform([](const int& num) {
return num / 2.0;
});
std::cout << halved << '\n';
}
//! [transform example]
| 178 |
4,054 | <gh_stars>1000+
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.config.subscription.impl;
import com.yahoo.config.subscription.ConfigHandle;
import com.yahoo.vespa.config.RawConfig;
/**
* A config handle which does not use the config class, but payload instead. Used in config proxy.
*
* @author <NAME>
*/
@SuppressWarnings({"rawtypes", "unchecked"})
public class GenericConfigHandle extends ConfigHandle {
private final GenericJRTConfigSubscription subscription;
public GenericConfigHandle(GenericJRTConfigSubscription subscription) {
super(subscription);
this.subscription = subscription;
}
public RawConfig getRawConfig() {
return subscription.getRawConfig();
}
}
| 238 |
661 | #ifndef _FMOD_IOS_H
#define _FMOD_IOS_H
/*
[ENUM]
[
[DESCRIPTION]
Control whether the sound will use a the dedicated hardware decoder or a software codec.
[REMARKS]
Every devices has a single hardware decoder and unlimited software decoders.
[SEE_ALSO]
]
*/
typedef enum FMOD_AUDIOQUEUE_CODECPOLICY
{
FMOD_AUDIOQUEUE_CODECPOLICY_DEFAULT, /* Try hardware first, if it's in use or prohibited by audio session, try software. */
FMOD_AUDIOQUEUE_CODECPOLICY_SOFTWAREONLY, /* kAudioQueueHardwareCodecPolicy_UseSoftwareOnly ~ try software, if not available fail. */
FMOD_AUDIOQUEUE_CODECPOLICY_HARDWAREONLY, /* kAudioQueueHardwareCodecPolicy_UseHardwareOnly ~ try hardware, if not available fail. */
FMOD_AUDIOQUEUE_CODECPOLICY_FORCEINT = 65536 /* Makes sure this enum is signed 32bit */
} FMOD_AUDIOQUEUE_CODECPOLICY;
#endif /* _FMOD_IOS_H */
| 366 |
482 | #ifndef FONT8X8_H
#define FONT8X8_H
#include <avr/pgmspace.h>
extern const unsigned char font8x8[];
#endif | 51 |
1,418 | <reponame>Watch-Later/recipes
#!/usr/bin/python3
# Read and analyse stdout of footprint.cc
# $ sudo ./footprint 100000 | ./footprint.py
from collections import OrderedDict
import re, sys
slabs = {}
sections = []
section = None
for line in sys.stdin:
if m := re.match('===== (.*) =====', line):
section_name = m.group(1)
# print(section_name)
if (section):
sections.append(section)
section = (section_name, OrderedDict(), OrderedDict())
meminfo = True
continue
if re.match('slabinfo -', line):
meminfo = False
continue
if meminfo:
if m := re.match('(.*): *(\\d+) kB', line):
section[1][m.group(1)] = int(m.group(2))
else:
if line[0] == '#':
continue
(slab, active, total, objsize) = line.split()[:4]
slabs[slab] = int(objsize)
section[2][slab] = int(active)
sections.append(section)
for i in range(1, len(sections)):
print('=====', sections[i][0])
meminfo = sections[i][1]
old = sections[i-1][1]
for key in meminfo:
diff = meminfo[key]-old[key]
if diff:
print(key, meminfo[key], diff)
print('-----')
slab = sections[i][2]
old = sections[i-1][2]
for key in slab:
diff = slab[key]-old[key]
if diff:
print(key, slabs[key], slab[key], diff)
| 651 |
4,140 | <reponame>FANsZL/hive<gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.metastore;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.ValidTxnList;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.txn.TxnCommonUtils;
import org.apache.hadoop.hive.metastore.txn.TxnStore;
import org.apache.hadoop.hive.metastore.txn.TxnUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.concurrent.TimeUnit;
/**
* Cleaner for the {@link MaterializationsRebuildLockHandler}. It removes outdated locks
* in the intervals specified by the input property.
*/
public class MaterializationsRebuildLockCleanerTask implements MetastoreTaskThread {
private static final Logger LOG = LoggerFactory.getLogger(MaterializationsRebuildLockCleanerTask.class);
private Configuration conf;
private TxnStore txnHandler;
@Override
public long runFrequency(TimeUnit unit) {
return MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.TXN_TIMEOUT, unit) / 2;
}
@Override
public void setConf(Configuration configuration) {
conf = configuration;
txnHandler = TxnUtils.getTxnStore(conf);
}
@Override
public Configuration getConf() {
return conf;
}
@Override
public void run() {
if (LOG.isDebugEnabled()) {
LOG.debug("Cleaning up materialization rebuild locks");
}
TxnStore.MutexAPI.LockHandle handle = null;
try {
handle = txnHandler.getMutexAPI().acquireLock(TxnStore.MUTEX_KEY.MaterializationRebuild.name());
ValidTxnList validTxnList = TxnCommonUtils.createValidReadTxnList(txnHandler.getOpenTxns(), 0);
long removedCnt = txnHandler.cleanupMaterializationRebuildLocks(validTxnList,
MetastoreConf.getTimeVar(conf, MetastoreConf.ConfVars.TXN_TIMEOUT, TimeUnit.MILLISECONDS));
if (removedCnt > 0) {
if (LOG.isDebugEnabled()) {
LOG.debug("Number of materialization locks deleted: " + removedCnt);
}
}
} catch (Throwable t) {
LOG.error("Unexpected error in thread: {}, message: {}", Thread.currentThread().getName(), t.getMessage(), t);
} finally {
if (handle != null) {
handle.releaseLocks();
}
}
}
}
| 1,030 |
665 | <reponame>opencirclesolutions/isis<filename>api/applib/src/test/java/org/apache/isis/applib/services/user/UserMemento_Test.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.applib.services.user;
import java.net.MalformedURLException;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Nested;
import org.junit.jupiter.api.Test;
import lombok.val;
class UserMemento_Test {
@Nested
class constructor {
@Test
void name_only() {
// when
val userMemento = UserMemento.ofName("fredflintstone");
// then original unchanged
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(0);
Assertions.assertThat(userMemento.getRealName()).isNull();
Assertions.assertThat(userMemento.getAvatarUrl()).isNull();
}
@Test
void name_and_roles() {
// when
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND");
// then
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento.getRealName()).isNull();
Assertions.assertThat(userMemento.getAvatarUrl()).isNull();
}
}
@Nested
class withRealName {
@Test
void user_and_roles_preserved_and_real_name_set() {
// copy
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND");
// when
val userMemento2 = userMemento.withRealName("<NAME>");
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento2.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento2.getRealName()).isEqualTo("<NAME>");
// then original unchanged
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento.getRealName()).isNull();
}
@Test
void user_and_roles_and_avatarUrl_preserved_and_real_name_set() throws MalformedURLException {
// copy
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND")
.withAvatarUrl(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
// when
val userMemento2 = userMemento.withRealName("<NAME>");
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento2.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento2.getAvatarUrl()).isEqualTo(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
Assertions.assertThat(userMemento2.getRealName()).isEqualTo("<NAME>");
// then original unchanged
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento.getAvatarUrl()).isEqualTo(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
Assertions.assertThat(userMemento.getRealName()).isNull();
}
}
@Nested
class withAvatarUrl {
@Test
void user_and_roles_preserved_and_avatarUrl_set() throws MalformedURLException {
// copy
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND");
// when
val userMemento2 = userMemento.withAvatarUrl(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento2.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento2.getAvatarUrl()).isEqualTo(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
// then original unchanged
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento.getAvatarUrl()).isNull();
}
@Test
void user_and_roles_and_real_name_preserved_and_avatarUrl_set() throws MalformedURLException {
// copy
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND").withRealName("<NAME>");
// when
val userMemento2 = userMemento.withAvatarUrl(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento2.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento2.getAvatarUrl()).isEqualTo(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
Assertions.assertThat(userMemento2.getRealName()).isEqualTo("<NAME>");
// then original unchanged
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento.getRealName()).isEqualTo("<NAME>");
Assertions.assertThat(userMemento.getAvatarUrl()).isNull();
}
}
@Nested
class withRole {
@Test
void user_and_roles_preserved_and_role_added() throws MalformedURLException {
// given
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND");
// when
val userMemento2 = userMemento.withRoleAdded("WICKET_ROLE");
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento2.getRoles().size()).isEqualTo(3);
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("WICKET_ROLE"));
// then original unchanged
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
}
}
@Nested
class withImpersonating {
@Test
void user_and_roles_preserved_and_impersonating_flag_set() throws MalformedURLException {
// given
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND");
// when
val userMemento2 = userMemento.withImpersonating(true);
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.isImpersonating()).isTrue();
// then original unchanged
Assertions.assertThat(userMemento.isImpersonating()).isFalse();
}
}
@Nested
class withTenancyToken {
@Test
void user_and_roles_preserved_and_impersonating_flag_set() throws MalformedURLException {
// given
val userMemento = UserMemento.ofNameAndRoleNames("fredflintstone", "CAVEMAN", "HUSBAND");
// when
val userMemento2 = userMemento.withMultiTenancyToken("/ITA");
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.getMultiTenancyToken()).isEqualTo("/ITA");
// then original unchanged
Assertions.assertThat(userMemento.getMultiTenancyToken()).isNull();
}
}
@Nested
class all_the_withers {
@Test
void happy_case() throws MalformedURLException {
// when
val userMemento = UserMemento.ofName("fredflintstone")
.withRoleAdded("CAVEMAN")
.withRoleAdded("HUSBAND")
.withAvatarUrl(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"))
.withRealName("<NAME>")
.withMultiTenancyToken("/USA/Bedrock")
;
// then
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento.getAvatarUrl()).isEqualTo(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
Assertions.assertThat(userMemento.getRealName()).isEqualTo("<NAME>");
Assertions.assertThat(userMemento.isImpersonating()).isFalse();
// and when
val userMemento2 = userMemento.withImpersonating(true);
// then copy created
Assertions.assertThat(userMemento2).isNotSameAs(userMemento);
// then copy correct
Assertions.assertThat(userMemento2.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento2.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento2.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento2.getAvatarUrl()).isEqualTo(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
Assertions.assertThat(userMemento2.getRealName()).isEqualTo("<NAME>");
Assertions.assertThat(userMemento2.isImpersonating()).isTrue();
Assertions.assertThat(userMemento2.getMultiTenancyToken()).isEqualTo("/USA/Bedrock");
// then original unchanged
Assertions.assertThat(userMemento.getName()).isEqualTo("fredflintstone");
Assertions.assertThat(userMemento.getRoles().size()).isEqualTo(2);
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("CAVEMAN"));
Assertions.assertThat(userMemento.streamRoleNames()).anyMatch(x -> x.equals("HUSBAND"));
Assertions.assertThat(userMemento.getAvatarUrl()).isEqualTo(new java.net.URL("https://upload.wikimedia.org/wikipedia/en/a/ad/Fred_Flintstone.png"));
Assertions.assertThat(userMemento.getRealName()).isEqualTo("<NAME>");
Assertions.assertThat(userMemento.isImpersonating()).isFalse();
Assertions.assertThat(userMemento.getMultiTenancyToken()).isEqualTo("/USA/Bedrock");
}
}
}
| 6,770 |
681 | <reponame>pkiraly/dataverse
package edu.harvard.iq.dataverse.engine.command.impl;
import edu.harvard.iq.dataverse.Dataset;
import edu.harvard.iq.dataverse.DatasetServiceBean;
import edu.harvard.iq.dataverse.Dataverse;
import edu.harvard.iq.dataverse.DataverseLinkingServiceBean;
import edu.harvard.iq.dataverse.DataverseServiceBean;
import edu.harvard.iq.dataverse.Guestbook;
import edu.harvard.iq.dataverse.MetadataBlock;
import edu.harvard.iq.dataverse.Template;
import edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser;
import edu.harvard.iq.dataverse.engine.DataverseEngine;
import edu.harvard.iq.dataverse.engine.NoOpTestEntityManager;
import edu.harvard.iq.dataverse.engine.TestCommandContext;
import edu.harvard.iq.dataverse.engine.TestDataverseEngine;
import edu.harvard.iq.dataverse.engine.command.DataverseRequest;
import edu.harvard.iq.dataverse.engine.command.exception.IllegalCommandException;
import edu.harvard.iq.dataverse.engine.command.exception.PermissionException;
import static edu.harvard.iq.dataverse.mocks.MocksFactory.makeAuthenticatedUser;
import edu.harvard.iq.dataverse.search.IndexBatchServiceBean;
import edu.harvard.iq.dataverse.search.IndexServiceBean;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.concurrent.Future;
import javax.persistence.EntityManager;
import javax.servlet.http.HttpServletRequest;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import org.junit.Before;
import org.junit.Test;
/**
*
* @author michael
*/
public class MoveDataverseCommandTest {
Dataverse root, childA, childB, grandchildAA, childC, grandchildCC, childD, grandchildDD, childE, grandchildEE, childF;
Dataset datasetC, datasetCC;
Guestbook gbA;
Template templateA;
MetadataBlock mbA, mbB;
DataverseEngine testEngine;
AuthenticatedUser auth, nobody;
protected HttpServletRequest httpRequest;
@Before
public void setUp() {
// authentication
auth = makeAuthenticatedUser("Super", "User");
auth.setSuperuser(true);
nobody = makeAuthenticatedUser("Nick", "Nobody");
nobody.setSuperuser(false);
// Dataverses
root = new Dataverse();
root.setName("root");
root.setId(1l);
root.setPublicationDate(new Timestamp(new Date().getTime()));
childA = new Dataverse();
childA.setOwner( root );
childA.setId(2l);
childB = new Dataverse();
childB.setOwner( root );
childB.setId(3l);
grandchildAA = new Dataverse();
grandchildAA.setOwner( childA );
grandchildAA.setId(4l);
childC = new Dataverse();
childC.setOwner(root);
childC.setId(5l);
grandchildCC = new Dataverse();
grandchildCC.setOwner(childC);
grandchildCC.setId(6l);
childD = new Dataverse();
childD.setOwner(root);
childD.setId(7l);
grandchildDD = new Dataverse();
grandchildDD.setOwner(childD);
grandchildDD.setId(8l);
childE = new Dataverse();
childE.setOwner(root);
childE.setId(9l);
grandchildEE = new Dataverse();
grandchildEE.setOwner(childE);
grandchildEE.setId(10l);
// Datasets
datasetC = new Dataset();
datasetC.setOwner(childC);
datasetC.setId(1l);
datasetCC = new Dataset();
datasetCC.setOwner(grandchildCC);
datasetCC.setId(2l);
// Guestbooks
gbA= new Guestbook();
gbA.setId(1l);
gbA.setDataverse(childC);
List<Guestbook> gbs = new ArrayList<>();
gbs.add(gbA);
childC.setGuestbooks(gbs);
childC.setGuestbookRoot(true);
grandchildCC.setGuestbookRoot(false);
datasetC.setGuestbook(gbA);
datasetCC.setGuestbook(gbA);
List<Guestbook> noneGb = new ArrayList();
root.setGuestbooks(noneGb);
childA.setGuestbooks(noneGb);
grandchildAA.setGuestbooks(noneGb);
childB.setGuestbooks(noneGb);
grandchildCC.setGuestbooks(noneGb);
childD.setGuestbooks(noneGb);
grandchildDD.setGuestbooks(noneGb);
// Templates
List<Template> ts = new ArrayList<>();
templateA = new Template();
templateA.setName("TemplateA");
templateA.setDataverse(childD);
ts.add(templateA);
childD.setTemplates(ts);
childD.setTemplateRoot(true);
grandchildDD.setTemplateRoot(false);
grandchildDD.setDefaultTemplate(templateA);
List<Template> noneT = new ArrayList<>();
root.setTemplates(noneT);
childA.setTemplates(noneT);
grandchildAA.setTemplates(noneT);
childB.setTemplates(noneT);
childC.setTemplates(noneT);
grandchildCC.setTemplates(noneT);
grandchildDD.setTemplates(noneT);
// Metadata blocks
List<MetadataBlock> mbsE = new ArrayList<>();
List<MetadataBlock> mbsEE = new ArrayList<>();
mbA = new MetadataBlock();
mbA.setOwner(root);
mbA.setId(1l);
mbB = new MetadataBlock();
mbB.setOwner(childE);
mbB.setId(2l);
mbsE.add(mbB);
mbsEE.add(mbA);
mbsEE.add(mbB);
childE.setMetadataBlocks(mbsE);
childE.setMetadataBlockRoot(true);
grandchildEE.setMetadataBlockRoot(false);
grandchildEE.setMetadataBlocks(mbsEE);
testEngine = new TestDataverseEngine( new TestCommandContext(){
@Override
public DataverseServiceBean dataverses() {
return new DataverseServiceBean(){
@Override
public Dataverse save(Dataverse dataverse) {
// no-op. The superclass accesses databases which we don't have.
return dataverse;
}
@Override
public Dataverse find(Object pk) {
// fake this for what we need
if (pk instanceof Long) {
if ((Long)pk == 10) {
return grandchildEE;
}
}
return new Dataverse();
}
@Override
public List<Dataverse> findByOwnerId(Long ownerId) {
return new ArrayList<>();
}
@Override
public List<Long> findAllDataverseDataverseChildren(Long dvId) {
// fake this for what we need
List<Long> fakeChildren = new ArrayList<>();
if (dvId == 9){
fakeChildren.add(grandchildEE.getId());
}
return fakeChildren;
}
@Override
public List<Long> findAllDataverseDatasetChildren(Long dvId) {
// fake this for what we need
List<Long> fakeChildren = new ArrayList<>();
if (dvId == 6) {
fakeChildren.add(datasetCC.getId());
}
return fakeChildren;
}
};
}
@Override
public IndexServiceBean index(){
return new IndexServiceBean(){
@Override
public Future<String> indexDataverse(Dataverse dataverse){
return null;
}
@Override
public Future<String> indexDataset(Dataset dataset, boolean doNormalSolrDocCleanUp){
return null;
}
@Override
public Future<String> indexDataverseInNewTransaction(Dataverse dataverse){
return null;
}
@Override
public Future<String> indexDatasetInNewTransaction(Long id){
return null;
}
};
}
@Override
public IndexBatchServiceBean indexBatch(){
return new IndexBatchServiceBean(){
@Override
public void indexDataverseRecursively(Dataverse dataverse) {
}
};
}
@Override
public DatasetServiceBean datasets() {
return new DatasetServiceBean() {
@Override
public List<Dataset> findByOwnerId(Long ownerId) {
return new ArrayList<>();
}
@Override
public Dataset find(Object pk) {
// fake this for what we need
if (pk instanceof Long) {
if ((Long)pk == 2) {
return datasetCC;
}
}
return new Dataset();
}
};
}
@Override
public EntityManager em() {
return new NoOpTestEntityManager();
}
@Override
public DataverseLinkingServiceBean dvLinking() {
return new DataverseLinkingServiceBean() {
};
}
});
}
/**
* Moving ChildB to ChildA
* @throws Exception - should not throw an exception
*/
@Test
public void testValidMove() throws Exception {
System.out.println("testValidMove");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, childB, childA, null));
assertEquals( childA, childB.getOwner() );
assertEquals( Arrays.asList(root, childA), childB.getOwners() );
// move back
testEngine.submit(
new MoveDataverseCommand(aRequest, childB, root, null));
assertEquals( root, childB.getOwner() );
assertEquals( Arrays.asList(root), childB.getOwners() );
}
/**
* Moving ChildA to its child (illegal).
*/
@Test( expected=IllegalCommandException.class )
public void testInvalidMove() throws Exception {
System.out.println("testInvalidMove");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, childA, grandchildAA, null));
fail();
}
/**
* Calling API as a non super user (illegal).
*/
@Test(expected = PermissionException.class)
public void testNotSuperUser() throws Exception {
System.out.println("testNotSuperUser");
DataverseRequest aRequest = new DataverseRequest(nobody, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, childB, childA, null));
fail();
}
@Test( expected=IllegalCommandException.class )
public void testMoveIntoSelf() throws Exception {
System.out.println("testMoveIntoSelf");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, childB, childB, null));
fail();
}
@Test( expected=IllegalCommandException.class )
public void testMoveIntoParent() throws Exception {
System.out.println("testMoveIntoParent");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildAA, childA, null));
fail();
}
@Test
public void testKeepGuestbook() throws Exception {
System.out.println("testKeepGuestbook");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, childC, childB, null));
assertNotNull(datasetC.getGuestbook());
// move back
testEngine.submit(
new MoveDataverseCommand(aRequest, childC, root, null));
assertEquals( root, childC.getOwner() );
}
@Test(expected = IllegalCommandException.class)
public void testRemoveGuestbookWithoutForce() throws Exception {
System.out.println("testRemoveGuestbookWithoutForce");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildCC, root, null));
fail();
}
@Test
public void testRemoveGuestbook() throws Exception {
System.out.println("testRemoveGuestbook");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildCC, root, true));
assertNull( datasetCC.getGuestbook());
// move back
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildCC, childC, true));
assertEquals( childC, grandchildCC.getOwner() );
}
@Test
public void testKeepTemplate() throws Exception {
System.out.println("testKeepTemplate");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, childD, childB, null));
assertNotNull(grandchildDD.getDefaultTemplate());
// move back
testEngine.submit(
new MoveDataverseCommand(aRequest, childD, root, null));
assertEquals( root, childD.getOwner() );
}
@Test(expected = IllegalCommandException.class)
public void testRemoveTemplateWithoutForce() throws Exception {
System.out.println("testRemoveTemplateWithoutForce");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildDD, root, null));
fail();
}
@Test
public void testRemoveTemplate() throws Exception {
System.out.println("testRemoveTemplate");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildDD, root, true));
assertNull( grandchildDD.getDefaultTemplate());
// move back
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildDD, childD, true));
assertEquals( childD, grandchildDD.getOwner() );
}
@Test
public void testKeepMetadataBlock() throws Exception {
System.out.println("testKeepMetadataBlock");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, childE, childB, null));
assertEquals(Arrays.asList(mbB), childE.getMetadataBlocks());
// move back
testEngine.submit(
new MoveDataverseCommand(aRequest, childE, root, null));
assertEquals( root, childE.getOwner() );
}
@Test(expected = IllegalCommandException.class)
public void testRemoveMetadataBlockWithoutForce() throws Exception {
System.out.println("testRemoveMetadataBlockWithoutForce");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildEE, root, null));
fail();
}
@Test
public void testRemoveMetadataBlock() throws Exception {
System.out.println("testRemoveMetadataBlock");
DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildEE, root, true));
assertEquals(Arrays.asList(mbA), grandchildEE.getMetadataBlocks(true));
// move back
testEngine.submit(
new MoveDataverseCommand(aRequest, grandchildEE, childE, true));
assertEquals( childE, grandchildEE.getOwner() );
}
}
| 8,114 |
1,826 | <filename>flexmark-util/src/test/java/com/vladsch/flexmark/util/visitor/VisitorTestSuite.java
package com.vladsch.flexmark.util.visitor;
import org.junit.runner.RunWith;
import org.junit.runners.Suite;
@RunWith(Suite.class)
@Suite.SuiteClasses({
NodeVisitorTest.class,
})
public class VisitorTestSuite {
}
| 126 |
619 | /*
* Author: <NAME> <<EMAIL>>
* <NAME> <<EMAIL>>
* Copyright (c) 2015 Intel Corporation.
*
* This program and the accompanying materials are made available under the
* terms of the The MIT License which is available at
* https://opensource.org/licenses/MIT.
*
* SPDX-License-Identifier: MIT
*/
#include "ttp223.h"
#include "upm_fti.h"
/**
* This file implements the Function Table Interface (FTI) for this sensor
*/
const char upm_ttp223_name[] = "TTP223";
const char upm_ttp223_description[] = "TTP223 Touch Detector Sensor";
const upm_protocol_t upm_ttp223_protocol[] = {UPM_GPIO};
const upm_sensor_t upm_ttp223_category[] = {UPM_SWITCH};
// forward declarations
const upm_sensor_descriptor_t upm_ttp223_get_descriptor();
const void* upm_ttp223_get_ft(upm_sensor_t sensor_type);
void* upm_ttp223_init_name();
void upm_ttp223_close(void* dev);
upm_result_t upm_ttp223_is_pressed(void* dev, bool* value, int num);
const upm_sensor_descriptor_t upm_ttp223_get_descriptor(){
upm_sensor_descriptor_t usd;
usd.name = upm_ttp223_name;
usd.description = upm_ttp223_description;
usd.protocol_size = 1;
usd.protocol = upm_ttp223_protocol;
usd.category_size = 1;
usd.category = upm_ttp223_category;
return usd;
}
static const upm_sensor_ft ft =
{
.upm_sensor_init_name = &upm_ttp223_init_name,
.upm_sensor_close = &upm_ttp223_close,
.upm_sensor_get_descriptor = &upm_ttp223_get_descriptor
};
static const upm_switch_ft sft =
{
.upm_switch_get_value = &upm_ttp223_is_pressed
};
const void* upm_ttp223_get_ft(upm_sensor_t sensor_type){
if(sensor_type == UPM_SWITCH){
return &sft;
}
else if(sensor_type == UPM_SENSOR){
return &ft;
}
return NULL;
}
void* upm_ttp223_init_name(){
return NULL;
}
void upm_ttp223_close(void* dev){
ttp223_close((ttp223_context)dev);
}
upm_result_t upm_ttp223_is_pressed(void* dev, bool* value, int num) {
if(num != 1){
printf("Sorry this sensor driver supports only one touch pad\n");
return UPM_ERROR_OUT_OF_RANGE;
}
return ttp223_is_pressed((ttp223_context)dev, value);
} | 900 |
346 | <gh_stars>100-1000
// usage: plash map KEY [ CONTAINER ]
// Map a container to a key. Use an empty container to delete a key.
//
// Example:
//
// $ plash build -f alpine
// 342
//
// $ plash map myfavorite 342
//
// $ plash map myfavorite
// 342
//
// $ plash build --from-map myfavorite
// 342
//
// $ plash map myfavorite ''
//
// $ plash map myfavorite
// $
#define _GNU_SOURCE
#include <assert.h>
#include <errno.h>
#include <libgen.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <plash.h>
char *plash_data;
void get(char const *linkpath) {
char *nodepath;
nodepath = realpath(linkpath, NULL);
if (nodepath == NULL) {
if (errno == ENOENT)
return;
pl_fatal("realpath");
}
puts(basename(nodepath));
}
void del(char const *linkpath) {
if (unlink(linkpath) == -1) {
if (errno == ENOENT)
return;
pl_fatal("unlink");
}
}
void set(char const *linkpath, char *container_id) {
char *nodepath;
nodepath = pl_call("nodepath", container_id);
if (chdir(pl_call("mkdtemp")) == -1)
pl_fatal("chdir");
if (asprintf(&nodepath, "..%s", nodepath + strlen(plash_data)) == -1)
pl_fatal("asprintf");
if (symlink(nodepath, "link") == -1)
pl_fatal("symlink");
if (rename("link", linkpath) == -1)
pl_fatal("rename");
}
int main(int argc, char *argv[]) {
char *linkpath;
if (argc < 2) {
pl_usage();
}
plash_data = pl_call("data");
assert(plash_data);
assert(plash_data[0] == '/');
// validate map key
if (!argv[1][0])
pl_fatal("empty map name not allowed");
else if (strchr(argv[1], '/') != NULL)
pl_fatal("'/' not allowed in map name");
// the location of the symlink for this map key
if (asprintf(&linkpath, "%s/map/%s", plash_data, argv[1]) == -1)
pl_fatal("asprintf");
if (argc == 2) {
get(linkpath);
} else if (argc == 3 && !argv[2][0]) {
del(linkpath);
} else if (argc == 3) {
set(linkpath, argv[2]);
} else
pl_usage();
}
| 857 |
2,151 | <filename>ios/web/public/web_state/ui/crw_content_view.h
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_WEB_PUBLIC_WEB_STATE_UI_CRW_CONTENT_VIEW_H_
#define IOS_WEB_PUBLIC_WEB_STATE_UI_CRW_CONTENT_VIEW_H_
#import <UIKit/UIKit.h>
// UIViews conforming to CRWScrollableContent (i.e. CRWContentViews) are used
// to display content within a WebState.
@protocol CRWScrollableContent<NSObject>
// The scroll view used to display the content. If |scrollView| is non-nil,
// it will be used to back the CRWContentViewScrollViewProxy and is expected to
// be a subview of the CRWContentView.
@property(nonatomic, strong, readonly) UIScrollView* scrollView;
// Adds an inset to content view. Implementations of this protocol can
// implement this method using UIScrollView.contentInset (where applicable) or
// via resizing a subview's frame. Can be used as a workaround for WKWebView
// bug, where UIScrollView.content inset does not work (rdar://23584409).
@property(nonatomic, assign) UIEdgeInsets contentInset;
// Returns YES if content is being displayed in the scroll view.
// TODO(stuartmorgan): See if this can be removed from the public interface.
- (BOOL)isViewAlive;
@optional
// Whether or not the content view should use the content inset when setting
// |contentInset|.
@property(nonatomic, assign) BOOL shouldUseViewContentInset;
@end
// Convenience type for content views.
typedef UIView<CRWScrollableContent> CRWContentView;
#endif // IOS_WEB_PUBLIC_WEB_STATE_UI_CRW_CONTENT_VIEW_H_
| 529 |
305 | package org.mamute.dao;
import javax.inject.Inject;
import org.hibernate.Query;
import org.hibernate.Session;
import org.mamute.infra.NotFoundException;
import org.mamute.model.TagPage;
public class TagPageDAO {
private Session session;
@Deprecated
public TagPageDAO() {
}
@Inject
public TagPageDAO(Session session) {
this.session = session;
}
public void save(TagPage tagPage) {
session.save(tagPage);
}
public TagPage findByTag(String tagName) {
TagPage tagPage = (TagPage) byTag(tagName).uniqueResult();
if(tagPage == null) throw new NotFoundException();
return tagPage;
}
public boolean existsOfTag(String tagName) {
return !byTag(tagName).list().isEmpty();
}
private Query byTag(String tagName) {
return session.createQuery("from TagPage where tag.name = :tag")
.setParameter("tag", tagName);
}
}
| 308 |
582 | /*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moduliths.events.jpa;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
/**
* Repository to store {@link JpaEventPublication}s.
*
* @author <NAME>
*/
interface JpaEventPublicationRepository extends JpaRepository<JpaEventPublication, UUID> {
/**
* Returns all {@link JpaEventPublication} that have not been completed yet.
*/
List<JpaEventPublication> findByCompletionDateIsNull();
/**
* Return the {@link JpaEventPublication} for the given serialized event and listener identifier.
*
* @param event must not be {@literal null}.
* @param listenerId must not be {@literal null}.
* @return
*/
@Query("select p from JpaEventPublication p where p.serializedEvent = ?1 and p.listenerId = ?2")
Optional<JpaEventPublication> findBySerializedEventAndListenerId(Object event, String listenerId);
}
| 469 |
373 | /*
* #%L
* ACS AEM Commons Bundle
* %%
* Copyright (C) 2015 Adobe
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/package com.adobe.acs.commons.httpcache.rule.impl;
import com.adobe.acs.commons.httpcache.config.HttpCacheConfig;
import com.adobe.acs.commons.httpcache.engine.CacheContent;
import com.adobe.acs.commons.httpcache.rule.AbstractHttpCacheHandlingRule;
import org.apache.commons.collections.CollectionUtils;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Service;
import org.apache.sling.api.SlingHttpServletRequest;
import org.apache.sling.api.SlingHttpServletResponse;
import java.util.Arrays;
import java.util.List;
/**
* ACS AEM Commons - HTTP Cache - Rule: Honor cache control headers
*
* Cache only Http response status for the request is 200.
* Do not cache the response when it's set with cache control headers marking it as not cacheable.
*/
@Component
@Service
public class HonorCacheControlHeaders extends AbstractHttpCacheHandlingRule {
private static final String KEY_CACHE_CONTROL_HEADER = "Cache-Control"; // HTTP 1.1
private static final String[] VALUES_CACHE_CONTROL = {"no-cache", "no-store", "must-revalidate"};
private static final String KEY_PRAGMA = "Pragma"; // HTTP 1.0
private static final String[] VALUES_PRAGMA = {"no-cache"};
@Override
public boolean onResponseCache(SlingHttpServletRequest request, SlingHttpServletResponse response, HttpCacheConfig cacheConfig, CacheContent cacheContent) {
// Check cache control header
if (cacheContent.getHeaders().containsKey(KEY_CACHE_CONTROL_HEADER)) {
List<String> cacheControlValues = cacheContent.getHeaders().get(KEY_CACHE_CONTROL_HEADER);
if (CollectionUtils.containsAny(cacheControlValues, Arrays.asList(VALUES_CACHE_CONTROL))) {
return false;
}
}
// Check Pragma.
if (cacheContent.getHeaders().containsKey(KEY_PRAGMA)) {
List<String> pragmaValues = cacheContent.getHeaders().get(KEY_PRAGMA);
if (CollectionUtils.containsAny(pragmaValues, Arrays.asList(VALUES_PRAGMA))) {
return false;
}
}
return true;
}
}
| 955 |
354 | /***********************************************************************************************************************
* OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC, and other contributors. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met:
*
* (1) Redistributions of source code must retain the above copyright notice, this list of conditions and the following
* disclaimer.
*
* (2) Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided with the distribution.
*
* (3) Neither the name of the copyright holder nor the names of any contributors may be used to endorse or promote products
* derived from this software without specific prior written permission from the respective party.
*
* (4) Other than as required in clauses (1) and (2), distributions in any form of modifications or other derivative works
* may not use the "OpenStudio" trademark, "OS", "os", or any other confusingly similar designation without specific prior
* written permission from Alliance for Sustainable Energy, LLC.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE UNITED STATES GOVERNMENT, OR THE UNITED
* STATES DEPARTMENT OF ENERGY, NOR ANY OF THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
* USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
***********************************************************************************************************************/
#include "../ForwardTranslator.hpp"
#include "../../model/Model.hpp"
#include "../../model/Schedule.hpp"
#include "../../model/Schedule_Impl.hpp"
#include "../../model/Node.hpp"
#include "../../model/Node_Impl.hpp"
#include "../../model/CoilHeatingWaterToAirHeatPumpEquationFit.hpp"
#include "../../model/CoilHeatingWaterToAirHeatPumpEquationFit_Impl.hpp"
#include "../../model/CurveQuadLinear.hpp"
#include "../../utilities/core/Logger.hpp"
#include "../../utilities/core/Assert.hpp"
#include <utilities/idd/Coil_Heating_WaterToAirHeatPump_EquationFit_FieldEnums.hxx>
#include "../../utilities/idd/IddEnums.hpp"
#include <utilities/idd/IddEnums.hxx>
#include <utilities/idd/IddFactory.hxx>
using namespace openstudio::model;
//using namespace std;
namespace openstudio {
namespace energyplus {
boost::optional<IdfObject>
ForwardTranslator::translateCoilHeatingWaterToAirHeatPumpEquationFit(CoilHeatingWaterToAirHeatPumpEquationFit& modelObject) {
//setup boost optionals to use to store get method returns
boost::optional<std::string> s;
boost::optional<double> value;
boost::optional<Node> node;
// Make sure the modelObject gets put into the map, and the new idfObject gets put into the final file.
// Also sets the idfObjects name
IdfObject idfObject = createRegisterAndNameIdfObject(IddObjectType::Coil_Heating_WaterToAirHeatPump_EquationFit, modelObject);
// Object Name
//std::string baseName = idfObject.name().get();
// Water Inlet Node Name
if (boost::optional<ModelObject> mo = modelObject.waterInletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::WaterInletNodeName, node->name().get());
}
}
// Water Outlet Node Name
if (boost::optional<ModelObject> mo = modelObject.waterOutletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::WaterOutletNodeName, node->name().get());
}
}
//Air Inlet Node Name
if (boost::optional<ModelObject> mo = modelObject.airInletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::AirInletNodeName, node->name().get());
}
}
//Air Outlet Node Name
if (boost::optional<ModelObject> mo = modelObject.airOutletModelObject()) {
if (boost::optional<Node> node = mo->optionalCast<Node>()) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::AirOutletNodeName, node->name().get());
}
}
//Rated Air Flow Rate
if (modelObject.isRatedAirFlowRateAutosized()) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::RatedAirFlowRate, "Autosize");
} else if ((value = modelObject.ratedAirFlowRate())) {
idfObject.setDouble(Coil_Heating_WaterToAirHeatPump_EquationFitFields::RatedAirFlowRate, value.get());
}
//Rated Water Flow Rate
if (modelObject.isRatedWaterFlowRateAutosized()) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::RatedWaterFlowRate, "Autosize");
} else if ((value = modelObject.ratedWaterFlowRate())) {
idfObject.setDouble(Coil_Heating_WaterToAirHeatPump_EquationFitFields::RatedWaterFlowRate, value.get());
}
// Rated Heating Capacity
if (modelObject.isRatedHeatingCapacityAutosized()) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::GrossRatedHeatingCapacity, "Autosize");
} else if ((value = modelObject.ratedHeatingCapacity())) {
idfObject.setDouble(Coil_Heating_WaterToAirHeatPump_EquationFitFields::GrossRatedHeatingCapacity, value.get());
}
// Heating Coefficient of Performance
if ((value = modelObject.ratedHeatingCoefficientofPerformance())) {
idfObject.setDouble(Coil_Heating_WaterToAirHeatPump_EquationFitFields::GrossRatedHeatingCOP, value.get());
}
// Heating Capacity Curve Name
{
auto curve = modelObject.heatingCapacityCurve();
if (auto _curve = translateAndMapModelObject(curve)) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::HeatingCapacityCurveName, _curve->nameString());
}
}
// Heating Power Consumption Curve Name
{
auto curve = modelObject.heatingPowerConsumptionCurve();
if (auto _curve = translateAndMapModelObject(curve)) {
idfObject.setString(Coil_Heating_WaterToAirHeatPump_EquationFitFields::HeatingPowerConsumptionCurveName, _curve->nameString());
}
}
return idfObject;
}
} // namespace energyplus
} // namespace openstudio
| 2,336 |
416 | <filename>sfm-map/src/main/java/org/simpleflatmapper/map/CaseInsensitiveEndsWithPredicate.java
package org.simpleflatmapper.map;
import org.simpleflatmapper.util.Named;
import org.simpleflatmapper.util.OrManyPredicate;
import org.simpleflatmapper.util.Predicate;
public final class CaseInsensitiveEndsWithPredicate implements Predicate<FieldKey<?>>, Named {
private final String end;
private CaseInsensitiveEndsWithPredicate(String name) {
this.end = name;
}
@Override
public boolean test(FieldKey<?> fieldKey) {
String name = fieldKey.getName();
if (name.length() < this.end.length()) return false;
// public boolean regionMatches(boolean ignoreCase, int toffset,
// String other, int ooffset, int len) {
return name.regionMatches(true, name.length() - end.length(), end, 0, end.length() );
}
@Override
public String getName() {
return end;
}
@Override
public String toString() {
return "CaseInsensitiveEndsWithPredicate{" +
"name='" + end + '\'' +
'}';
}
public static CaseInsensitiveEndsWithPredicate of(String name) {
return new CaseInsensitiveEndsWithPredicate(name);
}
public static Predicate<FieldKey<?>> any(String... name) {
CaseInsensitiveEndsWithPredicate[] predicates = new CaseInsensitiveEndsWithPredicate[name.length];
for (int i = 0; i < name.length; i++) {
String n = name[i];
predicates[i] = CaseInsensitiveEndsWithPredicate.of(n);
}
return new OrManyPredicate<FieldKey<?>>(predicates);
}
}
| 654 |
362 | package net.danlew.gfycat;
import android.app.Application;
import dagger.Module;
import dagger.Provides;
import net.danlew.gfycat.service.GfycatService;
import net.danlew.gfycat.ui.MainActivity;
import javax.inject.Singleton;
@Module(
injects = MainActivity.class
)
public class ServiceModule {
private Application mApplication;
public ServiceModule(Application application) {
mApplication = application;
}
@Provides
@Singleton
GfycatService provideGfycatService() {
return new GfycatService(mApplication);
}
}
| 200 |
2,106 | <filename>blaze/compute/tests/test_chunks.py
from odo import chunks
from blaze import discover, into, compute, symbol
from datashape.predicates import iscollection
L = [1, 2, 3, 4, 5, 6]
cL = chunks(list)([[1., 2., 3.], [4., 5., 6.]])
s = symbol('s', discover(cL))
def test_chunks_compute():
exprs = [s, s + 1, s.max(), s.mean() + 1, s.head()]
for e in exprs:
result = compute(e, {s: cL})
expected = compute(e, {s: L})
if iscollection(e.dshape):
result = into(list, result)
expected = into(list, expected)
assert result == expected
def test_chunks_head():
assert compute(s.head(2), cL) == (1., 2.)
def test_pmap_default():
flag = [0]
def mymap(func, seq):
flag[0] = True
return map(func, seq)
from blaze import set_default_pmap
set_default_pmap(mymap)
compute(s + 1, cL)
assert flag[0] is True
| 401 |
367 | import logging
import urllib.request
import re
import requests
###
# Scan focused on learning about the /privacy page, as per
# https://github.com/18F/site-scanning/issues/89.
# Set a default number of workers for a particular scan type.
# Overridden by a --workers flag. XXX not actually overridden?
workers = 50
def mergelists(a, b):
return list(set().union(a, b))
# Required scan function. This is the meat of the scanner, where things
# that use the network or are otherwise expensive would go.
#
# Runs locally or in the cloud (Lambda).
def scan(domain: str, environment: dict, options: dict) -> dict:
logging.debug("Scan function called with options: %s" % options)
results = {}
url = 'https://' + domain + '/privacy'
# get status_code for /privacy
try:
response = requests.head(url, allow_redirects=True, timeout=4)
results['status_code'] = str(response.status_code)
results['final_url'] = response.url
except Exception:
logging.debug("could not get data from %s", url)
results['status_code'] = str(-1)
results['final_url'] = ''
# search /privacy for email addresses
results['emails'] = []
try:
with urllib.request.urlopen(url, timeout=5) as privacypage:
for _, line in enumerate(privacypage):
line = line.decode().rstrip()
emails = re.findall('<a href="mailto:(.*?)"', line)
if emails:
results['emails'] = mergelists(emails, results['emails'])
except Exception:
logging.debug('error while trying to retrieve emails from %s', url)
# search /privacy for H[123] tags
results['h1'] = []
results['h2'] = []
results['h3'] = []
try:
with urllib.request.urlopen(url, timeout=5) as privacypage:
for _, line in enumerate(privacypage):
line = line.decode().rstrip()
h1s = re.findall('<h1>(.*)</h1>', line)
h2s = re.findall('<h2>(.*)</h2>', line)
h3s = re.findall('<h3>(.*)</h3>', line)
if h1s or h2s or h3s:
results['h1'] = mergelists(h1s, results['h1'])
results['h2'] = mergelists(h2s, results['h2'])
results['h3'] = mergelists(h3s, results['h3'])
except Exception:
logging.debug('error while trying to retrieve emails from %s', url)
logging.warning("sitemap %s Complete!", domain)
return results
# Required CSV row conversion function. Usually one row, can be more.
#
# Run locally.
def to_rows(data):
row = []
for page in headers:
row.extend([data[page]])
return [row]
# CSV headers for each row of data. Referenced locally.
headers = [
'status_code',
'final_url',
'emails',
'h1',
'h2',
'h3',
]
| 1,199 |
2,015 | <filename>src/picotorrent/core/http/httpresponse.hpp
#pragma once
#include <QObject>
#include <stdint.h>
#include <vector>
namespace pt
{
class HttpResponse : public QObject
{
Q_OBJECT
public:
HttpResponse();
virtual ~HttpResponse();
int statusCode;
std::vector<char> body;
signals:
void error();
void finished(HttpResponse* response);
void progress(int64_t current, int64_t total);
};
}
| 207 |
1,433 | <gh_stars>1000+
/*
* Copyright (c) 2016, 2017, 2018, 2019 FabricMC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.fabricmc.fabric.mixin.event.lifecycle;
import java.util.concurrent.CompletableFuture;
import org.spongepowered.asm.mixin.Final;
import org.spongepowered.asm.mixin.Mixin;
import org.spongepowered.asm.mixin.Shadow;
import org.spongepowered.asm.mixin.injection.At;
import org.spongepowered.asm.mixin.injection.Inject;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfo;
import org.spongepowered.asm.mixin.injection.callback.CallbackInfoReturnable;
import net.minecraft.server.world.ChunkHolder;
import net.minecraft.server.world.ServerWorld;
import net.minecraft.server.world.ThreadedAnvilChunkStorage;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.chunk.WorldChunk;
import net.fabricmc.fabric.api.event.lifecycle.v1.ServerChunkEvents;
@Mixin(ThreadedAnvilChunkStorage.class)
public abstract class ThreadedAnvilChunkStorageMixin {
@Shadow
@Final
private ServerWorld world;
// Chunk (Un)Load events, An explanation:
// Must of this code is wrapped inside of futures and consumers, so it's generally a mess.
/**
* Injection is inside of tryUnloadChunk.
* We inject just after "setLoadedToWorld" is made false, since here the WorldChunk is guaranteed to be unloaded.
*/
@Inject(method = "method_18843", at = @At(value = "INVOKE", target = "Lnet/minecraft/world/chunk/WorldChunk;setLoadedToWorld(Z)V", shift = At.Shift.AFTER))
private void onChunkUnload(ChunkHolder chunkHolder, CompletableFuture<Chunk> chunkFuture, long pos, Chunk chunk, CallbackInfo ci) {
ServerChunkEvents.CHUNK_UNLOAD.invoker().onChunkUnload(this.world, (WorldChunk) chunk);
}
/**
* Injection is inside of convertToFullChunk?
*
* <p>The following is expected contractually
*
* <ul><li>the chunk being loaded MUST be a WorldChunk.
* <li>everything within the chunk has been loaded into the world. Entities, BlockEntities, etc.</ul>
*/
@Inject(method = "method_17227", at = @At("TAIL"))
private void onChunkLoad(ChunkHolder chunkHolder, Chunk protoChunk, CallbackInfoReturnable<Chunk> callbackInfoReturnable) {
// We fire the event at TAIL since the chunk is guaranteed to be a WorldChunk then.
ServerChunkEvents.CHUNK_LOAD.invoker().onChunkLoad(this.world, (WorldChunk) callbackInfoReturnable.getReturnValue());
}
}
| 923 |
1,489 | package me.ele.amigo.utils;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import java.io.File;
import java.io.IOException;
import me.ele.amigo.reflect.FieldUtils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public class SysmLinkTest {
@Test
public void testCreateSymbolicLink() throws IOException {
Context appContext = InstrumentationRegistry.getTargetContext();
File file = new File(appContext.getApplicationInfo().sourceDir);
File link = new File(appContext.getFilesDir(), "source_apk_link");
link.delete();
SymbolicLinkUtil.createLink(file, link);
Assert.assertEquals(true, link.exists());
Assert.assertEquals(link.getCanonicalPath(), file.getCanonicalPath());
try {
SymbolicLinkUtil.createLink(file, link);
} catch (Exception e) {
Assert.assertEquals(true, e.getMessage().contains("link file already exists"));
}
try {
link.delete();
SymbolicLinkUtil.createLink(new File("not_exist_file"), link);
} catch (Exception e) {
Assert.assertEquals(true, e.getMessage().contains("target file doesn't exist"));
}
try {
FieldUtils.writeStaticField(SymbolicLinkUtil.class, "libLoaded", false);
SymbolicLinkUtil.createLink(new File("not_exist_file"), link);
} catch (Exception e) {
Assert.assertEquals(true, e.getMessage().contains("native lib loading failure"));
}
}
}
| 673 |
1,918 | # Copyright 2021, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Libraries for the federated CelebA dataset for simulation."""
import collections
import tensorflow as tf
from tensorflow_federated.python.simulation.datasets import download
from tensorflow_federated.python.simulation.datasets import sql_client_data
IMAGE_NAME = 'image'
ATTRIBUTE_NAMES = [
'five_o_clock_shadow',
'arched_eyebrows',
'attractive',
'bags_under_eyes',
'bald',
'bangs',
'big_lips',
'big_nose',
'black_hair',
'blond_hair',
'blurry',
'brown_hair',
'bushy_eyebrows',
'chubby',
'double_chin',
'eyeglasses',
'goatee',
'gray_hair',
'heavy_makeup',
'high_cheekbones',
'male',
'mouth_slightly_open',
'mustache',
'narrow_eyes',
'no_beard',
'oval_face',
'pale_skin',
'pointy_nose',
'receding_hairline',
'rosy_cheeks',
'sideburns',
'smiling',
'straight_hair',
'wavy_hair',
'wearing_earrings',
'wearing_hat',
'wearing_lipstick',
'wearing_necklace',
'wearing_necktie',
'young',
]
def _add_proto_parsing(dataset: tf.data.Dataset) -> tf.data.Dataset:
"""Add parsing of the tf.Example proto to the dataset pipeline."""
def parse_proto(tensor_proto):
parse_spec = collections.OrderedDict(
sorted([(IMAGE_NAME,
tf.io.FixedLenFeature(shape=(84, 84, 3), dtype=tf.int64))] +
[(attribute_name,
tf.io.FixedLenFeature(shape=(), dtype=tf.int64))
for attribute_name in ATTRIBUTE_NAMES]))
parsed_features = tf.io.parse_example(tensor_proto, parse_spec)
return collections.OrderedDict(
sorted([(IMAGE_NAME, parsed_features[IMAGE_NAME])] +
[(attribute_name,
tf.cast(parsed_features[attribute_name], tf.bool))
for attribute_name in ATTRIBUTE_NAMES]))
return dataset.map(parse_proto, num_parallel_calls=tf.data.AUTOTUNE)
def load_data(split_by_clients=True, cache_dir=None):
"""Loads the Federated CelebA dataset.
Downloads and caches the dataset locally. If previously downloaded, tries to
load the dataset from cache.
This dataset is derived from the
[LEAF repository](https://github.com/TalwalkarLab/leaf) preprocessing of the
[CelebA dataset](https://mmlab.ie.cuhk.edu.hk/projects/CelebA.html),
grouping examples by celebrity id. Details about LEAF were published in
["LEAF: A Benchmark for Federated
Settings"](https://arxiv.org/abs/1812.01097), and details about CelebA were
published in ["Deep Learning Face Attributes in the
Wild"](https://arxiv.org/abs/1411.7766).
The raw CelebA dataset contains 10,177 unique identities. During LEAF
preprocessing, all clients with less than 5 examples are removed; this leaves
9,343 clients.
The data is available with train and test splits by clients or by examples.
That is, when split by clients, ~90% of clients are selected for the train
set, ~10% of clients are selected for test, and all the examples for a given
user are part of the same data split. When split by examples, each client is
located in both the train data and the test data, with ~90% of the examples
on each client selected for train and ~10% of the examples selected for test.
Data set sizes:
*split_by_clients=True*:
- train: 8,408 clients, 180,429 total examples
- test: 935 clients, 19,859 total examples
*split_by_clients=False*:
- train: 9,343 clients, 177,457 total examples
- test: 9,343 clients, 22,831 total examples
The `tf.data.Datasets` returned by
`tff.simulation.datasets.ClientData.create_tf_dataset_for_client` will yield
`collections.OrderedDict` objects at each iteration. These objects have a
key/value pair storing the image of the celebrity:
- `'image'`: a `tf.Tensor` with `dtype=tf.int64` and shape [84, 84, 3],
containing the red/blue/green pixels of the image. Each pixel is a value
in the range [0, 255].
The OrderedDict objects also contain an additional 40 key/value pairs for the
celebrity image attributes, each of the format:
- `{attribute name}`: a `tf.Tensor` with `dtype=tf.bool` and shape [1],
set to True if the celebrity has this attribute in the image, or False
if they don't.
The attribute names are:
'five_o_clock_shadow', 'arched_eyebrows', 'attractive', 'bags_under_eyes',
'bald', 'bangs', 'big_lips', 'big_nose', 'black_hair', 'blond_hair',
'blurry', 'brown_hair', 'bushy_eyebrows', 'chubby', 'double_chin',
'eyeglasses', 'goatee', 'gray_hair', 'heavy_makeup', 'high_cheekbones',
'male', 'mouth_slightly_open', 'mustache', 'narrow_eyes', 'no_beard',
'oval_face', 'pale_skin', 'pointy_nose', 'receding_hairline', 'rosy_cheeks',
'sideburns', 'smiling', 'straight_hair', 'wavy_hair', 'wearing_earrings',
'wearing_hat', 'wearing_lipstick', 'wearing_necklace', 'wearing_necktie',
'young'
Note: The CelebA dataset may contain potential bias. The
[fairness indicators TF tutorial](
https://www.tensorflow.org/responsible_ai/fairness_indicators/tutorials/Fairness_Indicators_TFCO_CelebA_Case_Study)
goes into detail about several considerations to keep in mind while using the
CelebA dataset.
Args:
split_by_clients: There are 9,343 clients in the federated CelebA dataset
with 5 or more examples. If this argument is True, clients are divided
into train and test groups, with 8,408 and 935 clients respectively. If
this argument is False, the data is divided by examples instead, i.e., all
clients participate in both the train and test groups, with ~90% of the
examples belonging to the train group and the rest belonging to the test
group.
cache_dir: (Optional) directory to cache the downloaded file. If `None`,
caches in Keras' default cache directory.
Returns:
Tuple of `(train, test)` where the tuple elements are
`tff.simulation.datasets.ClientData` objects.
"""
database_path = download.get_compressed_file(
origin='https://storage.googleapis.com/tff-datasets-public/celeba.sqlite.lzma',
cache_dir=cache_dir)
if split_by_clients:
train_client_data = sql_client_data.SqlClientData(
database_path, 'split_by_clients_train').preprocess(_add_proto_parsing)
test_client_data = sql_client_data.SqlClientData(
database_path, 'split_by_clients_test').preprocess(_add_proto_parsing)
else:
train_client_data = sql_client_data.SqlClientData(
database_path, 'split_by_examples_train').preprocess(_add_proto_parsing)
test_client_data = sql_client_data.SqlClientData(
database_path, 'split_by_examples_test').preprocess(_add_proto_parsing)
return train_client_data, test_client_data
| 2,668 |
11,356 | <reponame>koln67/shaka-packager
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: <EMAIL> (<NAME>)
// Based on original Protocol Buffers design by
// <NAME>, <NAME>, and others.
#include <google/protobuf/compiler/java/java_extension.h>
#include <google/protobuf/compiler/java/java_context.h>
#include <google/protobuf/compiler/java/java_doc_comment.h>
#include <google/protobuf/compiler/java/java_helpers.h>
#include <google/protobuf/compiler/java/java_name_resolver.h>
#include <google/protobuf/io/printer.h>
#include <google/protobuf/stubs/strutil.h>
namespace google {
namespace protobuf {
namespace compiler {
namespace java {
ImmutableExtensionGenerator::ImmutableExtensionGenerator(
const FieldDescriptor* descriptor, Context* context)
: descriptor_(descriptor), context_(context),
name_resolver_(context->GetNameResolver()) {
if (descriptor_->extension_scope() != NULL) {
scope_ = name_resolver_->GetImmutableClassName(
descriptor_->extension_scope());
} else {
scope_ = name_resolver_->GetImmutableClassName(descriptor_->file());
}
}
ImmutableExtensionGenerator::~ImmutableExtensionGenerator() {}
// Initializes the vars referenced in the generated code templates.
void ExtensionGenerator::InitTemplateVars(
const FieldDescriptor* descriptor, const string& scope, bool immutable,
ClassNameResolver* name_resolver, std::map<string, string>* vars_pointer) {
std::map<string, string> &vars = *vars_pointer;
vars["scope"] = scope;
vars["name"] = UnderscoresToCamelCase(descriptor);
vars["containing_type"] =
name_resolver->GetClassName(descriptor->containing_type(), immutable);
vars["number"] = SimpleItoa(descriptor->number());
vars["constant_name"] = FieldConstantName(descriptor);
vars["index"] = SimpleItoa(descriptor->index());
vars["default"] = descriptor->is_repeated() ?
"" : DefaultValue(descriptor, immutable, name_resolver);
vars["type_constant"] = FieldTypeName(GetType(descriptor));
vars["packed"] = descriptor->options().packed() ? "true" : "false";
vars["enum_map"] = "null";
vars["prototype"] = "null";
JavaType java_type = GetJavaType(descriptor);
string singular_type;
switch (java_type) {
case JAVATYPE_MESSAGE:
singular_type = name_resolver->GetClassName(descriptor->message_type(),
immutable);
vars["prototype"] = singular_type + ".getDefaultInstance()";
break;
case JAVATYPE_ENUM:
singular_type = name_resolver->GetClassName(descriptor->enum_type(),
immutable);
vars["enum_map"] = singular_type + ".internalGetValueMap()";
break;
case JAVATYPE_STRING:
singular_type = "java.lang.String";
break;
case JAVATYPE_BYTES:
singular_type = immutable ? "com.google.protobuf.ByteString" : "byte[]";
break;
default:
singular_type = BoxedPrimitiveTypeName(java_type);
break;
}
vars["type"] = descriptor->is_repeated() ?
"java.util.List<" + singular_type + ">" : singular_type;
vars["singular_type"] = singular_type;
}
void ImmutableExtensionGenerator::Generate(io::Printer* printer) {
std::map<string, string> vars;
const bool kUseImmutableNames = true;
InitTemplateVars(descriptor_, scope_, kUseImmutableNames, name_resolver_,
&vars);
printer->Print(vars,
"public static final int $constant_name$ = $number$;\n");
WriteFieldDocComment(printer, descriptor_);
if (descriptor_->extension_scope() == NULL) {
// Non-nested
printer->Print(
vars,
"public static final\n"
" com.google.protobuf.GeneratedMessage.GeneratedExtension<\n"
" $containing_type$,\n"
" $type$> $name$ = com.google.protobuf.GeneratedMessage\n"
" .newFileScopedGeneratedExtension(\n"
" $singular_type$.class,\n"
" $prototype$);\n");
} else {
// Nested
printer->Print(
vars,
"public static final\n"
" com.google.protobuf.GeneratedMessage.GeneratedExtension<\n"
" $containing_type$,\n"
" $type$> $name$ = com.google.protobuf.GeneratedMessage\n"
" .newMessageScopedGeneratedExtension(\n"
" $scope$.getDefaultInstance(),\n"
" $index$,\n"
" $singular_type$.class,\n"
" $prototype$);\n");
}
}
int ImmutableExtensionGenerator::GenerateNonNestedInitializationCode(
io::Printer* printer) {
int bytecode_estimate = 0;
if (descriptor_->extension_scope() == NULL) {
// Only applies to non-nested extensions.
printer->Print(
"$name$.internalInit(descriptor.getExtensions().get($index$));\n",
"name", UnderscoresToCamelCase(descriptor_),
"index", SimpleItoa(descriptor_->index()));
bytecode_estimate += 21;
}
return bytecode_estimate;
}
int ImmutableExtensionGenerator::GenerateRegistrationCode(
io::Printer* printer) {
printer->Print(
"registry.add($scope$.$name$);\n",
"scope", scope_,
"name", UnderscoresToCamelCase(descriptor_));
return 7;
}
} // namespace java
} // namespace compiler
} // namespace protobuf
} // namespace google
| 2,556 |
371 | #include "platform.h"
#include "platform_utils.h"
#include "../../_stereokit.h"
#include "../../log.h"
#include "win32.h"
#include "uwp.h"
#include "linux.h"
#include "android.h"
#include "openxr.h"
namespace sk {
display_mode_ platform_mode = display_mode_none;
///////////////////////////////////////////
bool platform_init() {
// Set up any platform dependant variables
#if defined(SK_OS_ANDROID)
bool result = android_init();
#elif defined(SK_OS_LINUX)
bool result = linux_init ();
#elif defined(SK_OS_WINDOWS_UWP)
bool result = uwp_init ();
#elif defined(SK_OS_WINDOWS)
bool result = win32_init ();
#endif
if (!result) {
log_fail_reason(80, log_error, "Platform initialization failed!");
return false;
}
// Initialize graphics
void *luid = sk_display_mode == display_mode_mixedreality
? openxr_get_luid()
: nullptr;
skg_callback_log([](skg_log_ level, const char *text) {
switch (level) {
case skg_log_info: log_diagf("sk_gpu: %s", text); break;
case skg_log_warning: log_warnf("sk_gpu: %s", text); break;
case skg_log_critical: log_errf ("sk_gpu: %s", text); break;
}
});
if (skg_init(sk_app_name, luid) <= 0) {
log_fail_reason(95, log_error, "Failed to initialize sk_gpu!");
return false;
}
// Start up the current mode!
if (!platform_set_mode(sk_display_mode)) {
if (!sk_no_flatscreen_fallback && sk_display_mode != display_mode_flatscreen) {
log_infof("MixedReality display mode failed, falling back to Flatscreen");
sk_display_mode = display_mode_flatscreen;
return platform_set_mode(sk_display_mode);
}
log_errf("Couldn't initialize StereoKit in %s mode!", sk_display_mode == display_mode_mixedreality ? "MixedReality" : "Flatscreen");
return false;
}
return platform_utils_init();
}
///////////////////////////////////////////
void platform_shutdown() {
platform_utils_shutdown();
platform_stop_mode();
skg_shutdown();
#if defined(SK_OS_ANDROID)
android_shutdown();
#elif defined(SK_OS_LINUX)
linux_shutdown ();
#elif defined(SK_OS_WINDOWS_UWP)
uwp_shutdown ();
#elif defined(SK_OS_WINDOWS)
win32_shutdown ();
#endif
}
///////////////////////////////////////////
void platform_set_window(void *window) {
#if defined(SK_OS_ANDROID)
android_set_window(window);
#else
(void)window;
#endif
}
///////////////////////////////////////////
void platform_set_window_xam(void *window) {
#if defined(SK_OS_ANDROID)
android_set_window_xam(window);
#else
(void)window;
#endif
}
///////////////////////////////////////////
bool platform_set_mode(display_mode_ mode) {
if (platform_mode == mode)
return true;
switch (mode) {
case display_mode_none: log_diag("Starting headless mode"); break;
case display_mode_mixedreality: log_diag("Starting mixed reality mode"); break;
case display_mode_flatscreen: log_diag("Starting flatscreen mode"); break;
}
platform_stop_mode();
bool result = true;
if (mode == display_mode_mixedreality) {
// Platform init before OpenXR
#if defined(SK_OS_ANDROID)
result = android_start_pre_xr();
#elif defined(SK_OS_LINUX)
result = linux_start_pre_xr();
#elif defined(SK_OS_WINDOWS_UWP)
result = uwp_start_pre_xr();
#elif defined(SK_OS_WINDOWS)
result = win32_start_pre_xr();
#endif
// Init OpenXR
if (result) {
result = openxr_init ();
}
// Platform init after OpenXR
if (result) {
#if defined(SK_OS_ANDROID)
result = android_start_post_xr();
#elif defined(SK_OS_LINUX)
result = linux_start_post_xr();
#elif defined(SK_OS_WINDOWS_UWP)
result = uwp_start_post_xr();
#elif defined(SK_OS_WINDOWS)
result = win32_start_post_xr();
#endif
}
} else if (mode == display_mode_flatscreen) {
#if defined(SK_OS_ANDROID)
result = android_start_flat();
#elif defined(SK_OS_LINUX)
result = linux_start_flat ();
#elif defined(SK_OS_WINDOWS_UWP)
result = uwp_start_flat ();
#elif defined(SK_OS_WINDOWS)
result = win32_start_flat ();
#endif
}
platform_mode = mode;
return result;
}
///////////////////////////////////////////
void platform_step_begin() {
switch (platform_mode) {
case display_mode_none: break;
case display_mode_mixedreality: {
#if defined(SK_OS_ANDROID)
android_step_begin_xr();
#elif defined(SK_OS_LINUX)
linux_step_begin_xr ();
#elif defined(SK_OS_WINDOWS_UWP)
uwp_step_begin_xr ();
#elif defined(SK_OS_WINDOWS)
win32_step_begin_xr ();
#endif
openxr_step_begin();
} break;
case display_mode_flatscreen: {
#if defined(SK_OS_ANDROID)
android_step_begin_flat();
#elif defined(SK_OS_LINUX)
linux_step_begin_flat ();
#elif defined(SK_OS_WINDOWS_UWP)
uwp_step_begin_flat ();
#elif defined(SK_OS_WINDOWS)
win32_step_begin_flat ();
#endif
} break;
}
platform_utils_update();
}
///////////////////////////////////////////
void platform_step_end() {
switch (platform_mode) {
case display_mode_none: break;
case display_mode_mixedreality: openxr_step_end(); break;
case display_mode_flatscreen: {
#if defined(SK_OS_ANDROID)
android_step_end_flat();
#elif defined(SK_OS_LINUX)
linux_step_end_flat ();
#elif defined(SK_OS_WINDOWS_UWP)
uwp_step_end_flat ();
#elif defined(SK_OS_WINDOWS)
win32_step_end_flat ();
#endif
} break;
}
}
///////////////////////////////////////////
void platform_stop_mode() {
switch (platform_mode) {
case display_mode_none: break;
case display_mode_mixedreality: openxr_shutdown(); break;
case display_mode_flatscreen: {
#if defined(SK_OS_ANDROID)
android_stop_flat();
#elif defined(SK_OS_LINUX)
linux_stop_flat ();
#elif defined(SK_OS_WINDOWS_UWP)
uwp_stop_flat ();
#elif defined(SK_OS_WINDOWS)
win32_stop_flat ();
#endif
} break;
}
}
} // namespace sk
| 2,225 |
379 | // Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Description:
// mpc protocol base class
#pragma once
#include "abstract_network.h"
#include "gloo/rendezvous/hash_store.h"
#include "mpc_config.h"
#include "mpc_operators.h"
#include "core/paddlefl_mpc/mpc_protocol/abstract_context.h"
namespace paddle {
namespace mpc {
class MpcProtocol {
public:
MpcProtocol(const std::string &name) : _name(name){};
virtual ~MpcProtocol() = default;
virtual std::string name() const { return _name; }
virtual void init(MpcConfig &config) = 0;
// for test purpose
virtual void
init_with_store(const MpcConfig &config,
std::shared_ptr<gloo::rendezvous::Store> store) = 0;
virtual std::shared_ptr<MpcOperators> mpc_operators() = 0;
virtual std::shared_ptr<AbstractNetwork> network() = 0;
virtual std::shared_ptr<AbstractContext> mpc_context() = 0;
private:
const std::string _name;
};
} // mpc
} // paddle
| 488 |
892 | <filename>advisories/unreviewed/2022/05/GHSA-5xjg-wggg-qh5w/GHSA-5xjg-wggg-qh5w.json
{
"schema_version": "1.2.0",
"id": "GHSA-5xjg-wggg-qh5w",
"modified": "2022-05-02T06:11:52Z",
"published": "2022-05-02T06:11:52Z",
"aliases": [
"CVE-2010-0358"
],
"details": "Heap-based buffer overflow in the server in IBM Lotus Domino 7 and 8.5 FP1 allows remote attackers to cause a denial of service (daemon exit) and possibly have unspecified other impact via a long string in a crafted LDAP message to a TCP port, a different vulnerability than CVE-2009-3087.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2010-0358"
},
{
"type": "WEB",
"url": "http://intevydis.blogspot.com/2010/01/lotus-domino-7-probably-8-ldap-heap.html"
},
{
"type": "WEB",
"url": "http://intevydis.com/vd-list.shtml"
},
{
"type": "WEB",
"url": "http://securitytracker.com/id?1023456"
}
],
"database_specific": {
"cwe_ids": [
"CWE-119"
],
"severity": "HIGH",
"github_reviewed": false
}
} | 538 |
2,536 | <reponame>KWik/munki<filename>code/client/munkilib/munkirepo/MWA2APIRepo.py
# encoding: utf-8
'''Defines MWA2APIRepo plugin. See docstring for MWA2APIRepo class'''
from __future__ import absolute_import, print_function
import base64
import getpass
import os
import subprocess
import tempfile
try:
# Python 2
from urllib2 import quote
except ImportError:
from urllib.parse import quote
from munkilib.munkirepo import Repo, RepoError
from munkilib.wrappers import get_input, readPlistFromString, PlistReadError
DEBUG = False
# TODO: make this more easily configurable
CURL_CMD = '/usr/bin/curl'
class CurlError(Exception):
'''Error for curl operations'''
pass
class MWA2APIRepo(Repo):
'''Class for working with a repo accessible via the MWA2 API'''
# pylint: disable=super-init-not-called
def __init__(self, baseurl):
'''Constructor'''
self.baseurl = baseurl
self.authtoken = None
self._connect()
# pylint: enable=super-init-not-called
def _connect(self):
'''For a fileshare repo, we'd mount the share, prompting for
credentials if needed. For the API repo, we'll look for a stored
authtoken; if we don't find one, we'll prompt for credentials
and make an authtoken.'''
if not self.authtoken:
if 'MUNKIREPO_AUTHTOKEN' in os.environ:
self.authtoken = os.environ['MUNKIREPO_AUTHTOKEN']
else:
print('Please provide credentials for %s:' % self.baseurl)
username = get_input('Username: ')
password = <PASSWORD>()
user_and_pass = ('%s:%s' % (username, password)).encode("UTF-8")
self.authtoken = 'Basic %s' % base64.b64encode(
user_and_pass).decode("UTF-8")
def _curl(self, relative_url, headers=None, method='GET',
filename=None, content=None, formdata=None):
'''Use curl to talk to MWA2 API'''
# we use a config/directive file to avoid having the auth header show
# up in a process listing
contentpath = None
fileref, directivepath = tempfile.mkstemp()
fileobj = os.fdopen(fileref, 'w')
print('silent', file=fileobj) # no progress meter
print('show-error', file=fileobj) # print error msg to stderr
print('fail', file=fileobj) # throw error if download fails
print('location', file=fileobj) # follow redirects
print('request = %s' % method, file=fileobj)
if headers:
for key in headers:
print('header = "%s: %s"' % (key, headers[key]), file=fileobj)
print('header = "Authorization: %s"' % self.authtoken, file=fileobj)
if formdata:
for line in formdata:
print('form = "%s"' % line, file=fileobj)
url = os.path.join(self.baseurl, relative_url)
print('url = "%s"' % url, file=fileobj)
fileobj.close()
cmd = [CURL_CMD, '-q', '--config', directivepath]
if filename and method == 'GET':
cmd.extend(['-o', filename])
if filename and method in ('PUT', 'POST'):
cmd.extend(['-d', '@%s' % filename])
elif content and method in ('PUT', 'POST'):
if len(content) > 1024:
# it's a lot of data; let's write it to a local file first
# because we can't really pass it all via subprocess
fileref, contentpath = tempfile.mkstemp()
fileobj = os.fdopen(fileref, 'wb')
fileobj.write(content)
fileobj.close()
cmd.extend(['-d', '@%s' % contentpath])
else:
cmd.extend(['-d', content])
proc = subprocess.Popen(cmd, shell=False, bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = proc.communicate()
err = err.decode('UTF-8')
if DEBUG:
# save our curl_directives for debugging
fileref = open(directivepath)
curl_directives = fileref.read()
fileref.close()
try:
os.unlink(directivepath)
if contentpath:
os.unlink(contentpath)
except OSError:
pass
if proc.returncode:
if DEBUG:
raise CurlError((proc.returncode, err, curl_directives, cmd))
else:
raise CurlError((proc.returncode, err))
return output
def itemlist(self, kind):
'''Returns a list of identifiers for each item of kind.
Kind might be 'catalogs', 'manifests', 'pkgsinfo', 'pkgs', or 'icons'.
For a file-backed repo this would be a list of pathnames.'''
url = quote(kind.encode('UTF-8')) + '?api_fields=filename'
headers = {'Accept': 'application/xml'}
try:
data = self._curl(url, headers=headers)
except CurlError as err:
raise RepoError(err)
try:
plist = readPlistFromString(data)
except PlistReadError as err:
raise RepoError(err)
if kind in ['catalogs', 'manifests', 'pkgsinfo']:
# it's a list of dicts containing 'filename' key/values
return [item['filename'] for item in plist]
# it's a list of filenames (pkgs, icons)
return plist
def get(self, resource_identifier):
'''Returns the content of item with given resource_identifier.
For a file-backed repo, a resource_identifier of
'pkgsinfo/apps/Firefox-52.0.plist' would return the contents of
<repo_root>/pkgsinfo/apps/Firefox-52.0.plist.
Avoid using this method with the 'pkgs' kind as it might return a
really large blob of data.'''
url = quote(resource_identifier.encode('UTF-8'))
if resource_identifier.startswith(
('catalogs/', 'manifests/', 'pkgsinfo/')):
headers = {'Accept': 'application/xml'}
else:
headers = {}
try:
return self._curl(url, headers=headers)
except CurlError as err:
raise RepoError(err)
def get_to_local_file(self, resource_identifier, local_file_path):
'''Gets the contents of item with given resource_identifier and saves
it to local_file_path.
For a file-backed repo, a resource_identifier
of 'pkgsinfo/apps/Firefox-52.0.plist' would copy the contents of
<repo_root>/pkgsinfo/apps/Firefox-52.0.plist to a local file given by
local_file_path.'''
url = quote(resource_identifier.encode('UTF-8'))
if resource_identifier.startswith(
('catalogs/', 'manifests/', 'pkgsinfo/')):
headers = {'Accept': 'application/xml'}
else:
headers = {}
try:
self._curl(url, headers=headers, filename=local_file_path)
except CurlError as err:
raise RepoError(err)
def put(self, resource_identifier, content):
'''Stores content on the repo based on resource_identifier.
For a file-backed repo, a resource_identifier of
'pkgsinfo/apps/Firefox-52.0.plist' would result in the content being
saved to <repo_root>/pkgsinfo/apps/Firefox-52.0.plist.'''
url = quote(resource_identifier.encode('UTF-8'))
if resource_identifier.startswith(
('catalogs/', 'manifests/', 'pkgsinfo/')):
headers = {'Content-type': 'application/xml'}
else:
headers = {}
try:
self._curl(url, headers=headers, method='PUT', content=content)
except CurlError as err:
raise RepoError(err)
def put_from_local_file(self, resource_identifier, local_file_path):
'''Copies the content of local_file_path to the repo based on
resource_identifier. For a file-backed repo, a resource_identifier
of 'pkgsinfo/apps/Firefox-52.0.plist' would result in the content
being saved to <repo_root>/pkgsinfo/apps/Firefox-52.0.plist.'''
url = quote(resource_identifier.encode('UTF-8'))
if resource_identifier.startswith(('pkgs/', 'icons/')):
# MWA2API only supports POST for pkgs and icons
# and file uploads need to be form encoded
formdata = ['filedata=@%s' % local_file_path]
try:
self._curl(url, method='POST', formdata=formdata)
except CurlError as err:
raise RepoError(err)
else:
headers = {'Content-type': 'application/xml'}
try:
self._curl(url, headers=headers, method='PUT',
filename=local_file_path)
except CurlError as err:
raise RepoError(err)
def delete(self, resource_identifier):
'''Deletes a repo object located by resource_identifier.
For a file-backed repo, a resource_identifier of
'pkgsinfo/apps/Firefox-52.0.plist' would result in the deletion of
<repo_root>/pkgsinfo/apps/Firefox-52.0.plist.'''
url = quote(resource_identifier.encode('UTF-8'))
try:
self._curl(url, method='DELETE')
except CurlError as err:
raise RepoError(err)
| 4,341 |
1,113 | <filename>app/src/main/java/org/estgroup/phphub/api/entity/TopicEntity.java
package org.estgroup.phphub.api.entity;
import org.estgroup.phphub.api.entity.element.Topic;
import java.util.List;
public class TopicEntity {
protected List<Topic> data;
public List<Topic> getData() {
return data;
}
public void setData(List<Topic> data) {
this.data = data;
}
public class ATopic {
protected Topic data;
public Topic getData() {
return data;
}
public void setData(Topic data) {
this.data = data;
}
}
}
| 265 |
582 | <reponame>OLibutzki/moduliths
/*
* Copyright 2019-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.moduliths.test;
import java.util.Arrays;
import java.util.Collection;
import java.util.function.Function;
import java.util.function.Predicate;
import org.springframework.util.Assert;
/**
* All Spring application events fired during the test execution.
*
* @author <NAME>
*/
public interface PublishedEvents {
/**
* Creates a new {@link PublishedEvents} instance for the given events.
*
* @param events must not be {@literal null}.
* @return will never be {@literal null}.
*/
public static PublishedEvents of(Object... events) {
return of(Arrays.asList(events));
}
/**
* Creates a new {@link PublishedEvents} instance for the given events.
*
* @param events must not be {@literal null}.
* @return
*/
public static PublishedEvents of(Collection<? extends Object> events) {
Assert.notNull(events, "Events must not be null!");
return new DefaultPublishedEvents(events);
}
/**
* Returns all application events of the given type that were fired during the test execution.
*
* @param <T> the event type
* @param type must not be {@literal null}.
* @return
*/
<T> TypedPublishedEvents<T> ofType(Class<T> type);
/**
* All application events of a given type that were fired during a test execution.
*
* @author <NAME>
* @param <T> the event type
*/
interface TypedPublishedEvents<T> extends Iterable<T> {
/**
* Further constrain the event type for downstream assertions.
*
* @param <S>
* @param subType the sub type
* @return will never be {@literal null}.
*/
<S extends T> TypedPublishedEvents<S> ofSubType(Class<S> subType);
/**
* Returns all {@link TypedPublishedEvents} that match the given predicate.
*
* @param predicate must not be {@literal null}.
* @return will never be {@literal null}.
*/
TypedPublishedEvents<T> matching(Predicate<? super T> predicate);
/**
* Returns all {@link TypedPublishedEvents} that match the given predicate after applying the given mapping step.
*
* @param <S> the intermediate type to apply the {@link Predicate} on
* @param mapper the mapping step to extract a part of the original event subject to test for the {@link Predicate}.
* @param predicate the {@link Predicate} to apply on the value extracted.
* @return will never be {@literal null}.
*/
<S> TypedPublishedEvents<T> matchingMapped(Function<T, S> mapper, Predicate<? super S> predicate);
}
}
| 929 |
478 | /*
Copyright (c) 2014 Aerys
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute,
sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or
substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING
BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#pragma once
// Possible implementations:
// - Web Worker-based (web, sync)
// - Callback-based (web, async eg. xhr)
// - Thread-based (native)
// - Fork-based (native)
// Actual implementation loaded from Worker.cpp.
#if !defined(MINKO_WORKER_IMPL_WEBWORKER) && !defined(MINKO_WORKER_IMPL_WEBWORKER)
# if defined(EMSCRIPTEN)
# define MINKO_WORKER_IMPL_WEBWORKER
# else
# define MINKO_WORKER_IMPL_THREAD
# endif
#endif
#if defined(MINKO_WORKER_IMPL_WEBWORKER)
# define MINKO_DEFINE_WORKER(Class, Code) \
void minkoWorkerEntryPoint(char* data, int size) \
{ \
auto worker = Class ::create(""); \
std::vector<char> input(data, data + size); \
worker->run(input); \
} \
\
void Class ::run(const std::vector<char>& input) \
{ \
Code ; \
}
#endif
#if defined(MINKO_WORKER_IMPL_THREAD)
# define MINKO_DEFINE_WORKER(Class, Code) \
void Class ::run(const std::vector<char>& input) \
{ \
Code ; \
}
#endif
// Not specific to any implementation (at the moment).
#define MINKO_DECLARE_WORKER(Class) \
class Class : public ::minko::async::Worker \
{ \
public: \
static Ptr create(const std::string& name) \
{ \
return std::shared_ptr<Class>(new Class(name)); \
} \
\
void run(const std::vector<char>& input); \
\
private: \
Class(const std::string& name) : Worker(name) \
{ \
} \
};
| 2,508 |
809 | /**
* @file main.c
* @brief Control FPGA leds with HPS
* @author <NAME> <<EMAIL>>
* @version
* @date 29.01.2020
*/
#include <stdio.h>
#include <unistd.h>
#include <mem/vmem.h>
#include "hps_0.h"
#include "led.h"
int main(int argc, char **argv) {
vmem_map_region(vmem_current_context(),
(mmu_paddr_t) ~MMU_PAGE_MASK & (ALT_LWFPGASLVS_OFST + LED_PIO_BASE),
(mmu_vaddr_t) ~MMU_PAGE_MASK & (ALT_LWFPGASLVS_OFST + LED_PIO_BASE),
HW_REGS_SPAN,
PROT_READ | PROT_NOCACHE | PROT_WRITE);
while (1) {
int i;
printf("LED ON\n");
for (i = 0; i <= 8; i++) {
LEDR_LightCount(i);
usleep(100 * 1000);
}
printf("LED OFF\n");
for (i = 0; i <= 8; i++) {
LEDR_OffCount(i);
usleep(100 * 1000);
}
}
return 0;
}
| 432 |
310 | <filename>gear/software/a/aquamacs.json
{
"name": "Aquamacs",
"description": "A Mac OS X native version of Emacs.",
"url": "http://aquamacs.org/"
} | 58 |
2,151 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/undo/bookmark_undo_service.h"
#include <stddef.h>
#include <memory>
#include "base/macros.h"
#include "base/strings/utf_string_conversions.h"
#include "components/bookmarks/browser/bookmark_model.h"
#include "components/bookmarks/test/bookmark_test_helpers.h"
#include "components/bookmarks/test/test_bookmark_client.h"
#include "testing/gtest/include/gtest/gtest.h"
using base::ASCIIToUTF16;
using bookmarks::BookmarkModel;
using bookmarks::BookmarkNode;
namespace {
class BookmarkUndoServiceTest : public testing::Test {
public:
BookmarkUndoServiceTest();
void SetUp() override;
void TearDown() override;
BookmarkModel* GetModel();
BookmarkUndoService* GetUndoService();
private:
std::unique_ptr<bookmarks::BookmarkModel> bookmark_model_;
std::unique_ptr<BookmarkUndoService> bookmark_undo_service_;
DISALLOW_COPY_AND_ASSIGN(BookmarkUndoServiceTest);
};
BookmarkUndoServiceTest::BookmarkUndoServiceTest() {}
void BookmarkUndoServiceTest::SetUp() {
DCHECK(!bookmark_model_);
DCHECK(!bookmark_undo_service_);
bookmark_model_ = bookmarks::TestBookmarkClient::CreateModel();
bookmark_undo_service_.reset(new BookmarkUndoService);
bookmark_undo_service_->Start(bookmark_model_.get());
bookmarks::test::WaitForBookmarkModelToLoad(bookmark_model_.get());
}
BookmarkModel* BookmarkUndoServiceTest::GetModel() {
return bookmark_model_.get();
}
BookmarkUndoService* BookmarkUndoServiceTest::GetUndoService() {
return bookmark_undo_service_.get();
}
void BookmarkUndoServiceTest::TearDown() {
// Implement two-phase KeyedService shutdown for test KeyedServices.
bookmark_undo_service_->Shutdown();
bookmark_model_->Shutdown();
bookmark_undo_service_.reset();
bookmark_model_.reset();
}
TEST_F(BookmarkUndoServiceTest, AddBookmark) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
const BookmarkNode* parent = model->other_node();
model->AddURL(parent, 0, ASCIIToUTF16("foo"), GURL("http://www.bar.com"));
// Undo bookmark creation and test for no bookmarks.
undo_service->undo_manager()->Undo();
EXPECT_EQ(0, model->other_node()->child_count());
// Redo bookmark creation and ensure bookmark information is valid.
undo_service->undo_manager()->Redo();
const BookmarkNode* node = parent->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
}
// Test that a bookmark removal action can be undone and redone.
TEST_F(BookmarkUndoServiceTest, UndoBookmarkRemove) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
const BookmarkNode* parent = model->other_node();
model->AddURL(parent, 0, ASCIIToUTF16("foo"), GURL("http://www.bar.com"));
model->Remove(parent->GetChild(0));
EXPECT_EQ(2U, undo_service->undo_manager()->undo_count());
EXPECT_EQ(0U, undo_service->undo_manager()->redo_count());
// Undo the deletion of the only bookmark and check the bookmark values.
undo_service->undo_manager()->Undo();
EXPECT_EQ(1, model->other_node()->child_count());
const BookmarkNode* node = parent->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
EXPECT_EQ(1U, undo_service->undo_manager()->undo_count());
EXPECT_EQ(1U, undo_service->undo_manager()->redo_count());
// Redo the deletion and check that there are no bookmarks left.
undo_service->undo_manager()->Redo();
EXPECT_EQ(0, model->other_node()->child_count());
EXPECT_EQ(2U, undo_service->undo_manager()->undo_count());
EXPECT_EQ(0U, undo_service->undo_manager()->redo_count());
}
// Ensure the undo/redo works for editing of bookmark information grouped into
// one action.
TEST_F(BookmarkUndoServiceTest, UndoBookmarkGroupedAction) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
const BookmarkNode* n1 = model->AddURL(model->other_node(),
0,
ASCIIToUTF16("foo"),
GURL("http://www.foo.com"));
undo_service->undo_manager()->StartGroupingActions();
model->SetTitle(n1, ASCIIToUTF16("bar"));
model->SetURL(n1, GURL("http://www.bar.com"));
undo_service->undo_manager()->EndGroupingActions();
EXPECT_EQ(2U, undo_service->undo_manager()->undo_count());
EXPECT_EQ(0U, undo_service->undo_manager()->redo_count());
// Undo the modification of the bookmark and check for the original values.
undo_service->undo_manager()->Undo();
EXPECT_EQ(1, model->other_node()->child_count());
const BookmarkNode* node = model->other_node()->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.foo.com"));
// Redo the modifications and ensure the newer values are present.
undo_service->undo_manager()->Redo();
EXPECT_EQ(1, model->other_node()->child_count());
node = model->other_node()->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("bar"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
EXPECT_EQ(2U, undo_service->undo_manager()->undo_count());
EXPECT_EQ(0U, undo_service->undo_manager()->redo_count());
}
// Test moving bookmarks within a folder and between folders.
TEST_F(BookmarkUndoServiceTest, UndoBookmarkMoveWithinFolder) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
const BookmarkNode* n1 = model->AddURL(model->other_node(),
0,
ASCIIToUTF16("foo"),
GURL("http://www.foo.com"));
const BookmarkNode* n2 = model->AddURL(model->other_node(),
1,
ASCIIToUTF16("moo"),
GURL("http://www.moo.com"));
const BookmarkNode* n3 = model->AddURL(model->other_node(),
2,
ASCIIToUTF16("bar"),
GURL("http://www.bar.com"));
model->Move(n1, model->other_node(), 3);
// Undo the move and check that the nodes are in order.
undo_service->undo_manager()->Undo();
EXPECT_EQ(model->other_node()->GetChild(0), n1);
EXPECT_EQ(model->other_node()->GetChild(1), n2);
EXPECT_EQ(model->other_node()->GetChild(2), n3);
// Redo the move and check that the first node is in the last position.
undo_service->undo_manager()->Redo();
EXPECT_EQ(model->other_node()->GetChild(0), n2);
EXPECT_EQ(model->other_node()->GetChild(1), n3);
EXPECT_EQ(model->other_node()->GetChild(2), n1);
}
// Test undo of a bookmark moved to a different folder.
TEST_F(BookmarkUndoServiceTest, UndoBookmarkMoveToOtherFolder) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
const BookmarkNode* n1 = model->AddURL(model->other_node(),
0,
ASCIIToUTF16("foo"),
GURL("http://www.foo.com"));
const BookmarkNode* n2 = model->AddURL(model->other_node(),
1,
ASCIIToUTF16("moo"),
GURL("http://www.moo.com"));
const BookmarkNode* n3 = model->AddURL(model->other_node(),
2,
ASCIIToUTF16("bar"),
GURL("http://www.bar.com"));
const BookmarkNode* f1 =
model->AddFolder(model->other_node(), 3, ASCIIToUTF16("folder"));
model->Move(n3, f1, 0);
// Undo the move and check that the bookmark and folder are in place.
undo_service->undo_manager()->Undo();
ASSERT_EQ(4, model->other_node()->child_count());
EXPECT_EQ(model->other_node()->GetChild(0), n1);
EXPECT_EQ(model->other_node()->GetChild(1), n2);
EXPECT_EQ(model->other_node()->GetChild(2), n3);
EXPECT_EQ(model->other_node()->GetChild(3), f1);
EXPECT_EQ(0, f1->child_count());
// Redo the move back into the folder and check validity.
undo_service->undo_manager()->Redo();
ASSERT_EQ(3, model->other_node()->child_count());
EXPECT_EQ(model->other_node()->GetChild(0), n1);
EXPECT_EQ(model->other_node()->GetChild(1), n2);
EXPECT_EQ(model->other_node()->GetChild(2), f1);
ASSERT_EQ(1, f1->child_count());
EXPECT_EQ(f1->GetChild(0), n3);
}
// Tests the handling of multiple modifications that include renumbering of the
// bookmark identifiers.
TEST_F(BookmarkUndoServiceTest, UndoBookmarkRenameDelete) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
const BookmarkNode* f1 = model->AddFolder(model->other_node(),
0,
ASCIIToUTF16("folder"));
model->AddURL(f1, 0, ASCIIToUTF16("foo"), GURL("http://www.foo.com"));
model->SetTitle(f1, ASCIIToUTF16("Renamed"));
model->Remove(model->other_node()->GetChild(0));
// Undo the folder removal and ensure the folder and bookmark were restored.
undo_service->undo_manager()->Undo();
ASSERT_EQ(1, model->other_node()->child_count());
ASSERT_EQ(1, model->other_node()->GetChild(0)->child_count());
const BookmarkNode* node = model->other_node()->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("Renamed"));
node = model->other_node()->GetChild(0)->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.foo.com"));
// Undo the title change and ensure the folder was updated even though the
// id has changed.
undo_service->undo_manager()->Undo();
node = model->other_node()->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("folder"));
// Undo bookmark creation and test for removal of bookmark.
undo_service->undo_manager()->Undo();
ASSERT_EQ(0, model->other_node()->GetChild(0)->child_count());
// Undo folder creation and confirm the bookmark model is empty.
undo_service->undo_manager()->Undo();
ASSERT_EQ(0, model->other_node()->child_count());
// Redo all the actions and ensure the folder and bookmark are restored.
undo_service->undo_manager()->Redo(); // folder creation
undo_service->undo_manager()->Redo(); // bookmark creation
undo_service->undo_manager()->Redo(); // bookmark title change
ASSERT_EQ(1, model->other_node()->child_count());
ASSERT_EQ(1, model->other_node()->GetChild(0)->child_count());
node = model->other_node()->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("Renamed"));
node = model->other_node()->GetChild(0)->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.foo.com"));
undo_service->undo_manager()->Redo(); // folder deletion
EXPECT_EQ(0, model->other_node()->child_count());
}
// Test the undo of SortChildren and ReorderChildren.
TEST_F(BookmarkUndoServiceTest, UndoBookmarkReorder) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
const BookmarkNode* parent = model->other_node();
model->AddURL(parent, 0, ASCIIToUTF16("foo"), GURL("http://www.foo.com"));
model->AddURL(parent, 1, ASCIIToUTF16("moo"), GURL("http://www.moo.com"));
model->AddURL(parent, 2, ASCIIToUTF16("bar"), GURL("http://www.bar.com"));
model->SortChildren(parent);
// Test the undo of SortChildren.
undo_service->undo_manager()->Undo();
const BookmarkNode* node = parent->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.foo.com"));
node = parent->GetChild(1);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("moo"));
EXPECT_EQ(node->url(), GURL("http://www.moo.com"));
node = parent->GetChild(2);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("bar"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
// Test the redo of SortChildren.
undo_service->undo_manager()->Redo();
node = parent->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("bar"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
node = parent->GetChild(1);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.foo.com"));
node = parent->GetChild(2);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("moo"));
EXPECT_EQ(node->url(), GURL("http://www.m<EMAIL>"));
}
TEST_F(BookmarkUndoServiceTest, UndoBookmarkRemoveAll) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
// Setup bookmarks in the Other Bookmarks and the Bookmark Bar.
const BookmarkNode* new_folder;
const BookmarkNode* parent = model->other_node();
model->AddURL(parent, 0, ASCIIToUTF16("foo"), GURL("http://www.google.com"));
new_folder= model->AddFolder(parent, 1, ASCIIToUTF16("folder"));
model->AddURL(new_folder, 0, ASCIIToUTF16("bar"), GURL("http://www.bar.com"));
parent = model->bookmark_bar_node();
model->AddURL(parent, 0, ASCIIToUTF16("a"), GURL("http://www.a.com"));
new_folder = model->AddFolder(parent, 1, ASCIIToUTF16("folder"));
model->AddURL(new_folder, 0, ASCIIToUTF16("b"), GURL("http://www.b.com"));
model->RemoveAllUserBookmarks();
// Test that the undo of RemoveAllUserBookmarks restores all folders and
// bookmarks.
undo_service->undo_manager()->Undo();
ASSERT_EQ(2, model->other_node()->child_count());
EXPECT_EQ(1, model->other_node()->GetChild(1)->child_count());
const BookmarkNode* node = model->other_node()->GetChild(1)->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("bar"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
ASSERT_EQ(2, model->bookmark_bar_node()->child_count());
EXPECT_EQ(1, model->bookmark_bar_node()->GetChild(1)->child_count());
node = model->bookmark_bar_node()->GetChild(1)->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("b"));
EXPECT_EQ(node->url(), GURL("http://www.b.com"));
// Test that the redo removes all folders and bookmarks.
undo_service->undo_manager()->Redo();
EXPECT_EQ(0, model->other_node()->child_count());
EXPECT_EQ(0, model->bookmark_bar_node()->child_count());
}
TEST_F(BookmarkUndoServiceTest, UndoRemoveFolderWithBookmarks) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
// Setup bookmarks in the Other Bookmarks.
const BookmarkNode* new_folder;
const BookmarkNode* parent = model->other_node();
new_folder = model->AddFolder(parent, 0, ASCIIToUTF16("folder"));
model->AddURL(new_folder, 0, ASCIIToUTF16("bar"), GURL("http://www.bar.com"));
model->Remove(parent->GetChild(0));
// Test that the undo restores the bookmark and folder.
undo_service->undo_manager()->Undo();
ASSERT_EQ(1, model->other_node()->child_count());
new_folder = model->other_node()->GetChild(0);
EXPECT_EQ(1, new_folder->child_count());
const BookmarkNode* node = new_folder->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("bar"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
// Test that the redo restores the bookmark and folder.
undo_service->undo_manager()->Redo();
ASSERT_EQ(0, model->other_node()->child_count());
// Test that the undo after a redo restores the bookmark and folder.
undo_service->undo_manager()->Undo();
ASSERT_EQ(1, model->other_node()->child_count());
new_folder = model->other_node()->GetChild(0);
EXPECT_EQ(1, new_folder->child_count());
node = new_folder->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("bar"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
}
TEST_F(BookmarkUndoServiceTest, UndoRemoveFolderWithSubfolders) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
// Setup bookmarks in the Other Bookmarks with the following structure:
// folder
// subfolder1
// subfolder2
// bar - http://www.bar.com
// This setup of multiple subfolders where the first subfolder has 0 children
// is designed specifically to ensure we do not crash in this scenario and
// that bookmarks are restored to the proper subfolder. See crbug.com/474123.
const BookmarkNode* parent = model->other_node();
const BookmarkNode* new_folder = model->AddFolder(
parent, 0, ASCIIToUTF16("folder"));
model->AddFolder(new_folder, 0, ASCIIToUTF16("subfolder1"));
const BookmarkNode* sub_folder2 = model->AddFolder(
new_folder, 1, ASCIIToUTF16("subfolder2"));
model->AddURL(sub_folder2, 0, ASCIIToUTF16("bar"),
GURL("http://www.bar.com"));
model->Remove(parent->GetChild(0));
// Test that the undo restores the subfolders and their contents.
undo_service->undo_manager()->Undo();
ASSERT_EQ(1, model->other_node()->child_count());
const BookmarkNode* restored_new_folder = model->other_node()->GetChild(0);
EXPECT_EQ(2, restored_new_folder->child_count());
const BookmarkNode* restored_sub_folder1 = restored_new_folder->GetChild(0);
EXPECT_EQ(ASCIIToUTF16("subfolder1"), restored_sub_folder1->GetTitle());
EXPECT_EQ(0, restored_sub_folder1->child_count());
const BookmarkNode* restored_sub_folder2 = restored_new_folder->GetChild(1);
EXPECT_EQ(ASCIIToUTF16("subfolder2"), restored_sub_folder2->GetTitle());
EXPECT_EQ(1, restored_sub_folder2->child_count());
const BookmarkNode* node = restored_sub_folder2->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("bar"));
EXPECT_EQ(node->url(), GURL("http://www.bar.com"));
}
TEST_F(BookmarkUndoServiceTest, TestUpperLimit) {
BookmarkModel* model = GetModel();
BookmarkUndoService* undo_service = GetUndoService();
// This maximum is set in undo_manager.cc
const size_t kMaxUndoGroups = 100;
const BookmarkNode* parent = model->other_node();
model->AddURL(parent, 0, ASCIIToUTF16("foo"), GURL("http://www.foo.com"));
for (size_t i = 1; i < kMaxUndoGroups + 1; ++i)
model->AddURL(parent, i, ASCIIToUTF16("bar"), GURL("http://www.bar.com"));
EXPECT_EQ(kMaxUndoGroups, undo_service->undo_manager()->undo_count());
// Undo as many operations as possible.
while (undo_service->undo_manager()->undo_count())
undo_service->undo_manager()->Undo();
EXPECT_EQ(1, parent->child_count());
const BookmarkNode* node = model->other_node()->GetChild(0);
EXPECT_EQ(node->GetTitle(), ASCIIToUTF16("foo"));
EXPECT_EQ(node->url(), GURL("http://www.foo.com"));
}
} // namespace
| 7,325 |
777 | <reponame>domenic/mojo
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "net/url_request/data_protocol_handler.h"
#include "net/url_request/url_request_data_job.h"
namespace net {
DataProtocolHandler::DataProtocolHandler() {
}
URLRequestJob* DataProtocolHandler::MaybeCreateJob(
URLRequest* request, NetworkDelegate* network_delegate) const {
return new URLRequestDataJob(request, network_delegate);
}
bool DataProtocolHandler::IsSafeRedirectTarget(const GURL& location) const {
return false;
}
} // namespace net
| 206 |
704 | <gh_stars>100-1000
package com.netflix.governator.guice;
import com.google.inject.Injector;
/**
* Action to perform after the injector is created.
*
* @author elandau
*/
public interface PostInjectorAction {
public void call(Injector injector);
}
| 87 |
7,857 | #include "QtCore/QModelIndex/qmodelindex_wrap.h"
#include "Extras/Utils/nutils.h"
#include "QtCore/QVariant/qvariant_wrap.h"
#include "core/Component/component_wrap.h"
Napi::FunctionReference QModelIndexWrap::constructor;
Napi::Object QModelIndexWrap::init(Napi::Env env, Napi::Object exports) {
Napi::HandleScope scope(env);
char CLASSNAME[] = "QModelIndex";
Napi::Function func = DefineClass(
env, CLASSNAME,
{InstanceMethod("column", &QModelIndexWrap::column),
InstanceMethod("data", &QModelIndexWrap::data),
InstanceMethod("flags", &QModelIndexWrap::flags),
InstanceMethod("isValid", &QModelIndexWrap::isValid),
InstanceMethod("parent", &QModelIndexWrap::parent),
InstanceMethod("row", &QModelIndexWrap::row),
InstanceMethod("sibling", &QModelIndexWrap::sibling),
InstanceMethod("siblingAtColumn", &QModelIndexWrap::siblingAtColumn),
InstanceMethod("siblingAtRow", &QModelIndexWrap::siblingAtRow),
StaticMethod("fromQVariant",
&StaticQModelIndexWrapMethods::fromQVariant),
COMPONENT_WRAPPED_METHODS_EXPORT_DEFINE(QModelIndexWrap)});
constructor = Napi::Persistent(func);
exports.Set(CLASSNAME, func);
return exports;
}
QModelIndexWrap::QModelIndexWrap(const Napi::CallbackInfo& info)
: Napi::ObjectWrap<QModelIndexWrap>(info) {
Napi::Env env = info.Env();
if (info.Length() > 0 && info[0].IsExternal()) {
// --- if external ---
this->instance = std::unique_ptr<QModelIndex>(
info[0].As<Napi::External<QModelIndex>>().Data());
} else {
if (info.Length() == 0) {
this->instance = std::make_unique<QModelIndex>();
} else {
Napi::TypeError::New(env, "Wrong number of arguments")
.ThrowAsJavaScriptException();
}
}
this->rawData = extrautils::configureComponent(this->getInternalInstance());
}
QModelIndexWrap::~QModelIndexWrap() { this->instance.reset(); }
QModelIndex* QModelIndexWrap::getInternalInstance() {
return this->instance.get();
}
Napi::Value QModelIndexWrap::column(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
return Napi::Value::From(env, this->instance->column());
}
Napi::Value QModelIndexWrap::data(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
int role = info[0].As<Napi::Number>().Int32Value();
QVariant data = this->instance->data(role);
auto instance = QVariantWrap::constructor.New(
{Napi::External<QVariant>::New(env, new QVariant(data))});
return instance;
}
Napi::Value QModelIndexWrap::flags(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
Qt::ItemFlags flags = this->instance->flags();
return Napi::Value::From(env, static_cast<int>(flags));
}
Napi::Value QModelIndexWrap::isValid(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
return Napi::Value::From(env, this->instance->isValid());
}
Napi::Value QModelIndexWrap::parent(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
QModelIndex parent = this->instance->parent();
auto instance = QModelIndexWrap::constructor.New(
{Napi::External<QModelIndex>::New(env, new QModelIndex(parent))});
return instance;
}
Napi::Value QModelIndexWrap::row(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
return Napi::Value::From(env, this->instance->row());
}
Napi::Value QModelIndexWrap::sibling(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
int row = info[0].As<Napi::Number>().Int32Value();
int column = info[1].As<Napi::Number>().Int32Value();
QModelIndex sibling = this->instance->sibling(row, column);
auto instance = QModelIndexWrap::constructor.New(
{Napi::External<QModelIndex>::New(env, new QModelIndex(sibling))});
return instance;
}
Napi::Value QModelIndexWrap::siblingAtColumn(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
int column = info[0].As<Napi::Number>().Int32Value();
QModelIndex index = this->instance->siblingAtColumn(column);
auto instance = QModelIndexWrap::constructor.New(
{Napi::External<QModelIndex>::New(env, new QModelIndex(index))});
return instance;
}
Napi::Value QModelIndexWrap::siblingAtRow(const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
int row = info[0].As<Napi::Number>().Int32Value();
QModelIndex index = this->instance->siblingAtRow(row);
auto instance = QModelIndexWrap::constructor.New(
{Napi::External<QModelIndex>::New(env, new QModelIndex(index))});
return instance;
}
Napi::Value StaticQModelIndexWrapMethods::fromQVariant(
const Napi::CallbackInfo& info) {
Napi::Env env = info.Env();
Napi::Object variantObject = info[0].As<Napi::Object>();
QVariantWrap* variantWrap =
Napi::ObjectWrap<QVariantWrap>::Unwrap(variantObject);
QVariant* variant = variantWrap->getInternalInstance();
QModelIndex index = variant->value<QModelIndex>();
auto instance = QModelIndexWrap::constructor.New(
{Napi::External<QModelIndex>::New(env, new QModelIndex(index))});
return instance;
}
| 1,867 |
3,012 | /** @file
NULL Library class that reads Debug Mask variable and if it exists makes a
HOB that contains the debug mask.
Copyright (c) 2011, Apple, Inc. All rights reserved.<BR>
SPDX-License-Identifier: BSD-2-Clause-Patent
**/
#include <PiPei.h>
#include <Library/HobLib.h>
#include <Library/DebugLib.h>
#include <Library/PeiServicesLib.h>
#include <Ppi/ReadOnlyVariable2.h>
#include <Guid/DebugMask.h>
/**
The constructor reads variable and sets HOB
@param FileHandle The handle of FFS header the loaded driver.
@param PeiServices The pointer to the PEI services.
@retval EFI_SUCCESS The constructor always returns EFI_SUCCESS.
**/
EFI_STATUS
EFIAPI
PeiDebugPrintHobLibConstructor (
IN EFI_PEI_FILE_HANDLE FileHandle,
IN CONST EFI_PEI_SERVICES **PeiServices
)
{
EFI_STATUS Status;
EFI_PEI_READ_ONLY_VARIABLE2_PPI *Variable;
UINTN Size;
UINT64 GlobalErrorLevel;
UINT32 HobErrorLevel;
Status = PeiServicesLocatePpi (
&gEfiPeiReadOnlyVariable2PpiGuid,
0,
NULL,
(VOID **)&Variable
);
if (!EFI_ERROR (Status)) {
Size = sizeof (GlobalErrorLevel);
Status = Variable->GetVariable (
Variable,
DEBUG_MASK_VARIABLE_NAME,
&gEfiGenericVariableGuid,
NULL,
&Size,
&GlobalErrorLevel
);
if (!EFI_ERROR (Status)) {
//
// Build the GUID'ed HOB for DXE
//
HobErrorLevel = (UINT32)GlobalErrorLevel;
BuildGuidDataHob (
&gEfiGenericVariableGuid,
&HobErrorLevel,
sizeof (HobErrorLevel)
);
}
}
return EFI_SUCCESS;
}
| 1,027 |
1,641 | from eth_utils.toolz import (
curry,
)
from eth import constants
from eth._utils.numeric import (
unsigned_to_signed,
signed_to_unsigned,
ceil8,
)
from eth.vm.computation import BaseComputation
def add(computation: BaseComputation) -> None:
"""
Addition
"""
left, right = computation.stack_pop_ints(2)
result = (left + right) & constants.UINT_256_MAX
computation.stack_push_int(result)
def addmod(computation: BaseComputation) -> None:
"""
Modulo Addition
"""
left, right, mod = computation.stack_pop_ints(3)
if mod == 0:
result = 0
else:
result = (left + right) % mod
computation.stack_push_int(result)
def sub(computation: BaseComputation) -> None:
"""
Subtraction
"""
left, right = computation.stack_pop_ints(2)
result = (left - right) & constants.UINT_256_MAX
computation.stack_push_int(result)
def mod(computation: BaseComputation) -> None:
"""
Modulo
"""
value, mod = computation.stack_pop_ints(2)
if mod == 0:
result = 0
else:
result = value % mod
computation.stack_push_int(result)
def smod(computation: BaseComputation) -> None:
"""
Signed Modulo
"""
value, mod = map(
unsigned_to_signed,
computation.stack_pop_ints(2),
)
pos_or_neg = -1 if value < 0 else 1
if mod == 0:
result = 0
else:
result = (abs(value) % abs(mod) * pos_or_neg) & constants.UINT_256_MAX
computation.stack_push_int(signed_to_unsigned(result))
def mul(computation: BaseComputation) -> None:
"""
Multiplication
"""
left, right = computation.stack_pop_ints(2)
result = (left * right) & constants.UINT_256_MAX
computation.stack_push_int(result)
def mulmod(computation: BaseComputation) -> None:
"""
Modulo Multiplication
"""
left, right, mod = computation.stack_pop_ints(3)
if mod == 0:
result = 0
else:
result = (left * right) % mod
computation.stack_push_int(result)
def div(computation: BaseComputation) -> None:
"""
Division
"""
numerator, denominator = computation.stack_pop_ints(2)
if denominator == 0:
result = 0
else:
result = (numerator // denominator) & constants.UINT_256_MAX
computation.stack_push_int(result)
def sdiv(computation: BaseComputation) -> None:
"""
Signed Division
"""
numerator, denominator = map(
unsigned_to_signed,
computation.stack_pop_ints(2),
)
pos_or_neg = -1 if numerator * denominator < 0 else 1
if denominator == 0:
result = 0
else:
result = (pos_or_neg * (abs(numerator) // abs(denominator)))
computation.stack_push_int(signed_to_unsigned(result))
@curry
def exp(computation: BaseComputation, gas_per_byte: int) -> None:
"""
Exponentiation
"""
base, exponent = computation.stack_pop_ints(2)
bit_size = exponent.bit_length()
byte_size = ceil8(bit_size) // 8
if exponent == 0:
result = 1
elif base == 0:
result = 0
else:
result = pow(base, exponent, constants.UINT_256_CEILING)
computation.consume_gas(
gas_per_byte * byte_size,
reason="EXP: exponent bytes",
)
computation.stack_push_int(result)
def signextend(computation: BaseComputation) -> None:
"""
Signed Extend
"""
bits, value = computation.stack_pop_ints(2)
if bits <= 31:
testbit = bits * 8 + 7
sign_bit = (1 << testbit)
if value & sign_bit:
result = value | (constants.UINT_256_CEILING - sign_bit)
else:
result = value & (sign_bit - 1)
else:
result = value
computation.stack_push_int(result)
def shl(computation: BaseComputation) -> None:
"""
Bitwise left shift
"""
shift_length, value = computation.stack_pop_ints(2)
if shift_length >= 256:
result = 0
else:
result = (value << shift_length) & constants.UINT_256_MAX
computation.stack_push_int(result)
def shr(computation: BaseComputation) -> None:
"""
Bitwise right shift
"""
shift_length, value = computation.stack_pop_ints(2)
if shift_length >= 256:
result = 0
else:
result = (value >> shift_length) & constants.UINT_256_MAX
computation.stack_push_int(result)
def sar(computation: BaseComputation) -> None:
"""
Arithmetic bitwise right shift
"""
shift_length, value = computation.stack_pop_ints(2)
value = unsigned_to_signed(value)
if shift_length >= 256:
result = 0 if value >= 0 else constants.UINT_255_NEGATIVE_ONE
else:
result = (value >> shift_length) & constants.UINT_256_MAX
computation.stack_push_int(result)
| 1,997 |
2,996 | <gh_stars>1000+
// Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.world.generation;
public interface FacetProvider {
/**
* @param seed the seed value (typically used for random number generators)
*/
default void setSeed(long seed) {
// don't do anything
}
/**
* This is always called after {@link #setSeed(long)}.
*/
default void initialize() {
// don't do anything
}
void process(GeneratingRegion region);
}
| 191 |
852 | <gh_stars>100-1000
#ifndef JetAlgorithms_QGLikelihoodCalculator_h
#define JetAlgorithms_QGLikelihoodCalculator_h
#include "CondFormats/JetMETObjects/interface/QGLikelihoodObject.h"
/**
* The QGLikelihoodCalculater calculates the likelihood for a jet
* It takes information on the valid range of the tool, the binning of the categories, and their PDFs from the QGLikelihoodObject
* The variables in the vars vector should match with the variables in the QGLikelihoodObject, in which they are identified by the varIndex
* Authors: <EMAIL>, <EMAIL>, <EMAIL>
*/
class QGLikelihoodCalculator {
public:
QGLikelihoodCalculator(){};
~QGLikelihoodCalculator(){};
float computeQGLikelihood(
const QGLikelihoodObject &QGLParamsColl, float pt, float eta, float rho, std::vector<float> vars) const;
float systematicSmearing(const QGLikelihoodSystematicsObject &QGLParamsColl,
float pt,
float eta,
float rho,
float qgValue,
int qgIndex) const;
private:
const QGLikelihoodObject::Entry *findEntry(std::vector<QGLikelihoodObject::Entry> const &data,
float eta,
float pt,
float rho,
int qgIndex,
int varIndex) const;
bool isValidRange(float pt, float rho, float eta, const QGLikelihoodCategory &qgValidRange) const;
float smearingFunction(float x0, float a, float b, float min, float max) const;
};
#endif
| 778 |
1,755 | """Test the use of preconditions with VTK_EXPECTS()
"""
import sys
import vtk
from vtk.test import Testing
class TestExpects(Testing.vtkTest):
def testPoints(self):
"""Test the index limits for vtkPoints
"""
points = vtk.vtkPoints()
p = (1.0, 2.0, 3.0)
points.InsertNextPoint(p)
self.assertEqual(points.GetPoint(0), p)
with self.assertRaises(ValueError):
points.GetPoint(-1)
with self.assertRaises(ValueError):
points.GetPoint(1)
with self.assertRaises(ValueError):
points.SetPoint(-1, p)
with self.assertRaises(ValueError):
points.SetPoint(1, p)
def testArray(self):
"""Test values, tuples, components of arrays
"""
array = vtk.vtkDoubleArray()
array.SetNumberOfComponents(2)
t = (2.0, 10.0)
array.InsertNextTuple(t)
array.InsertNextTuple(t)
array.InsertNextTuple(t)
self.assertEqual(array.GetTuple(0), t)
self.assertEqual(array.GetTuple(2), t)
with self.assertRaises(ValueError):
array.GetTuple(-1)
with self.assertRaises(ValueError):
array.GetTuple(3)
with self.assertRaises(ValueError):
array.SetTuple(-1, t)
with self.assertRaises(ValueError):
array.SetTuple(3, t)
self.assertEqual(array.GetValue(0), 2.0)
self.assertEqual(array.GetValue(5), 10.0)
with self.assertRaises(ValueError):
array.GetValue(-1)
with self.assertRaises(ValueError):
array.GetValue(6)
with self.assertRaises(ValueError):
array.SetValue(-1, 2.0)
with self.assertRaises(ValueError):
array.SetValue(6, 10.0)
self.assertEqual(array.GetComponent(0, 1), 10.0)
with self.assertRaises(ValueError):
array.GetComponent(0, -1)
with self.assertRaises(ValueError):
array.GetComponent(0, 2)
with self.assertRaises(ValueError):
array.GetComponent(-1, 0)
with self.assertRaises(ValueError):
array.GetComponent(3, 1)
with self.assertRaises(ValueError):
array.SetComponent(0, -1, 0.0)
with self.assertRaises(ValueError):
array.SetComponent(0, 2, 0.0)
with self.assertRaises(ValueError):
array.SetComponent(-1, 0, 0.0)
with self.assertRaises(ValueError):
array.SetComponent(3, 1, 0.0)
if __name__ == "__main__":
Testing.main([(TestExpects, 'test')])
| 1,254 |
331 | <reponame>HarkonenBade/agithub
# Copyright 2012-2016 <NAME> and contributors
# See COPYING for license details
from agithub.base import API, ConnectionProperties, Client
class DigitalOcean(API):
"""
Digital Ocean API
"""
def __init__(self, token=None, *args, **kwargs):
props = ConnectionProperties(
api_url='api.digitalocean.com',
url_prefix='/v2',
secure_http=True,
extra_headers={
'authorization': self.generateAuthHeader(token)
},
)
self.setClient(Client(*args, **kwargs))
self.setConnectionProperties(props)
def generateAuthHeader(self, token):
if token is not None:
return "Bearer " + token
return None
| 332 |
1,091 | <reponame>MohiK98/onos
/*
* Copyright 2021-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.p4runtime.model;
import p4.config.v1.P4InfoOuterClass;
/**
* Provides utility methods for P4Info annotations.
*/
public final class P4InfoAnnotationUtils {
public static final String ONE_SHOT_ONLY_ANNOTATION = "oneshot";
public static final String MAX_GROUP_SIZE_ANNOTATION = "max_group_size";
private P4InfoAnnotationUtils() {
}
/**
* Gets the annotation value if available in the given P4Info preamble.
* Supports annotation in the form @my_annotation(value).
*
* @param name Annotation name
* @param preamble preamble of the P4Info object
* @return The annotation value if present, null otherwise
*/
public static String getAnnotationValue(String name, P4InfoOuterClass.Preamble preamble) {
return preamble.getAnnotationsList().stream()
.filter(a -> a.startsWith("@" + name))
// e.g. @my_annotation(value)
.map(a -> a.substring(name.length() + 2, a.length() - 1))
.findFirst()
.orElse(null);
}
/**
* Checks if the given annotation name is present in the given P4Info preamble.
* Supports annotation in the form @my_annotation* (i.e., @oneshot, @max_group_size(10)).
*
* @param name Annotation name
* @param preamble preamble of the P4Info object
* @return True if the annotation is available, False otherwise.
*/
public static boolean isAnnotationPresent(String name, P4InfoOuterClass.Preamble preamble) {
return preamble.getAnnotationsList().stream()
.anyMatch(a -> a.startsWith("@" + name));
}
}
| 818 |
20,765 | #include "file_utils.h"
#include "string_utils.h"
#include <fstream>
#include <string>
bool find_folder(const char* root, const char* prefix, std::string& dest)
{
return false;
}
bool find_folder(const std::string& root, const std::string& prefix, std::string& dest)
{
return find_folder(root.c_str(), prefix.c_str(), dest);
}
std::vector<std::string> ls(const char* root, const char* prefix, LS_FLAGS flags)
{
std::vector<std::string> list;
return list;
}
bool file_exists(const std::string& path)
{
return false;
}
bool dir_exists(const std::string& path)
{
return false;
}
std::string get_exe_path()
{
return std::string();
}
std::string get_wine_exe_name(bool keep_ext)
{
return std::string();
}
std::string get_home_dir()
{
std::string path;
return path;
}
std::string get_data_dir()
{
std::string path;
return path;
}
std::string get_config_dir()
{
std::string path;
return path;
}
| 374 |
679 | /**************************************************************
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*************************************************************/
#ifndef _BGFX_RANGE_B2IBOX_HXX
#define _BGFX_RANGE_B2IBOX_HXX
#include <basegfx/point/b2ipoint.hxx>
#include <basegfx/point/b2dpoint.hxx>
#include <basegfx/tuple/b2ituple.hxx>
#include <basegfx/tuple/b2i64tuple.hxx>
#include <basegfx/range/basicbox.hxx>
#include <vector>
#include <basegfx/basegfxdllapi.h>
namespace basegfx
{
class BASEGFX_DLLPUBLIC B2IBox
{
public:
typedef sal_Int32 ValueType;
typedef Int32Traits TraitsType;
B2IBox()
{
}
explicit B2IBox(const B2ITuple& rTuple)
: maRangeX(rTuple.getX()),
maRangeY(rTuple.getY())
{
}
B2IBox(sal_Int32 x1,
sal_Int32 y1,
sal_Int32 x2,
sal_Int32 y2) :
maRangeX(x1),
maRangeY(y1)
{
maRangeX.expand(x2);
maRangeY.expand(y2);
}
B2IBox(const B2ITuple& rTuple1,
const B2ITuple& rTuple2) :
maRangeX(rTuple1.getX()),
maRangeY(rTuple1.getY())
{
expand( rTuple2 );
}
B2IBox(const B2IBox& rBox) :
maRangeX(rBox.maRangeX),
maRangeY(rBox.maRangeY)
{
}
bool isEmpty() const
{
return maRangeX.isEmpty() || maRangeY.isEmpty();
}
void reset()
{
maRangeX.reset();
maRangeY.reset();
}
bool operator==( const B2IBox& rBox ) const
{
return (maRangeX == rBox.maRangeX
&& maRangeY == rBox.maRangeY);
}
bool operator!=( const B2IBox& rBox ) const
{
return (maRangeX != rBox.maRangeX
|| maRangeY != rBox.maRangeY);
}
void operator=(const B2IBox& rBox)
{
maRangeX = rBox.maRangeX;
maRangeY = rBox.maRangeY;
}
sal_Int32 getMinX() const
{
return maRangeX.getMinimum();
}
sal_Int32 getMinY() const
{
return maRangeY.getMinimum();
}
sal_Int32 getMaxX() const
{
return maRangeX.getMaximum();
}
sal_Int32 getMaxY() const
{
return maRangeY.getMaximum();
}
sal_Int64 getWidth() const
{
return maRangeX.getRange();
}
sal_Int64 getHeight() const
{
return maRangeY.getRange();
}
B2IPoint getMinimum() const
{
return B2IPoint(
maRangeX.getMinimum(),
maRangeY.getMinimum()
);
}
B2IPoint getMaximum() const
{
return B2IPoint(
maRangeX.getMaximum(),
maRangeY.getMaximum()
);
}
B2I64Tuple getRange() const
{
return B2I64Tuple(
maRangeX.getRange(),
maRangeY.getRange()
);
}
B2DPoint getCenter() const
{
return B2DPoint(
maRangeX.getCenter(),
maRangeY.getCenter()
);
}
bool isInside(const B2ITuple& rTuple) const
{
return (
maRangeX.isInside(rTuple.getX())
&& maRangeY.isInside(rTuple.getY())
);
}
bool isInside(const B2IBox& rBox) const
{
return (
maRangeX.isInside(rBox.maRangeX)
&& maRangeY.isInside(rBox.maRangeY)
);
}
bool overlaps(const B2IBox& rBox) const
{
return (
maRangeX.overlaps(rBox.maRangeX)
&& maRangeY.overlaps(rBox.maRangeY)
);
}
void expand(const B2ITuple& rTuple)
{
maRangeX.expand(rTuple.getX());
maRangeY.expand(rTuple.getY());
}
void expand(const B2IBox& rBox)
{
maRangeX.expand(rBox.maRangeX);
maRangeY.expand(rBox.maRangeY);
}
void intersect(const B2IBox& rBox)
{
maRangeX.intersect(rBox.maRangeX);
maRangeY.intersect(rBox.maRangeY);
}
void grow(sal_Int32 nValue)
{
maRangeX.grow(nValue);
maRangeY.grow(nValue);
}
private:
BasicBox maRangeX;
BasicBox maRangeY;
};
/** Compute the set difference of the two given boxes
This method calculates the symmetric difference (aka XOR)
between the two given boxes, and returning the resulting
boxes. Thus, the result will contain all areas where one, but
not both boxes lie.
@param o_rResult
Result vector. The up to four difference boxes are returned
within this vector
@param rFirst
The first box
@param rSecond
The second box
@return the input vector
*/
::std::vector< B2IBox >& computeSetDifference( ::std::vector< B2IBox >& o_rResult,
const B2IBox& rFirst,
const B2IBox& rSecond );
} // end of namespace basegfx
#endif /* _BGFX_RANGE_B2IBOX_HXX */
| 2,551 |
6,717 | //******************************************************************************
//
// Copyright (c) 2016 Microsoft Corporation. All rights reserved.
//
// This code is licensed under the MIT License (MIT).
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//******************************************************************************
#pragma once
#import <Contacts/ContactsExport.h>
#import <Foundation/NSObject.h>
@class CNMutableContact;
@class NSString;
@class CNMutableGroup;
@class CNContact;
@class CNGroup;
CONTACTS_EXPORT_CLASS
@interface CNSaveRequest : NSObject
- (void)addContact:(CNMutableContact*)contact toContainerWithIdentifier:(NSString*)identifier STUB_METHOD;
- (void)updateContact:(CNMutableContact*)contact STUB_METHOD;
- (void)deleteContact:(CNMutableContact*)contact STUB_METHOD;
- (void)addGroup:(CNMutableGroup*)group toContainerWithIdentifier:(NSString*)identifier STUB_METHOD;
- (void)updateGroup:(CNMutableGroup*)group STUB_METHOD;
- (void)deleteGroup:(CNMutableGroup*)group STUB_METHOD;
- (void)addMember:(CNContact*)contact toGroup:(CNGroup*)group STUB_METHOD;
- (void)removeMember:(CNContact*)contact fromGroup:(CNGroup*)group STUB_METHOD;
@end
| 466 |
4,816 | <filename>include/retdec/bin2llvmir/optimizations/idioms/idioms_types.h
/**
* @file include/retdec/bin2llvmir/optimizations/idioms/idioms_types.h
* @brief Instruction idioms analysis types
* @copyright (c) 2017 Avast Software, licensed under the MIT license
*/
#ifndef RETDEC_BIN2LLVMIR_OPTIMIZATIONS_IDIOMS_IDIOMS_TYPES_H
#define RETDEC_BIN2LLVMIR_OPTIMIZATIONS_IDIOMS_IDIOMS_TYPES_H
namespace retdec {
namespace bin2llvmir {
/**
* @brief Compiler type
*/
enum CC_compiler {
CC_ANY = 0, //unrecognized compiler
CC_Borland,
CC_GCC,
CC_Intel,
CC_LLVM,
CC_OWatcom,
CC_VStudio
};
/**
* @brief Target architecture
*/
enum CC_arch {
ARCH_ANY = 0, //unknown architecture
ARCH_MIPS,
ARCH_POWERPC,
ARCH_ARM,
ARCH_THUMB,
ARCH_x86
};
} // namespace bin2llvmir
} // namespace retdec
#endif
| 335 |
335 | {
"word": "Headmaster",
"definitions": [
"A man who is the head teacher in a school."
],
"parts-of-speech": "Noun"
} | 63 |
419 | <reponame>ajax7876/ElasticTransition-ObjC<filename>ElasticTransistionExample/NavigationExampleViewController.h
//
// NavigationExampleViewController.h
// ElasticTransistionExample
//
// Created by Tigielle on 11/02/16.
// Copyright © 2016 <NAME>. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface NavigationExampleViewController : UIViewController
-(IBAction)dismiss:(id)sender;
-(IBAction)showMore:(id)sender;
@end
| 146 |
1,590 | {
"parameters": {
"api-version": "2018-08-01",
"subscriptionId": "subid",
"resourceGroupName": "rg1",
"frontDoorName": "frontDoor1"
},
"responses": {
"200": {
"body": {
"value": [
{
"id": "/subscriptions/subid/resourceGroups/rg1/providers/Microsoft.Network/frontDoors/frontDoor1/frontendEndpoints/frontendEndpoint1",
"name": "frontendEndpoint1",
"properties": {
"hostName": "www.contoso.com",
"sessionAffinityEnabledState": "Enabled",
"sessionAffinityTtlSeconds": 60,
"webApplicationFirewallPolicyLink": {
"id": "/subscriptions/subid/resourceGroups/rg1/providers/Microsoft.Network/frontDoorWebApplicationFirewallPolicies/policy1"
}
}
}
]
}
}
}
}
| 431 |
5,460 | <reponame>gtourkas/moto
"""Exceptions raised by the timestreamwrite service."""
| 26 |
2,073 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mahout.clustering;
import org.apache.mahout.math.Vector;
import org.apache.mahout.math.function.Functions;
import org.apache.mahout.math.function.SquareRootFunction;
/**
* An online Gaussian accumulator that uses a running power sums approach as reported
* on http://en.wikipedia.org/wiki/Standard_deviation
* Suffers from overflow, underflow and roundoff error but has minimal observe-time overhead
*/
public class RunningSumsGaussianAccumulator implements GaussianAccumulator {
private double s0;
private Vector s1;
private Vector s2;
private Vector mean;
private Vector std;
@Override
public double getN() {
return s0;
}
@Override
public Vector getMean() {
return mean;
}
@Override
public Vector getStd() {
return std;
}
@Override
public double getAverageStd() {
if (s0 == 0.0) {
return 0.0;
} else {
return std.zSum() / std.size();
}
}
@Override
public Vector getVariance() {
return std.times(std);
}
@Override
public void observe(Vector x, double weight) {
s0 += weight;
Vector weightedX = x.times(weight);
if (s1 == null) {
s1 = weightedX;
} else {
s1.assign(weightedX, Functions.PLUS);
}
Vector x2 = x.times(x).times(weight);
if (s2 == null) {
s2 = x2;
} else {
s2.assign(x2, Functions.PLUS);
}
}
@Override
public void compute() {
if (s0 != 0.0) {
mean = s1.divide(s0);
std = s2.times(s0).minus(s1.times(s1)).assign(new SquareRootFunction()).divide(s0);
}
}
}
| 802 |
5,133 | <filename>processor/src/test/java/org/mapstruct/ap/test/selection/qualifier/iterable/RiverEntity.java
/*
* Copyright MapStruct Authors.
*
* Licensed under the Apache License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0
*/
package org.mapstruct.ap.test.selection.qualifier.iterable;
/**
*
* @author <NAME>
*/
public class RiverEntity extends TopologyFeatureEntity {
private int length;
public int getLength() {
return length;
}
public void setLength(int length) {
this.length = length;
}
}
| 191 |
676 | <filename>app/src/main/java/com/alorma/github/account/StartUpBootReceiver.java<gh_stars>100-1000
package com.alorma.github.account;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import com.alorma.github.GitskariosApplication;
import com.alorma.github.injector.component.ApplicationComponent;
import com.alorma.github.injector.component.DaggerNotificationsComponent;
import com.alorma.github.injector.component.NotificationsComponent;
import com.alorma.github.injector.module.NotificationsModule;
import com.alorma.github.notifications.AppNotificationsManager;
import javax.inject.Inject;
public class StartUpBootReceiver extends BroadcastReceiver {
@Inject AppNotificationsManager appNotificationsManager;
@Override
public void onReceive(Context context, Intent intent) {
if (Intent.ACTION_BOOT_COMPLETED.equals(intent.getAction())) {
injectComponents(context);
if (appNotificationsManager != null) {
appNotificationsManager.setNotificationsEnabled(appNotificationsManager.areNotificationsEnabled());
}
}
}
private void injectComponents(Context context) {
GitskariosApplication application = (GitskariosApplication) context.getApplicationContext();
ApplicationComponent applicationComponent = application.getApplicationComponent();
NotificationsComponent notificationsComponent = DaggerNotificationsComponent.builder()
.applicationComponent(applicationComponent)
.notificationsModule(new NotificationsModule())
.build();
notificationsComponent.inject(this);
}
}
| 479 |
356 | /*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef GLTF_VALIDATE_H_
#define GLTF_VALIDATE_H_
#include "gltf.h" // NOLINT: Silence relative path warning.
#include "load.h" // NOLINT: Silence relative path warning.
#include "message.h" // NOLINT: Silence relative path warning.
#include "stream.h" // NOLINT: Silence relative path warning.
// Perform glTF structure validation.
//
// This performs a thorough check for issues that are likely to cause conversion
// issues, rather than a strict enforcement of the schema.
//
// Notable exceptions are:
// 1) Any validation done earlier in GltfLoad is not repeated here.
// 2) It does not perform inspection of binary resources (including Draco-
// compressed meshes).
// 3) Recoverable issues are treated as warnings rather than errors.
// 4) Fields extraneous to conversion may be ignored.
bool GltfValidate(const Gltf& gltf, GltfLogger* logger);
bool GltfLoadAndValidate(
GltfStream* gltf_stream, const char* name, const GltfLoadSettings& settings,
Gltf* out_gltf, GltfLogger* logger);
#endif // GLTF_VALIDATE_H_
| 477 |
2,230 | /**
* Copyright (c) 2017-present, Facebook, Inc.
* All rights reserved.
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "save2json.h"
#include "engine/cmd.h"
#include "engine/cmd.gen.h"
#include "engine/cmd_specific.gen.h"
#include "engine/game_env.h"
#include "engine/ai.h"
#include "engine/cmd.h"
static inline void set_p(const PointF &p, json *location) {
(*location)["x"] = p.x;
(*location)["y"] = p.y;
}
static inline void set_cmd(const CmdDurative *_c, json *cmd) {
const CmdDurative &c = *_c;
(*cmd)["cmd"] = CmdTypeLookup::idx2str(c.type());
(*cmd)["id"] = c.id();
(*cmd)["state"] = 0;
if (c.type() == ATTACK) {
const CmdAttack &tmp = dynamic_cast<const CmdAttack &>(c);
(*cmd)["target_id"] = tmp.target();
} else if (c.type() == MOVE) {
const CmdMove &tmp = dynamic_cast<const CmdMove &>(c);
set_p(tmp.p(), &(*cmd)["p"]);
} else if (c.type() == GATHER) {
const CmdGather &tmp = dynamic_cast<const CmdGather &>(c);
(*cmd)["target_id"] = tmp.resource();
(*cmd)["state"] = tmp.state();
} else if (c.type() == BUILD) {
const CmdBuild &tmp = dynamic_cast<const CmdBuild &>(c);
(*cmd)["state"] = tmp.state();
}
}
void save2json::SetTick(Tick tick, json *game) {
(*game)["tick"] = tick;
}
void save2json::SetWinner(PlayerId id, json *game) {
(*game)["winner"] = id;
}
void save2json::SetPlayerId(PlayerId id, json *game) {
(*game)["player_id"] = id;
}
void save2json::SetSpectator(bool is_spectator, json *game) {
(*game)["spectator"] = is_spectator;
}
void save2json::SetTermination(bool t, json *game) {
(*game)["terminated"] = t;
}
void save2json::SetGameCounter(int game_counter, json *game) {
(*game)["game_counter"] = game_counter;
}
void save2json::Save(const RTSMap& m, json *game) {
json rts_map;
rts_map["width"] = m.GetXSize();
rts_map["height"] = m.GetYSize();
json slots;
for (int y = 0; y < m.GetYSize(); y ++) {
for (int x = 0; x < m.GetXSize(); x ++) {
Loc loc = m.GetLoc(x, y, 0);
Terrain t = FOG;
if (m(loc).type == NORMAL) t = NORMAL;
else if (m(loc).type == IMPASSABLE) t = IMPASSABLE;
slots.push_back(t);
}
}
rts_map["slots"] = slots;
(*game)["rts_map"] = rts_map;
}
void save2json::SavePlayerMap(const Player& player, json *game) {
json rts_map;
const RTSMap &m = player.GetMap();
rts_map["width"] = m.GetXSize();
rts_map["height"] = m.GetYSize();
json slots;
for (int y = 0; y < m.GetYSize(); y ++) {
for (int x = 0; x < m.GetXSize(); x ++) {
Loc loc = m.GetLoc(x, y, 0);
const Fog &f = player.GetFog(loc);
Terrain t = FOG;
if (f.CanSeeTerrain()) {
if (m(loc).type == NORMAL) t = NORMAL;
else if (m(loc).type == IMPASSABLE) t = IMPASSABLE;
} else {
// Add prev seen units.
for (const auto &u : f.seen_units()) {
Save(u, nullptr, &rts_map);
}
}
slots.push_back(t);
}
}
rts_map["slots"] = slots;
(*game)["rts_map"] = rts_map;
}
void save2json::SaveStats(const Player& player, json *game) {
// Save the information for player.
json pp;
pp["player_id"] = player.GetId();
pp["resource"] = player.GetResource();
(*game)["players"].push_back(pp);
}
/*
void save2json::Save(const AI &bot, json *game) {
json mbot;
vector<int> selected = bot.GetAllSelectedUnits();
for (int id : selected) {
mbot["selected_units"].push_back(id);
}
vector<int> state = bot.GetState();
for (int id : state) {
mbot["state"].push_back(id);
}
(*game)["bots"].push_back(mbot);
}
*/
void save2json::Save(const Unit& unit, const CmdReceiver *receiver, json *game) {
json u;
u["id"] = unit.GetId();
u["player_id"] = unit.GetPlayerId();
const UnitProperty& prop = unit.GetProperty();
u["att"] = prop._att;
u["att_r"] = prop._att_r;
u["vis_r"] = prop._vis_r;
u["def"] = prop._def;
u["hp"] = prop._hp;
u["max_hp"] = prop._max_hp;
u["speed"] = prop._speed;
u["unit_type"] = unit.GetUnitType();
set_p(unit.GetPointF(), &u["p"]);
set_p(unit.GetLastPointF(), &u["last_p"]);
// Save commands.
if (receiver != nullptr) {
const CmdDurative *cmd = receiver->GetUnitDurativeCmd(unit.GetId());
if (cmd != nullptr) {
set_cmd(cmd, &u["cmd"]);
} else {
u["cmd"]["cmd"] = "I";
u["cmd"]["id"] = unit.GetId();
u["cmd"]["state"] = 0;
}
}
// Set cds.
for (int i = 0; i < NUM_COOLDOWN; ++i) {
json cd;
CDType t = (CDType)i;
const Cooldown &cd_ref = prop.CD(t);
cd["cd"] = cd_ref._cd;
cd["last"] = cd_ref._last;
cd["name"] = ::_CDType2string(t);
u["cds"].push_back(cd);
}
(*game)["units"].push_back(u);
}
void save2json::Save(const Bullet& bullet, json *game) {
json bb;
bb["id_from"] = bullet.GetIdFrom();
bb["state"] = ::_BulletState2string(bullet.GetState());
set_p(bullet.GetPointF(), &bb["p"]);
(*game)["bullets"].push_back(bb);
}
void save2json::SaveCmd(const CmdReceiver &receiver, PlayerId player_id, json *game) {
const vector<CmdDurative*> &added_cmds = receiver.GetHistoryAtCurrentTick();
for (const auto cmd : added_cmds) {
if (player_id == INVALID || Player::ExtractPlayerId(cmd->id()) == player_id) {
set_cmd(cmd, &(*game)["new_cmd"]);
}
}
}
| 2,713 |
852 | <filename>RecoLocalCalo/HcalLaserReco/src/HcalLaserUnpacker.cc
#include "DataFormats/FEDRawData/interface/FEDRawData.h"
#include "DataFormats/HcalDigi/interface/HcalLaserDigi.h"
#include "RecoLocalCalo/HcalLaserReco/src/HcalLaserUnpacker.h"
#include "FWCore/Utilities/interface/Exception.h"
#include <ostream>
HcalLaserUnpacker::HcalLaserUnpacker() {}
struct CombinedTDCQDCDataFormat {
unsigned int cdfHeader0, cdfHeader1, cdfHeader2, cdfHeader3;
unsigned int n_qdc_hits; // Count of QDC channels
unsigned int n_tdc_hits; // upper/lower TDC counts
unsigned short qdc_values[4];
};
void HcalLaserUnpacker::unpack(const FEDRawData& raw, HcalLaserDigi& digi) const {
if (raw.size() < 3 * 8) {
throw cms::Exception("Missing Data") << "No data in the block";
}
const CombinedTDCQDCDataFormat* qdctdc = (const CombinedTDCQDCDataFormat*)raw.data();
// first, we do the QADC
std::vector<uint16_t> qadcvals;
for (unsigned int i = 0; i < qdctdc->n_qdc_hits; i++) {
qadcvals.push_back(qdctdc->qdc_values[i] & 0xFFF);
}
digi.setQADC(qadcvals);
// next, we do the TDC
const unsigned int* hitbase = (&(qdctdc->n_tdc_hits)) + 1; // base is one beyond
unsigned int totalhits = 0;
hitbase += qdctdc->n_qdc_hits / 2; // two unsigned short per unsigned long
totalhits = qdctdc->n_tdc_hits & 0xFFFF; // mask off high bits
for (unsigned int i = 0; i < totalhits; i++) {
int channel = (hitbase[i] & 0x7FC00000) >> 22; // hardcode channel assignment
int time = (hitbase[i] & 0xFFFFF);
if (channel == 0 && time == 0 && i == (totalhits - 1))
continue; // ignore "filler" hit
digi.addTDCHit(channel, time);
}
}
| 681 |
1,909 | <gh_stars>1000+
package org.knowm.xchange.examples.bithumb.trade;
import java.io.IOException;
import java.math.BigDecimal;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.bithumb.BithumbException;
import org.knowm.xchange.bithumb.dto.account.BithumbOrder;
import org.knowm.xchange.bithumb.service.BithumbTradeServiceRaw;
import org.knowm.xchange.currency.Currency;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.Order;
import org.knowm.xchange.dto.trade.LimitOrder;
import org.knowm.xchange.examples.bithumb.BithumbDemoUtils;
import org.knowm.xchange.exceptions.ExchangeException;
import org.knowm.xchange.service.trade.TradeService;
import org.knowm.xchange.service.trade.params.CancelOrderByCurrencyPair;
import org.knowm.xchange.service.trade.params.CancelOrderByIdParams;
import org.knowm.xchange.service.trade.params.DefaultTradeHistoryParamCurrencyPair;
import org.knowm.xchange.service.trade.params.TradeHistoryParams;
import org.knowm.xchange.service.trade.params.orders.DefaultOpenOrdersParamCurrencyPair;
import org.knowm.xchange.service.trade.params.orders.OpenOrdersParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class BithumbTradeDemo {
private static final Logger log = LoggerFactory.getLogger(BithumbTradeDemo.class);
public static final CurrencyPair CURRENCY_PAIR = new CurrencyPair(Currency.XRP, Currency.KRW);
public static void main(String[] args) throws IOException, InterruptedException {
final Exchange exchange = BithumbDemoUtils.createExchange();
final TradeService tradeService = exchange.getTradeService();
generic(tradeService);
raw((BithumbTradeServiceRaw) tradeService);
}
public interface CancelOrderByPairAndIdParams
extends CancelOrderByIdParams, CancelOrderByCurrencyPair {}
private static void generic(TradeService tradeService) throws IOException, InterruptedException {
log.debug("{}", "----------GENERIC----------");
final OpenOrdersParams openOrders = new DefaultOpenOrdersParamCurrencyPair(CURRENCY_PAIR);
try {
log.debug("{}", tradeService.getOpenOrders(openOrders));
} catch (ExchangeException e) {
log.debug("", e);
}
final TradeHistoryParams tradeHistoryParams =
new DefaultTradeHistoryParamCurrencyPair(CURRENCY_PAIR);
log.debug("{}", tradeService.getTradeHistory(tradeHistoryParams));
final LimitOrder limitOrderBuy =
new LimitOrder.Builder(Order.OrderType.BID, CURRENCY_PAIR)
.originalAmount(BigDecimal.valueOf(10))
.limitPrice(BigDecimal.valueOf(100))
.build();
log.debug("{}", tradeService.placeLimitOrder(limitOrderBuy));
Thread.sleep(3000); // wait for order to propagate
final LimitOrder limitOrderSell =
new LimitOrder.Builder(Order.OrderType.ASK, CURRENCY_PAIR)
.originalAmount(BigDecimal.valueOf(10))
.limitPrice(BigDecimal.valueOf(600))
.build();
log.debug("{}", tradeService.placeLimitOrder(limitOrderSell));
Thread.sleep(3000); // wait for order to propagate
tradeService.getOpenOrders(openOrders).getOpenOrders().stream()
.map(Order::getId)
.map(
orderId ->
new CancelOrderByPairAndIdParams() {
@Override
public String getOrderId() {
return orderId;
}
@Override
public CurrencyPair getCurrencyPair() {
return CURRENCY_PAIR;
}
})
.forEach(
param -> {
try {
log.debug("{}", tradeService.cancelOrder(param));
} catch (IOException ignored) {
}
});
// log.debug("{}",tradeService.placeMarketOrder(new MarketOrder(Order.OrderType.ASK,
// BigDecimal.valueOf(10), CURRENCY_PAIR)));
}
private static void raw(BithumbTradeServiceRaw tradeServiceRaw)
throws IOException, InterruptedException {
log.debug("{}", "----------RAW----------");
final OpenOrdersParams openOrders = new DefaultOpenOrdersParamCurrencyPair(CURRENCY_PAIR);
try {
log.debug("{}", tradeServiceRaw.getBithumbOrders(CURRENCY_PAIR));
} catch (BithumbException e) {
log.debug("", e);
}
log.debug("{}", tradeServiceRaw.getBithumbUserTransactions(CURRENCY_PAIR));
final LimitOrder limitOrderBuy =
new LimitOrder.Builder(Order.OrderType.BID, CURRENCY_PAIR)
.originalAmount(BigDecimal.valueOf(10))
.limitPrice(BigDecimal.valueOf(100))
.build();
log.debug("{}", tradeServiceRaw.placeBithumbLimitOrder(limitOrderBuy));
Thread.sleep(3000); // wait for order to propagate
final LimitOrder limitOrderSell =
new LimitOrder.Builder(Order.OrderType.ASK, CURRENCY_PAIR)
.originalAmount(BigDecimal.valueOf(10))
.limitPrice(BigDecimal.valueOf(600))
.build();
log.debug("{}", tradeServiceRaw.placeBithumbLimitOrder(limitOrderSell));
Thread.sleep(3000); // wait for order to propagate
tradeServiceRaw.getBithumbOrders(CURRENCY_PAIR).getData().stream()
.map(BithumbOrder::getOrderId)
.forEach(
orderId -> {
try {
log.debug("{}", tradeServiceRaw.cancelBithumbOrder(orderId, CURRENCY_PAIR));
} catch (IOException ignored) {
}
});
// log.debug("{}", tradeServiceRaw.placeBithumbMarketOrder(new
// MarketOrder(Order.OrderType.ASK, BigDecimal.valueOf(10), CURRENCY_PAIR)));
}
}
| 2,298 |
575 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef ASH_KEYBOARD_VIRTUAL_KEYBOARD_CONTROLLER_H_
#define ASH_KEYBOARD_VIRTUAL_KEYBOARD_CONTROLLER_H_
#include <stdint.h>
#include "ash/ash_export.h"
#include "ash/bluetooth_devices_observer.h"
#include "ash/public/cpp/keyboard/keyboard_controller_observer.h"
#include "ash/public/cpp/session/session_observer.h"
#include "ash/public/cpp/tablet_mode_observer.h"
#include "base/macros.h"
#include "ui/base/ime/chromeos/ime_keyset.h"
#include "ui/events/devices/input_device_event_observer.h"
namespace ash {
// This class observes input device changes for the virtual keyboard.
// TODO(https://crbug.com/849995): Should rename this to not confuse it with
// KeyboardController. |ForceShowKeyboardWithKeyset| also does not really
// belong here based on the current class description.
class ASH_EXPORT VirtualKeyboardController
: public TabletModeObserver,
public ui::InputDeviceEventObserver,
public KeyboardControllerObserver,
public SessionObserver {
public:
VirtualKeyboardController();
~VirtualKeyboardController() override;
// Force enable the keyboard and show it with the given keyset: none, emoji,
// handwriting or voice. Works even in laptop mode.
void ForceShowKeyboardWithKeyset(chromeos::input_method::ImeKeyset keyset);
// TabletModeObserver:
void OnTabletModeEventsBlockingChanged() override;
// ui::InputDeviceEventObserver:
void OnInputDeviceConfigurationChanged(uint8_t input_device_types) override;
// Toggles whether the presence of an external keyboard should be ignored
// when determining whether or not to show the on-screen keyboard.
void ToggleIgnoreExternalKeyboard();
// KeyboardControllerObserver:
void OnKeyboardEnabledChanged(bool is_enabled) override;
void OnKeyboardHidden(bool is_temporary_hide) override;
// SessionObserver:
void OnActiveUserSessionChanged(const AccountId& account_id) override;
private:
// Updates the list of active input devices.
void UpdateDevices();
// Updates the keyboard state.
void UpdateKeyboardEnabled();
// Force enable the keyboard and show it, even in laptop mode.
void ForceShowKeyboard();
// Callback function of |bluetooth_devices_observer_|. Called when the
// bluetooth adapter or |device| changes.
void OnBluetoothAdapterOrDeviceChanged(device::BluetoothDevice* device);
// True if an external keyboard is connected.
bool has_external_keyboard_;
// True if an internal keyboard is connected.
bool has_internal_keyboard_;
// True if a touchscreen is connected.
bool has_touchscreen_;
// True if the presence of an external keyboard should be ignored.
bool ignore_external_keyboard_;
// Observer to observe the bluetooth devices.
std::unique_ptr<BluetoothDevicesObserver> bluetooth_devices_observer_;
DISALLOW_COPY_AND_ASSIGN(VirtualKeyboardController);
};
} // namespace ash
#endif // ASH_KEYBOARD_VIRTUAL_KEYBOARD_CONTROLLER_H_
| 922 |
8,747 | // Copyright 2021 Espressif Systems (Shanghai) PTE LTD
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#pragma once
#include <queue>
#include <mutex>
#include <condition_variable>
#include <memory>
#include <thread>
#include <atomic>
typedef void (*cb_t)(void* arg);
class TimerTaskMock
{
public:
TimerTaskMock(cb_t cb): cb(cb), t(run_static, this), active(false), ms(INT32_MAX) {}
~TimerTaskMock(void) { active = false; t.join(); }
void SetTimeout(uint32_t m)
{
ms = m;
active = true;
}
private:
static void run_static(TimerTaskMock* timer)
{
timer->run();
}
void run(void)
{
while (!active.load()) {
std::this_thread::sleep_for(std::chrono::milliseconds(1));
}
while (active.load()) {
std::this_thread::sleep_for(std::chrono::milliseconds(ms));
cb(nullptr);
}
}
cb_t cb;
std::thread t;
std::atomic<bool> active;
uint32_t ms;
};
| 581 |
984 | <reponame>om-sharma/java-driver
/*
* Copyright DataStax, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datastax.oss.driver.internal.core.type;
import com.datastax.oss.driver.api.core.detach.AttachmentPoint;
import com.datastax.oss.driver.api.core.type.CustomType;
import com.datastax.oss.driver.shaded.guava.common.base.Preconditions;
import edu.umd.cs.findbugs.annotations.NonNull;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import net.jcip.annotations.Immutable;
@Immutable
public class DefaultCustomType implements CustomType, Serializable {
private static final long serialVersionUID = 1;
/** @serial */
private final String className;
public DefaultCustomType(@NonNull String className) {
Preconditions.checkNotNull(className);
this.className = className;
}
@NonNull
@Override
public String getClassName() {
return className;
}
@Override
public boolean isDetached() {
return false;
}
@Override
public void attach(@NonNull AttachmentPoint attachmentPoint) {
// nothing to do
}
@Override
public boolean equals(Object other) {
if (other == this) {
return true;
} else if (other instanceof CustomType) {
CustomType that = (CustomType) other;
return this.className.equals(that.getClassName());
} else {
return false;
}
}
@Override
public int hashCode() {
return className.hashCode();
}
@Override
public String toString() {
return "Custom(" + className + ")";
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
Preconditions.checkNotNull(className);
}
}
| 703 |
510 | <filename>scripts/lsdbus.py
#!/usr/bin/env python3
import dbus.mainloop.glib
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
bus = dbus.SystemBus()
o = bus.get_object('org.freedesktop.DBus', '/org/freedesktop/DBus')
o = dbus.Interface(o, 'org.freedesktop.DBus')
ls = o.ListNames()
for n in ls:
if n[0] != ':':
continue
pid = o.GetConnectionUnixProcessID(n)
with open('/proc/%d/cmdline' % pid) as f:
s = f.read()
s = s.split('\0')
print('%-10s %-5d %s' % (n, pid, ' '.join(s)))
| 255 |
2,338 | // FIXME(dliew): Duplicated from `test/sanitizer_common/TestCases/Darwin/symbolizer-function-offset-dladdr.cpp`.
// This case can be dropped once sanitizer_common tests work on iOS devices (rdar://problem/47333049).
// NOTE: `detect_leaks=0` is necessary because with LSan enabled the dladdr
// symbolizer actually leaks memory because the call to
// `__sanitizer::DemangleCXXABI` leaks memory which LSan detects
// (rdar://problem/42868950).
// RUN: %clangxx_asan %s -O0 -o %t
// RUN: %env_asan_opts=detect_leaks=0,verbosity=2,external_symbolizer_path=,stack_trace_format='"function_name_%f___function_offset_%q"' %run %t > %t.output 2>&1
// RUN: FileCheck -input-file=%t.output %s
#include <sanitizer/common_interface_defs.h>
#include <stdio.h>
void baz() {
printf("Do stuff in baz\n");
__sanitizer_print_stack_trace();
}
void bar() {
printf("Do stuff in bar\n");
baz();
}
void foo() {
printf("Do stuff in foo\n");
bar();
}
int main() {
printf("Do stuff in main\n");
foo();
return 0;
}
// CHECK: External symbolizer is explicitly disabled
// CHECK: Using dladdr symbolizer
// These `function_offset` patterns are designed to disallow `0x0` which is the
// value printed for `kUnknown`.
// CHECK: function_name_baz{{(\(\))?}}___function_offset_0x{{0*[1-9a-f][0-9a-f]*$}}
// CHECK: function_name_bar{{(\(\))?}}___function_offset_0x{{0*[1-9a-f][0-9a-f]*$}}
// CHECK: function_name_foo{{(\(\))?}}___function_offset_0x{{0*[1-9a-f][0-9a-f]*$}}
// CHECK: function_name_main{{(\(\))?}}___function_offset_0x{{0*[1-9a-f][0-9a-f]*$}}
| 612 |
654 | <gh_stars>100-1000
/* icmp.c - Server response tests using ICMP echo requests
Copyright (C) 2000, 2001 <NAME>
Copyright (C) 2003, 2005, 2007, 2012 <NAME>
This file is part of the pdnsd package.
pdnsd is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
pdnsd is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with pdnsd; see the file COPYING. If not, see
<http://www.gnu.org/licenses/>.
*/
/*
* This should now work on both Linux and FreeBSD (and CYGWIN?). If anyone
* with experience in other Unix flavors wants to contribute platform-specific
* code, he is very welcome.
*/
#include <config.h>
#ifdef HAVE_SYS_POLL_H
#include <sys/poll.h>
#endif
#include <sys/time.h>
#include <stdlib.h>
#include <stddef.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <string.h>
#include "ipvers.h"
#if (TARGET==TARGET_LINUX)
# include <netinet/ip.h>
# include <linux/types.h>
# include <linux/icmp.h>
#elif (TARGET==TARGET_BSD)
# include <netinet/in_systm.h>
# include <netinet/ip.h>
# include <netinet/ip_icmp.h>
#elif (TARGET==TARGET_CYGWIN)
# include <netinet/ip.h>
# include <netinet/in_systm.h>
# include <netinet/ip_icmp.h>
# include "freebsd_netinet_ip_icmp.h"
#else
# error Unsupported platform!
#endif
#ifdef ENABLE_IPV6
# include <netinet/ip6.h>
# include <netinet/icmp6.h>
#endif
#include <netdb.h>
#include "icmp.h"
#include "error.h"
#include "helpers.h"
#include "servers.h"
#define ICMP_MAX_ERRS 10
static volatile unsigned long icmp_errs=0; /* This is only here to minimize log output.
Since the consequences of a race is only
one log message more/less (out of
ICMP_MAX_ERRS), no lock is required. */
volatile int ping_isocket=-1;
#ifdef ENABLE_IPV6
volatile int ping6_isocket=-1;
#endif
/* different names, same thing... be careful, as these are macros... */
#if (TARGET==TARGET_LINUX)
# define ip_saddr saddr
# define ip_daddr daddr
# define ip_hl ihl
# define ip_p protocol
#else
# define icmphdr icmp
# define iphdr ip
# define ip_saddr ip_src.s_addr
# define ip_daddr ip_dst.s_addr
#endif
#if (TARGET==TARGET_LINUX)
# define icmp_type type
# define icmp_code code
# define icmp_cksum checksum
# define icmp_id un.echo.id
# define icmp_seq un.echo.sequence
#else
# define ICMP_DEST_UNREACH ICMP_UNREACH
# define ICMP_TIME_EXCEEDED ICMP_TIMXCEED
#endif
#define ICMP_BASEHDR_LEN 8
#define ICMP4_ECHO_LEN ICMP_BASEHDR_LEN
#if (TARGET==TARGET_LINUX) || (TARGET==TARGET_BSD) || (TARGET==TARGET_CYGWIN)
/*
* These are the ping implementations for Linux/FreeBSD in their IPv4/ICMPv4 and IPv6/ICMPv6 versions.
* I know they share some code, but I'd rather keep them separated in some parts, as some
* things might go in different directions there.
*/
/* Initialize the sockets for pinging */
void init_ping_socket()
{
if ((ping_isocket=socket(PF_INET, SOCK_RAW, IPPROTO_ICMP))==-1) {
log_warn("icmp ping: socket() failed: %s",strerror(errno));
}
#ifdef ENABLE_IPV6
if (!run_ipv4) {
/* Failure to initialize the IPv4 ping socket is not
necessarily a problem, as long as the IPv6 version works. */
if ((ping6_isocket=socket(PF_INET6, SOCK_RAW, IPPROTO_ICMPV6))==-1) {
log_warn("icmpv6 ping: socket() failed: %s",strerror(errno));
}
}
#endif
}
/* Takes a packet as send out and a received ICMP packet and looks whether the ICMP packet is
* an error reply on the sent-out one. packet is only the packet (without IP header).
* errmsg includes an IP header.
* to is the destination address of the original packet (the only thing that is actually
* compared of the IP header). The RFC says that we get at least 8 bytes of the offending packet.
* We do not compare more, as this is all we need.*/
static int icmp4_errcmp(char *packet, int plen, struct in_addr *to, char *errmsg, int elen, int errtype)
{
struct iphdr iph;
struct icmphdr icmph;
struct iphdr eiph;
char *data;
/* XXX: lots of memcpy to avoid unaligned accesses on alpha */
if (elen<sizeof(struct iphdr))
return 0;
memcpy(&iph,errmsg,sizeof(iph));
if (iph.ip_p!=IPPROTO_ICMP || elen<iph.ip_hl*4+ICMP_BASEHDR_LEN+sizeof(eiph))
return 0;
PDNSD_ASSERT(sizeof(icmph) >= ICMP_BASEHDR_LEN, "icmp4_errcmp: ICMP_BASEHDR_LEN botched");
memcpy(&icmph,errmsg+iph.ip_hl*4,ICMP_BASEHDR_LEN);
memcpy(&eiph,errmsg+iph.ip_hl*4+ICMP_BASEHDR_LEN,sizeof(eiph));
if (elen<iph.ip_hl*4+ICMP_BASEHDR_LEN+eiph.ip_hl*4+8)
return 0;
data=errmsg+iph.ip_hl*4+ICMP_BASEHDR_LEN+eiph.ip_hl*4;
return icmph.icmp_type==errtype && memcmp(&to->s_addr, &eiph.ip_daddr, sizeof(to->s_addr))==0 &&
memcmp(data, packet, plen<8?plen:8)==0;
}
/* IPv4/ICMPv4 ping. Called from ping (see below) */
static int ping4(struct in_addr addr, int timeout, int rep)
{
int i;
int isock;
#if (TARGET==TARGET_LINUX)
struct icmp_filter f;
#endif
unsigned short id=(unsigned short)get_rand16(); /* randomize a ping id */
isock=ping_isocket;
#if (TARGET==TARGET_LINUX)
/* Fancy ICMP filering -- only on Linux (as far is I know) */
/* In fact, there should be macros for treating icmp_filter, but I haven't found them in Linux 2.2.15.
* So, set it manually and unportable ;-) */
/* This filter lets ECHO_REPLY (0), DEST_UNREACH(3) and TIME_EXCEEDED(11) pass. */
/* !(0000 1000 0000 1001) = 0xff ff f7 f6 */
f.data=0xfffff7f6;
if (setsockopt(isock,SOL_RAW,ICMP_FILTER,&f,sizeof(f))==-1) {
if (++icmp_errs<=ICMP_MAX_ERRS) {
log_warn("icmp ping: setsockopt() failed: %s", strerror(errno));
}
return -1;
}
#endif
for (i=0;i<rep;i++) {
struct sockaddr_in from,to;
struct icmphdr icmpd;
unsigned long sum;
uint16_t *ptr;
long tm,tpassed;
int j;
icmpd.icmp_type=ICMP_ECHO;
icmpd.icmp_code=0;
icmpd.icmp_cksum=0;
icmpd.icmp_id=htons((uint16_t)id);
icmpd.icmp_seq=htons((uint16_t)i);
/* Checksumming - Algorithm taken from nmap. Thanks... */
ptr=(uint16_t *)&icmpd;
sum=0;
for (j=0;j<4;j++) {
sum+=*ptr++;
}
sum = (sum >> 16) + (sum & 0xffff);
sum += (sum >> 16);
icmpd.icmp_cksum=~sum;
memset(&to,0,sizeof(to));
to.sin_family=AF_INET;
to.sin_port=0;
to.sin_addr=addr;
SET_SOCKA_LEN4(to);
if (sendto(isock,&icmpd,ICMP4_ECHO_LEN,0,(struct sockaddr *)&to,sizeof(to))==-1) {
if (++icmp_errs<=ICMP_MAX_ERRS) {
log_warn("icmp ping: sendto() failed: %s.",strerror(errno));
}
return -1;
}
/* listen for reply. */
tm=time(NULL); tpassed=0;
do {
int psres;
#ifdef NO_POLL
fd_set fds,fdse;
struct timeval tv;
FD_ZERO(&fds);
PDNSD_ASSERT(isock<FD_SETSIZE,"socket file descriptor exceeds FD_SETSIZE.");
FD_SET(isock, &fds);
fdse=fds;
tv.tv_usec=0;
tv.tv_sec=timeout>tpassed?timeout-tpassed:0;
/* There is a possible race condition with the arrival of a signal here,
but it is so unlikely to be a problem in practice that the effort
to do this properly is not worth the trouble.
*/
if(is_interrupted_servstat_thread()) {
DEBUG_MSG("server status thread interrupted.\n");
return -1;
}
psres=select(isock+1,&fds,NULL,&fdse,&tv);
#else
struct pollfd pfd;
pfd.fd=isock;
pfd.events=POLLIN;
/* There is a possible race condition with the arrival of a signal here,
but it is so unlikely to be a problem in practice that the effort
to do this properly is not worth the trouble.
*/
if(is_interrupted_servstat_thread()) {
DEBUG_MSG("server status thread interrupted.\n");
return -1;
}
psres=poll(&pfd,1,timeout>tpassed?(timeout-tpassed)*1000:0);
#endif
if (psres<0) {
if(errno==EINTR && is_interrupted_servstat_thread()) {
DEBUG_MSG("poll/select interrupted in server status thread.\n");
}
else if (++icmp_errs<=ICMP_MAX_ERRS) {
log_warn("poll/select failed: %s",strerror(errno));
}
return -1;
}
if (psres==0) /* timed out */
break;
#ifdef NO_POLL
if (FD_ISSET(isock,&fds) || FD_ISSET(isock,&fdse))
#else
if (pfd.revents&(POLLIN|POLLERR))
#endif
{
char buf[1024];
socklen_t sl=sizeof(from);
int len;
if ((len=recvfrom(isock,&buf,sizeof(buf),0,(struct sockaddr *)&from,&sl))!=-1) {
if (len>sizeof(struct iphdr)) {
struct iphdr iph;
memcpy(&iph, buf, sizeof(iph));
if (len-iph.ip_hl*4>=ICMP_BASEHDR_LEN) {
struct icmphdr icmpp;
memcpy(&icmpp, ((uint32_t *)buf)+iph.ip_hl, sizeof(icmpp));
if (iph.ip_saddr==addr.s_addr && icmpp.icmp_type==ICMP_ECHOREPLY &&
ntohs(icmpp.icmp_id)==id && ntohs(icmpp.icmp_seq)<=i) {
return (i-ntohs(icmpp.icmp_seq))*timeout+(time(NULL)-tm); /* return the number of ticks */
} else {
/* No regular echo reply. Maybe an error? */
if (icmp4_errcmp((char *)&icmpd, ICMP4_ECHO_LEN, &to.sin_addr, buf, len, ICMP_DEST_UNREACH) ||
icmp4_errcmp((char *)&icmpd, ICMP4_ECHO_LEN, &to.sin_addr, buf, len, ICMP_TIME_EXCEEDED)) {
return -1;
}
}
}
}
} else {
return -1; /* error */
}
}
else {
if (++icmp_errs<=ICMP_MAX_ERRS) {
log_error("Unhandled poll/select event in ping4() at %s, line %d.",__FILE__,__LINE__);
}
return -1;
}
tpassed=time(NULL)-tm;
} while (tpassed<timeout);
}
return -1; /* no answer */
}
#ifdef ENABLE_IPV6
/* Takes a packet as send out and a received ICMPv6 packet and looks whether the ICMPv6 packet is
* an error reply on the sent-out one. packet is only the packet (without IPv6 header).
* errmsg does not include an IPv6 header. to is the address the sent packet went to.
* This is specialized for icmpv6: It zeros out the checksum field, which is filled in
* by the kernel, and expects that the checksum field in the sent-out packet is zeroed out too
* We need a little magic to parse the answer, as there could be extension headers present, end
* we don't know their length a priori.*/
static int icmp6_errcmp(char *packet, int plen, struct in6_addr *to, char *errmsg, int elen, int errtype)
{
struct icmp6_hdr icmph;
struct ip6_hdr eiph;
struct ip6_hbh hbh;
char *data;
int rlen,nxt;
/* XXX: lots of memcpy here to avoid unaligned access faults on alpha */
if (elen<sizeof(icmph)+sizeof(eiph))
return 0;
memcpy(&icmph,errmsg,sizeof(icmph));
memcpy(&eiph,errmsg+sizeof(icmph),sizeof(eiph));
if (!IN6_ARE_ADDR_EQUAL(&eiph.ip6_dst, to))
return 0;
rlen=elen-sizeof(icmph)-sizeof(eiph);
data=errmsg+sizeof(icmph)+sizeof(eiph);
nxt=eiph.ip6_nxt;
/* Now, jump over any known option header that might be present, and then
* try to compare the packets. */
while (nxt!=IPPROTO_ICMPV6) {
/* Those are the headers we understand. */
if (nxt!=IPPROTO_HOPOPTS && nxt!=IPPROTO_ROUTING && nxt!=IPPROTO_DSTOPTS)
return 0;
if (rlen<sizeof(hbh))
return 0;
memcpy(&hbh,data,sizeof(hbh));
if (rlen<hbh.ip6h_len)
return 0;
rlen-=hbh.ip6h_len;
nxt=hbh.ip6h_nxt;
data+=hbh.ip6h_len;
}
if (rlen<sizeof(struct icmp6_hdr))
return 0;
/* Zero out the checksum of the enclosed ICMPv6 header, it is kernel-filled in the original data */
memset(((char *)data)+offsetof(struct icmp6_hdr,icmp6_cksum),0,sizeof(icmph.icmp6_cksum));
return icmph.icmp6_type==errtype && memcmp(data, packet, plen<rlen?plen:rlen)==0;
}
/* IPv6/ICMPv6 ping. Called from ping (see below) */
static int ping6(struct in6_addr a, int timeout, int rep)
{
int i;
/* int ck_offs=2;*/
int isock;
struct icmp6_filter f;
unsigned short id=(unsigned short)(rand()&0xffff); /* randomize a ping id */
isock=ping6_isocket;
ICMP6_FILTER_SETBLOCKALL(&f);
ICMP6_FILTER_SETPASS(ICMP6_ECHO_REPLY,&f);
ICMP6_FILTER_SETPASS(ICMP6_DST_UNREACH,&f);
ICMP6_FILTER_SETPASS(ICMP6_TIME_EXCEEDED,&f);
if (setsockopt(isock,IPPROTO_ICMPV6,ICMP6_FILTER,&f,sizeof(f))==-1) {
if (++icmp_errs<=ICMP_MAX_ERRS) {
log_warn("icmpv6 ping: setsockopt() failed: %s", strerror(errno));
}
return -1;
}
for (i=0;i<rep;i++) {
struct sockaddr_in6 from;
struct icmp6_hdr icmpd;
long tm,tpassed;
icmpd.icmp6_type=ICMP6_ECHO_REQUEST;
icmpd.icmp6_code=0;
icmpd.icmp6_cksum=0; /* The friendly kernel does fill that in for us. */
icmpd.icmp6_id=htons((uint16_t)id);
icmpd.icmp6_seq=htons((uint16_t)i);
memset(&from,0,sizeof(from));
from.sin6_family=AF_INET6;
from.sin6_flowinfo=IPV6_FLOWINFO;
from.sin6_port=0;
from.sin6_addr=a;
SET_SOCKA_LEN6(from);
if (sendto(isock,&icmpd,sizeof(icmpd),0,(struct sockaddr *)&from,sizeof(from))==-1) {
if (++icmp_errs<=ICMP_MAX_ERRS) {
log_warn("icmpv6 ping: sendto() failed: %s.",strerror(errno));
}
return -1;
}
/* listen for reply. */
tm=time(NULL); tpassed=0;
do {
int psres;
#ifdef NO_POLL
fd_set fds,fdse;
struct timeval tv;
FD_ZERO(&fds);
PDNSD_ASSERT(isock<FD_SETSIZE,"socket file descriptor exceeds FD_SETSIZE.");
FD_SET(isock, &fds);
fdse=fds;
tv.tv_usec=0;
tv.tv_sec=timeout>tpassed?timeout-tpassed:0;
/* There is a possible race condition with the arrival of a signal here,
but it is so unlikely to be a problem in practice that the effort
to do this properly is not worth the trouble.
*/
if(is_interrupted_servstat_thread()) {
DEBUG_MSG("server status thread interrupted.\n");
return -1;
}
psres=select(isock+1,&fds,NULL,&fdse,&tv);
#else
struct pollfd pfd;
pfd.fd=isock;
pfd.events=POLLIN;
/* There is a possible race condition with the arrival of a signal here,
but it is so unlikely to be a problem in practice that the effort
to do this properly is not worth the trouble.
*/
if(is_interrupted_servstat_thread()) {
DEBUG_MSG("server status thread interrupted.\n");
return -1;
}
psres=poll(&pfd,1,timeout>tpassed?(timeout-tpassed)*1000:0);
#endif
if (psres<0) {
if(errno==EINTR && is_interrupted_servstat_thread()) {
DEBUG_MSG("poll/select interrupted in server status thread.\n");
}
else if (++icmp_errs<=ICMP_MAX_ERRS) {
log_warn("poll/select failed: %s",strerror(errno));
}
return -1;
}
if (psres==0) /* timed out */
break;
#ifdef NO_POLL
if (FD_ISSET(isock,&fds) || FD_ISSET(isock,&fdse))
#else
if (pfd.revents&(POLLIN|POLLERR))
#endif
{
char buf[1024];
socklen_t sl=sizeof(from);
int len;
if ((len=recvfrom(isock,&buf,sizeof(buf),0,(struct sockaddr *)&from,&sl))!=-1) {
if (len>=sizeof(struct icmp6_hdr)) {
/* we get packets without IPv6 header, luckily */
struct icmp6_hdr icmpp;
memcpy(&icmpp, buf, sizeof(icmpp));
if (IN6_ARE_ADDR_EQUAL(&from.sin6_addr,&a) &&
ntohs(icmpp.icmp6_id)==id && ntohs(icmpp.icmp6_seq)<=i) {
return (i-ntohs(icmpp.icmp6_seq))*timeout+(time(NULL)-tm); /* return the number of ticks */
} else {
/* No regular echo reply. Maybe an error? */
if (icmp6_errcmp((char *)&icmpd, sizeof(icmpd), &from.sin6_addr, buf, len, ICMP6_DST_UNREACH) ||
icmp6_errcmp((char *)&icmpd, sizeof(icmpd), &from.sin6_addr, buf, len, ICMP6_TIME_EXCEEDED)) {
return -1;
}
}
}
} else {
return -1; /* error */
}
}
else {
if (++icmp_errs<=ICMP_MAX_ERRS) {
log_error("Unhandled poll/select event in ping6() at %s, line %d.",__FILE__,__LINE__);
}
return -1;
}
tpassed=time(NULL)-tm;
} while (tpassed<timeout);
}
return -1; /* no answer */
}
#endif /* ENABLE_IPV6*/
/* Perform an icmp ping on a host, returning -1 on timeout or
* "host unreachable" or the ping time in 10ths of secs
* (but actually, we are not that accurate any more).
* timeout in 10ths of seconds, rep is the repetition count
*/
int ping(pdnsd_a *addr, int timeout, int rep)
{
if (SEL_IPVER(ping_isocket,ping6_isocket) == -1)
return -1;
/* We were given a timeout in 10ths of seconds,
but ping4 and ping6 want a timeout in seconds. */
timeout /= 10;
#ifdef ENABLE_IPV4
if (run_ipv4)
return ping4(addr->ipv4,timeout,rep);
#endif
#ifdef ENABLE_IPV6
ELSE_IPV6 {
/* If it is a IPv4 mapped IPv6 address, we prefer ICMPv4. */
if (ping_isocket!=-1 && IN6_IS_ADDR_V4MAPPED(&addr->ipv6)) {
struct in_addr v4;
v4.s_addr=((uint32_t *)&addr->ipv6)[3];
return ping4(v4,timeout,rep);
} else
return ping6(addr->ipv6,timeout,rep);
}
#endif
return -1;
}
#else
# error "Huh! No OS macro defined!"
#endif /*(TARGET==TARGET_LINUX) || (TARGET==TARGET_BSD) || (TARGET==TARGET_CYGWIN)*/
| 7,407 |
372 | <reponame>mgd-hin/systemds
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.test.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import org.apache.sysds.runtime.util.DependencyThreadPool;
import org.apache.sysds.test.AutomatedTestBase;
import org.apache.sysds.test.TestConfiguration;
import org.apache.sysds.test.TestUtils;
import org.apache.sysds.test.functions.transform.TransformFrameBuildMultithreadedTest;
import org.junit.Assert;
import org.junit.Test;
public class DependencyThreadPoolTest extends AutomatedTestBase {
private final static String TEST_NAME = "DependencyThreadPoolTest";
private final static String TEST_DIR = "util/";
private final static String TEST_CLASS_DIR = TEST_DIR + TransformFrameBuildMultithreadedTest.class.getSimpleName()
+ "/";
@Override
public void setUp() {
TestUtils.clearAssertionInformation();
addTestConfiguration(TEST_NAME, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME, new String[] {"y"}));
}
@Test
public void testSimpleDependency() throws InterruptedException, ExecutionException {
DependencyThreadPool pool = new DependencyThreadPool(4);
TestObj global = new TestObj();
TestTaskAdd task1 = new TestTaskAdd(1, 5, global);
TestTaskMult task2 = new TestTaskMult(2, 20, global);
List<? extends Callable<?>> tasks = Arrays.asList(task1, task2);
List<List<? extends Callable<?>>> dependencies = new ArrayList<>();
dependencies.add(Collections.singletonList(task2));
dependencies.add(null);
List<Future<Future<?>>> futures = pool.submitAll(tasks, dependencies);
for(Future<Future<?>> ff : futures) {
ff.get().get();
}
Assert.assertEquals(5, global.value);
}
@Test
public void testMultipleDependency() throws InterruptedException, ExecutionException {
DependencyThreadPool pool = new DependencyThreadPool(4);
TestObj global = new TestObj();
TestTaskMult task1 = new TestTaskMult(1, 20, global);
TestTaskAdd task2 = new TestTaskAdd(2, 5, global);
TestTaskMult task3 = new TestTaskMult(3, 20, global);
TestTaskAdd task4 = new TestTaskAdd(4, 10, global);
List<? extends Callable<?>> tasks = Arrays.asList(task1, task2, task3, task4);
List<List<? extends Callable<?>>> dependencies = new ArrayList<>();
dependencies.add(Collections.singletonList(task2));
dependencies.add(null);
dependencies.add(Collections.singletonList(task2));
dependencies.add(Arrays.asList(task3, task1));
List<Future<Future<?>>> futures = pool.submitAll(tasks, dependencies);
for(Future<Future<?>> ff : futures) {
ff.get().get();
}
Assert.assertEquals(2010, global.value);
}
private static class TestObj {
public int value = 0;
private void add(int v) {
synchronized(this) {
value += v;
}
}
private void mult(int v) {
synchronized(this) {
value *= v;
}
}
}
private static class TestTaskAdd implements Callable<Integer> {
int _id;
int _time;
TestObj _global;
public TestTaskAdd(int id, int time, TestObj global) {
_id = id;
_time = time;
_global = global;
}
@Override
public Integer call() throws Exception {
Thread.sleep(_time);
_global.add(_time);
return _id;
}
}
private static class TestTaskMult implements Callable<Integer> {
int _id;
int _time;
TestObj _global;
public TestTaskMult(int id, int time, TestObj global) {
_id = id;
_time = time;
_global = global;
}
@Override
public Integer call() throws Exception {
Thread.sleep(_time);
_global.mult(_time);
return _id;
}
}
}
| 1,480 |
406 | <gh_stars>100-1000
/**
🍪 the_hyp0cr1t3
🍪 06.12.2020 17:26:32
**/
#ifdef W
#include "k_II.h"
#else
#include <bits/stdc++.h>
using namespace std;
#endif
#define pb emplace_back
#define sz(x) int(x.size())
#define all(x) x.begin(), x.end()
const int64_t DESPACITO = 2e18;
const int INF = 2e9, MOD = 1e9+7;
const int N = 1e6 + 10;
int dp[N][6];
int main() {
cin.tie(nullptr)->sync_with_stdio(false);
int i, j, k, n;
cin >> n;
dp[0][0] = 1;
for(i = 0; i < n; i++)
for(j = 0; j < 6; j++)
for(k = 0; k < 6; k++)
(dp[i+k+1][k] += dp[i][j]) %= MOD;
int ans = 0;
for(j = 0; j < 6; j++)
(ans += dp[n][j]) %= MOD;
cout << ans;
} // ~W | 435 |
526 | <filename>open-metadata-implementation/access-services/community-profile/community-profile-client/src/main/java/org/odpi/openmetadata/accessservices/communityprofile/client/UserIdentityManagement.java<gh_stars>100-1000
/* SPDX-License-Identifier: Apache 2.0 */
/* Copyright Contributors to the ODPi Egeria project. */
package org.odpi.openmetadata.accessservices.communityprofile.client;
import org.odpi.openmetadata.accessservices.communityprofile.api.UserIdentityManagementInterface;
import org.odpi.openmetadata.accessservices.communityprofile.client.rest.CommunityProfileRESTClient;
import org.odpi.openmetadata.accessservices.communityprofile.metadataelement.UserIdentityElement;
import org.odpi.openmetadata.accessservices.communityprofile.properties.UserIdentityProperties;
import org.odpi.openmetadata.accessservices.communityprofile.rest.*;
import org.odpi.openmetadata.commonservices.ffdc.InvalidParameterHandler;
import org.odpi.openmetadata.commonservices.ffdc.rest.GUIDResponse;
import org.odpi.openmetadata.commonservices.ffdc.rest.NameRequestBody;
import org.odpi.openmetadata.commonservices.ffdc.rest.SearchStringRequestBody;
import org.odpi.openmetadata.frameworks.auditlog.AuditLog;
import org.odpi.openmetadata.frameworks.connectors.ffdc.InvalidParameterException;
import org.odpi.openmetadata.frameworks.connectors.ffdc.PropertyServerException;
import org.odpi.openmetadata.frameworks.connectors.ffdc.UserNotAuthorizedException;
import java.util.List;
/**
* UserIdentityManagement is the client for explicitly managing the user identity entities and associating them with
* profiles. It is typically used when the relationship between user identities and profiles are many to one.
*/
public class UserIdentityManagement implements UserIdentityManagementInterface
{
private String serverName; /* Initialized in constructor */
private String serverPlatformURLRoot; /* Initialized in constructor */
private CommunityProfileRESTClient restClient; /* Initialized in constructor */
private InvalidParameterHandler invalidParameterHandler = new InvalidParameterHandler();
private final String urlTemplatePrefix = "/servers/{0}/open-metadata/access-services/community-profile/users/{1}/user-identities";
/**
* Create a new client with no authentication embedded in the HTTP request.
*
* @param serverName name of the server to connect to
* @param serverPlatformURLRoot the network address of the server running the OMAS REST servers
*
* @throws InvalidParameterException bad input parameters
*/
public UserIdentityManagement(String serverName,
String serverPlatformURLRoot) throws InvalidParameterException
{
final String methodName = "Constructor (no security)";
invalidParameterHandler.validateOMAGServerPlatformURL(serverPlatformURLRoot, serverName, methodName);
this.serverName = serverName;
this.serverPlatformURLRoot = serverPlatformURLRoot;
this.restClient = new CommunityProfileRESTClient(serverName, serverPlatformURLRoot);
}
/**
* Create a new client with no authentication embedded in the HTTP request.
*
* @param serverName name of the server to connect to
* @param serverPlatformURLRoot the network address of the server running the OMAS REST servers
* @param auditLog logging destination
* @throws InvalidParameterException there is a problem creating the client-side components to issue any
* REST API calls.
*/
public UserIdentityManagement(String serverName,
String serverPlatformURLRoot,
AuditLog auditLog) throws InvalidParameterException
{
final String methodName = "Constructor (no security)";
invalidParameterHandler.validateOMAGServerPlatformURL(serverPlatformURLRoot, serverName, methodName);
this.serverName = serverName;
this.serverPlatformURLRoot = serverPlatformURLRoot;
this.restClient = new CommunityProfileRESTClient(serverName, serverPlatformURLRoot, auditLog);
}
/**
* Create a new client that passes userId and password in each HTTP request. This is the
* userId/password of the calling server. The end user's userId is sent on each request.
*
* @param serverName name of the server to connect to
* @param serverPlatformURLRoot the network address of the server running the OMAS REST servers
* @param userId caller's userId embedded in all HTTP requests
* @param password caller's userId embedded in all HTTP requests
*
* @throws InvalidParameterException bad input parameters
*/
public UserIdentityManagement(String serverName,
String serverPlatformURLRoot,
String userId,
String password) throws InvalidParameterException
{
final String methodName = "Constructor (with security)";
invalidParameterHandler.validateOMAGServerPlatformURL(serverPlatformURLRoot, serverName, methodName);
this.serverName = serverName;
this.serverPlatformURLRoot = serverPlatformURLRoot;
this.restClient = new CommunityProfileRESTClient(serverName, serverPlatformURLRoot, userId, password);
}
/**
* Create a new client that passes userId and password in each HTTP request. This is the
* userId/password of the calling server. The end user's userId is sent on each request.
*
* @param serverName name of the server to connect to
* @param serverPlatformURLRoot the network address of the server running the OMAS REST servers
* @param userId caller's userId embedded in all HTTP requests
* @param password caller's userId embedded in all HTTP requests
* @param auditLog logging destination
*
* @throws InvalidParameterException bad input parameters
*/
public UserIdentityManagement(String serverName,
String serverPlatformURLRoot,
String userId,
String password,
AuditLog auditLog) throws InvalidParameterException
{
final String methodName = "Constructor (with security)";
invalidParameterHandler.validateOMAGServerPlatformURL(serverPlatformURLRoot, serverName, methodName);
this.serverName = serverName;
this.serverPlatformURLRoot = serverPlatformURLRoot;
this.restClient = new CommunityProfileRESTClient(serverName, serverPlatformURLRoot, userId, password, auditLog);
}
/**
* Create a new client that passes userId and password in each HTTP request. This is the
* userId/password of the calling server. The end user's userId is sent on each request.
*
* @param serverName name of the server to connect to
* @param serverPlatformURLRoot the network address of the server running the OMAS REST servers
* @param restClient pre-initialized REST client
* @param maxPageSize pre-initialized parameter limit
* @throws InvalidParameterException there is a problem with the information about the remote OMAS
*/
public UserIdentityManagement(String serverName,
String serverPlatformURLRoot,
CommunityProfileRESTClient restClient,
int maxPageSize) throws InvalidParameterException
{
final String methodName = "Constructor (with security)";
invalidParameterHandler.setMaxPagingSize(maxPageSize);
invalidParameterHandler.validateOMAGServerPlatformURL(serverPlatformURLRoot, serverName, methodName);
this.serverName = serverName;
this.serverPlatformURLRoot = serverPlatformURLRoot;
this.restClient = restClient;
}
/* ========================================================
* The metadata source represents the third party technology this integration processing is connecting to
*/
/**
* Create a UserIdentity. This is not connected to a profile.
*
* @param userId the name of the calling user.
* @param externalSourceGUID unique identifier of software server capability representing the caller
* @param externalSourceName unique name of software server capability representing the caller
* @param newIdentity properties for the new userIdentity.
*
* @return unique identifier of the UserIdentity
*
* @throws InvalidParameterException one of the parameters is invalid.
* @throws PropertyServerException there is a problem retrieving information from the property server(s).
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public String createUserIdentity(String userId,
String externalSourceGUID,
String externalSourceName,
UserIdentityProperties newIdentity) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
final String methodName = "createUserIdentity";
final String propertiesParameterName = "newIdentity";
final String qualifiedNameParameterName = "newIdentity.qualifiedName";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateObject(newIdentity, propertiesParameterName, methodName);
invalidParameterHandler.validateName(newIdentity.getQualifiedName(), qualifiedNameParameterName, methodName);
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix;
UserIdentityRequestBody requestBody = new UserIdentityRequestBody();
requestBody.setExternalSourceGUID(externalSourceGUID);
requestBody.setExternalSourceName(externalSourceName);
requestBody.setProperties(newIdentity);
GUIDResponse restResult = restClient.callGUIDPostRESTCall(methodName, urlTemplate, requestBody, serverName, userId);
return restResult.getGUID();
}
/**
* Update a UserIdentity.
*
* @param userId the name of the calling user
* @param externalSourceGUID unique identifier of software server capability representing the caller
* @param externalSourceName unique name of software server capability representing the caller
* @param userIdentityGUID unique identifier of the UserIdentity
* @param isMergeUpdate should the supplied properties be overlaid on the existing properties (true) or replace them (false
* @param properties updated properties for the new userIdentity
*
* @throws InvalidParameterException one of the parameters is invalid.
* @throws PropertyServerException there is a problem retrieving information from the property server(s).
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public void updateUserIdentity(String userId,
String externalSourceGUID,
String externalSourceName,
String userIdentityGUID,
boolean isMergeUpdate,
UserIdentityProperties properties) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
final String methodName = "updateUserIdentity";
final String guidParameterName = "userIdentityGUID";
final String propertiesParameterName = "properties";
final String qualifiedNameParameterName = "properties.qualifiedName";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(userIdentityGUID, guidParameterName, methodName);
invalidParameterHandler.validateObject(properties, propertiesParameterName, methodName);
if (! isMergeUpdate)
{
invalidParameterHandler.validateName(properties.getQualifiedName(), qualifiedNameParameterName, methodName);
}
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix + "/{2}?isMergeUpdate={3}";
UserIdentityRequestBody requestBody = new UserIdentityRequestBody();
requestBody.setExternalSourceGUID(externalSourceGUID);
requestBody.setExternalSourceName(externalSourceName);
requestBody.setProperties(properties);
restClient.callVoidPostRESTCall(methodName, urlTemplate, requestBody, serverName, userId, userIdentityGUID, isMergeUpdate);
}
/**
* Remove a user identity object. This will fail if the profile would be left without an
* associated user identity.
*
* @param userId the name of the calling user
* @param externalSourceGUID unique identifier of software server capability representing the caller
* @param externalSourceName unique name of software server capability representing the caller
* @param userIdentityGUID unique identifier of the UserIdentity
*
* @throws InvalidParameterException one of the parameters is invalid.
* @throws PropertyServerException there is a problem retrieving information from the property server(s).
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public void deleteUserIdentity(String userId,
String externalSourceGUID,
String externalSourceName,
String userIdentityGUID) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
final String methodName = "deleteUserIdentity";
final String guidParameterName = "userIdentityGUID";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(userIdentityGUID, guidParameterName, methodName);
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix + "/{2}";
MetadataSourceRequestBody requestBody = new MetadataSourceRequestBody();
requestBody.setExternalSourceGUID(externalSourceGUID);
requestBody.setExternalSourceName(externalSourceName);
restClient.callVoidPostRESTCall(methodName, urlTemplate, requestBody, serverName, userId, userIdentityGUID);
}
/**
* Link a user identity to a profile. This will fail if the user identity is already connected to
* a profile.
*
* @param userId the name of the calling user.
* @param externalSourceGUID unique identifier of software server capability representing the caller
* @param externalSourceName unique name of software server capability representing the caller
* @param profileGUID the profile to add the identity to.
* @param userIdentityGUID additional userId for the profile.
*
* @throws InvalidParameterException one of the parameters is invalid.
* @throws PropertyServerException there is a problem retrieving information from the property server(s).
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public void addIdentityToProfile(String userId,
String externalSourceGUID,
String externalSourceName,
String userIdentityGUID,
String profileGUID) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
final String methodName = "addIdentityToProfile";
final String profileGUIDParameterName = "profileGUID";
final String userIdentityGUIDParameterName = "userIdentityGUID";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(profileGUID, profileGUIDParameterName, methodName);
invalidParameterHandler.validateGUID(userIdentityGUID, userIdentityGUIDParameterName, methodName);
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix + "/{2}/profiles/{3}/link";
MetadataSourceRequestBody requestBody = new MetadataSourceRequestBody();
requestBody.setExternalSourceGUID(externalSourceGUID);
requestBody.setExternalSourceName(externalSourceName);
restClient.callVoidPostRESTCall(methodName, urlTemplate, requestBody, serverName, userId, userIdentityGUID, profileGUID);
}
/**
* Unlink a user identity from a profile. This will fail if the profile would be left without an
* associated user identity.
*
* @param userId the name of the calling user.
* @param externalSourceGUID unique identifier of software server capability representing the caller
* @param externalSourceName unique name of software server capability representing the caller
* @param userIdentityGUID unique identifier of the UserIdentity
* @param profileGUID profile to remove it from.
*
* @throws InvalidParameterException one of the parameters is invalid.
* @throws PropertyServerException there is a problem retrieving information from the property server(s).
* @throws UserNotAuthorizedException the requesting user is not authorized to issue this request.
*/
public void removeIdentityFromProfile(String userId,
String externalSourceGUID,
String externalSourceName,
String userIdentityGUID,
String profileGUID) throws InvalidParameterException,
PropertyServerException,
UserNotAuthorizedException
{
final String methodName = "removeIdentityFromProfile";
final String profileGUIDParameterName = "profileGUID";
final String userIdentityGUIDParameterName = "userIdentityGUID";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(profileGUID, profileGUIDParameterName, methodName);
invalidParameterHandler.validateGUID(userIdentityGUID, userIdentityGUIDParameterName, methodName);
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix + "/{2}/profiles/{3}/unlink";
MetadataSourceRequestBody requestBody = new MetadataSourceRequestBody();
requestBody.setExternalSourceGUID(externalSourceGUID);
requestBody.setExternalSourceName(externalSourceName);
restClient.callVoidPostRESTCall(methodName, urlTemplate, requestBody, serverName, userId, userIdentityGUID, profileGUID);
}
/**
* Retrieve the list of user identity metadata elements that contain the search string.
* The search string is treated as a regular expression.
*
* @param userId calling user
* @param searchString string to find in the properties
* @param startFrom paging start point
* @param pageSize maximum results that can be returned
*
* @return list of matching metadata elements
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public List<UserIdentityElement> findUserIdentities(String userId,
String searchString,
int startFrom,
int pageSize) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
final String methodName = "findUserIdentities";
final String searchStringParameterName = "searchString";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateSearchString(searchString, searchStringParameterName, methodName);
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix + "/by-search-string?startFrom={2}&pageSize={3}";
SearchStringRequestBody requestBody = new SearchStringRequestBody();
requestBody.setSearchString(searchString);
requestBody.setSearchStringParameterName(searchStringParameterName);
UserIdentityListResponse restResult = restClient.callUserIdentityListPostRESTCall(methodName,
urlTemplate,
requestBody,
serverName,
userId,
Integer.toString(startFrom),
Integer.toString(pageSize));
return restResult.getElements();
}
/**
* Retrieve the list of user identity metadata elements with a matching qualified name.
* There are no wildcards supported on this request.
*
* @param userId calling user
* @param name name to search for
* @param startFrom paging start point
* @param pageSize maximum results that can be returned
*
* @return list of matching metadata elements
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public List<UserIdentityElement> getUserIdentitiesByName(String userId,
String name,
int startFrom,
int pageSize) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
final String methodName = "getUserIdentitiesByName";
final String namePropertyName = "qualifiedName";
final String nameParameterName = "name";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateName(name, nameParameterName, methodName);
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix + "/by-name?startFrom={2}&pageSize={3}";
NameRequestBody requestBody = new NameRequestBody();
requestBody.setName(name);
requestBody.setNamePropertyName(namePropertyName);
requestBody.setNameParameterName(nameParameterName);
UserIdentityListResponse restResult = restClient.callUserIdentityListPostRESTCall(methodName,
urlTemplate,
requestBody,
serverName,
userId,
Integer.toString(startFrom),
Integer.toString(pageSize));
return restResult.getElements();
}
/**
* Retrieve the userIdentity metadata element with the supplied unique identifier.
*
* @param userId calling user
* @param userIdentityGUID unique identifier of the requested metadata element
*
* @return matching metadata element
*
* @throws InvalidParameterException one of the parameters is invalid
* @throws UserNotAuthorizedException the user is not authorized to issue this request
* @throws PropertyServerException there is a problem reported in the open metadata server(s)
*/
public UserIdentityElement getUserIdentityByGUID(String userId,
String userIdentityGUID) throws InvalidParameterException,
UserNotAuthorizedException,
PropertyServerException
{
final String methodName = "getUserIdentityByGUID";
final String userIdentityGUIDParameterName = "userIdentityGUID";
invalidParameterHandler.validateUserId(userId, methodName);
invalidParameterHandler.validateGUID(userIdentityGUID, userIdentityGUIDParameterName, methodName);
final String urlTemplate = serverPlatformURLRoot + urlTemplatePrefix + "/{2}";
UserIdentityResponse restResult = restClient.callUserIdentityGetRESTCall(methodName,
urlTemplate,
serverName,
userId,
userIdentityGUID);
return restResult.getElement();
}
}
| 11,263 |
376 | _base_ = [
'../../../_base_/datasets/fine_tune_based/base_voc.py',
'../../../_base_/schedules/schedule.py',
'../../../_base_/models/faster_rcnn_r50_caffe_fpn.py',
'../../../_base_/default_runtime.py'
]
# classes splits are predefined in FewShotVOCDataset
data = dict(
train=dict(classes='BASE_CLASSES_SPLIT2'),
val=dict(classes='BASE_CLASSES_SPLIT2'),
test=dict(classes='BASE_CLASSES_SPLIT2'))
lr_config = dict(warmup_iters=100, step=[12000, 16000])
runner = dict(max_iters=18000)
# model settings
model = dict(
pretrained='open-mmlab://detectron2/resnet101_caffe',
backbone=dict(depth=101),
roi_head=dict(bbox_head=dict(num_classes=15)))
# using regular sampler can get a better base model
use_infinite_sampler = False
| 318 |
14,668 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef SERVICES_NETWORK_PROXY_RESOLVING_CLIENT_SOCKET_FACTORY_H_
#define SERVICES_NETWORK_PROXY_RESOLVING_CLIENT_SOCKET_FACTORY_H_
#include <memory>
#include "base/component_export.h"
#include "base/memory/raw_ptr.h"
#include "base/memory/ref_counted.h"
#include "net/socket/connect_job_factory.h"
#include "net/ssl/ssl_config.h"
#include "url/gurl.h"
namespace net {
struct CommonConnectJobParams;
class HttpNetworkSession;
class NetworkIsolationKey;
class URLRequestContext;
} // namespace net
namespace network {
class ProxyResolvingClientSocket;
class COMPONENT_EXPORT(NETWORK_SERVICE) ProxyResolvingClientSocketFactory {
public:
// Constructs a ProxyResolvingClientSocketFactory. This factory shares
// network session params with |request_context|, but keeps separate socket
// pools by instantiating and owning a separate |network_session_|.
explicit ProxyResolvingClientSocketFactory(
net::URLRequestContext* request_context);
ProxyResolvingClientSocketFactory(const ProxyResolvingClientSocketFactory&) =
delete;
ProxyResolvingClientSocketFactory& operator=(
const ProxyResolvingClientSocketFactory&) = delete;
~ProxyResolvingClientSocketFactory();
// Creates a socket. |url|'s host and port specify where a connection will be
// established to. The full URL will be only used for proxy resolution. Caller
// doesn't need to explicitly sanitize the url, any sensitive data (like
// embedded usernames and passwords), and local data (i.e. reference fragment)
// will be sanitized by net::ProxyService::ResolveProxyHelper() before the url
// is disclosed to the proxy.
//
// |network_isolation_key| indicates the network shard to use for storing
// shared network state (DNS cache entries, shared H2/QUIC proxy connections,
// etc). Proxy connections will only be shared with other
// ProxyResolvingClientSockets, not with standards HTTP/HTTPS requests.
//
// If |use_tls| is true, TLS connect will be used in addition to TCP connect.
// The URLRequestContext's SSL configurations will be respected when
// establishing a TLS connection.
std::unique_ptr<ProxyResolvingClientSocket> CreateSocket(
const GURL& url,
const net::NetworkIsolationKey& network_isolation_key,
bool use_tls);
private:
std::unique_ptr<net::HttpNetworkSession> network_session_;
std::unique_ptr<net::CommonConnectJobParams> common_connect_job_params_;
raw_ptr<net::URLRequestContext> request_context_;
std::unique_ptr<net::ConnectJobFactory> connect_job_factory_ =
std::make_unique<net::ConnectJobFactory>();
};
} // namespace network
#endif // SERVICES_NETWORK_PROXY_RESOLVING_CLIENT_SOCKET_FACTORY_H_
| 861 |
577 | package org.fluentlenium.adapter;
@FunctionalInterface
public interface TriConsumer<U, V, W> {
void accept(U arg1, V arg2, W arg3);
}
| 52 |
12,278 | <reponame>pyracanda/runtime
#define LINUX_UC_FLAGS_OFF 0x000
#define LINUX_UC_LINK_OFF 0x004
#define LINUX_UC_STACK_OFF 0x008
#define LINUX_UC_MCONTEXT_OFF 0x018
#define LINUX_UC_SIGMASK_OFF 0x1b8
#define LINUX_SC_FLAGS_OFF 0x000
#define LINUX_SC_GR_OFF 0x004
#define LINUX_SC_FR_OFF 0x088
#define LINUX_SC_IASQ_OFF 0x188
#define LINUX_SC_IAOQ_OFF 0x190
#define LINUX_SC_SAR_OFF 0x198
/* The signal frame contains 4 words of space for the sigreturn
trampoline, the siginfo structure, and then the sigcontext
structure. See include/asm-parisc/compat_rt_sigframe.h. */
#define LINUX_RT_SIGFRAME_UC_OFF 0xac
| 328 |
339 | //
// DRDViewController.h
// Durandal
//
// Created by cendywang on 12/10/2015.
// Copyright (c) 2015 cendywang. All rights reserved.
//
@import UIKit;
@interface DRDViewController : UIViewController
@end
| 77 |
915 | <filename>testing/unittest/runtime_static_assert.h<gh_stars>100-1000
#pragma once
#include <string>
#include <thrust/detail/static_assert.h>
#undef THRUST_STATIC_ASSERT
#undef THRUST_STATIC_ASSERT_MSG
#define THRUST_STATIC_ASSERT(B) unittest::assert_static((B), __FILE__, __LINE__);
#define THRUST_STATIC_ASSERT_MSG(B, msg) unittest::assert_static((B), __FILE__, __LINE__);
namespace unittest
{
__host__ __device__
void assert_static(bool condition, const char * filename, int lineno);
}
#include <thrust/device_new.h>
#include <thrust/device_delete.h>
#if THRUST_DEVICE_SYSTEM == THRUST_DEVICE_SYSTEM_CUDA
#define ASSERT_STATIC_ASSERT(X) \
{ \
bool triggered = false; \
typedef unittest::static_assert_exception ex_t; \
thrust::device_ptr<ex_t> device_ptr = thrust::device_new<ex_t>(); \
ex_t* raw_ptr = thrust::raw_pointer_cast(device_ptr); \
::cudaMemcpyToSymbol(unittest::detail::device_exception, &raw_ptr, sizeof(ex_t*)); \
try { X; } catch (ex_t) { triggered = true; } \
if (!triggered) { \
triggered = static_cast<ex_t>(*device_ptr).triggered; \
} \
thrust::device_free(device_ptr); \
raw_ptr = NULL; \
::cudaMemcpyToSymbol(unittest::detail::device_exception, &raw_ptr, sizeof(ex_t*)); \
if (!triggered) { unittest::UnitTestFailure f; f << "[" << __FILE__ << ":" << __LINE__ << "] did not trigger a THRUST_STATIC_ASSERT"; throw f; } \
}
#else
#define ASSERT_STATIC_ASSERT(X) \
{ \
bool triggered = false; \
typedef unittest::static_assert_exception ex_t; \
try { X; } catch (ex_t) { triggered = true; } \
if (!triggered) { unittest::UnitTestFailure f; f << "[" << __FILE__ << ":" << __LINE__ << "] did not trigger a THRUST_STATIC_ASSERT"; throw f; } \
}
#endif
namespace unittest
{
class static_assert_exception
{
public:
__host__ __device__
static_assert_exception() : triggered(false)
{
}
__host__ __device__
static_assert_exception(const char * filename, int lineno)
: triggered(true), filename(filename), lineno(lineno)
{
}
bool triggered;
const char * filename;
int lineno;
};
namespace detail
{
#if THRUST_HOST_COMPILER == THRUST_HOST_COMPILER_GCC || \
THRUST_HOST_COMPILER == THRUST_HOST_COMPILER_CLANG
__attribute__((used))
#endif
__device__ static static_assert_exception* device_exception = NULL;
}
__host__ __device__
void assert_static(bool condition, const char * filename, int lineno)
{
if (!condition)
{
static_assert_exception ex(filename, lineno);
#ifdef __CUDA_ARCH__
*detail::device_exception = ex;
#else
throw ex;
#endif
}
}
}
| 1,270 |
701 | package top.easelink.framework.customview;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.View;
import androidx.annotation.ColorInt;
import java.util.Random;
import top.easelink.framework.R;
/**
* Thanks to https://github.com/KingJA/StampView
*/
public class ELStampView extends View {
private Paint mOutStrokePaint;
private int mOutStrokeWidth = 20;
private int mInStrokeWidth = 10;
private int mOffSet;
private int mWidth;
private int mHeight;
private float mTextSize;
private Paint mInStrokePaint;
private Paint mTextPaint;
private float mTextHeightOffset;
private int mBorderColor;
private String mStampText;
private Paint mSpotPaint;
private boolean drawSpotEnable =false;
private float mTextSizeScale = 0.4f;
public ELStampView(Context context) {
this(context,null);
}
public ELStampView(Context context, AttributeSet attrs) {
this(context, attrs,0);
}
public ELStampView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.ELStampView);
mBorderColor = typedArray.getColor(R.styleable.ELStampView_stampColor, Color.BLACK);
mStampText = typedArray.getString(R.styleable.ELStampView_stampText);
mStampText = mStampText==null? "" : mStampText;
typedArray.recycle();
}
public void setText(String text) {
if (!TextUtils.isEmpty(text)) {
mStampText = text;
}
}
public void setTextSizeScale(float scale) {
mTextSizeScale = scale;
}
public void setStampColor(@ColorInt int color) {
mBorderColor = color;
}
public void setDrawSpotEnable(boolean enable) {
drawSpotEnable = enable;
}
private void initStampView() {
mSpotPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mSpotPaint.setColor(Color.WHITE);
mOutStrokePaint = new Paint();
mOutStrokePaint.setColor(mBorderColor);
mOutStrokePaint.setAntiAlias(true);
mOutStrokePaint.setStrokeWidth(mOutStrokeWidth);
mOutStrokePaint.setStyle(Paint.Style.STROKE);
mOffSet = (int) (mOutStrokeWidth*0.5f);
mInStrokePaint = new Paint();
mInStrokePaint.setAntiAlias(true);
mInStrokePaint.setColor(mBorderColor);
mInStrokePaint.setStrokeWidth(mInStrokeWidth);
mInStrokePaint.setStyle(Paint.Style.STROKE);
mTextPaint = new Paint();
mTextPaint.setAntiAlias(true);
mTextPaint.setColor(mBorderColor);
mTextPaint.setTextSize(mTextSize);
mTextPaint.setFakeBoldText(true);
mTextHeightOffset = -(mTextPaint.ascent() + mTextPaint.descent()) * 0.5f;
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mWidth = getMeasuredWidth();
mHeight = getMeasuredHeight();
mOutStrokeWidth= (int) (mWidth * 1.0f / 18);
mInStrokeWidth= (int) (mWidth *1.0f / 36);
mTextSize= mWidth * mTextSizeScale;
initStampView();
}
@Override
protected void onDraw(Canvas canvas) {
initStampView();
super.onDraw(canvas);
canvas.drawCircle(mWidth*0.5f,mWidth*0.5f,mWidth*0.5f-mOffSet,mOutStrokePaint);
canvas.drawCircle(mWidth*0.5f,mWidth*0.5f,mWidth*0.5f-mOutStrokeWidth-mOutStrokeWidth,mInStrokePaint);
float tabTextWidth = mTextPaint.measureText(mStampText);
canvas.save();
canvas.rotate(-30,mWidth * 0.5f,mWidth * 0.5f);
canvas.drawText(mStampText, mWidth * 0.5f - 0.5f * tabTextWidth, mHeight * 0.5f + mTextHeightOffset, mTextPaint);
canvas.restore();
if (drawSpotEnable) {
drawSpot(canvas);
}
}
private void drawSpot(Canvas canvas) {
for (int i = 0; i < 20; i++) {
Path mirrorPath = getRandomSpotPath(
3,
new Random().nextInt(mWidth/2) + new Random().nextInt(mWidth/2),
new Random().nextInt(mHeight/2)+new Random().nextInt(mHeight/2),
new Random().nextInt(3) + 3);
canvas.drawPath(mirrorPath,mSpotPaint);
}
}
public Path getRandomSpotPath(int sides, int centerX, int centerY, int radius) {
Path path = new Path();
float offsetAngle = 0;
offsetAngle = (float) (Math.PI * offsetAngle / 180);
for (int i = 0; i < sides; i++) {
float x = (float) (centerX + radius * Math.cos(offsetAngle));
float y = (float) (centerY + radius * Math.sin(offsetAngle));
offsetAngle += 2 * Math.PI / sides;
if (i == 0) {
path.moveTo(x, y);
} else {
path.lineTo(x+new Random().nextInt(8)+3, y+new Random().nextInt(8)+3);
}
}
path.close();
return path;
}
public void reDraw() {
invalidate();
}
} | 2,390 |
1,664 | <reponame>likenamehaojie/Apache-Ambari-ZH<filename>ambari-server/src/test/java/org/apache/ambari/server/api/AmbariErrorHandlerTest.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.api;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Map;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.ambari.server.security.authentication.jwt.JwtAuthenticationPropertiesProvider;
import org.easymock.EasyMockSupport;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.servlet.DefaultServlet;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.junit.Test;
import com.google.gson.Gson;
import com.google.gson.JsonSyntaxException;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.WebResource;
public class AmbariErrorHandlerTest extends EasyMockSupport {
Gson gson = new Gson();
@Test
public void testHandle() throws Exception {
}
@Test
public void testErrorWithJetty() throws Exception {
Server server = new Server(0);
JwtAuthenticationPropertiesProvider propertiesProvider = createNiceMock(JwtAuthenticationPropertiesProvider.class);
expect(propertiesProvider.get()).andReturn(null).anyTimes();
replayAll();
ServletContextHandler root = new ServletContextHandler(server, "/",
ServletContextHandler.SECURITY | ServletContextHandler.SESSIONS);
root.addServlet(HelloServlet.class, "/hello");
root.addServlet(DefaultServlet.class, "/");
root.setErrorHandler(new AmbariErrorHandler(gson, propertiesProvider));
server.start();
int localPort = ((ServerConnector)server.getConnectors()[0]).getLocalPort();
Client client = new Client();
WebResource resource = client.resource("http://localhost:" + localPort + "/");
ClientResponse successResponse = resource.path("hello").get(ClientResponse.class);
assertEquals(HttpServletResponse.SC_OK, successResponse.getStatus());
ClientResponse failResponse = resource.path("fail").get(ClientResponse.class);
assertEquals(HttpServletResponse.SC_NOT_FOUND, failResponse.getStatus());
try {
String response = failResponse.getEntity(String.class);
System.out.println(response);
Map map;
map = gson.fromJson(response, Map.class);
System.out.println(map);
assertNotNull("Incorrect response status", map.get("status"));
assertNotNull("Incorrect response message", map.get("message"));
} catch (JsonSyntaxException e1) {
fail("Incorrect response");
}
server.stop();
verifyAll();
}
@SuppressWarnings("serial")
public static class HelloServlet extends HttpServlet {
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setContentType("text/html");
response.setStatus(HttpServletResponse.SC_OK);
response.getWriter().println("hello");
}
}
}
| 1,282 |
358 | from copy import deepcopy
from os.path import join
from threading import Thread, Lock
from typing import List, Union
import matplotlib.pyplot as plt
import numpy as np
from sklearn.metrics import mean_squared_error
from fedot.core.pipelines.pipeline import Pipeline
from fedot.core.pipelines.node import Node
from fedot.core.data.data import InputData
from fedot.core.log import default_log, Log
from fedot.core.operations.operation_template import extract_operation_params
from fedot.core.utils import default_fedot_data_dir
from fedot.sensitivity.operations_hp_sensitivity.problem import OneOperationProblem
from fedot.sensitivity.node_sa_approaches import NodeAnalyzeApproach
from fedot.sensitivity.operations_hp_sensitivity.sa_and_sample_methods import analyze_method_by_name, \
sample_method_by_name
from fedot.sensitivity.sa_requirements import SensitivityAnalysisRequirements, HyperparamsAnalysisMetaParams
class OneOperationHPAnalyze(NodeAnalyzeApproach):
lock = Lock()
def __init__(self, pipeline: Pipeline, train_data, test_data: InputData,
requirements: SensitivityAnalysisRequirements = None,
path_to_save=None, log: Log = None):
super().__init__(pipeline, train_data, test_data, path_to_save)
requirements = SensitivityAnalysisRequirements() if requirements is None else requirements
self.requirements: HyperparamsAnalysisMetaParams = requirements.hp_analysis_meta
self.analyze_method = analyze_method_by_name.get(self.requirements.analyze_method)
self.sample_method = sample_method_by_name.get(self.requirements.sample_method)
self.problem = None
self.operation_type = None
self.data_under_lock: dict = {}
self.path_to_save = \
join(default_fedot_data_dir(), 'sensitivity', 'nodes_sensitivity') if path_to_save is None else path_to_save
self.log = default_log(__name__) if log is None else log
def analyze(self, node: Node,
is_dispersion_analysis: bool = False) -> Union[dict, float]:
# check whether the pipeline is fitted
if not self._pipeline.fitted_on_data:
self._pipeline.fit(self._train_data)
# create problem
self.operation_type = node.operation.operation_type
self.problem = OneOperationProblem(operation_types=[self.operation_type])
# sample
samples = self.sample(self.requirements.sample_size, node)
response_matrix = self._get_response_matrix(samples)
indices = self.analyze_method(self.problem.dictionary, samples, response_matrix)
converted_to_json_indices = self._convert_indices_to_json(problem=self.problem,
si=indices)
# Analyze the dispersion of params
if is_dispersion_analysis:
self._dispersion_analysis(node=node,
sample_size=self.requirements.sample_size)
return converted_to_json_indices
def sample(self, *args) -> Union[List[Pipeline], Pipeline]:
"""
:param args:
:return:
"""
sample_size, node = args
samples: List[np.array] = self.sample_method(self.problem.dictionary, num_of_samples=sample_size)
converted_samples: List[dict] = self.problem.convert_sample_to_dict(samples)
sampled_pipelines: List[Pipeline] = self._apply_params_to_node(params=converted_samples,
node=node)
return sampled_pipelines
def _apply_params_to_node(self, params: List[dict], node: Node) -> List[Pipeline]:
sampled_pipelines: List[Pipeline] = list()
for sample in params:
copied_pipeline = deepcopy(self._pipeline)
node_id = self._pipeline.nodes.index(node)
copied_pipeline.nodes[node_id].custom_params = sample
sampled_pipelines.append(copied_pipeline)
return sampled_pipelines
def _get_response_matrix(self, samples: List[Pipeline]):
operation_response_matrix = []
for sampled_pipeline in samples:
sampled_pipeline.fit(self._train_data)
prediction = sampled_pipeline.predict(self._test_data)
mse_metric = mean_squared_error(y_true=self._test_data.target,
y_pred=prediction.predict)
operation_response_matrix.append(mse_metric)
return np.array(operation_response_matrix)
def _dispersion_analysis(self, node: Node, sample_size: int):
samples: np.array = self.sample_method(self.problem.dictionary, num_of_samples=sample_size)
transposed_samples = samples.T
converted_samples = self.problem.convert_for_dispersion_analysis(transposed_samples)
jobs = [Thread(target=self._evaluate_variance,
args=(params, transposed_samples[index], node))
for index, params in enumerate(converted_samples)]
for job in jobs:
job.start()
for job in jobs:
job.join()
self._visualize_variance()
def _evaluate_variance(self, params: List[dict], samples, node: Node):
# default values of param & loss
param_name = list(params[0].keys())[0]
default_param_value = extract_operation_params(node).get(param_name)
pipelines_with_applied_params = self._apply_params_to_node(params, node)
# percentage ratio
samples = (samples - default_param_value) / default_param_value
response_matrix = self._get_response_matrix(pipelines_with_applied_params)
response_matrix = (response_matrix - np.mean(response_matrix)) / \
(max(response_matrix) - min(response_matrix))
OneOperationHPAnalyze.lock.acquire()
self.data_under_lock[f'{param_name}'] = [samples.reshape(1, -1)[0], response_matrix]
OneOperationHPAnalyze.lock.release()
def _visualize_variance(self):
x_ticks_param = list()
x_ticks_loss = list()
for param in self.data_under_lock.keys():
x_ticks_param.append(param)
x_ticks_loss.append(f'{param}_loss')
param_values_data = list()
losses_data = list()
for value in self.data_under_lock.values():
param_values_data.append(value[0])
losses_data.append(value[1])
fig, (ax1, ax2) = plt.subplots(2, figsize=(20, 10))
ax1.boxplot(param_values_data)
ax2.boxplot(losses_data)
ax1.set_title('param')
ax1.set_xticks(range(1, len(x_ticks_param) + 1))
ax1.set_xticklabels(x_ticks_param)
ax2.set_title('loss')
ax2.set_xticks(range(1, len(x_ticks_loss) + 1))
ax2.set_xticklabels(x_ticks_loss)
plt.savefig(join(self._path_to_save, f'{self.operation_type}_hp_sa.jpg'))
@staticmethod
def _convert_indices_to_json(problem: OneOperationProblem, si: dict) -> dict:
sobol_indices = []
for index in range(problem.num_vars):
var_indices = {f"{problem.names[index]}": {
'S1': list(si['S1'])[index],
'S1_conf': list(si['S1_conf'])[index],
'ST': list(si['ST'])[index],
'ST_conf': list(si['ST_conf'])[index],
}}
sobol_indices.append(var_indices)
data = {
'problem': problem.dictionary,
'sobol_indices': sobol_indices
}
return data
| 3,313 |
335 | <gh_stars>100-1000
{
"word": "Catch",
"definitions": [
"An act of catching something, typically a ball.",
"A chance or act of catching the ball to dismiss a batsman.",
"An amount of fish caught.",
"A person considered desirable as a partner or spouse.",
"A game in which a ball is thrown back and forth between two or more players.",
"A device for securing something such as a door, window, or box.",
"A hidden problem or disadvantage in an apparently ideal situation.",
"An unevenness in a person's voice caused by emotion.",
"A round, typically one with words arranged to produce a humorous effect."
],
"parts-of-speech": "Noun"
} | 242 |
12,278 | <filename>3rdParty/boost/1.71.0/libs/test/test/writing-test-ts/test-timeout-fail.cpp
// (C) Copyright <NAME>, 2019
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// See http://www.boost.org/libs/test for the library home page.
#define BOOST_TEST_MODULE timeout-error
#include <boost/test/unit_test.hpp>
#include <chrono>
#include <thread>
namespace utf = boost::unit_test;
BOOST_AUTO_TEST_CASE(test_fail, * utf::timeout(1))
{
std::this_thread::sleep_for(std::chrono::milliseconds(2000));
BOOST_TEST(true);
}
| 251 |
945 | <gh_stars>100-1000
/*=========================================================================
*
* Copyright NumFOCUS
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
#include "itkRealFFTTest.h"
#if defined(ITK_USE_FFTWD)
// Test FFT using FFTW Libraries. The test is performed for two 3D
// arrays, one of them having the same dimension(4,4,4) and the other
// having different dimensions (3,4,5). Images are created with
// different dimensions in the test function based on the second
// template argument and the size of these dimensions are taken from
// the array.The data types used are float and double.
int
itkFFTWD_RealFFTTest(int, char *[])
{
using ImageD1 = itk::Image<double, 1>;
using ImageCD1 = itk::Image<std::complex<double>, 1>;
using ImageD2 = itk::Image<double, 2>;
using ImageCD2 = itk::Image<std::complex<double>, 2>;
using ImageD3 = itk::Image<double, 3>;
using ImageCD3 = itk::Image<std::complex<double>, 3>;
# ifndef ITK_USE_CUFFTW
std::cout << "WriteWisdomCache " << itk::FFTWGlobalConfiguration::GetWriteWisdomCache() << std::endl;
std::cout << "ReadWisdomCache " << itk::FFTWGlobalConfiguration::GetReadWisdomCache() << std::endl;
std::cout << "PlanRigor " << itk::FFTWGlobalConfiguration::GetPlanRigor() << std::endl;
std::cout << "WisdomCacheBase " << itk::FFTWGlobalConfiguration::GetWisdomCacheBase() << std::endl;
std::cout << "WisdomeFile " << itk::FFTWGlobalConfiguration::GetWisdomFileDefaultBaseName() << std::endl;
# endif
unsigned int SizeOfDimensions1[] = { 4, 4, 4 };
unsigned int SizeOfDimensions2[] = { 3, 5, 4 };
int rval = 0;
std::cerr << "FFTWD:double,1 (4,4,4)" << std::endl;
if ((test_fft<double,
1,
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD1>,
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD1>>(SizeOfDimensions1)) != 0)
rval++;
std::cerr << "FFTWD:double,2 (4,4,4)" << std::endl;
if ((test_fft<double,
2,
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD2>,
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD2>>(SizeOfDimensions1)) != 0)
rval++;
std::cerr << "FFTWD:double,3 (4,4,4)" << std::endl;
if ((test_fft<double,
3,
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD3>,
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD3>>(SizeOfDimensions1)) != 0)
rval++;
std::cerr << "FFTWD:double,1 (3,5,4)" << std::endl;
if ((test_fft<double,
1,
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD1>,
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD1>>(SizeOfDimensions2)) != 0)
rval++;
std::cerr << "FFTWD:double,2 (3,5,4)" << std::endl;
if ((test_fft<double,
2,
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD2>,
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD2>>(SizeOfDimensions2)) != 0)
rval++;
std::cerr << "FFTWD:double,3 (3,5,4)" << std::endl;
if ((test_fft<double,
3,
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD3>,
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD3>>(SizeOfDimensions2)) != 0)
rval++;
// Exercise the plan rigor methods
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD3>::Pointer fft =
itk::FFTWRealToHalfHermitianForwardFFTImageFilter<ImageD3>::New();
fft->SetPlanRigor(FFTW_ESTIMATE);
if (fft->GetPlanRigor() != FFTW_ESTIMATE)
{
std::cerr << "Plan rigor read from FFT filter is not FFTW_ESTIMATE." << std::endl;
return 0;
}
fft->SetPlanRigor(FFTW_MEASURE);
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD3>::Pointer ifft =
itk::FFTWHalfHermitianToRealInverseFFTImageFilter<ImageCD3>::New();
ifft->SetPlanRigor(FFTW_ESTIMATE);
if (ifft->GetPlanRigor() != FFTW_ESTIMATE)
{
std::cerr << "Plan rigor read from FFT filter is not FFTW_ESTIMATE." << std::endl;
return 0;
}
ifft->SetPlanRigor(FFTW_MEASURE);
fft->Print(std::cout);
ifft->Print(std::cout);
return (rval == 0) ? 0 : -1;
}
#endif
| 1,974 |
877 | // @skip-test
// Check that types on enum constants can be inferred. This test doesn't succeed for either kind of
// WPI, because WPI doesn't learn anything about enum constants from how they're used. They also
// cannot be assigned to, so there's no way for WPI to learn their types.
import org.checkerframework.checker.testchecker.ainfer.qual.Sibling1;
public class EnumConstants {
enum MyEnum {
ONE,
TWO;
}
void requiresS1(@Sibling1 MyEnum e) {}
void test() {
// :: warning: argument
requiresS1(MyEnum.ONE);
}
}
| 179 |
7,482 | <reponame>Davidfind/rt-thread<gh_stars>1000+
/*
* Copyright (c) 2012, Freescale Semiconductor, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* o Neither the name of Freescale Semiconductor, Inc. nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "sdk.h"
#include "camera/camera_def.h"
#include "registers/regsiomuxc.h"
#include "registers/regsccm.h"
#define CAMERA_I2C_PORT (1)
void camera_ipu1_iomux_config(void);
////////////////////////////////////////////////////////////////////////////////
// Variables
////////////////////////////////////////////////////////////////////////////////
uint8_t g_camera_i2c_port = CAMERA_I2C_PORT;
////////////////////////////////////////////////////////////////////////////////
// Code
////////////////////////////////////////////////////////////////////////////////
/* dummy empty function for camera_test
* camera power is always on for MX6DQ SMD board*/
void camera_power_on(void)
{
}
/*IOMUX configuration for CSI port0*/
void csi_port0_iomux_config(void)
{
camera_ipu1_iomux_config();
/* set GPR1 to enable parallel interface
* bit 19: 0 - Enable mipi to IPU1 CSI0, virtual channel is fixed to 0
* 1 - Enable parallel interface to IPU CSI0
* bit 20: 0 - Enable mipi to IPU2 CSI1, virtual channel is fixed to 3
* 1 - Enable parallel interface to IPU2 CSI1
* IPU1 CSI1 directly connect to mipi CSI2, virtual channel is fixed to 1
* IPU2 CSI0 directly connect to mipi CSI2, virtual channel is fixed to 2
*/
#if defined(CHIP_MX6DQ)
BW_IOMUXC_GPR1_MIPI_IPU1_MUX(1/*PARALLEL_INTERFACE*/);
#endif
#if defined(CHIP_MX6SDL)
BW_IOMUXC_GPR13_IPU_CSI0_MUX(4/*IPU_CSI0*/);
#endif
}
//! @brief Function to configure IOMUXC for ipu1 module.
//! @todo Move this function to [chip]/[board]/iomuxc folders?
void camera_ipu1_iomux_config(void)
{
// Config ipu1.IPU1_CSI0_DATA12 to pad CSI0_DATA12(M2)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA12_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA12(0x020E0288)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA12
// ALT1 (1) - Select instance: eim signal: EIM_DATA08
// ALT3 (3) - Select instance: uart4 signal: UART4_TX_DATA
// ALT5 (5) - Select instance: gpio5 signal: GPIO5_IO30
// ALT7 (7) - Select instance: arm signal: ARM_TRACE09
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA12_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA12_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA12_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12(0x020E0658)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA12_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_DATA13 to pad CSI0_DATA13(L1)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA13_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA13(0x020E028C)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA13
// ALT1 (1) - Select instance: eim signal: EIM_DATA09
// ALT3 (3) - Select instance: uart4 signal: UART4_RX_DATA
// ALT5 (5) - Select instance: gpio5 signal: GPIO5_IO31
// ALT7 (7) - Select instance: arm signal: ARM_TRACE10
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA13_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA13_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA13_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13(0x020E065C)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA13_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_DATA14 to pad CSI0_DATA14(M4)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA14_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA14(0x020E0290)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA14
// ALT1 (1) - Select instance: eim signal: EIM_DATA10
// ALT3 (3) - Select instance: uart5 signal: UART5_TX_DATA
// ALT5 (5) - Select instance: gpio6 signal: GPIO6_IO00
// ALT7 (7) - Select instance: arm signal: ARM_TRACE11
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA14_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA14_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA14_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14(0x020E0660)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA14_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_DATA15 to pad CSI0_DATA15(M5)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA15_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA15(0x020E0294)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA15
// ALT1 (1) - Select instance: eim signal: EIM_DATA11
// ALT3 (3) - Select instance: uart5 signal: UART5_RX_DATA
// ALT5 (5) - Select instance: gpio6 signal: GPIO6_IO01
// ALT7 (7) - Select instance: arm signal: ARM_TRACE12
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA15_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA15_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA15_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15(0x020E0664)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA15_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_DATA16 to pad CSI0_DATA16(L4)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA16_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA16(0x020E0298)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA16
// ALT1 (1) - Select instance: eim signal: EIM_DATA12
// ALT3 (3) - Select instance: uart4 signal: UART4_RTS_B
// ALT5 (5) - Select instance: gpio6 signal: GPIO6_IO02
// ALT7 (7) - Select instance: arm signal: ARM_TRACE13
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA16_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA16_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA16_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16(0x020E0668)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA16_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_DATA17 to pad CSI0_DATA17(L3)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA17_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA17(0x020E029C)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA17
// ALT1 (1) - Select instance: eim signal: EIM_DATA13
// ALT3 (3) - Select instance: uart4 signal: UART4_CTS_B
// ALT5 (5) - Select instance: gpio6 signal: GPIO6_IO03
// ALT7 (7) - Select instance: arm signal: ARM_TRACE14
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA17_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA17_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA17_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17(0x020E066C)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA17_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_DATA18 to pad CSI0_DATA18(M6)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA18_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA18(0x020E02A0)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA18
// ALT1 (1) - Select instance: eim signal: EIM_DATA14
// ALT3 (3) - Select instance: uart5 signal: UART5_RTS_B
// ALT5 (5) - Select instance: gpio6 signal: GPIO6_IO04
// ALT7 (7) - Select instance: arm signal: ARM_TRACE15
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA18_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA18_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA18_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18(0x020E0670)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA18_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_DATA19 to pad CSI0_DATA19(L6)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA19_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA19(0x020E02A4)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_DATA19
// ALT1 (1) - Select instance: eim signal: EIM_DATA15
// ALT3 (3) - Select instance: uart5 signal: UART5_CTS_B
// ALT5 (5) - Select instance: gpio6 signal: GPIO6_IO05
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA19_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA19_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_DATA19_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19(0x020E0674)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_DATA19_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_HSYNC to pad CSI0_HSYNC(P4)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_HSYNC_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_HSYNC(0x020E025C)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_HSYNC
// ALT3 (3) - Select instance: ccm signal: CCM_CLKO1
// ALT5 (5) - Select instance: gpio5 signal: GPIO5_IO19
// ALT7 (7) - Select instance: arm signal: ARM_TRACE_CTL
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_HSYNC_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_HSYNC_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_HSYNC_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC(0x020E062C)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_HSYNC_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_PIXCLK to pad CSI0_PIXCLK(P1)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_PIXCLK_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_PIXCLK(0x020E0258)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_PIXCLK
// ALT5 (5) - Select instance: gpio5 signal: GPIO5_IO18
// ALT7 (7) - Select instance: arm signal: ARM_EVENTO
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_PIXCLK_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_PIXCLK_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_PIXCLK_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK(0x020E0628)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_PIXCLK_SRE_V(SLOW));
// Config ipu1.IPU1_CSI0_VSYNC to pad CSI0_VSYNC(N2)
// HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_VSYNC_WR(0x00000000);
// HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_WR(0x000130B0);
// Mux Register:
// IOMUXC_SW_MUX_CTL_PAD_CSI0_VSYNC(0x020E0264)
// SION [4] - Software Input On Field Reset: DISABLED
// Force the selected mux mode Input path no matter of MUX_MODE functionality.
// DISABLED (0) - Input Path is determined by functionality of the selected mux mode (regular).
// ENABLED (1) - Force input path of pad.
// MUX_MODE [2:0] - MUX Mode Select Field Reset: ALT5
// Select iomux modes to be used for pad.
// ALT0 (0) - Select instance: ipu1 signal: IPU1_CSI0_VSYNC
// ALT1 (1) - Select instance: eim signal: EIM_DATA01
// ALT5 (5) - Select instance: gpio5 signal: GPIO5_IO21
// ALT7 (7) - Select instance: arm signal: ARM_TRACE00
HW_IOMUXC_SW_MUX_CTL_PAD_CSI0_VSYNC_WR(
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_VSYNC_SION_V(DISABLED) |
BF_IOMUXC_SW_MUX_CTL_PAD_CSI0_VSYNC_MUX_MODE_V(ALT0));
// Pad Control Register:
// IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC(0x020E0634)
// HYS [16] - Hysteresis Enable Field Reset: ENABLED
// DISABLED (0) - CMOS input
// ENABLED (1) - Schmitt trigger input
// PUS [15:14] - Pull Up / Down Config. Field Reset: 100K_OHM_PU
// 100K_OHM_PD (0) - 100K Ohm Pull Down
// 47K_OHM_PU (1) - 47K Ohm Pull Up
// 100K_OHM_PU (2) - 100K Ohm Pull Up
// 22K_OHM_PU (3) - 22K Ohm Pull Up
// PUE [13] - Pull / Keep Select Field Reset: PULL
// KEEP (0) - Keeper Enabled
// PULL (1) - Pull Enabled
// PKE [12] - Pull / Keep Enable Field Reset: ENABLED
// DISABLED (0) - Pull/Keeper Disabled
// ENABLED (1) - Pull/Keeper Enabled
// ODE [11] - Open Drain Enable Field Reset: DISABLED
// Enables open drain of the pin.
// DISABLED (0) - Output is CMOS.
// ENABLED (1) - Output is Open Drain.
// SPEED [7:6] - Speed Field Reset: 100MHZ
// TBD (0) - TBD
// 50MHZ (1) - Low (50 MHz)
// 100MHZ (2) - Medium (100 MHz)
// 200MHZ (3) - Maximum (200 MHz)
// DSE [5:3] - Drive Strength Field Reset: 40_OHM
// HIZ (0) - HI-Z
// 240_OHM (1) - 240 Ohm
// 120_OHM (2) - 120 Ohm
// 80_OHM (3) - 80 Ohm
// 60_OHM (4) - 60 Ohm
// 48_OHM (5) - 48 Ohm
// 40_OHM (6) - 40 Ohm
// 34_OHM (7) - 34 Ohm
// SRE [0] - Slew Rate Field Reset: SLOW
// Slew rate control.
// SLOW (0) - Slow Slew Rate
// FAST (1) - Fast Slew Rate
HW_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_WR(
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_HYS_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_PUS_V(100K_OHM_PD) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_PUE_V(PULL) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_PKE_V(ENABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_ODE_V(DISABLED) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_SPEED_V(100MHZ) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_DSE_V(40_OHM) |
BF_IOMUXC_SW_PAD_CTL_PAD_CSI0_VSYNC_SRE_V(SLOW));
}
/*!
* reset camera sensor through GPIO on SMD board
*
*/
void sensor_reset(void)
{
int32_t reset_occupy = 1000, reset_delay = 1000;
sensor_standby(0);
/* MX6DQ/SDL_SMART_DEVICE: camera reset through GPIO1_17 */
BW_IOMUXC_SW_MUX_CTL_PAD_SD1_DATA1_MUX_MODE(BV_IOMUXC_SW_MUX_CTL_PAD_SD1_DATA1_MUX_MODE__ALT5);
gpio_set_direction(GPIO_PORT1, 17, GPIO_GDIR_OUTPUT);
gpio_set_level(GPIO_PORT1, 17, GPIO_LOW_LEVEL);
hal_delay_us(reset_occupy);
gpio_set_level(GPIO_PORT1, 17, GPIO_HIGH_LEVEL);
hal_delay_us(reset_delay);
}
/*!
* set camera sensor to standby mode.
*
* @param enable: specify whether set camera sensor to standby mode
*
*/
void sensor_standby(int32_t enable)
{
/* MX6DQ/SDL_SMART_DEVICE: setting to gpio1_16, power down high active */
BW_IOMUXC_SW_MUX_CTL_PAD_SD1_DATA0_MUX_MODE(BV_IOMUXC_SW_MUX_CTL_PAD_SD1_DATA0_MUX_MODE__ALT5);
gpio_set_direction(GPIO_PORT1, 16, GPIO_GDIR_OUTPUT);
if (enable)
gpio_set_level(GPIO_PORT1, 16, GPIO_HIGH_LEVEL);
else
gpio_set_level(GPIO_PORT1, 16, GPIO_LOW_LEVEL);
}
/*!
* set camera sensor clock to 24MHz.
*
*/
void sensor_clock_setting(void)
{
int32_t clock_delay = 1000;
/*MX6DQ/SDL_SMART_DEVICE: config clko */
/*config gpio_0 to be clko */
BW_IOMUXC_SW_MUX_CTL_PAD_GPIO00_MUX_MODE(BV_IOMUXC_SW_MUX_CTL_PAD_GPIO00_MUX_MODE__ALT0);
BW_IOMUXC_SW_PAD_CTL_PAD_GPIO00_SRE(BV_IOMUXC_SW_PAD_CTL_PAD_GPIO00_SRE__FAST);
BW_IOMUXC_SW_PAD_CTL_PAD_GPIO00_DSE(BV_IOMUXC_SW_PAD_CTL_PAD_GPIO00_DSE__80_OHM);
/*select osc_clk 24MHz, CKO1 output drives cko2 clock */
HW_CCM_CCOSR_WR(
BF_CCM_CCOSR_CLKO2_EN(1) |
BF_CCM_CCOSR_CLKO2_DIV(0) | /*div 1*/
BF_CCM_CCOSR_CLKO2_SEL(0xe) | /*osc_clk*/
BF_CCM_CCOSR_CLKO_SEL(1) |
BF_CCM_CCOSR_CLKO1_EN(1) |
BF_CCM_CCOSR_CLKO1_DIV(0)); /*div 1*/
hal_delay_us(clock_delay);
}
////////////////////////////////////////////////////////////////////////////////
// EOF
////////////////////////////////////////////////////////////////////////////////
| 22,019 |
305 | <filename>aat/engine/dispatch/manager.py
import sys
import traceback
from typing import cast, List, TYPE_CHECKING
from .order_entry import StrategyManagerOrderEntryMixin, OrderManager
from .periodic import PeriodicManagerMixin
from .portfolio import StrategyManagerPortfolioMixin, PortfolioManager
from .risk import StrategyManagerRiskMixin, RiskManager
from .utils import StrategyManagerUtilsMixin
from aat.config import TradingType
from aat.core import Event, Error
from aat.exchange import Exchange
from aat.core.handler import EventHandler
if TYPE_CHECKING:
from aat.strategy import Strategy
from aat.engine import TradingEngine
class StrategyManager(
StrategyManagerOrderEntryMixin,
StrategyManagerRiskMixin,
StrategyManagerPortfolioMixin,
StrategyManagerUtilsMixin,
PeriodicManagerMixin,
EventHandler,
):
def __init__(
self,
trading_engine: "TradingEngine",
trading_type: TradingType,
exchanges: List[Exchange],
load_accounts: bool = False,
) -> None:
"""The Manager sits between the strategies and the engine and manages state
Args:
trading_engine (TradingEngine); the trading engine instance
trading_type (TradingType); the trading type
exchanges (List[Exchange]); a list of exchanges to dispatch to
load_accounts (bool); load positions from accounts on startup
"""
# store trading engine
self._engine = trading_engine
# store the exchanges
self._exchanges = exchanges
# store whether to query accounts on start
self._load_accounts = load_accounts
# pull from trading engine class
self._portfolio_mgr = self._engine.portfolio_manager
self._risk_mgr = self._engine.risk_manager
self._order_mgr = self._engine.order_manager
# install self for callbacks
self._portfolio_mgr._setManager(self)
self._risk_mgr._setManager(self)
self._order_mgr._setManager(self)
# add exchanges for order manager
for exc in exchanges:
self._order_mgr.addExchange(exc)
# initialize event subscriptions
self._data_subscriptions = {} # type: ignore
# initialize order and trade tracking
self._strategy_open_orders = {}
self._strategy_past_orders = {}
self._strategy_trades = {}
# internal use for synchronizing
self._alerted_events = {}
# internal use for periodics
self._periodics = []
# ********* #
# Accessors #
# ********* #
def riskManager(self) -> RiskManager:
return self._risk_mgr
def orderManager(self) -> OrderManager:
return self._order_mgr
def portfolioManager(self) -> PortfolioManager:
return self._portfolio_mgr
def strategies(self) -> List["Strategy"]:
return self._engine.strategies
def exchangeinstances(self) -> List[Exchange]:
return self._engine.exchanges
# ********************* #
# EventHandler methods *
# **********************
async def onTrade(self, event: Event) -> None:
await self._portfolio_mgr.onTrade(event)
await self._risk_mgr.onTrade(event)
await self._order_mgr.onTrade(event)
async def onOpen(self, event: Event) -> None:
await self._portfolio_mgr.onOpen(event)
await self._risk_mgr.onOpen(event)
await self._order_mgr.onOpen(event)
async def onCancel(self, event: Event) -> None:
await self._portfolio_mgr.onCancel(event)
await self._risk_mgr.onCancel(event)
await self._order_mgr.onCancel(event)
async def onChange(self, event: Event) -> None:
await self._portfolio_mgr.onChange(event)
await self._risk_mgr.onChange(event)
await self._order_mgr.onChange(event)
async def onFill(self, event: Event) -> None:
await self._portfolio_mgr.onFill(event)
await self._risk_mgr.onFill(event)
await self._order_mgr.onFill(event)
async def onHalt(self, event: Event) -> None:
await self._portfolio_mgr.onHalt(event)
await self._risk_mgr.onHalt(event)
await self._order_mgr.onHalt(event)
async def onContinue(self, event: Event) -> None:
await self._portfolio_mgr.onContinue(event)
await self._risk_mgr.onContinue(event)
await self._order_mgr.onContinue(event)
async def onData(self, event: Event) -> None:
await self._portfolio_mgr.onData(event)
await self._risk_mgr.onData(event)
await self._order_mgr.onData(event)
async def onError(self, event: Event) -> None:
print("\n\nA Fatal Error has occurred:")
error = cast(Error, event.target)
traceback.print_exception(
type(error.exception),
error.exception,
error.exception.__traceback__,
)
sys.exit(1)
async def onExit(self, event: Event) -> None:
await self._portfolio_mgr.onExit(event)
await self._risk_mgr.onExit(event)
await self._order_mgr.onExit(event)
async def onStart(self, event: Event) -> None:
# Initialize strategies
self._portfolio_mgr.updateStrategies(self.strategies())
# Initialize positions
for exchange in self.exchangeinstances():
if self._load_accounts:
acc = await exchange.accounts()
self._portfolio_mgr.updateAccount(acc)
acc = await exchange.balance()
self._portfolio_mgr.updateCash(acc)
self._risk_mgr.updateCash(acc)
# Defer to sub onStarts
await self._portfolio_mgr.onStart(event)
await self._risk_mgr.onStart(event)
await self._order_mgr.onStart(event)
| 2,345 |
3,603 | <gh_stars>1000+
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.metadata;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import io.airlift.configuration.Config;
import io.airlift.configuration.LegacyConfig;
import javax.validation.constraints.NotNull;
import java.io.File;
import java.util.List;
public class StaticCatalogStoreConfig
{
private static final Splitter SPLITTER = Splitter.on(',').trimResults().omitEmptyStrings();
private File catalogConfigurationDir = new File("etc/catalog/");
private List<String> disabledCatalogs;
@NotNull
public File getCatalogConfigurationDir()
{
return catalogConfigurationDir;
}
@LegacyConfig("plugin.config-dir")
@Config("catalog.config-dir")
public StaticCatalogStoreConfig setCatalogConfigurationDir(File dir)
{
this.catalogConfigurationDir = dir;
return this;
}
public List<String> getDisabledCatalogs()
{
return disabledCatalogs;
}
@Config("catalog.disabled-catalogs")
public StaticCatalogStoreConfig setDisabledCatalogs(String catalogs)
{
this.disabledCatalogs = (catalogs == null) ? null : SPLITTER.splitToList(catalogs);
return this;
}
public StaticCatalogStoreConfig setDisabledCatalogs(List<String> catalogs)
{
this.disabledCatalogs = (catalogs == null) ? null : ImmutableList.copyOf(catalogs);
return this;
}
}
| 656 |
820 | from tempfile import mkstemp, mkdtemp
import os
import signal
import sys
from time import time, sleep
from collections import defaultdict
import cProfile
import pstats
import shutil
import functools
import multiprocessing
import socket
import sysconfig
import concurrent
from unittest import skip, skipIf, TestCase, TestSuite, findTestCases # noqa: F401
from tornado.testing import AsyncTestCase
from unittest import mock
import tornado
from circus import get_arbiter
from circus.client import AsyncCircusClient, make_message
from circus.util import DEFAULT_ENDPOINT_DEALER, DEFAULT_ENDPOINT_SUB
from circus.util import tornado_sleep, ConflictError
from circus.util import IS_WINDOWS
from circus.watcher import Watcher
DEBUG = sysconfig.get_config_var('Py_DEBUG') == 1
if 'ASYNC_TEST_TIMEOUT' not in os.environ:
os.environ['ASYNC_TEST_TIMEOUT'] = '30'
class EasyTestSuite(TestSuite):
def __init__(self, name):
try:
super(EasyTestSuite, self).__init__(
findTestCases(sys.modules[name]))
except KeyError:
pass
PYTHON = sys.executable
# Script used to sleep for a specified amount of seconds.
# Should be used instead of the 'sleep' command for
# compatibility
SLEEP = PYTHON + " -c 'import time;time.sleep(%d)'"
def get_ioloop():
from tornado import ioloop
return ioloop.IOLoop.current()
def get_available_port():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.bind(("", 0))
return s.getsockname()[1]
finally:
s.close()
class MockWatcher(Watcher):
def start(self):
self.started = True
def spawn_process(self):
self.processes[1] = 'dummy'
class TestCircus(AsyncTestCase):
arbiter_factory = get_arbiter
arbiters = []
def setUp(self):
super(TestCircus, self).setUp()
self.files = []
self.dirs = []
self.tmpfiles = []
self._clients = {}
self.plugins = []
@property
def cli(self):
if self.arbiters == []:
# nothing is running
raise Exception("nothing is running")
endpoint = self.arbiters[-1].endpoint
if endpoint in self._clients:
return self._clients[endpoint]
cli = AsyncCircusClient(endpoint=endpoint)
self._clients[endpoint] = cli
return cli
def _stop_clients(self):
for client in self._clients.values():
client.stop()
self._clients.clear()
def get_new_ioloop(self):
return get_ioloop()
def tearDown(self):
for file in self.files + self.tmpfiles:
try:
os.remove(file)
except OSError:
pass
for dir in self.dirs:
try:
shutil.rmtree(dir)
except OSError:
pass
self._stop_clients()
for plugin in self.plugins:
plugin.stop()
for arbiter in self.arbiters:
if arbiter.running:
try:
arbiter.stop()
except ConflictError:
pass
self.arbiters = []
super(TestCircus, self).tearDown()
def make_plugin(self, klass, endpoint=DEFAULT_ENDPOINT_DEALER,
sub=DEFAULT_ENDPOINT_SUB, check_delay=1,
**config):
config['active'] = True
plugin = klass(endpoint, sub, check_delay, None, **config)
self.plugins.append(plugin)
return plugin
@tornado.gen.coroutine
def start_arbiter(self, cmd='support.run_process',
stdout_stream=None, debug=True, **kw):
testfile, arbiter = self._create_circus(
cmd, stdout_stream=stdout_stream,
debug=debug, use_async=True, **kw)
self.test_file = testfile
self.arbiter = arbiter
self.arbiters.append(arbiter)
yield self.arbiter.start()
@tornado.gen.coroutine
def stop_arbiter(self):
for watcher in self.arbiter.iter_watchers():
yield self.arbiter.rm_watcher(watcher.name)
yield self.arbiter._emergency_stop()
@tornado.gen.coroutine
def status(self, cmd, **props):
resp = yield self.call(cmd, **props)
raise tornado.gen.Return(resp.get('status'))
@tornado.gen.coroutine
def numwatchers(self, cmd, **props):
resp = yield self.call(cmd, waiting=True, **props)
raise tornado.gen.Return(resp.get('numprocesses'))
@tornado.gen.coroutine
def numprocesses(self, cmd, **props):
resp = yield self.call(cmd, waiting=True, **props)
raise tornado.gen.Return(resp.get('numprocesses'))
@tornado.gen.coroutine
def pids(self):
resp = yield self.call('list', name='test')
raise tornado.gen.Return(resp.get('pids'))
def get_tmpdir(self):
dir_ = mkdtemp()
self.dirs.append(dir_)
return dir_
def get_tmpfile(self, content=None):
fd, file = mkstemp()
os.close(fd)
self.tmpfiles.append(file)
if content is not None:
with open(file, 'w') as f:
f.write(content)
return file
@classmethod
def _create_circus(cls, callable_path, plugins=None, stats=False,
use_async=False, arbiter_kw=None, **kw):
fd, testfile = mkstemp()
os.close(fd)
wdir = os.path.dirname(os.path.dirname(os.path.dirname(
os.path.realpath(__file__))))
args = ['circus/tests/generic.py', callable_path, testfile]
worker = {'cmd': PYTHON, 'args': args, 'working_dir': wdir,
'name': 'test', 'graceful_timeout': 2}
worker.update(kw)
if not arbiter_kw:
arbiter_kw = {}
debug = arbiter_kw['debug'] = kw.get('debug',
arbiter_kw.get('debug', False))
# -1 => no periodic callback to manage_watchers by default
arbiter_kw['check_delay'] = kw.get('check_delay',
arbiter_kw.get('check_delay', -1))
_gp = get_available_port
arbiter_kw['controller'] = "tcp://127.0.0.1:%d" % _gp()
arbiter_kw['pubsub_endpoint'] = "tcp://127.0.0.1:%d" % _gp()
arbiter_kw['multicast_endpoint'] = "udp://172.16.31.10:12027"
if stats:
arbiter_kw['statsd'] = True
arbiter_kw['stats_endpoint'] = "tcp://127.0.0.1:%d" % _gp()
arbiter_kw['statsd_close_outputs'] = not debug
if use_async:
arbiter_kw['background'] = False
arbiter_kw['loop'] = get_ioloop()
else:
arbiter_kw['background'] = True
arbiter = cls.arbiter_factory([worker], plugins=plugins, **arbiter_kw)
cls.arbiters.append(arbiter)
return testfile, arbiter
@tornado.gen.coroutine
def _stop_runners(self):
for arbiter in self.arbiters:
yield arbiter.stop()
self.arbiters = []
@tornado.gen.coroutine
def call(self, _cmd, **props):
msg = make_message(_cmd, **props)
resp = yield self.cli.call(msg)
raise tornado.gen.Return(resp)
def profile(func):
"""Can be used to dump profile stats"""
def _profile(*args, **kw):
profiler = cProfile.Profile()
try:
return profiler.runcall(func, *args, **kw)
finally:
pstats.Stats(profiler).sort_stats('time').print_stats(30)
return _profile
class Process(object):
def __init__(self, testfile):
self.testfile = testfile
# init signal handling
if IS_WINDOWS:
signal.signal(signal.SIGABRT, self.handle_quit)
signal.signal(signal.SIGTERM, self.handle_quit)
signal.signal(signal.SIGINT, self.handle_quit)
signal.signal(signal.SIGILL, self.handle_quit)
signal.signal(signal.SIGBREAK, self.handle_quit)
else:
signal.signal(signal.SIGQUIT, self.handle_quit)
signal.signal(signal.SIGTERM, self.handle_quit)
signal.signal(signal.SIGINT, self.handle_quit)
signal.signal(signal.SIGCHLD, self.handle_chld)
self.alive = True
def _write(self, msg):
with open(self.testfile, 'a+') as f:
f.write(msg)
def handle_quit(self, *args):
self._write('QUIT')
self.alive = False
def handle_chld(self, *args):
self._write('CHLD')
return
def run(self):
self._write('START')
while self.alive:
sleep(0.1)
self._write('STOP')
def run_process(test_file):
process = Process(test_file)
process.run()
return 1
def has_gevent():
try:
import gevent # NOQA
return True
except ImportError:
return False
def has_circusweb():
try:
import circusweb # NOQA
return True
except ImportError:
return False
class TimeoutException(Exception):
pass
def poll_for_callable(func, *args, **kwargs):
"""Replay to update the status during timeout seconds."""
timeout = 5
if 'timeout' in kwargs:
timeout = kwargs.pop('timeout')
start = time()
last_exception = None
while time() - start < timeout:
try:
func_args = []
for arg in args:
if callable(arg):
func_args.append(arg())
else:
func_args.append(arg)
func(*func_args)
except AssertionError as e:
last_exception = e
sleep(0.1)
else:
return True
raise last_exception or AssertionError('No exception triggered yet')
def poll_for(filename, needles, timeout=5):
"""Poll a file for a given string.
Raises a TimeoutException if the string isn't found after timeout seconds
of polling.
"""
if isinstance(needles, str):
needles = [needles]
start = time()
needle = content = None
while time() - start < timeout:
with open(filename) as f:
content = f.read()
for needle in needles:
if needle in content:
return True
# When using gevent this will make sure the redirector greenlets are
# scheduled.
sleep(0.1)
raise TimeoutException('Timeout polling "%s" for "%s". Content: %s' % (
filename, needle, content))
@tornado.gen.coroutine
def async_poll_for(filename, needles, timeout=5):
"""Async version of poll_for
"""
if isinstance(needles, str):
needles = [needles]
start = time()
needle = content = None
while time() - start < timeout:
with open(filename) as f:
content = f.read()
for needle in needles:
if needle in content:
raise tornado.gen.Return(True)
yield tornado_sleep(0.1)
raise TimeoutException('Timeout polling "%s" for "%s". Content: %s' % (
filename, needle, content))
def truncate_file(filename):
"""Truncate a file (empty it)."""
open(filename, 'w').close() # opening as 'w' overwrites the file
def run_plugin(klass, config, plugin_info_callback=None, duration=300,
endpoint=DEFAULT_ENDPOINT_DEALER,
pubsub_endpoint=DEFAULT_ENDPOINT_SUB):
check_delay = 1
ssh_server = None
class _Statsd(object):
gauges = []
increments = defaultdict(int)
def gauge(self, name, value):
self.gauges.append((name, value))
def increment(self, name):
self.increments[name] += 1
def stop(self):
pass
_statsd = _Statsd()
plugin = klass(endpoint, pubsub_endpoint, check_delay, ssh_server,
**config)
# make sure we close the existing statsd client
if hasattr(plugin, 'statsd'):
plugin.statsd.stop()
plugin.statsd = _statsd
deadline = time() + (duration / 1000.)
plugin.loop.add_timeout(deadline, plugin.stop)
plugin.start()
try:
if plugin_info_callback:
plugin_info_callback(plugin)
finally:
plugin.stop()
return _statsd
@tornado.gen.coroutine
def async_run_plugin(klass, config, plugin_info_callback, duration=300,
endpoint=DEFAULT_ENDPOINT_DEALER,
pubsub_endpoint=DEFAULT_ENDPOINT_SUB):
queue = multiprocessing.Queue()
plugin_info_callback = functools.partial(plugin_info_callback, queue)
circusctl_process = multiprocessing.Process(
target=run_plugin,
args=(klass, config, plugin_info_callback, duration,
endpoint, pubsub_endpoint))
circusctl_process.start()
while queue.empty():
yield tornado_sleep(.1)
result = queue.get()
raise tornado.gen.Return(result)
class FakeProcess(object):
def __init__(self, pid, status, started=1, age=1):
self.status = status
self.pid = pid
self.started = started
self.age = age
self.stopping = False
def is_alive(self):
return True
def stop(self):
pass
class MagicMockFuture(mock.MagicMock, concurrent.futures.Future):
def cancel(self):
return False
def cancelled(self):
return False
def running(self):
return False
def done(self):
return True
def result(self, timeout=None):
return None
def exception(self, timeout=None):
return None
def add_done_callback(self, fn):
fn(self)
def set_result(self, result):
pass
def set_exception(self, exception):
pass
def __del__(self):
# Don't try to print non-consumed exceptions
pass
| 6,359 |
1,255 | <reponame>RichardRanft/RakNet
/*
Copyright (c) 2009-2010 <NAME>. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of LibCat nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef CAT_BOMBAY_TABLE_HPP
#define CAT_BOMBAY_TABLE_HPP
#include <cat/threads/RWLock.hpp>
#include <cat/io/ThreadPoolFiles.hpp>
#include <cat/db/BombayTableIndex.hpp>
namespace cat {
namespace bombay {
static u64 INVALID_RECORD_OFFSET = ~(u64)0;
struct CacheNode
{
CacheNode *parent, *lower, *higher;
u64 offset;
};
class TableIndex;
class IHash;
// Query() AsyncBuffer tag must derive from AsyncQueryRead
struct AsyncQueryRead
{
ThreadRefObject *_reference;
AsyncCallback _callback;
CAT_INLINE void SetCallback(AsyncCallback callback = 0, ThreadRefObject *reference = 0)
{
if (reference)
reference->AddRef();
_callback = callback;
_reference = reference;
}
};
///// Table
class Table : public AsyncFile
{
ShutdownObserver *_shutdown_observer;
u32 _record_bytes; // Bytes per record (without CacheNode overhead)
u64 _next_record; // Next record offset
protected:
RWLock _lock;
u64 _index_database_size, _index_read_offset, _index_read_completed;
u32 _index_read_size;
static const u32 MAX_INDEX_READ_SIZE = 32768;
static const int NUM_PARALLEL_INDEX_READS = 3;
// Cache hash table of binary trees
static const u32 TARGET_TREE_SIZE = 16;
static const u32 MIN_TABLE_SIZE = 2048;
u32 _hash_table_size;
CacheNode **_cache_hash_table;
u8 *_cache; // Cache memory
u32 _cache_bytes; // Cache bytes
u32 _next_cache_slot; // Offset in cache memory to next free slot
bool _cache_full; // Cache full flag for optimization
TableIndex *_head_index, *_head_index_unique;
TableIndex *_head_index_waiting, *_head_index_update;
bool AllocateCache();
void FreeCache();
// Node versions
CacheNode *FindNode(u64 offset);
void UnlinkNode(CacheNode *node);
void InsertNode(u64 offset, u32 key, CacheNode *hint, CacheNode *node);
// Always returns with a cache node; may re-use an old cache node
u8 *SetOffset(u64 offset);
u8 *InsertOffset(u64 offset);
u8 *PeekOffset(u64 offset);
bool RemoveOffset(u64 offset);
public:
Table(const char *file_path, u32 record_bytes, u32 cache_bytes, ShutdownObserver *shutdown_observer);
virtual ~Table();
private:
TableIndex *MakeIndex(const char *index_file_path, IHash *hash_function, bool unique);
u64 UniqueIndexLookup(const void *data);
public:
// To initialize, run MakeIndex() for all of the desired indexing routines,
// and then run Initialize(), which will initialize index objects.
template<class THashFunc> CAT_INLINE TableIndex *MakeIndex(const char *index_file_path, bool unique)
{
return MakeIndex(index_file_path, new THashFunc, unique);
}
bool Initialize();
public:
CAT_INLINE u32 GetCacheBytes() { return _cache_bytes; }
CAT_INLINE u32 GetRecordBytes() { return _record_bytes; }
protected:
virtual bool OnRemoveRead(ThreadPoolLocalStorage *tls, int error, AsyncBuffer *buffer, u32 bytes);
virtual bool OnQueryRead(ThreadPoolLocalStorage *tls, int error, AsyncBuffer *buffer, u32 bytes);
protected:
bool StartIndexing();
bool StartIndexingRead();
void OnIndexingDone();
virtual bool OnIndexingRead(ThreadPoolLocalStorage *tls, int error, AsyncBuffer *buffer, u32 bytes);
public:
bool RequestIndexRebuild(TableIndex *index);
public:
// Insert an AsyncBuffer data buffer
u64 Insert(void *data);
// Update with an AsyncBuffer data buffer
bool Update(void *data, u64 offset);
// Query with an AsyncBuffer
// NOTE: Query() AsyncBuffer tag must derive from AsyncQueryRead
bool Query(u64 offset, AsyncBuffer *buffer);
// Remove based on offset
bool Remove(u64 offset);
};
} // namespace bombay
} // namespace cat
#endif // CAT_BOMBAY_TABLE_HPP
| 1,833 |
348 | <reponame>chamberone/Leaflet.PixiOverlay
{"nom":"Valbelle","dpt":"Alpes-de-Haute-Provence","inscrits":221,"abs":50,"votants":171,"blancs":11,"nuls":4,"exp":156,"res":[{"panneau":"1","voix":91},{"panneau":"2","voix":65}]} | 95 |
1,144 | /*
* #%L
* de.metas.servicerepair.base
* %%
* Copyright (C) 2021 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
package de.metas.servicerepair.project.service.commands;
import com.google.common.collect.ImmutableList;
import de.metas.common.util.time.SystemTime;
import de.metas.document.DocTypeId;
import de.metas.document.DocTypeQuery;
import de.metas.document.IDocTypeDAO;
import de.metas.location.CountryId;
import de.metas.money.CurrencyId;
import de.metas.order.OrderId;
import de.metas.order.compensationGroup.OrderGroupRepository;
import de.metas.organization.IOrgDAO;
import de.metas.organization.OrgId;
import de.metas.pricing.PriceListId;
import de.metas.pricing.PriceListVersionId;
import de.metas.pricing.PricingSystemId;
import de.metas.pricing.service.IPriceListDAO;
import de.metas.pricing.service.IPricingBL;
import de.metas.project.ProjectId;
import de.metas.servicerepair.project.model.ServiceRepairProjectCostCollector;
import de.metas.servicerepair.project.model.ServiceRepairProjectInfo;
import de.metas.servicerepair.project.model.ServiceRepairProjectTask;
import de.metas.servicerepair.project.service.ServiceRepairProjectService;
import de.metas.servicerepair.project.service.commands.createQuotationFromProjectCommand.ProjectQuotationPricingInfo;
import de.metas.servicerepair.project.service.commands.createQuotationFromProjectCommand.QuotationAggregator;
import de.metas.util.Services;
import lombok.Builder;
import lombok.NonNull;
import org.adempiere.exceptions.AdempiereException;
import org.compiere.model.I_C_Order;
import org.compiere.model.I_M_PriceList;
import org.compiere.model.X_C_DocType;
import org.compiere.util.TimeUtil;
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.List;
public class CreateQuotationFromProjectCommand
{
private final IDocTypeDAO docTypeDAO = Services.get(IDocTypeDAO.class);
private final IPriceListDAO priceListDAO = Services.get(IPriceListDAO.class);
private final IPricingBL pricingBL = Services.get(IPricingBL.class);
private final IOrgDAO orgDAO = Services.get(IOrgDAO.class);
private final OrderGroupRepository orderGroupRepository;
private final ServiceRepairProjectService projectService;
private final ProjectId projectId;
@Builder
private CreateQuotationFromProjectCommand(
@NonNull final OrderGroupRepository orderGroupRepository,
@NonNull final ServiceRepairProjectService projectService,
//
@NonNull final ProjectId projectId)
{
this.orderGroupRepository = orderGroupRepository;
this.projectService = projectService;
this.projectId = projectId;
}
public OrderId execute()
{
final ImmutableList<ServiceRepairProjectCostCollector> costCollectors = projectService.getByProjectIdButNotIncludedInCustomerQuotation(projectId)
.stream()
.filter(ServiceRepairProjectCostCollector::isNotIncludedInCustomerQuotation) // redundant but feels safe
.collect(ImmutableList.toImmutableList());
if (costCollectors.isEmpty())
{
throw new AdempiereException("Everything is already quoted");
}
final ServiceRepairProjectInfo fromProject = projectService.getById(projectId);
final QuotationAggregator quotationAggregator = newQuotationAggregator(fromProject);
final I_C_Order order = quotationAggregator
.addAll(costCollectors)
.createDraft();
projectService.setCustomerQuotationToCostCollectors(quotationAggregator.getQuotationLineIdsIndexedByCostCollectorId());
return OrderId.ofRepoId(order.getC_Order_ID());
}
private QuotationAggregator newQuotationAggregator(@NonNull final ServiceRepairProjectInfo project)
{
final List<ServiceRepairProjectTask> tasks = projectService.getTasksByProjectId(project.getProjectId());
return QuotationAggregator.builder()
.pricingBL(pricingBL)
.orderGroupRepository(orderGroupRepository)
.project(project)
.tasks(tasks)
.pricingInfo(getPricingInfo(project))
.quotationDocTypeId(getQuotationDocTypeId(project))
.build();
}
private DocTypeId getQuotationDocTypeId(@NonNull final ServiceRepairProjectInfo project)
{
return docTypeDAO.getDocTypeId(DocTypeQuery.builder()
.docBaseType(X_C_DocType.DOCBASETYPE_SalesOrder)
.docSubType(X_C_DocType.DOCSUBTYPE_CostEstimate)
.adClientId(project.getClientAndOrgId().getClientId().getRepoId())
.adOrgId(project.getClientAndOrgId().getOrgId().getRepoId())
.build());
}
private ProjectQuotationPricingInfo getPricingInfo(@NonNull final ServiceRepairProjectInfo project)
{
final PriceListVersionId priceListVersionId = project.getPriceListVersionId();
if (priceListVersionId == null)
{
throw new AdempiereException("@NotFound@ @M_PriceList_Version_ID@")
.setParameter("project", project);
}
final I_M_PriceList priceList = priceListDAO.getPriceListByPriceListVersionId(priceListVersionId);
final OrgId orgId = project.getClientAndOrgId().getOrgId();
final ZoneId orgTimeZone = orgDAO.getTimeZone(orgId);
return ProjectQuotationPricingInfo.builder()
.orgId(orgId)
.orgTimeZone(orgTimeZone)
.shipBPartnerId(project.getBpartnerId())
.datePromised(extractDatePromised(project, orgTimeZone))
.pricingSystemId(PricingSystemId.ofRepoId(priceList.getM_PricingSystem_ID()))
.priceListId(PriceListId.ofRepoId(priceList.getM_PriceList_ID()))
.priceListVersionId(priceListVersionId)
.currencyId(CurrencyId.ofRepoId(priceList.getC_Currency_ID()))
.countryId(CountryId.ofRepoId(priceList.getC_Country_ID()))
.build();
}
private static ZonedDateTime extractDatePromised(
@NonNull final ServiceRepairProjectInfo project,
@NonNull final ZoneId timeZone)
{
final ZonedDateTime dateFinish = project.getDateFinish();
return dateFinish != null
? TimeUtil.convertToTimeZone(dateFinish, timeZone)
: SystemTime.asZonedDateTimeAtEndOfDay(timeZone);
}
}
| 2,162 |
309 | <filename>src/Cxx/PolyData/PointLocatorVisualization.cxx
#include <vtkPointSource.h>
#include <vtkSphereSource.h>
#include <vtkPointLocator.h>
#include <vtkSmartPointer.h>
#include <vtkPolyData.h>
#include <vtkSliderWidget.h>
#include <vtkPolyDataMapper.h>
#include <vtkActor.h>
#include <vtkRenderWindow.h>
#include <vtkRenderer.h>
#include <vtkRenderWindowInteractor.h>
#include <vtkPolyData.h>
#include <vtkSmartPointer.h>
#include <vtkCommand.h>
#include <vtkWidgetEvent.h>
#include <vtkCallbackCommand.h>
#include <vtkWidgetEventTranslator.h>
#include <vtkInteractorStyleTrackballCamera.h>
#include <vtkSliderWidget.h>
#include <vtkSliderRepresentation2D.h>
#include <vtkProperty.h>
#include <vtkMath.h>
class vtkSliderCallback : public vtkCommand
{
public:
static vtkSliderCallback *New()
{
return new vtkSliderCallback;
}
vtkSliderCallback():PointLocator(0), Level(0), PolyData(0), Renderer(0){}
virtual void Execute(vtkObject *caller, unsigned long, void*)
{
vtkSliderWidget *sliderWidget =
reinterpret_cast<vtkSliderWidget*>(caller);
this->Level = vtkMath::Round(static_cast<vtkSliderRepresentation *>(sliderWidget->GetRepresentation())->GetValue());
this->PointLocator->GenerateRepresentation(this->Level, this->PolyData);
this->Renderer->Render();
}
vtkPointLocator* PointLocator;
int Level;
vtkPolyData* PolyData;
vtkRenderer* Renderer;
};
int main (int, char *[])
{
vtkSmartPointer<vtkSphereSource> inputSource =
vtkSmartPointer<vtkSphereSource>::New();
inputSource->SetPhiResolution(10);
inputSource->SetThetaResolution(10);
inputSource->Update();
vtkSmartPointer<vtkPolyDataMapper> pointsMapper =
vtkSmartPointer<vtkPolyDataMapper>::New();
pointsMapper->SetInputConnection(inputSource->GetOutputPort());
vtkSmartPointer<vtkActor> pointsActor =
vtkSmartPointer<vtkActor>::New();
pointsActor->SetMapper(pointsMapper);
pointsActor->GetProperty()->SetInterpolationToFlat();
// Create the tree
vtkSmartPointer<vtkPointLocator> pointLocator =
vtkSmartPointer<vtkPointLocator>::New();
pointLocator->SetDataSet(inputSource->GetOutput());
pointLocator->BuildLocator();
// Initialize the representation
vtkSmartPointer<vtkPolyData> polydata =
vtkSmartPointer<vtkPolyData>::New();
pointLocator->GenerateRepresentation(0, polydata);
vtkSmartPointer<vtkPolyDataMapper> locatorTreeMapper =
vtkSmartPointer<vtkPolyDataMapper>::New();
locatorTreeMapper->SetInputData(polydata);
vtkSmartPointer<vtkActor> locatorTreeActor =
vtkSmartPointer<vtkActor>::New();
locatorTreeActor->SetMapper(locatorTreeMapper);
locatorTreeActor->GetProperty()->SetInterpolationToFlat();
locatorTreeActor->GetProperty()->SetRepresentationToWireframe();
// A renderer and render window
vtkSmartPointer<vtkRenderer> renderer =
vtkSmartPointer<vtkRenderer>::New();
vtkSmartPointer<vtkRenderWindow> renderWindow =
vtkSmartPointer<vtkRenderWindow>::New();
renderWindow->AddRenderer(renderer);
// An interactor
vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor =
vtkSmartPointer<vtkRenderWindowInteractor>::New();
renderWindowInteractor->SetRenderWindow(renderWindow);
// Add the actors to the scene
renderer->AddActor(pointsActor);
renderer->AddActor(locatorTreeActor);
// Render an image (lights and cameras are created automatically)
renderWindow->Render();
vtkSmartPointer<vtkSliderRepresentation2D> sliderRep =
vtkSmartPointer<vtkSliderRepresentation2D>::New();
sliderRep->SetMinimumValue(0);
sliderRep->SetMaximumValue(pointLocator->GetLevel());
sliderRep->SetValue(0);
sliderRep->SetTitleText("MaxPointsPerRegion");
sliderRep->GetPoint1Coordinate()->SetCoordinateSystemToNormalizedDisplay();
sliderRep->GetPoint1Coordinate()->SetValue(.2, .2);
sliderRep->GetPoint2Coordinate()->SetCoordinateSystemToNormalizedDisplay();
sliderRep->GetPoint2Coordinate()->SetValue(.8, .2);
sliderRep->SetSliderLength(0.075);
sliderRep->SetSliderWidth(0.05);
sliderRep->SetEndCapLength(0.05);
vtkSmartPointer<vtkSliderWidget> sliderWidget =
vtkSmartPointer<vtkSliderWidget>::New();
sliderWidget->SetInteractor(renderWindowInteractor);
sliderWidget->SetRepresentation(sliderRep);
sliderWidget->SetAnimationModeToAnimate();
sliderWidget->EnabledOn();
vtkSmartPointer<vtkSliderCallback> callback =
vtkSmartPointer<vtkSliderCallback>::New();
callback->PointLocator = pointLocator;
callback->PolyData = polydata;
callback->Renderer = renderer;
sliderWidget->AddObserver(vtkCommand::InteractionEvent,callback);
renderWindowInteractor->Initialize();
renderWindow->Render();
renderWindowInteractor->Start();
return EXIT_SUCCESS;
}
| 1,630 |
742 | {
"scripts": {
"gcp-build": "tsc function.ts"
},
"dependencies": {
"@google-cloud/functions-framework": "^1.0.0"
},
"devDependencies": {
"typescript": "^3.7.2"
}
}
| 91 |
10,225 | <filename>independent-projects/resteasy-reactive/server/runtime/src/main/java/org/jboss/resteasy/reactive/server/model/ContextResolvers.java
package org.jboss.resteasy.reactive.server.model;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.ext.ContextResolver;
import org.jboss.resteasy.reactive.common.model.ResourceContextResolver;
import org.jboss.resteasy.reactive.common.util.MediaTypeHelper;
import org.jboss.resteasy.reactive.server.jaxrs.ContextResolverDelegate;
import org.jboss.resteasy.reactive.spi.BeanFactory;
public class ContextResolvers {
private final Map<Class<?>, List<ResourceContextResolver>> resolvers = new HashMap<>();
public <T> void addContextResolver(Class<T> contextType, ResourceContextResolver contextResolver) {
List<ResourceContextResolver> list = resolvers.get(contextType);
if (list == null) {
list = new ArrayList<>(1);
resolvers.put(contextType, list);
}
list.add(contextResolver);
}
public <T> ContextResolver<T> getContextResolver(Class<T> clazz, MediaType mediaType) {
List<ResourceContextResolver> goodResolvers = resolvers.get(clazz);
if ((goodResolvers != null) && !goodResolvers.isEmpty()) {
List<MediaType> mt = Collections.singletonList(mediaType);
final List<ContextResolver<T>> delegates = new ArrayList<>();
MediaType bestMatch = null;
for (ResourceContextResolver goodResolver : goodResolvers) {
boolean add = false;
// we don't care
if (mediaType == null) {
add = true;
} else {
MediaType match;
// wildcard handling
if (goodResolver.mediaTypes().isEmpty()) {
match = MediaType.WILDCARD_TYPE;
} else {
match = MediaTypeHelper.getBestMatch(mt, goodResolver.mediaTypes());
// if there's no match, we must skip it
if (match == null)
continue;
}
if (bestMatch == null) {
bestMatch = match;
add = true;
} else {
int cmp = MediaTypeHelper.Q_COMPARATOR.compare(bestMatch, match);
if (cmp == 0) {
// same fitness
add = true;
} else if (cmp > 0) {
// wrong order means that our best match is not as good as the new match
delegates.clear();
add = true;
bestMatch = match;
}
// otherwise this is not as good as our delegate list, so let's not add it
}
}
if (add) {
delegates.add((ContextResolver<T>) goodResolver.getFactory().createInstance().getInstance());
}
}
if (delegates.isEmpty()) {
return null;
} else if (delegates.size() == 1) {
return delegates.get(0);
}
return new ContextResolverDelegate<>(delegates);
}
return null;
}
public Map<Class<?>, List<ResourceContextResolver>> getResolvers() {
return resolvers;
}
public void initializeDefaultFactories(Function<String, BeanFactory<?>> factoryCreator) {
for (Map.Entry<Class<?>, List<ResourceContextResolver>> entry : resolvers.entrySet()) {
for (ResourceContextResolver i : entry.getValue()) {
if (i.getFactory() == null) {
i.setFactory((BeanFactory) factoryCreator.apply(i.getClassName()));
}
}
}
}
}
| 2,029 |
355 | #include "CascadeFaceDetection.h"
#include "CaffeBinding.h"
#include "../Test/util/BoundingBox.inc.h"
#include "../Test/TestFaceDetection.inc.h"
#include "pyboostcvconverter.hpp"
using namespace std;
caffe::CaffeBinding* kCaffeBinding = new caffe::CaffeBinding();
namespace FaceInception {
CascadeCNN* cascade;
CascadeFaceDetection::CascadeFaceDetection() {
cout << "Please specify the net models." << endl;
}
PyObject* pyopencv_from(const FaceInformation &info) {
PyObject* seq = PyList_New(3);
//ERRWRAP2(
PyObject* rect = Py_BuildValue("(dddd)", info.boundingbox.x, info.boundingbox.y, info.boundingbox.width, info.boundingbox.height);
PyList_SET_ITEM(seq, 0, rect);
PyObject* score = PyFloat_FromDouble(info.confidence);
PyList_SET_ITEM(seq, 1, score);
PyObject* points = PyList_New(5);
for (int i = 0; i < 5; i++) {
PyObject* item = Py_BuildValue("(dd)", info.points[i].x, info.points[i].y);
PyList_SET_ITEM(points, i, item);
}
PyList_SET_ITEM(seq, 2, points);
//)
return seq;
}
static inline PyObject* pyopencv_from_face_info_vec(const std::vector<FaceInformation>& value) {
int i, n = (int)value.size();
PyObject* seq = PyList_New(n);
//ERRWRAP2(
for (i = 0; i < n; i++) {
PyObject* item = pyopencv_from(value[i]);
if (!item)
break;
PyList_SET_ITEM(seq, i, item);
}
//if (i < n) {
// Py_DECREF(seq);
// return 0;
//}
//)
return seq;
}
CascadeFaceDetection::CascadeFaceDetection(std::string net12_definition, std::string net12_weights,
std::string net12_stitch_definition, std::string net12_stitch_weights,
std::string net24_definition, std::string net24_weights,
std::string net48_definition, std::string net48_weights,
std::string netLoc_definition, std::string netLoc_weights,
int gpu_id) {
cascade = new CascadeCNN(net12_definition, net12_weights,
net12_stitch_definition, net12_stitch_weights,
net24_definition, net24_weights,
net48_definition, net48_weights,
netLoc_definition, netLoc_weights,
gpu_id);
}
std::vector<FaceInformation> CascadeFaceDetection::Predict(cv::Mat& input_image, vector<double> thresholds, double min_face) {
std::vector<FaceInformation> result;
vector<vector<Point2d>> points;
if (cascade != NULL) {
auto rect_and_score = cascade->GetDetection(input_image, 12.0 / min_face, thresholds, true, 0.7, true, points);
for (int i = 0; i < rect_and_score.size();i++) {
result.push_back(FaceInformation{ rect_and_score[i].first, rect_and_score[i].second, points[i] });
}
}
return result;
}
PyObject * CascadeFaceDetection::Predict(PyObject * input) {
Mat input_image = FaceInception::fromNDArrayToMat(input);
if (!input_image.data) return nullptr;
auto faces = Predict(input_image, { 0.6, 0.6, 0.7 });
return pyopencv_from_face_info_vec(faces);
}
PyObject * CascadeFaceDetection::Predict(PyObject * input, PyObject * confidence_threshold, PyObject * min_face) {
Mat input_image;
ERRWRAP2(input_image = FaceInception::fromNDArrayToMat(input));
if (!input_image.data || !PyFloat_Check(min_face)) return nullptr;
vector<double> thesholds(3);
PyArg_ParseTuple(confidence_threshold, "dddd", &thesholds[0], &thesholds[1], &thesholds[2]);
auto faces = Predict(input_image, thesholds, PyFloat_AsDouble(min_face));
return pyopencv_from_face_info_vec(faces);
}
PyObject* CascadeFaceDetection::ForceGetLandmark(PyObject* input, PyObject * CoarseRect) {
Mat input_image;
ERRWRAP2(input_image = FaceInception::fromNDArrayToMat(input));
cv::Rect2d r;
PyArg_ParseTuple(CoarseRect, "dddd", &r.x, &r.y, &r.width, &r.height);
cout << r << endl;
vector<vector<Point2d>> points;
auto rect_and_score = cascade->ForceGetLandmark(input_image, r, 0.7, points);
vector<FaceInformation> result;
for (int i = 0; i < rect_and_score.size(); i++) {
result.push_back(FaceInformation{ rect_and_score[i].first, rect_and_score[i].second, points[i] });
}
return pyopencv_from_face_info_vec(result);
}
CascadeFaceDetection::~CascadeFaceDetection() {
delete cascade;
delete kCaffeBinding;
}
} | 2,014 |
623 | <gh_stars>100-1000
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.server.account;
/** Expands user name to a local email address, usually by adding a domain. */
public interface EmailExpander {
boolean canExpand(String user);
String expand(String user);
class None implements EmailExpander {
public static final None INSTANCE = new None();
public static boolean canHandle(String fmt) {
return fmt == null || fmt.isEmpty();
}
private None() {}
@Override
public boolean canExpand(String user) {
return false;
}
@Override
public String expand(String user) {
return null;
}
}
class Simple implements EmailExpander {
private static final String PLACEHOLDER = "{0}";
public static boolean canHandle(String fmt) {
return fmt != null && fmt.contains(PLACEHOLDER);
}
private final String lhs;
private final String rhs;
public Simple(String fmt) {
final int p = fmt.indexOf(PLACEHOLDER);
lhs = fmt.substring(0, p);
rhs = fmt.substring(p + PLACEHOLDER.length());
}
@Override
public boolean canExpand(String user) {
return !user.contains(" ");
}
@Override
public String expand(String user) {
return lhs + user + rhs;
}
}
}
| 603 |
1,217 | #include <stdlib.h>
#include <stdio.h>
#include <vector>
#include <string>
#include <sstream>
#include <iostream>
#include <fstream>
#include <stdexcept>
#include <zlib.h>
#include "arc_utilities/arc_exceptions.hpp"
#include "arc_utilities/zlib_helpers.hpp"
namespace ZlibHelpers
{
// MAKE SURE THE INPUT BUFFER IS LESS THAN 4GB IN SIZE
std::vector<uint8_t> DecompressBytes(const std::vector<uint8_t>& compressed)
{
z_stream strm;
std::vector<uint8_t> buffer;
const size_t BUFSIZE = 1024 * 1024;
uint8_t temp_buffer[BUFSIZE];
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
int ret = inflateInit(&strm);
if (ret != Z_OK)
{
(void)inflateEnd(&strm);
std::cerr << "ZLIB unable to init inflate stream" << std::endl;
throw std::invalid_argument("ZLIB unable to init inflate stream");
}
strm.avail_in = (uint32_t)compressed.size();
strm.next_in = const_cast<uint8_t*>(reinterpret_cast<const uint8_t*>(compressed.data()));
do
{
strm.next_out = temp_buffer;
strm.avail_out = BUFSIZE;
ret = inflate(&strm, Z_NO_FLUSH);
if (buffer.size() < strm.total_out)
{
buffer.insert(buffer.end(), temp_buffer, temp_buffer + BUFSIZE - strm.avail_out);
}
}
while (ret == Z_OK);
if (ret != Z_STREAM_END)
{
(void)inflateEnd(&strm);
std::cerr << "ZLIB unable to inflate stream with ret=" << ret << std::endl;
throw std::invalid_argument("ZLIB unable to inflate stream");
}
(void)inflateEnd(&strm);
std::vector<uint8_t> decompressed(buffer);
return decompressed;
}
// MAKE SURE THE INPUT BUFFER IS LESS THAN 4GB IN SIZE
std::vector<uint8_t> CompressBytes(const std::vector<uint8_t>& uncompressed)
{
z_stream strm;
std::vector<uint8_t> buffer;
const size_t BUFSIZE = 1024 * 1024;
uint8_t temp_buffer[BUFSIZE];
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
strm.avail_in = (uint32_t)uncompressed.size();
strm.next_in = const_cast<uint8_t*>(reinterpret_cast<const uint8_t*>(uncompressed.data()));
strm.next_out = temp_buffer;
strm.avail_out = BUFSIZE;
int ret = deflateInit(&strm, Z_BEST_SPEED);
if (ret != Z_OK)
{
(void)deflateEnd(&strm);
std::cerr << "ZLIB unable to init deflate stream" << std::endl;
throw std::invalid_argument("ZLIB unable to init deflate stream");
}
while (strm.avail_in != 0)
{
ret = deflate(&strm, Z_NO_FLUSH);
if (ret != Z_OK)
{
(void)deflateEnd(&strm);
std::cerr << "ZLIB unable to deflate stream" << std::endl;
throw std::invalid_argument("ZLIB unable to deflate stream");
}
if (strm.avail_out == 0)
{
buffer.insert(buffer.end(), temp_buffer, temp_buffer + BUFSIZE);
strm.next_out = temp_buffer;
strm.avail_out = BUFSIZE;
}
}
int deflate_ret = Z_OK;
while (deflate_ret == Z_OK)
{
if (strm.avail_out == 0)
{
buffer.insert(buffer.end(), temp_buffer, temp_buffer + BUFSIZE);
strm.next_out = temp_buffer;
strm.avail_out = BUFSIZE;
}
deflate_ret = deflate(&strm, Z_FINISH);
}
if (deflate_ret != Z_STREAM_END)
{
(void)deflateEnd(&strm);
std::cerr << "ZLIB unable to deflate stream" << std::endl;
throw std::invalid_argument("ZLIB unable to deflate stream");
}
buffer.insert(buffer.end(), temp_buffer, temp_buffer + BUFSIZE - strm.avail_out);
(void)deflateEnd(&strm);
std::vector<uint8_t> compressed(buffer);
return compressed;
}
std::vector<uint8_t> LoadFromFileAndDecompress(const std::string& path)
{
std::ifstream input_file(path, std::ios::binary | std::ios::in | std::ios::ate);
if (!input_file.is_open())
{
throw_arc_exception(std::runtime_error, "Couldn't open file " + path);
}
std::streamsize size = input_file.tellg();
input_file.seekg(0, std::ios::beg);
std::vector<uint8_t> file_buffer((size_t)size);
if (!(input_file.read(reinterpret_cast<char*>(file_buffer.data()), size)))
{
throw_arc_exception(std::runtime_error, "Unable to read entire contents of file");
}
const std::vector<uint8_t> decompressed = ZlibHelpers::DecompressBytes(file_buffer);
return decompressed;
}
void CompressAndWriteToFile(const std::vector<uint8_t>& uncompressed, const std::string& path)
{
const auto compressed = CompressBytes(uncompressed);
std::ofstream output_file(path, std::ios::out | std::ios::binary);
output_file.write(reinterpret_cast<const char*>(compressed.data()), (std::streamsize)compressed.size());
output_file.close();
}
}
| 2,625 |
1,444 |
package mage.cards.b;
import java.util.UUID;
import mage.Mana;
import mage.abilities.Ability;
import mage.abilities.common.BeginningOfPreCombatMainTriggeredAbility;
import mage.abilities.effects.OneShotEffect;
import mage.abilities.effects.common.counter.AddCountersSourceEffect;
import mage.cards.CardImpl;
import mage.cards.CardSetInfo;
import mage.constants.CardType;
import mage.constants.Outcome;
import mage.constants.TargetController;
import mage.counters.CounterType;
import mage.game.Game;
import mage.game.permanent.Permanent;
import mage.players.Player;
/**
*
* @author jeffwadsworth
*/
public final class BountyOfTheLuxa extends CardImpl {
public BountyOfTheLuxa(UUID ownerId, CardSetInfo setInfo) {
super(ownerId, setInfo, new CardType[]{CardType.ENCHANTMENT}, "{2}{G}{U}");
//At the beginning of your precombat main phase, remove all flood counters from Bounty of the Luxa. If no counters were removed this way, put a flood counter on Bounty of the Luxa and draw a card. Otherwise, add {C}{G}{U}.
this.addAbility(new BeginningOfPreCombatMainTriggeredAbility(new BountyOfTheLuxaEffect(), TargetController.YOU, false));
}
private BountyOfTheLuxa(final BountyOfTheLuxa card) {
super(card);
}
@Override
public BountyOfTheLuxa copy() {
return new BountyOfTheLuxa(this);
}
}
class BountyOfTheLuxaEffect extends OneShotEffect {
public BountyOfTheLuxaEffect() {
super(Outcome.Benefit);
staticText = "remove all flood counters from {this}. If no counters were removed this way, put a flood counter on {this} and draw a card. Otherwise, add {C}{G}{U}";
}
public BountyOfTheLuxaEffect(final BountyOfTheLuxaEffect effect) {
super(effect);
}
@Override
public BountyOfTheLuxaEffect copy() {
return new BountyOfTheLuxaEffect(this);
}
@Override
public boolean apply(Game game, Ability source) {
Player controller = game.getPlayer(source.getControllerId());
Permanent bountyOfLuxa = game.getPermanent(source.getSourceId());
if (bountyOfLuxa != null && bountyOfLuxa.getZoneChangeCounter(game) != source.getSourceObjectZoneChangeCounter()) {
bountyOfLuxa = null;
}
if (controller != null) {
if (bountyOfLuxa != null
&& bountyOfLuxa.getCounters(game).getCount(CounterType.FLOOD) > 0) {
bountyOfLuxa.removeCounters(CounterType.FLOOD.createInstance(bountyOfLuxa.getCounters(game).getCount(CounterType.FLOOD)), source, game);
if (bountyOfLuxa.getCounters(game).getCount(CounterType.FLOOD) == 0) {
Mana manaToAdd = new Mana();
manaToAdd.increaseColorless();
manaToAdd.increaseGreen();
manaToAdd.increaseBlue();
controller.getManaPool().addMana(manaToAdd, game, source);
}
} else {
if (bountyOfLuxa != null) {
new AddCountersSourceEffect(CounterType.FLOOD.createInstance()).apply(game, source);
}
controller.drawCards(1, source, game);
}
return true;
}
return false;
}
}
| 1,330 |
722 | // Copyright (C) 2019. Huawei Technologies Co., Ltd. All rights reserved.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
// WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#include "gpu/mali/fp16/depthwise_convolution_mali_fp16.h"
#include "gpu/mali/fp16/depthwise_convolution_direct_mali_fp16.h"
#include "gpu/mali/cl/kernel_option/conv_depthwise_opt.h"
inline EE depthwise_core_mali_fp16(GCLHandle_t handle,
TensorDesc inputDesc,
const GCLMem_t input,
TensorDesc filterDesc,
const GCLMem_t filter,
ConvolutionParamSpec convParamSpec,
ForwardRunInfoMali_t forwardRunInfo,
TensorDesc biasDesc,
const GCLMem_t bias,
U32 tmpBytes,
GCLMem_t tmpBuf,
TensorDesc outputDesc,
GCLMem_t output,
ActivationMode depthwiseActivationMode)
{
UNUSED(inputDesc);
UNUSED(biasDesc);
UNUSED(tmpBytes);
UNUSED(tmpBuf);
cl_mem inbuf, biasimg, outbuf, fltbuf;
inbuf = input->mem;
fltbuf = filter->mem;
biasimg = bias->mem;
outbuf = output->mem;
U32 iw, ih, ic, in;
U32 fw, fh, sw, sh, pw, ph, dw, dh;
U32 ow, oh, oc, on;
sw = convParamSpec.stride_w;
sh = convParamSpec.stride_h;
pw = convParamSpec.padding_left;
ph = convParamSpec.padding_top;
dw = convParamSpec.dilatedRate_w;
dh = convParamSpec.dilatedRate_h;
fw = convParamSpec.kernel_w;
fh = convParamSpec.kernel_h;
tensorSelectGet(inputDesc, NULL, NULL, &in, &ic, &ih, &iw);
tensorSelectGet(outputDesc, NULL, NULL, &on, &oc, &oh, &ow);
U32 item_h = forwardRunInfo->best_h[0];
U32 iw_str, ih_str, ic_str, ihw_str, in_str;
I32 iw_off, ih_off;
get_gclmem_dim(input->desc, &iw_str, &ih_str, &ic_str, (U32 *)&iw_off, (U32 *)&ih_off);
iw_off -= pw;
ih_off -= ph;
ihw_str = iw_str * ih_str;
ic_str = (ic + 3) / 4;
in_str = ic_str * ihw_str;
U32 ow_str, oh_str, oc_str, ow_off, oh_off, ohw_str, on_str, o_off;
get_gclmem_dim(output->desc, &ow_str, &oh_str, &oc_str, &ow_off, &oh_off);
ohw_str = oh_str * ow_str;
oc_str = (oc + 3) / 4;
on_str = oc_str * ohw_str;
o_off = oh_off * ow_str + ow_off;
U32 gs[3] = {ow, (oh + item_h - 1) / item_h, (oc + 3) / 4 * on};
U32 ls[3] = {0, 0, 0};
U32 dim = 3;
Kernel kernel;
char kernelName[128];
KernelOpt kernelOpt;
if (dw > 1 || dh > 1) {
CHECK_STATUS(
set_conv_depthwise_dila_opt_mali(fw, fh, sh, dh, item_h, depthwiseActivationMode, false,
DT_F16, input->desc.memType, output->desc.memType, kernelName, &kernelOpt));
CHECK_STATUS(gcl_create_kernel(handle, kernelName, &kernel, &kernelOpt));
CHECK_STATUS(gcl_set_kernelArgs(kernel, iw_str, ihw_str, ic_str, iw_off, ih_off, ow_str,
ohw_str, o_off, oh, oc, sw, dw, dh, in_str, on_str, gs[0], gs[1], inbuf, fltbuf,
biasimg, outbuf));
} else {
CHECK_STATUS(set_conv_depthwise_opt_mali(fw, fh, sh, item_h, depthwiseActivationMode, false,
DT_F16, input->desc.memType, output->desc.memType, kernelName, &kernelOpt));
CHECK_STATUS(gcl_create_kernel(handle, kernelName, &kernel, &kernelOpt));
CHECK_STATUS(
gcl_set_kernelArgs(kernel, iw_str, ihw_str, ic_str, iw_off, ih_off, ow_str, ohw_str,
o_off, oh, oc, sw, in_str, on_str, gs[0], gs[1], inbuf, fltbuf, biasimg, outbuf));
}
gcl_set_kernelVec(handle, kernel, dim, gs, ls, kernelName);
#ifdef _DEBUG
// CHECK_STATUS(gcl_run_kernel(handle, kernel, dim, gs, ls, kernelName));
// handle->t_total += handle->t_execute;
#endif
return SUCCESS;
}
inline TensorDesc transform_filter_desc(TensorDesc filterDesc, U32 item_k)
{
U32 fw, fh, fc;
tensorSelectGet(filterDesc, NULL, NULL, NULL, &fc, &fh, &fw);
TensorDesc desc;
desc.df = DF_NCHW;
desc.dt = DT_F16;
desc.nDims = 4;
desc.dims[3] = 1;
desc.dims[0] = fw * fh * item_k;
desc.dims[1] = (fc + item_k - 1) / item_k;
desc.dims[2] = 1;
return desc;
}
EE depthwise_convolution_direct_transform_filter_bytes_mali_fp16(
TensorDesc filterDesc, ForwardRunInfoMali_t forwardRunInfo, TensorDesc *ftmDesc)
{
U32 item_k = forwardRunInfo->best_k[0];
*ftmDesc = transform_filter_desc(filterDesc, item_k);
return SUCCESS;
}
EE depthwise_convolution_direct_transform_filter_mali_fp16(GCLHandle_t handle,
TensorDesc filterDesc,
GCLMem_t filter,
ForwardRunInfoMali_t forwardRunInfo,
TensorDesc *fltmemDesc,
GCLMem_t fltmem)
{
DataType fdt;
DataFormat fdf;
U32 fw, fh, fc;
tensorSelectGet(filterDesc, &fdt, &fdf, NULL, &fc, &fh, &fw);
U32 fwh = fw * fh;
U32 item_k = forwardRunInfo->best_k[0];
char kernelName[128];
Kernel kernel;
KernelOpt kernelOpt;
CHECK_STATUS(set_conv_depthwise_trans_flt(item_k, DT_F16, GCL_MEM_BUF, kernelName, &kernelOpt));
CHECK_STATUS(gcl_get_kernel_from_map(handle, kernelName, &kernel, &kernelOpt));
CHECK_STATUS(gcl_set_kernelArgs(kernel, fw, fh, fwh, fc, filter->mem, fltmem->mem));
U32 gs[3] = {fwh, (fc + item_k - 1) / item_k};
U32 ls[3] = {0, 0, 0};
U32 dim = 2;
CHECK_STATUS(gcl_run_kernel(handle, kernel, dim, gs, ls, kernelName));
*fltmemDesc = transform_filter_desc(filterDesc, item_k);
return SUCCESS;
}
GCLMemDesc depthwise_convolution_get_input_nchwc4_desc(TensorDesc inputDesc,
TensorDesc filterDesc,
ConvolutionParamSpec convParamSpec,
TensorDesc outputDesc,
U32 item_h)
{
GCLMemDesc desc;
U32 oh = outputDesc.dims[1];
U32 ih_align = ALIGN(oh, item_h);
U32 pl, pr, pt, pb;
calDepthwisePaddingVal(inputDesc, convParamSpec, ih_align, &pl, &pr, &pt, &pb);
inputDesc.df = DF_NCHWC4;
bool useImg = check_qualcomm_device();
if (useImg) {
OclMemoryImg mem;
mem.resize(inputDesc);
U32 str[3] = {0};
mem.stride(str);
if (CHECK_MEET_IMAGE_LIMITS(str[0], str[1], str[2])) {
mem.padding(pl, pr, pt, pb, 0, 0);
desc = mem.get_desc();
} else {
useImg = false;
}
}
if (!useImg) {
OclMemory mem;
mem.resize(inputDesc);
mem.padding(pl, pr, pt, pb);
desc = mem.get_desc();
}
return desc;
}
EE depthwise_convolution_trans_input_to_nchwc4(GCLHandle_t handle,
TensorDesc inputDesc,
TensorDesc filterDesc,
GCLMem_t input,
ConvolutionParamSpec convParamSpec,
GCLMem_t tmpBuf,
TensorDesc outputDesc,
U32 item_h,
GCLMemDesc *transDesc,
U32 *tmpSubOff)
{
GCLMemDesc desc = depthwise_convolution_get_input_nchwc4_desc(
inputDesc, filterDesc, convParamSpec, outputDesc, item_h);
GCLMem tMem;
if (desc.memType != tmpBuf->desc.memType) {
CHECK_STATUS(NOT_MATCH);
}
tMem.mem = tmpBuf->mem;
tMem.desc = desc;
CHECK_STATUS(ocl_fill_memory_zero(handle, &tMem, 0));
CHECK_STATUS(ocl_data_trans_form(handle, input, &tMem, 0, 0, NCHW_TO_NCHWC4));
*transDesc = desc;
if (desc.memType == GCL_MEM_BUF) {
U32 size = desc.byteSize;
(*tmpSubOff) += ALIGN(size, BUFFER_ALIGN_BASE);
}
return SUCCESS;
}
EE depthwise_convolution_direct_infer_forward_tmp_bytes_mali_fp16(TensorDesc inputDesc,
TensorDesc filterDesc,
TensorDesc outputDesc,
ConvolutionParamSpec convParamSpec,
ForwardRunInfoMali_t forwardRunInfo,
U32 *bytes)
{
U32 size = 0;
if (inputDesc.df == DF_NCHW) {
GCLMemDesc desc = depthwise_convolution_get_input_nchwc4_desc(
inputDesc, filterDesc, convParamSpec, outputDesc, forwardRunInfo->best_h[0]);
if (desc.memType == GCL_MEM_IMG_3D) {
bytes[1] = desc.stride[0];
bytes[2] = desc.stride[1];
bytes[3] = desc.stride[2];
} else {
bytes[0] = desc.byteSize;
}
}
return SUCCESS;
}
EE depthwise_convolution_direct_mali_fp16(GCLHandle_t handle,
TensorDesc inputDesc,
const GCLMem_t input,
TensorDesc filterDesc,
const GCLMem_t filter,
ConvolutionParamSpec convParamSpec,
ForwardRunInfoMali_t forwardRunInfo,
TensorDesc biasDesc,
const GCLMem_t bias,
U32 tmpBytes,
GCLMem_t tmpBuf,
TensorDesc outputDesc,
GCLMem_t output,
ActivationMode depthwiseActivationMode)
{
CHECK_STATUS(fill_output_zero(handle, output, outputDesc));
CHECK_STATUS(depthwise_core_mali_fp16(handle, inputDesc, input, filterDesc, filter,
convParamSpec, forwardRunInfo, biasDesc, bias, tmpBytes, tmpBuf, outputDesc, output,
depthwiseActivationMode));
return SUCCESS;
}
| 4,258 |
318 | from flask import Blueprint, current_app, request
from flask_login import current_user
from ..models import Logs, db
from ..util.custom_decorator import login_required
import copy
import json
api = Blueprint('api', __name__)
from . import api_msg_manage, module_manage, project_manage, report_manage, build_in_manage, case_manage, login, \
test_tool, task_manage, file_manage, config, case_set_manage, test_case_file_manage, errors
@api.before_request
def before_request():
try:
# print('url:{} ,method:{},请求参数:{}'.format(request.url, request.method, request.json))
current_app.logger.info(
'ip:{}, url:{} ,method:{},请求参数:{}'.format(request.headers.get('X-Forwarded-For'), request.url, request.method, request.json))
except Exception as e:
pass
# print(request.remote_addr)
@api.after_request
def after_request(r):
uid = current_user.id if getattr(current_user, 'id', None) else None
new_project = Logs(ip=request.headers.get('X-Forwarded-For'),
uid=uid,
url=request.url, )
db.session.add(new_project)
db.session.commit()
if 'downloadFile' in request.url:
return r
result = copy.copy(r.response)
if isinstance(result[0], bytes):
result[0] = bytes.decode(result[0])
if 'apiMsg/run' not in request.url and 'report/run' not in request.url and 'report/list' not in request.url and not isinstance(
result[0], str):
current_app.logger.info('url:{} ,method:{},返回数据:{}'.format(request.url, request.method, json.loads(result[0])))
return r
| 662 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.