max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
8,805 |
// Copyright <NAME> 2014.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_COROUTINES2_COROUTINE_H
#define BOOST_COROUTINES2_COROUTINE_H
#include <exception>
#include <boost/assert.hpp>
#include <boost/config.hpp>
#include <boost/coroutine2/detail/coroutine.hpp>
#ifdef BOOST_HAS_ABI_HEADERS
# include BOOST_ABI_PREFIX
#endif
namespace boost {
namespace coroutines2 {
template< typename T >
struct coroutine {
using pull_type = detail::pull_coroutine< T >;
using push_type = detail::push_coroutine< T >;
};
template< typename T >
using asymmetric_coroutine = coroutine< T >;
}}
#ifdef BOOST_HAS_ABI_HEADERS
# include BOOST_ABI_SUFFIX
#endif
#endif // BOOST_COROUTINES2_COROUTINE_H
| 390 |
2,904 | <reponame>Sid-darthvader/dowhy
import string
from importlib import import_module
from dowhy.interpreter import Interpreter
def get_class_object(method_name, *args, **kwargs):
# from https://www.bnmetrics.com/blog/factory-pattern-in-python3-simple-version
try:
module_name = method_name
class_name = string.capwords(method_name, "_").replace('_', '')
interpreter_module = import_module('.' + module_name, package="dowhy.interpreters")
interpreter_class = getattr(interpreter_module, class_name)
assert issubclass(interpreter_class, Interpreter)
except (AttributeError, AssertionError, ImportError):
raise ImportError('{} is not an existing interpreter.'.format(method_name))
return interpreter_class
| 274 |
2,073 | <filename>activemq-unit-tests/src/test/java/org/apache/activemq/spring/ActiveMQConnectionFactoryFactoryBeanTest.java
/**
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.spring;
import java.util.Arrays;
import junit.framework.TestCase;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
*/
public class ActiveMQConnectionFactoryFactoryBeanTest extends TestCase {
private static final transient Logger LOG = LoggerFactory.getLogger(ActiveMQConnectionFactoryFactoryBeanTest.class);
private ActiveMQConnectionFactoryFactoryBean factory;
public void testSingleTcpURL() throws Exception {
factory.setTcpHostAndPort("tcp://localhost:61616");
assertCreatedURL("failover:(tcp://localhost:61616)");
}
public void testSingleTcpURLWithInactivityTimeout() throws Exception {
factory.setTcpHostAndPort("tcp://localhost:61616");
factory.setMaxInactivityDuration(60000L);
assertCreatedURL("failover:(tcp://localhost:61616?wireFormat.maxInactivityDuration=60000)");
}
public void testSingleTcpURLWithInactivityTimeoutAndTcpNoDelay() throws Exception {
factory.setTcpHostAndPort("tcp://localhost:61616");
factory.setMaxInactivityDuration(50000L);
factory.setTcpProperties("tcpNoDelayEnabled=true");
assertCreatedURL("failover:(tcp://localhost:61616?wireFormat.maxInactivityDuration=50000&tcpNoDelayEnabled=true)");
}
public void testSingleTcpURLWithInactivityTimeoutAndMaxReconnectDelay() throws Exception {
factory.setTcpHostAndPort("tcp://localhost:61616");
factory.setMaxInactivityDuration(60000L);
factory.setMaxReconnectDelay(50000L);
assertCreatedURL("failover:(tcp://localhost:61616?wireFormat.maxInactivityDuration=60000)?maxReconnectDelay=50000");
}
public void testSingleTcpURLWithInactivityTimeoutAndMaxReconnectDelayAndFailoverProperty() throws Exception {
factory.setTcpHostAndPort("tcp://localhost:61616");
factory.setMaxInactivityDuration(40000L);
factory.setMaxReconnectDelay(30000L);
factory.setFailoverProperties("useExponentialBackOff=false");
assertCreatedURL("failover:(tcp://localhost:61616?wireFormat.maxInactivityDuration=40000)?maxReconnectDelay=30000&useExponentialBackOff=false");
}
public void testMultipleTcpURLsWithInactivityTimeoutAndMaxReconnectDelayAndFailoverProperty() throws Exception {
factory.setTcpHostAndPorts(Arrays.asList(new String[] {"tcp://localhost:61618", "tcp://foo:61619"}));
factory.setMaxInactivityDuration(40000L);
factory.setMaxReconnectDelay(30000L);
factory.setFailoverProperties("useExponentialBackOff=false");
assertCreatedURL("failover:(tcp://localhost:61618?wireFormat.maxInactivityDuration=40000,tcp://foo:61619?wireFormat.maxInactivityDuration=40000)?maxReconnectDelay=30000&useExponentialBackOff=false");
}
protected void assertCreatedURL(String expectedURL) throws Exception {
String url = factory.getBrokerURL();
LOG.debug("Generated URL: " + url);
assertEquals("URL", expectedURL, url);
Object value = factory.getObject();
assertTrue("Value should be an ActiveMQConnectionFactory", value instanceof ActiveMQConnectionFactory);
ActiveMQConnectionFactory connectionFactory = (ActiveMQConnectionFactory) value;
String brokerURL = connectionFactory.getBrokerURL();
assertEquals("brokerURL", expectedURL, brokerURL);
}
@Override
protected void setUp() throws Exception {
factory = new ActiveMQConnectionFactoryFactoryBean();
}
}
| 1,443 |
315 | #!/illumina/development/haplocompare/hc-virtualenv/bin/python
import sys
import re
GT_SPLITTER = re.compile(r'[\/\|]') # split a genotype field by '/' or '|'
def fast_nonref_remover(input_stream, output_stream):
"""
Copies each line of :param:`input_stream` to :param:`output_stream`
unless that line describes a variant where any of the sample genotypes
include a <NON_REF> allele.
"""
for line in input_stream:
bad_variant = False
if line[0] != '#':
split_line = line.split('\t')
split_alt = split_line[4].split(',')
if split_alt[-1] == '<NON_REF>':
for sample_column in range(9, len(split_line)): #
split_gt = re.split(GT_SPLITTER, split_line[sample_column].split(':')[0])
n_alt = len(split_alt)
for gt in split_gt:
if gt != '.' and int(gt) == n_alt:
bad_variant = True
break
else: # break out of both loops as soon as we find the first <NON_REF> GT
continue
break
if not bad_variant:
output_stream.write(line)
if __name__ == '__main__':
fast_nonref_remover(sys.stdin, sys.stdout) | 717 |
8,805 | /*-----------------------------------------------------------------------------+
Copyright (c) 2010-2011: <NAME>
+------------------------------------------------------------------------------+
Distributed under the Boost Software License, Version 1.0.
(See accompanying file LICENCE.txt or copy at
http://www.boost.org/LICENSE_1_0.txt)
+-----------------------------------------------------------------------------*/
#ifndef BOOST_ICL_TYPE_TRAITS_INFINITY_HPP_JOFA_100322
#define BOOST_ICL_TYPE_TRAITS_INFINITY_HPP_JOFA_100322
#include <string>
#include <boost/static_assert.hpp>
#include <boost/icl/type_traits/is_numeric.hpp>
#include <boost/icl/type_traits/rep_type_of.hpp>
#include <boost/icl/type_traits/size_type_of.hpp>
#include <boost/mpl/and.hpp>
#include <boost/mpl/if.hpp>
namespace boost{ namespace icl
{
template<class Type> struct has_std_infinity
{
typedef has_std_infinity type;
BOOST_STATIC_CONSTANT(bool,
value = ( is_numeric<Type>::value
&& std::numeric_limits<Type>::has_infinity
)
);
};
template<class Type> struct has_max_infinity
{
typedef has_max_infinity type;
BOOST_STATIC_CONSTANT(bool,
value = ( is_numeric<Type>::value
&& ! std::numeric_limits<Type>::has_infinity
)
);
};
//------------------------------------------------------------------------------
template <class Type, bool has_std_inf=false, bool has_std_max=false>
struct get_numeric_infinity;
template <class Type, bool has_std_max>
struct get_numeric_infinity<Type, true, has_std_max>
{
typedef get_numeric_infinity type;
static Type value()
{
return (std::numeric_limits<Type>::infinity)();
}
};
template <class Type>
struct get_numeric_infinity<Type, false, true>
{
typedef get_numeric_infinity type;
static Type value()
{
return (std::numeric_limits<Type>::max)();
}
};
template <class Type>
struct get_numeric_infinity<Type, false, false>
{
typedef get_numeric_infinity type;
static Type value()
{
return Type();
}
};
template <class Type>
struct numeric_infinity
{
typedef numeric_infinity type;
static Type value()
{
return get_numeric_infinity< Type
, has_std_infinity<Type>::value
, has_max_infinity<Type>::value >::value();
}
};
//------------------------------------------------------------------------------
template<class Type, bool has_numeric_inf, bool has_repr_inf, bool has_size, bool has_diff>
struct get_infinity;
template<class Type, bool has_repr_inf, bool has_size, bool has_diff>
struct get_infinity<Type, true, has_repr_inf, has_size, has_diff>
{
typedef get_infinity type;
static Type value()
{
return numeric_infinity<Type>::value();
}
};
template<class Type, bool has_size, bool has_diff>
struct get_infinity<Type, false, true, has_size, has_diff>
{
typedef get_infinity type;
static Type value()
{
return Type(numeric_infinity<typename Type::rep>::value());
}
};
template<class Type, bool has_diff>
struct get_infinity<Type, false, false, true, has_diff>
{
typedef get_infinity type;
typedef typename Type::size_type size_type;
static Type value()
{
return Type(numeric_infinity<size_type>::value());
}
};
template<class Type>
struct get_infinity<Type, false, false, false, true>
{
typedef get_infinity type;
typedef typename Type::difference_type difference_type;
static Type value()
{
return identity_element<difference_type>::value();
}
};
template<class Type>
struct get_infinity<Type, false, false, false, false>
{
typedef get_infinity type;
static Type value()
{
return identity_element<Type>::value();
}
};
template <class Type> struct infinity
{
typedef infinity type;
static Type value()
{
return
get_infinity< Type
, is_numeric<Type>::value
, has_rep_type<Type>::value
, has_size_type<Type>::value
, has_difference_type<Type>::value
>::value();
}
};
template <>
struct infinity<std::string>
{
typedef infinity type;
static std::string value()
{
return std::string();
}
};
}} // namespace boost icl
#endif
| 2,022 |
743 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.guacamole.net.auth.credentials;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.guacamole.form.Field;
/**
* A fully-valid set of credentials and associated values. Each instance of
* this object should describe a full set of parameter name/value pairs which
* can be used to authenticate successfully, even if that success depends on
* factors not described by this object.
*/
public class UserCredentials extends CredentialsInfo {
/**
* All fields required for valid credentials.
*/
private Map<String, String> values;
/**
* Creates a new UserCredentials object which requires the given fields and
* values.
*
* @param fields
* The fields to require.
*
* @param values
* The values required for each field, as a map of field name to
* correct value.
*/
public UserCredentials(Collection<Field> fields, Map<String, String> values) {
super(fields);
this.values = values;
}
/**
* Creates a new UserCredentials object which requires fields described by
* the given CredentialsInfo. The value required for each field in the
* CredentialsInfo is defined in the given Map.
*
* @param info
* The CredentialsInfo object describing the fields to require.
*
* @param values
* The values required for each field, as a map of field name to
* correct value.
*/
public UserCredentials(CredentialsInfo info, Map<String, String> values) {
this(info.getFields(), values);
}
/**
* Creates a new UserCredentials object which requires fields described by
* the given CredentialsInfo but does not yet have any defined values.
*
* @param info
* The CredentialsInfo object describing the fields to require.
*/
public UserCredentials(CredentialsInfo info) {
this(info, new HashMap<String, String>());
}
/**
* Creates a new UserCredentials object which requires the given fields but
* does not yet have any defined values.
*
* @param fields
* The fields to require.
*/
public UserCredentials(Collection<Field> fields) {
this(fields, new HashMap<String, String>());
}
/**
* Returns a map of field names to values which backs this UserCredentials
* object. Modifications to the returned map will directly affect the
* associated name/value pairs.
*
* @return
* A map of field names to their corresponding values which backs this
* UserCredentials object.
*/
public Map<String, String> getValues() {
return values;
}
/**
* Replaces the map backing this UserCredentials object with the given map.
* All field name/value pairs described by the original map are replaced by
* the name/value pairs in the given map.
*
* @param values
* The map of field names to their corresponding values which should be
* used to back this UserCredentials object.
*/
public void setValues(Map<String, String> values) {
this.values = values;
}
/**
* Returns the value defined by this UserCrendentials object for the field
* having the given name.
*
* @param name
* The name of the field whose value should be returned.
*
* @return
* The value of the field having the given name, or null if no value is
* defined for that field.
*/
public String getValue(String name) {
return values.get(name);
}
/**
* Returns the value defined by this UserCrendentials object for the given
* field.
*
* @param field
* The field whose value should be returned.
*
* @return
* The value of the given field, or null if no value is defined for
* that field.
*/
public String getValue(Field field) {
return getValue(field.getName());
}
/**
* Sets the value of the field having the given name. Any existing value
* for that field is replaced.
*
* @param name
* The name of the field whose value should be assigned.
*
* @param value
* The value to assign to the field having the given name.
*
* @return
* The previous value of the field, or null if the value of the field
* was not previously defined.
*/
public String setValue(String name, String value) {
return values.put(name, value);
}
/**
* Sets the value of the given field. Any existing value for that field is
* replaced.
*
* @param field
* The field whose value should be assigned.
*
* @param value
* The value to assign to the given field.
*
* @return
* The previous value of the field, or null if the value of the field
* was not previously defined.
*/
public String setValue(Field field, String value) {
return setValue(field.getName(), value);
}
/**
* Removes (undefines) the value of the field having the given name,
* returning its previous value. If the field value was not defined, this
* function has no effect, and null is returned.
*
* @param name
* The name of the field whose value should be removed.
*
* @return
* The previous value of the field, or null if the value of the field
* was not previously defined.
*/
public String removeValue(String name) {
return values.remove(name);
}
/**
* Removes (undefines) the value of the given field returning its previous
* value. If the field value was not defined, this function has no effect,
* and null is returned.
*
* @param field
* The field whose value should be removed.
*
* @return
* The previous value of the field, or null if the value of the field
* was not previously defined.
*/
public String removeValue(Field field) {
return removeValue(field.getName());
}
}
| 2,430 |
764 | {"data":{"id":16,"name":"<NAME>","publish_datetime":"23/10/2020 12:53 PM","content":"<p>Et est, perferendis .</p>","meta_title":"Hic vero eius expedi","cannonical_link":"https://www.google.com","meta_keywords":"Qui aspernatur velit","meta_description":"<p>Quae lorem rem in ea.</p>","status":2,"display_status":"Draft","categories":[{"id":2,"name":"quia ut optio","status":true,"display_status":"Active","created_at":"2020-10-15 10:35:08","created_by":"<NAME>","updated_at":"2020-10-15 10:35:08","updated_by":null},{"id":6,"name":"fugit impedit quia","status":true,"display_status":"Active","created_at":"2020-10-15 10:35:08","created_by":"<NAME>","updated_at":"2020-10-15 10:35:08","updated_by":null}],"tags":[{"id":2,"name":"sed","status":true,"display_status":"Active","created_at":"2020-10-15 10:35:08","created_by":"<NAME>","updated_at":"2020-10-15 10:35:08","updated_by":null}],"created_at":"2020-10-16","created_by":"<NAME>","updated_at":"2020-10-16 07:42:41","updated_by":"<NAME>"}} | 361 |
369 | /*
* Copyright © 2017 <NAME>, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package io.cdap.cdap.api;
import io.cdap.cdap.api.schedule.TriggeringScheduleInfo;
import javax.annotation.Nullable;
/**
* This interface provides the context for accessing scheduling information of this program.
*/
public interface SchedulableProgramContext {
/**
* @return The information of the schedule that launches this program.
* Return {@code null} if the program is not launched by a schedule.
*/
@Nullable
TriggeringScheduleInfo getTriggeringScheduleInfo();
}
| 301 |
5,116 | /*
* Copyright 2018 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef REALM_JNI_UTIL_JNI_UTILS_HPP
#define REALM_JNI_UTIL_JNI_UTILS_HPP
#include <jni.h>
#include <vector>
namespace realm {
namespace jni_util {
// Util functions for JNI.
class JniUtils {
public:
~JniUtils()
{
}
// Call this only once in JNI_OnLoad.
static void initialize(JavaVM* vm, jint vm_version) noexcept;
// Call this in JNI_OnUnload.
static void release();
// When attach_if_needed is false, returns the JNIEnv if there is one attached to this thread. Assert if there is
// none. When attach_if_needed is true, try to attach and return a JNIEnv if necessary.
static JNIEnv* get_env(bool attach_if_needed = false);
// Detach the current thread from the JVM. Only required for C++ threads that where attached in the first place.
// Failing to do so is a resource leak.
static void detach_current_thread();
private:
JniUtils(JavaVM* vm, jint vm_version) noexcept
: m_vm(vm)
, m_vm_version(vm_version)
{
}
JavaVM* m_vm;
jint m_vm_version;
};
} // namespace realm
} // namespace jni_util
#endif // REALM_JNI_UTIL_JNI_UTILS_HPP
| 597 |
2,338 | <gh_stars>1000+
# Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
# See https://llvm.org/LICENSE.txt for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
"""Models DAGs of scalar math expressions.
Used for generating region bodies at the "math" level where they are still type
polymorphic. This is modeled to be polymorphic by attribute name for interop
with serialization schemes that are just plain-old-dicts.
These classes are typically not user accessed and are created as a by-product
of interpreting a comprehension DSL and model the operations to perform in the
op body. The class hierarchy is laid out to map well to a form of YAML that
can be easily consumed from the C++ side, not necessarily for ergonomics.
"""
from typing import Optional, Sequence
from .yaml_helper import *
from .types import *
__all__ = [
"ScalarAssign",
"ScalarApplyFn",
"ScalarArg",
"ScalarConst",
"ScalarIndex",
"ScalarExpression",
"ScalarSymbolicCast",
]
class ScalarApplyFn:
"""A type of ScalarExpression that applies a named function to operands."""
def __init__(self, fn_name: str, *operands: "ScalarExpression"):
self.fn_name = fn_name
self.operands = operands
def expr(self) -> "ScalarExpression":
return ScalarExpression(scalar_apply=self)
def __repr__(self):
return f"ScalarApplyFn<{self.fn_name}>({', '.join(self.operands)})"
class ScalarArg:
"""A type of ScalarExpression that references a named argument."""
def __init__(self, arg: str):
self.arg = arg
def expr(self) -> "ScalarExpression":
return ScalarExpression(scalar_arg=self)
def __repr__(self):
return f"(ScalarArg({self.arg})"
class ScalarConst:
"""A type of ScalarExpression representing a constant."""
def __init__(self, value: str):
self.value = value
def expr(self) -> "ScalarExpression":
return ScalarExpression(scalar_const=self)
def __repr__(self):
return f"(ScalarConst({self.value})"
class ScalarIndex:
"""A type of ScalarExpression accessing an iteration index."""
def __init__(self, dim: int):
self.dim = dim
def expr(self) -> "ScalarExpression":
return ScalarExpression(scalar_index=self)
def __repr__(self):
return f"(ScalarIndex({self.dim})"
class ScalarSymbolicCast:
"""A type of ScalarExpression that symbolically casts an operand to a TypeVar."""
def __init__(self, to_type: TypeVar, operand: "ScalarExpression"):
self.to_type = to_type
self.operand = operand
def expr(self) -> "ScalarExpression":
return ScalarExpression(symbolic_cast=self)
def __repr__(self):
return f"ScalarSymbolicCast({self.to_type}, {self.operand})"
class ScalarExpression(YAMLObject):
"""An expression on scalar values.
Can be one of:
- ScalarApplyFn
- ScalarArg
- ScalarConst
- ScalarIndex
- ScalarSymbolicCast
"""
yaml_tag = "!ScalarExpression"
def __init__(self,
scalar_apply: Optional[ScalarApplyFn] = None,
scalar_arg: Optional[ScalarArg] = None,
scalar_const: Optional[ScalarConst] = None,
scalar_index: Optional[ScalarIndex] = None,
symbolic_cast: Optional[ScalarSymbolicCast] = None):
if (bool(scalar_apply) + bool(scalar_arg) + bool(scalar_const) +
bool(scalar_index) + bool(symbolic_cast)) != 1:
raise ValueError("One of 'scalar_apply', 'scalar_arg', 'scalar_const', "
"'scalar_index', 'symbolic_cast' must be specified")
self.scalar_apply = scalar_apply
self.scalar_arg = scalar_arg
self.scalar_const = scalar_const
self.scalar_index = scalar_index
self.symbolic_cast = symbolic_cast
def to_yaml_custom_dict(self):
if self.scalar_apply:
return dict(
scalar_apply=dict(
fn_name=self.scalar_apply.fn_name,
operands=list(self.scalar_apply.operands),
))
elif self.scalar_arg:
return dict(scalar_arg=self.scalar_arg.arg)
elif self.scalar_const:
return dict(scalar_const=self.scalar_const.value)
elif self.scalar_index:
return dict(scalar_index=self.scalar_index.dim)
elif self.symbolic_cast:
# Note that even though operands must be arity 1, we write it the
# same way as for apply because it allows handling code to be more
# generic vs having a special form.
return dict(
symbolic_cast=dict(
type_var=self.symbolic_cast.to_type.name,
operands=[self.symbolic_cast.operand]))
else:
raise ValueError(f"Unexpected ScalarExpression type: {self}")
class ScalarAssign(YAMLObject):
"""An assignment to a named argument (LHS of a comprehension)."""
yaml_tag = "!ScalarAssign"
def __init__(self, arg: str, value: ScalarExpression):
self.arg = arg
self.value = value
def to_yaml_custom_dict(self):
return dict(arg=self.arg, value=self.value)
def __repr__(self):
return f"ScalarAssign({self.arg}, {self.value})"
| 1,995 |
575 | <reponame>Ron423c/chromium<filename>third_party/blink/renderer/controller/memory_usage_monitor.cc
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/controller/memory_usage_monitor.h"
#include "third_party/blink/renderer/platform/bindings/v8_per_isolate_data.h"
#include "third_party/blink/renderer/platform/scheduler/public/thread.h"
#include "third_party/blink/renderer/platform/scheduler/public/thread_scheduler.h"
#include "third_party/blink/renderer/platform/wtf/allocator/partitions.h"
namespace blink {
namespace {
constexpr base::TimeDelta kPingInterval = base::TimeDelta::FromSeconds(1);
}
MemoryUsageMonitor::MemoryUsageMonitor() {
timer_.SetTaskRunner(
Thread::MainThread()->Scheduler()->NonWakingTaskRunner());
}
MemoryUsageMonitor::MemoryUsageMonitor(
scoped_refptr<base::SingleThreadTaskRunner> task_runner_for_testing,
const base::TickClock* clock_for_testing)
: timer_(clock_for_testing) {
timer_.SetTaskRunner(task_runner_for_testing);
}
void MemoryUsageMonitor::AddObserver(Observer* observer) {
StartMonitoringIfNeeded();
observers_.AddObserver(observer);
}
void MemoryUsageMonitor::RemoveObserver(Observer* observer) {
observers_.RemoveObserver(observer);
}
bool MemoryUsageMonitor::HasObserver(Observer* observer) {
return observers_.HasObserver(observer);
}
void MemoryUsageMonitor::StartMonitoringIfNeeded() {
if (timer_.IsRunning())
return;
timer_.Start(FROM_HERE, kPingInterval,
WTF::BindRepeating(&MemoryUsageMonitor::TimerFired,
WTF::Unretained(this)));
}
void MemoryUsageMonitor::StopMonitoring() {
timer_.Stop();
}
MemoryUsage MemoryUsageMonitor::GetCurrentMemoryUsage() {
MemoryUsage usage;
GetV8MemoryUsage(usage);
GetBlinkMemoryUsage(usage);
GetProcessMemoryUsage(usage);
return usage;
}
void MemoryUsageMonitor::GetV8MemoryUsage(MemoryUsage& usage) {
v8::Isolate* isolate = V8PerIsolateData::MainThreadIsolate();
DCHECK(isolate);
v8::HeapStatistics heap_statistics;
isolate->GetHeapStatistics(&heap_statistics);
// TODO: Add memory usage for worker threads.
usage.v8_bytes =
heap_statistics.total_heap_size() + heap_statistics.malloced_memory();
}
void MemoryUsageMonitor::GetBlinkMemoryUsage(MemoryUsage& usage) {
usage.blink_gc_bytes = ProcessHeap::TotalAllocatedObjectSize();
usage.partition_alloc_bytes = WTF::Partitions::TotalSizeOfCommittedPages();
}
void MemoryUsageMonitor::TimerFired() {
MemoryUsage usage = GetCurrentMemoryUsage();
for (auto& observer : observers_)
observer.OnMemoryPing(usage);
if (observers_.empty())
StopMonitoring();
}
} // namespace blink
| 948 |
474 | <reponame>bryancall/nghttp3
/*
* nghttp3
*
* Copyright (c) 2019 nghttp3 contributors
* Copyright (c) 2017 ngtcp2 contributors
* Copyright (c) 2012 nghttp2 contributors
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
#include "nghttp3_str.h"
#include <string.h>
#include <assert.h>
uint8_t *nghttp3_cpymem(uint8_t *dest, const uint8_t *src, size_t n) {
memcpy(dest, src, n);
return dest + n;
}
/* Generated by gendowncasetbl.py */
static const uint8_t DOWNCASE_TBL[] = {
0 /* NUL */, 1 /* SOH */, 2 /* STX */, 3 /* ETX */,
4 /* EOT */, 5 /* ENQ */, 6 /* ACK */, 7 /* BEL */,
8 /* BS */, 9 /* HT */, 10 /* LF */, 11 /* VT */,
12 /* FF */, 13 /* CR */, 14 /* SO */, 15 /* SI */,
16 /* DLE */, 17 /* DC1 */, 18 /* DC2 */, 19 /* DC3 */,
20 /* DC4 */, 21 /* NAK */, 22 /* SYN */, 23 /* ETB */,
24 /* CAN */, 25 /* EM */, 26 /* SUB */, 27 /* ESC */,
28 /* FS */, 29 /* GS */, 30 /* RS */, 31 /* US */,
32 /* SPC */, 33 /* ! */, 34 /* " */, 35 /* # */,
36 /* $ */, 37 /* % */, 38 /* & */, 39 /* ' */,
40 /* ( */, 41 /* ) */, 42 /* * */, 43 /* + */,
44 /* , */, 45 /* - */, 46 /* . */, 47 /* / */,
48 /* 0 */, 49 /* 1 */, 50 /* 2 */, 51 /* 3 */,
52 /* 4 */, 53 /* 5 */, 54 /* 6 */, 55 /* 7 */,
56 /* 8 */, 57 /* 9 */, 58 /* : */, 59 /* ; */,
60 /* < */, 61 /* = */, 62 /* > */, 63 /* ? */,
64 /* @ */, 97 /* A */, 98 /* B */, 99 /* C */,
100 /* D */, 101 /* E */, 102 /* F */, 103 /* G */,
104 /* H */, 105 /* I */, 106 /* J */, 107 /* K */,
108 /* L */, 109 /* M */, 110 /* N */, 111 /* O */,
112 /* P */, 113 /* Q */, 114 /* R */, 115 /* S */,
116 /* T */, 117 /* U */, 118 /* V */, 119 /* W */,
120 /* X */, 121 /* Y */, 122 /* Z */, 91 /* [ */,
92 /* \ */, 93 /* ] */, 94 /* ^ */, 95 /* _ */,
96 /* ` */, 97 /* a */, 98 /* b */, 99 /* c */,
100 /* d */, 101 /* e */, 102 /* f */, 103 /* g */,
104 /* h */, 105 /* i */, 106 /* j */, 107 /* k */,
108 /* l */, 109 /* m */, 110 /* n */, 111 /* o */,
112 /* p */, 113 /* q */, 114 /* r */, 115 /* s */,
116 /* t */, 117 /* u */, 118 /* v */, 119 /* w */,
120 /* x */, 121 /* y */, 122 /* z */, 123 /* { */,
124 /* | */, 125 /* } */, 126 /* ~ */, 127 /* DEL */,
128 /* 0x80 */, 129 /* 0x81 */, 130 /* 0x82 */, 131 /* 0x83 */,
132 /* 0x84 */, 133 /* 0x85 */, 134 /* 0x86 */, 135 /* 0x87 */,
136 /* 0x88 */, 137 /* 0x89 */, 138 /* 0x8a */, 139 /* 0x8b */,
140 /* 0x8c */, 141 /* 0x8d */, 142 /* 0x8e */, 143 /* 0x8f */,
144 /* 0x90 */, 145 /* 0x91 */, 146 /* 0x92 */, 147 /* 0x93 */,
148 /* 0x94 */, 149 /* 0x95 */, 150 /* 0x96 */, 151 /* 0x97 */,
152 /* 0x98 */, 153 /* 0x99 */, 154 /* 0x9a */, 155 /* 0x9b */,
156 /* 0x9c */, 157 /* 0x9d */, 158 /* 0x9e */, 159 /* 0x9f */,
160 /* 0xa0 */, 161 /* 0xa1 */, 162 /* 0xa2 */, 163 /* 0xa3 */,
164 /* 0xa4 */, 165 /* 0xa5 */, 166 /* 0xa6 */, 167 /* 0xa7 */,
168 /* 0xa8 */, 169 /* 0xa9 */, 170 /* 0xaa */, 171 /* 0xab */,
172 /* 0xac */, 173 /* 0xad */, 174 /* 0xae */, 175 /* 0xaf */,
176 /* 0xb0 */, 177 /* 0xb1 */, 178 /* 0xb2 */, 179 /* 0xb3 */,
180 /* 0xb4 */, 181 /* 0xb5 */, 182 /* 0xb6 */, 183 /* 0xb7 */,
184 /* 0xb8 */, 185 /* 0xb9 */, 186 /* 0xba */, 187 /* 0xbb */,
188 /* 0xbc */, 189 /* 0xbd */, 190 /* 0xbe */, 191 /* 0xbf */,
192 /* 0xc0 */, 193 /* 0xc1 */, 194 /* 0xc2 */, 195 /* 0xc3 */,
196 /* 0xc4 */, 197 /* 0xc5 */, 198 /* 0xc6 */, 199 /* 0xc7 */,
200 /* 0xc8 */, 201 /* 0xc9 */, 202 /* 0xca */, 203 /* 0xcb */,
204 /* 0xcc */, 205 /* 0xcd */, 206 /* 0xce */, 207 /* 0xcf */,
208 /* 0xd0 */, 209 /* 0xd1 */, 210 /* 0xd2 */, 211 /* 0xd3 */,
212 /* 0xd4 */, 213 /* 0xd5 */, 214 /* 0xd6 */, 215 /* 0xd7 */,
216 /* 0xd8 */, 217 /* 0xd9 */, 218 /* 0xda */, 219 /* 0xdb */,
220 /* 0xdc */, 221 /* 0xdd */, 222 /* 0xde */, 223 /* 0xdf */,
224 /* 0xe0 */, 225 /* 0xe1 */, 226 /* 0xe2 */, 227 /* 0xe3 */,
228 /* 0xe4 */, 229 /* 0xe5 */, 230 /* 0xe6 */, 231 /* 0xe7 */,
232 /* 0xe8 */, 233 /* 0xe9 */, 234 /* 0xea */, 235 /* 0xeb */,
236 /* 0xec */, 237 /* 0xed */, 238 /* 0xee */, 239 /* 0xef */,
240 /* 0xf0 */, 241 /* 0xf1 */, 242 /* 0xf2 */, 243 /* 0xf3 */,
244 /* 0xf4 */, 245 /* 0xf5 */, 246 /* 0xf6 */, 247 /* 0xf7 */,
248 /* 0xf8 */, 249 /* 0xf9 */, 250 /* 0xfa */, 251 /* 0xfb */,
252 /* 0xfc */, 253 /* 0xfd */, 254 /* 0xfe */, 255 /* 0xff */,
};
void nghttp3_downcase(uint8_t *s, size_t len) {
size_t i;
for (i = 0; i < len; ++i) {
s[i] = DOWNCASE_TBL[s[i]];
}
}
| 2,746 |
2,112 | /*
The source code contained in this file is based on the original code by
<NAME> (https://github.com/no1msd/mstch). The original license by Daniel
Sipka can be read below:
The MIT License (MIT)
Copyright (c) 2015 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#pragma once
#include <sstream>
#include <boost/variant/static_visitor.hpp>
#include <thrift/compiler/detail/mustache/mstch.h>
#include <thrift/compiler/detail/mustache/render_context.h>
#include <thrift/compiler/detail/mustache/utils.h>
namespace apache {
namespace thrift {
namespace mstch {
class render_node : public boost::static_visitor<std::string> {
public:
render_node(render_context& ctx) : m_ctx(ctx) {}
template <class T>
std::string operator()(const T&) const {
return "";
}
std::string operator()(const int& value) const {
return std::to_string(value);
}
std::string operator()(const double& value) const {
std::stringstream ss;
ss << value;
return ss.str();
}
std::string operator()(const bool& value) const {
return value ? "true" : "false";
}
std::string operator()(const lambda& value) const {
template_type interpreted{
value([this](const node& n) { return visit(render_node(m_ctx), n); })};
return render_context::push(m_ctx).render(interpreted);
}
std::string operator()(const std::string& value) const { return value; }
private:
render_context& m_ctx;
};
} // namespace mstch
} // namespace thrift
} // namespace apache
| 765 |
335 | // $Id$
# ifndef CPPAD_LOCAL_OPTIMIZE_CSUM_STACKS_HPP
# define CPPAD_LOCAL_OPTIMIZE_CSUM_STACKS_HPP
/* --------------------------------------------------------------------------
CppAD: C++ Algorithmic Differentiation: Copyright (C) 2003-16 <NAME>
CppAD is distributed under multiple licenses. This distribution is under
the terms of the
GNU General Public License Version 3.
A copy of this license is included in the COPYING file of this distribution.
Please visit http://www.coin-or.org/CppAD/ for information on other licenses.
-------------------------------------------------------------------------- */
# include <stack>
# include <cppad/local/optimize/csum_variable.hpp>
/*!
\file csum_stacks.hpp
Information about one cumulative summation operation.
*/
// BEGIN_CPPAD_LOCAL_OPTIMIZE_NAMESPACE
namespace CppAD { namespace local { namespace optimize {
/*!
Information about one cumulative summation operation.
*/
struct struct_csum_stacks {
/// old operator indices for this cummulative summation
std::stack<struct struct_csum_variable> op_stack;
/// old variable indices to be added
std::stack<size_t > add_stack;
/// old variavle indices to be subtracted
std::stack<size_t > sub_stack;
};
} } } // END_CPPAD_LOCAL_OPTIMIZE_NAMESPACE
# endif
| 449 |
335 | {
"word": "Jacaranda",
"definitions": [
"A tropical American tree that has blue trumpet-shaped flowers, fernlike leaves, and fragrant timber."
],
"parts-of-speech": "Noun"
} | 75 |
1,652 | package com.ctrip.xpipe.redis.checker;
import com.ctrip.xpipe.api.email.EmailResponse;
import com.ctrip.xpipe.api.server.Server;
import com.ctrip.xpipe.redis.checker.alert.AlertMessageEntity;
import com.ctrip.xpipe.redis.checker.healthcheck.RedisHealthCheckInstance;
import com.ctrip.xpipe.redis.checker.model.CheckerStatus;
import com.ctrip.xpipe.redis.checker.model.HealthCheckResult;
import com.ctrip.xpipe.redis.checker.model.ProxyTunnelInfo;
import com.ctrip.xpipe.redis.core.entity.XpipeMeta;
import com.fasterxml.jackson.annotation.JsonIgnore;
import org.xml.sax.SAXException;
import java.io.IOException;
import java.util.*;
/**
* @author lishanglin
* date 2021/3/16
*/
public interface CheckerConsoleService {
XpipeMeta getXpipeMeta(String console, int clusterPartIndex) throws SAXException, IOException;
XpipeMeta getXpipeAllMeta(String console) throws SAXException, IOException;
List<ProxyTunnelInfo> getProxyTunnelInfos(String console);
void ack(String console, CheckerStatus checkerStatus);
void report(String console, HealthCheckResult result);
boolean isClusterOnMigration(String console, String clusterId);
void updateRedisRole(String console, RedisHealthCheckInstance instance, Server.SERVER_ROLE role);
Set<String> sentinelCheckWhiteList(String console);
Set<String> clusterAlertWhiteList(String console);
boolean isSentinelAutoProcess(String console);
boolean isAlertSystemOn(String console);
Date getClusterCreateTime(String console, String clusterId);
Map<String, Date> loadAllClusterCreateTime(String console);
public class AlertMessage {
private AlertMessageEntity message;
private Properties properties;
private String eventOperator;
public AlertMessage() {
}
public AlertMessage(String eventOperator, AlertMessageEntity message, EmailResponse response) {
this.message = message;
this.properties = response.getProperties();
this.eventOperator = eventOperator;
}
public AlertMessageEntity getMessage() {
return message;
}
@JsonIgnore
public EmailResponse getEmailResponse() {
return new EmailResponse() {
@Override
public Properties getProperties() {
return properties;
}
};
}
public void setMessage(AlertMessageEntity message) {
this.message = message;
}
public void setProperties(Properties properties) {
this.properties = properties;
}
public Properties getProperties() {
return properties;
}
public void setEventOperator(String eventOperator) {
this.eventOperator = eventOperator;
}
public String getEventOperator() {
return eventOperator;
}
}
void recordAlert(String console, String eventOperator, AlertMessageEntity message, EmailResponse response);
}
| 1,159 |
681 | package com.fasterxml.sort.util;
import java.util.Iterator;
public class CastingIterator<T> implements Iterator<T> {
private final Iterator<Object> _it;
public CastingIterator(Iterator<Object> it) {
_it = it;
}
@Override
public boolean hasNext() {
return _it.hasNext();
}
@SuppressWarnings("unchecked")
@Override
public T next() {
return (T)_it.next();
}
@Override
public void remove() {
throw new UnsupportedOperationException();
}
} | 207 |
7,482 | <reponame>rockonedege/rt-thread
/*
* @ : Copyright (c) 2021 Phytium Information Technology, Inc.
*
* SPDX-License-Identifier: Apache-2.0.
*
* @Date: 2021-04-07 09:53:07
* @LastEditTime: 2021-04-07 13:42:30
* @Description: This files is for uart config
*
* @Modify History:
* Ver Who Date Changes
* ----- ------ -------- --------------------------------------
*/
#include "ft_uart.h"
#include "ft_parameters.h"
FUart_Config_t FUart_Config_tTable[FT_UART_NUM] = {
{FT_UART0_ID,
FT_UART0_BASE_ADDR,
FT_UART0_CLK_FREQ_HZ,
38},
{FT_UART1_ID,
FT_UART1_BASE_ADDR,
FT_UART1_CLK_FREQ_HZ,
39},
{FT_UART2_ID,
FT_UART2_BASE_ADDR,
FT_UART2_CLK_FREQ_HZ,
40},
{FT_UART3_ID,
FT_UART3_BASE_ADDR,
FT_UART3_CLK_FREQ_HZ,
41}};
| 435 |
507 | # terrascript/data/oneandone.py
import terrascript
class oneandone_instance_size(terrascript.Data):
pass
__all__ = [
"oneandone_instance_size",
]
| 60 |
2,494 | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* pkix_pl_hashtable.h
*
* Hashtable Object Definition
*
*/
#ifndef _PKIX_PL_HASHTABLE_H
#define _PKIX_PL_HASHTABLE_H
#include "pkix_pl_common.h"
#ifdef __cplusplus
extern "C" {
#endif
/* see source file for function documentation */
PKIX_Error *
pkix_pl_HashTable_RegisterSelf(void *plContext);
#ifdef __cplusplus
}
#endif
#endif /* _PKIX_PL_HASHTABLE_H */
| 218 |
623 | package nb.barmie.net.proxy.thread;
import java.io.ByteArrayOutputStream;
import java.net.Socket;
/***********************************************************
* A proxy thread that writes received data straight out to
* the destination socket untouched.
*
* Written by <NAME> (@NickstaDB).
**********************************************************/
public class PassThroughProxyThread extends ProxyThread {
/*******************
* Construct the proxy thread.
*
* @param srcSocket The source socket.
* @param dstSocket The destination socket.
******************/
public PassThroughProxyThread(Socket srcSocket, Socket dstSocket) {
super(srcSocket, dstSocket);
}
/*******************
* Return data untouched.
*
* @param data The data received from the source socket.
* @return The data to write to the destination socket.
******************/
public ByteArrayOutputStream handleData(ByteArrayOutputStream data) {
//Return the original data untouched
return data;
}
}
| 258 |
4,857 | <reponame>gvprathyusha6/hbase<gh_stars>1000+
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.types;
import static org.junit.Assert.assertEquals;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.example.protobuf.generated.CellMessage;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.hadoop.hbase.util.SimplePositionedByteRange;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
@Category({ SmallTests.class, MiscTests.class })
public class TestPBCell {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestPBCell.class);
private static final PBCell CODEC = new PBCell();
/**
* Basic test to verify utility methods in {@link PBType} and delegation to protobuf works.
*/
@Test
public void testRoundTrip() {
CellMessage.Cell cell =
CellMessage.Cell.newBuilder().setRow(ByteString.copyFromUtf8("row")).build();
PositionedByteRange pbr = new SimplePositionedByteRange(cell.getSerializedSize());
pbr.setPosition(0);
int encodedLength = CODEC.encode(pbr, cell);
pbr.setPosition(0);
CellMessage.Cell decoded = CODEC.decode(pbr);
assertEquals(encodedLength, pbr.getPosition());
assertEquals("row", decoded.getRow().toStringUtf8());
}
}
| 734 |
464 | # -*- coding: utf-8 -*-
"""
checks.py
Each function in here should
- Take a DataFrame as its first argument, maybe optional arguments
- Makes its assert on the result
- Return the original DataFrame
"""
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import six
from engarde import generic
from engarde.generic import verify, verify_all, verify_any
def none_missing(df, columns=None):
"""
Asserts that there are no missing values (NaNs) in the DataFrame.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
try:
assert not df[columns].isnull().any().any()
except AssertionError as e:
missing = df[columns].isnull()
msg = generic.bad_locations(missing)
e.args = msg
raise
return df
def is_monotonic(df, items=None, increasing=None, strict=False):
"""
Asserts that the DataFrame is monotonic.
Parameters
==========
df : Series or DataFrame
items : dict
mapping columns to conditions (increasing, strict)
increasing : None or bool
None is either increasing or decreasing.
strict : whether the comparison should be strict
Returns
=======
df : DataFrame
"""
if items is None:
items = {k: (increasing, strict) for k in df}
for col, (increasing, strict) in items.items():
s = pd.Index(df[col])
if increasing:
good = getattr(s, 'is_monotonic_increasing')
elif increasing is None:
good = getattr(s, 'is_monotonic') | getattr(s, 'is_monotonic_decreasing')
else:
good = getattr(s, 'is_monotonic_decreasing')
if strict:
if increasing:
good = good & (s.to_series().diff().dropna() > 0).all()
elif increasing is None:
good = good & ((s.to_series().diff().dropna() > 0).all() |
(s.to_series().diff().dropna() < 0).all())
else:
good = good & (s.to_series().diff().dropna() < 0).all()
if not good:
raise AssertionError
return df
def is_shape(df, shape):
"""
Asserts that the DataFrame is of a known shape.
Parameters
==========
df : DataFrame
shape : tuple
(n_rows, n_columns). Use None or -1 if you don't care
about a dimension.
Returns
=======
df : DataFrame
"""
try:
check = np.all(np.equal(df.shape, shape) | (np.equal(shape, [-1, -1]) |
np.equal(shape, [None, None])))
assert check
except AssertionError as e:
msg = ("Expected shape: {}\n"
"\t\tActual shape: {}".format(shape, df.shape))
e.args = (msg,)
raise
return df
def unique(df, columns=None):
"""
Asserts that columns in the DataFrame only have unique values.
Parameters
----------
df : DataFrame
columns : list
list of columns to restrict the check to. If None, check all columns.
Returns
-------
df : DataFrame
same as the original
"""
if columns is None:
columns = df.columns
for col in columns:
if not df[col].is_unique:
raise AssertionError("Column {!r} contains non-unique values".format(col))
return df
def unique_index(df):
"""
Assert that the index is unique
Parameters
==========
df : DataFrame
Returns
=======
df : DataFrame
"""
try:
assert df.index.is_unique
except AssertionError as e:
e.args = df.index.get_duplicates()
raise
return df
def within_set(df, items=None):
"""
Assert that df is a subset of items
Parameters
==========
df : DataFrame
items : dict
mapping of columns (k) to array-like of values (v) that
``df[k]`` is expected to be a subset of
Returns
=======
df : DataFrame
"""
for k, v in items.items():
if not df[k].isin(v).all():
bad = df.loc[~df[k].isin(v), k]
raise AssertionError('Not in set', bad)
return df
def within_range(df, items=None):
"""
Assert that a DataFrame is within a range.
Parameters
==========
df : DataFame
items : dict
mapping of columns (k) to a (low, high) tuple (v)
that ``df[k]`` is expected to be between.
Returns
=======
df : DataFrame
"""
for k, (lower, upper) in items.items():
if (lower > df[k]).any() or (upper < df[k]).any():
bad = (lower > df[k]) | (upper < df[k])
raise AssertionError("Outside range", bad)
return df
def within_n_std(df, n=3):
"""
Assert that every value is within ``n`` standard
deviations of its column's mean.
Parameters
==========
df : DataFame
n : int
number of standard deviations from the mean
Returns
=======
df : DataFrame
"""
means = df.mean()
stds = df.std()
inliers = (np.abs(df[means.index] - means) < n * stds)
if not np.all(inliers):
msg = generic.bad_locations(~inliers)
raise AssertionError(msg)
return df
def has_dtypes(df, items):
"""
Assert that a DataFrame has ``dtypes``
Parameters
==========
df: DataFrame
items: dict
mapping of columns to dtype.
Returns
=======
df : DataFrame
"""
dtypes = df.dtypes
for k, v in items.items():
if not dtypes[k] == v:
raise AssertionError("{} has the wrong dtype. Should be ({}), is ({})".format(k, v,dtypes[k]))
return df
def one_to_many(df, unitcol, manycol):
"""
Assert that a many-to-one relationship is preserved between two
columns. For example, a retail store will have have distinct
departments, each with several employees. If each employee may
only work in a single department, then the relationship of the
department to the employees is one to many.
Parameters
==========
df : DataFrame
unitcol : str
The column that encapulates the groups in ``manycol``.
manycol : str
The column that must remain unique in the distict pairs
between ``manycol`` and ``unitcol``
Returns
=======
df : DataFrame
"""
subset = df[[manycol, unitcol]].drop_duplicates()
for many in subset[manycol].unique():
if subset[subset[manycol] == many].shape[0] > 1:
msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol)
raise AssertionError(msg)
return df
def is_same_as(df, df_to_compare, **kwargs):
"""
Assert that two pandas dataframes are the equal
Parameters
==========
df : pandas DataFrame
df_to_compare : pandas DataFrame
**kwargs : dict
keyword arguments passed through to panda's ``assert_frame_equal``
Returns
=======
df : DataFrame
"""
try:
tm.assert_frame_equal(df, df_to_compare, **kwargs)
except AssertionError as exc:
six.raise_from(AssertionError("DataFrames are not equal"), exc)
return df
__all__ = ['is_monotonic', 'is_same_as', 'is_shape', 'none_missing',
'unique_index', 'within_n_std', 'within_range', 'within_set',
'has_dtypes', 'verify', 'verify_all', 'verify_any',
'one_to_many','is_same_as',]
| 3,169 |
4,681 | import outcome
import pytest
import time
from .._core.tests.tutil import slow
from .. import _core
from ..testing import assert_checkpoints
from .._timeouts import *
async def check_takes_about(f, expected_dur):
start = time.perf_counter()
result = await outcome.acapture(f)
dur = time.perf_counter() - start
print(dur / expected_dur)
# 1.5 is an arbitrary fudge factor because there's always some delay
# between when we become eligible to wake up and when we actually do. We
# used to sleep for 0.05, and regularly observed overruns of 1.6x on
# Appveyor, and then started seeing overruns of 2.3x on Travis's macOS, so
# now we bumped up the sleep to 1 second, marked the tests as slow, and
# hopefully now the proportional error will be less huge.
#
# We also also for durations that are a hair shorter than expected. For
# example, here's a run on Windows where a 1.0 second sleep was measured
# to take 0.9999999999999858 seconds:
# https://ci.appveyor.com/project/njsmith/trio/build/1.0.768/job/3lbdyxl63q3h9s21
# I believe that what happened here is that Windows's low clock resolution
# meant that our calls to time.monotonic() returned exactly the same
# values as the calls inside the actual run loop, but the two subtractions
# returned slightly different values because the run loop's clock adds a
# random floating point offset to both times, which should cancel out, but
# lol floating point we got slightly different rounding errors. (That
# value above is exactly 128 ULPs below 1.0, which would make sense if it
# started as a 1 ULP error at a different dynamic range.)
assert (1 - 1e-8) <= (dur / expected_dur) < 1.5
return result.unwrap()
# How long to (attempt to) sleep for when testing. Smaller numbers make the
# test suite go faster.
TARGET = 1.0
@slow
async def test_sleep():
async def sleep_1():
await sleep_until(_core.current_time() + TARGET)
await check_takes_about(sleep_1, TARGET)
async def sleep_2():
await sleep(TARGET)
await check_takes_about(sleep_2, TARGET)
with pytest.raises(ValueError):
await sleep(-1)
with assert_checkpoints():
await sleep(0)
# This also serves as a test of the trivial move_on_at
with move_on_at(_core.current_time()):
with pytest.raises(_core.Cancelled):
await sleep(0)
@slow
async def test_move_on_after():
with pytest.raises(ValueError):
with move_on_after(-1):
pass # pragma: no cover
async def sleep_3():
with move_on_after(TARGET):
await sleep(100)
await check_takes_about(sleep_3, TARGET)
@slow
async def test_fail():
async def sleep_4():
with fail_at(_core.current_time() + TARGET):
await sleep(100)
with pytest.raises(TooSlowError):
await check_takes_about(sleep_4, TARGET)
with fail_at(_core.current_time() + 100):
await sleep(0)
async def sleep_5():
with fail_after(TARGET):
await sleep(100)
with pytest.raises(TooSlowError):
await check_takes_about(sleep_5, TARGET)
with fail_after(100):
await sleep(0)
with pytest.raises(ValueError):
with fail_after(-1):
pass # pragma: no cover
| 1,232 |
443 | """Index Source.patch_id
Revision ID: 5508859bed73
Revises: 26f665189ca0
Create Date: 2014-01-06 11:14:19.109932
"""
# revision identifiers, used by Alembic.
revision = '5508859bed73'
down_revision = '26f665189ca0'
from alembic import op
def upgrade():
op.create_index('idx_source_patch_id', 'source', ['patch_id'])
def downgrade():
op.drop_index('idx_source_patch_id', 'source')
| 155 |
8,772 | {
"casuser" : [ {
"signatureCount" : 84,
"userIdentity" : {
"name" : "casuser",
"displayName" : "casuser",
"id" : "<KEY>"
},
"credentialNickname" : "<PASSWORD>",
"credential" : {
"credentialId" : "<KEY>",
"userHandle" : "<KEY>",
"publicKeyCose" : "<KEY>",
"signatureCount" : 0
},
"attestationMetadata" : {
"trusted" : true,
"metadataIdentifier" : "2fb54029-7613-4f1d-94f1-fb876c14a6fe",
"vendorProperties" : {
"url" : "https://yubico.com",
"imageUrl" : "https://developers.yubico.com/U2F/Images/yubico.png",
"name" : "Yubico"
},
"deviceProperties" : {
"deviceId" : "1.3.6.1.4.1.41482.1.7",
"displayName" : "YubiKey 5 NFC",
"deviceUrl" : "https://support.yubico.com/support/solutions/articles/15000014174--yubikey-5-nfc",
"imageUrl" : "https://developers.yubico.com/U2F/Images/YK5.png"
},
"transports" : [ "USB", "NFC" ]
},
"username" : "casuser",
"registrationTime" : "2020-11-25T05:34:16.366768Z"
} ]
}
| 564 |
652 | /*
* Copyright (C) 2014 Stratio (http://stratio.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.cassandra.lucene.search;
import com.google.common.base.MoreObjects;
import com.stratio.cassandra.lucene.IndexPagingState;
import com.stratio.cassandra.lucene.schema.Schema;
import com.stratio.cassandra.lucene.search.condition.Condition;
import com.stratio.cassandra.lucene.search.sort.SortField;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.lucene.search.BooleanClause.Occur.FILTER;
import static org.apache.lucene.search.BooleanClause.Occur.MUST;
/**
* Class representing an Lucene index search. It can be translated to a Lucene {@link Query} using a {@link Schema}.
*
* @author <NAME> {@literal <<EMAIL>>}
*/
public class Search {
protected static final Logger logger = LoggerFactory.getLogger(Search.class);
private static final boolean DEFAULT_FORCE_REFRESH = false;
/** The mandatory conditions not participating in scoring. */
public final List<Condition> filter;
/** The mandatory conditions participating in scoring. */
public final List<Condition> query;
/** The sorting fields for the query. */
private final List<SortField> sort;
/** If this search must refresh the index before reading it. */
private final Boolean refresh;
/** The paging state. */
private final IndexPagingState paging;
/**
* Constructor using the specified querying, filtering, sorting and refresh options.
*
* @param filter the filtering {@link Condition}s not involved in scoring
* @param query the querying {@link Condition}s participating in scoring
* @param sort the sort fields for the query
* @param paging the paging state
* @param refresh if this search must refresh the index before reading it
*/
public Search(List<Condition> filter,
List<Condition> query,
List<SortField> sort,
IndexPagingState paging,
Boolean refresh) {
this.filter = filter == null ? Collections.EMPTY_LIST : filter;
this.query = query == null ? Collections.EMPTY_LIST : query;
this.sort = sort == null ? Collections.EMPTY_LIST : sort;
this.paging = paging;
this.refresh = refresh == null ? DEFAULT_FORCE_REFRESH : refresh;
}
/**
* Returns if this search requires post reconciliation agreement processing to preserve the order of its results.
*
* @return {@code true} if it requires post processing, {@code false} otherwise
*/
public boolean requiresPostProcessing() {
return usesRelevance() || usesSorting();
}
/**
* Returns if this search requires full ranges scan.
*
* @return {@code true} if this search requires full ranges scan, {code null} otherwise
*/
public boolean requiresFullScan() {
return usesRelevance() || usesSorting() || refresh && isEmpty();
}
/**
* Returns if this search uses Lucene relevance formula.
*
* @return {@code true} if this search uses Lucene relevance formula, {@code false} otherwise
*/
public boolean usesRelevance() {
return !query.isEmpty();
}
/**
* Returns if this search uses field sorting.
*
* @return {@code true} if this search uses field sorting, {@code false} otherwise
*/
public boolean usesSorting() {
return !sort.isEmpty();
}
/**
* Returns if this search doesn't specify any filter, query or sort.
*
* @return {@code true} if this search doesn't specify any filter, query or sort, {@code false} otherwise
*/
public boolean isEmpty() {
return filter.isEmpty() && query.isEmpty() && sort.isEmpty();
}
/**
* Returns the Lucene {@link Query} represented by this search, with the additional optional data range filter.
*
* @param schema the indexing schema
* @param range the additional data range filter, maybe {@code null}
* @return a Lucene {@link Query}
*/
public Query query(Schema schema, Query range) {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
if (range != null) {
builder.add(range, FILTER);
}
filter.forEach(condition -> builder.add(condition.query(schema), FILTER));
query.forEach(condition -> builder.add(condition.query(schema), MUST));
BooleanQuery booleanQuery = builder.build();
return booleanQuery.clauses().isEmpty() ? new MatchAllDocsQuery() : booleanQuery;
}
public Query postProcessingQuery(Schema schema) {
if (query.isEmpty()) {
return new MatchAllDocsQuery();
} else {
BooleanQuery.Builder builder = new BooleanQuery.Builder();
query.forEach(condition -> builder.add(condition.query(schema), MUST));
return builder.build();
}
}
/**
* Returns if this search needs to refresh the index before reading it.
*
* @return {@code true} if this search needs to refresh the index before reading it, {@code false} otherwise.
*/
public boolean refresh() {
return refresh;
}
/**
* Returns the Lucene {@link org.apache.lucene.search.SortField}s represented by this using the specified schema.
*
* @param schema the indexing schema to be used
* @return the Lucene sort fields represented by this using {@code schema}
*/
public List<org.apache.lucene.search.SortField> sortFields(Schema schema) {
return sort.stream().map(s -> s.sortField(schema)).collect(Collectors.toList());
}
public IndexPagingState paging() {
return paging;
}
/**
* Returns the names of the involved fields when post processing.
*
* @return the names of the involved fields
*/
public Set<String> postProcessingFields() {
Set<String> fields = new LinkedHashSet<>();
query.forEach(condition -> fields.addAll(condition.postProcessingFields()));
sort.forEach(condition -> fields.addAll(condition.postProcessingFields()));
return fields;
}
/**
* Validates this {@link Search} against the specified {@link Schema}.
*
* @param schema a {@link Schema}
* @return this
*/
public Search validate(Schema schema) {
filter.forEach(condition -> condition.query(schema));
query.forEach(condition -> condition.query(schema));
sort.forEach(field -> field.sortField(schema));
return this;
}
/** {@inheritDoc} */
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("filter", filter)
.add("query", query)
.add("sort", sort)
.add("refresh", refresh)
.add("paging", paging)
.toString();
}
}
| 2,825 |
3,579 | package com.querydsl.apt.domain.p10;
import java.io.Serializable;
import javax.persistence.MappedSuperclass;
@MappedSuperclass
public abstract class AbstractPersistable<PK extends Serializable> implements Persistable<PK> {
} | 71 |
3,799 | /*
* Copyright 2019 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.camera.extensions.impl;
import android.hardware.camera2.TotalCaptureResult;
/**
* Processes a {@link TotalCaptureResult} to update a CaptureStage.
*
* @since 1.0
*/
public interface RequestUpdateProcessorImpl extends ProcessorImpl {
/**
* Process the {@link TotalCaptureResult} to update the {@link CaptureStageImpl}
*
* @param result The metadata associated with the image. Can be null if the image and meta have
* not been synced.
* @return The updated parameters used for the repeating requests. If this is {@code null} then
* the previous parameters will be used.
*/
CaptureStageImpl process(TotalCaptureResult result);
}
| 375 |
360 | <reponame>Xarthisius/yt
from yt.testing import requires_file
from yt.utilities.answer_testing.framework import data_dir_load
from yt.visualization.plot_window import SlicePlot
@requires_file("amrvac/bw_3d0000.dat")
def test_code_units_xy_labels():
ds = data_dir_load("amrvac/bw_3d0000.dat", kwargs=dict(unit_system="code"))
p = SlicePlot(ds, "x", ("gas", "density"))
ax = p.plots[("gas", "density")].axes
assert "code length" in ax.get_xlabel().replace("\\", "")
assert "code length" in ax.get_ylabel().replace("\\", "")
| 216 |
892 | <reponame>westonsteimel/advisory-database-github
{
"schema_version": "1.2.0",
"id": "GHSA-8jq6-w5cg-wm45",
"modified": "2020-11-11T21:38:07Z",
"published": "2020-11-11T21:38:18Z",
"aliases": [
],
"summary": "Exploitable inventory component chaining in PocketMine-MP",
"details": "### Impact\nSpecially crafted `InventoryTransactionPacket`s sent by malicious clients were able to exploit the behaviour of `InventoryTransaction->findResultItem()` and cause it to take an abnormally long time to execute (causing an apparent server freeze).\n\nThe affected code is intended to compact conflicting `InventoryActions` which are in the same `InventoryTransaction` by flattening them into a single action. When multiple pathways to a result existed, the complexity of this flattening became exponential.\n\nThe problem was fixed by bailing when ambiguities are detected.\n\n**At the time of writing, this exploit is being used in the wild by attackers to deny service to servers.**\n\n### Patches\nUpgrade to 3.15.4 or newer.\n\n### Workarounds\nNo practical workarounds are possible, short of backporting the fix or implementing checks in a plugin listening to `DataPacketReceiveEvent`.\n\n### References\nc368ebb5e74632bc622534b37cd1447b97281e20\n\n### For more information\nIf you have any questions or comments about this advisory:\n* Email us at [<EMAIL>](mailto:<EMAIL>)",
"severity": [
],
"affected": [
{
"package": {
"ecosystem": "Packagist",
"name": "pocketmine/pocketmine-mp"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "3.15.4"
}
]
}
]
}
],
"references": [
{
"type": "WEB",
"url": "https://github.com/pmmp/PocketMine-MP/security/advisories/GHSA-8jq6-w5cg-wm45"
}
],
"database_specific": {
"cwe_ids": [
"CWE-400"
],
"severity": "HIGH",
"github_reviewed": true
}
} | 818 |
441 | <gh_stars>100-1000
// Copyright 2009-2020 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
#pragma once
#include "linearspace2.h"
#include "linearspace3.h"
#include "quaternion.h"
#include "bbox.h"
#include "vec4.h"
namespace embree
{
#define VectorT typename L::Vector
#define ScalarT typename L::Vector::Scalar
////////////////////////////////////////////////////////////////////////////////
// Affine Space
////////////////////////////////////////////////////////////////////////////////
template<typename L>
struct AffineSpaceT
{
L l; /*< linear part of affine space */
VectorT p; /*< affine part of affine space */
////////////////////////////////////////////////////////////////////////////////
// Constructors, Assignment, Cast, Copy Operations
////////////////////////////////////////////////////////////////////////////////
__forceinline AffineSpaceT ( ) { }
__forceinline AffineSpaceT ( const AffineSpaceT& other ) { l = other.l; p = other.p; }
__forceinline AffineSpaceT ( const L & other ) { l = other ; p = VectorT(zero); }
__forceinline AffineSpaceT& operator=( const AffineSpaceT& other ) { l = other.l; p = other.p; return *this; }
__forceinline AffineSpaceT( const VectorT& vx, const VectorT& vy, const VectorT& vz, const VectorT& p ) : l(vx,vy,vz), p(p) {}
__forceinline AffineSpaceT( const L& l, const VectorT& p ) : l(l), p(p) {}
template<typename L1> __forceinline AffineSpaceT( const AffineSpaceT<L1>& s ) : l(s.l), p(s.p) {}
////////////////////////////////////////////////////////////////////////////////
// Constants
////////////////////////////////////////////////////////////////////////////////
__forceinline AffineSpaceT( ZeroTy ) : l(zero), p(zero) {}
__forceinline AffineSpaceT( OneTy ) : l(one), p(zero) {}
/*! return matrix for scaling */
static __forceinline AffineSpaceT scale(const VectorT& s) { return L::scale(s); }
/*! return matrix for translation */
static __forceinline AffineSpaceT translate(const VectorT& p) { return AffineSpaceT(one,p); }
/*! return matrix for rotation, only in 2D */
static __forceinline AffineSpaceT rotate(const ScalarT& r) { return L::rotate(r); }
/*! return matrix for rotation around arbitrary point (2D) or axis (3D) */
static __forceinline AffineSpaceT rotate(const VectorT& u, const ScalarT& r) { return L::rotate(u,r); }
/*! return matrix for rotation around arbitrary axis and point, only in 3D */
static __forceinline AffineSpaceT rotate(const VectorT& p, const VectorT& u, const ScalarT& r) { return translate(+p) * rotate(u,r) * translate(-p); }
/*! return matrix for looking at given point, only in 3D */
static __forceinline AffineSpaceT lookat(const VectorT& eye, const VectorT& point, const VectorT& up) {
VectorT Z = normalize(point-eye);
VectorT U = normalize(cross(up,Z));
VectorT V = normalize(cross(Z,U));
return AffineSpaceT(L(U,V,Z),eye);
}
};
// template specialization to get correct identity matrix for type AffineSpace3fa
template<>
__forceinline AffineSpaceT<LinearSpace3ff>::AffineSpaceT( OneTy ) : l(one), p(0.f, 0.f, 0.f, 1.f) {}
////////////////////////////////////////////////////////////////////////////////
// Unary Operators
////////////////////////////////////////////////////////////////////////////////
template<typename L> __forceinline AffineSpaceT<L> operator -( const AffineSpaceT<L>& a ) { return AffineSpaceT<L>(-a.l,-a.p); }
template<typename L> __forceinline AffineSpaceT<L> operator +( const AffineSpaceT<L>& a ) { return AffineSpaceT<L>(+a.l,+a.p); }
template<typename L> __forceinline AffineSpaceT<L> rcp( const AffineSpaceT<L>& a ) { L il = rcp(a.l); return AffineSpaceT<L>(il,-(il*a.p)); }
////////////////////////////////////////////////////////////////////////////////
// Binary Operators
////////////////////////////////////////////////////////////////////////////////
template<typename L> __forceinline const AffineSpaceT<L> operator +( const AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return AffineSpaceT<L>(a.l+b.l,a.p+b.p); }
template<typename L> __forceinline const AffineSpaceT<L> operator -( const AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return AffineSpaceT<L>(a.l-b.l,a.p-b.p); }
template<typename L> __forceinline const AffineSpaceT<L> operator *( const ScalarT & a, const AffineSpaceT<L>& b ) { return AffineSpaceT<L>(a*b.l,a*b.p); }
template<typename L> __forceinline const AffineSpaceT<L> operator *( const AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return AffineSpaceT<L>(a.l*b.l,a.l*b.p+a.p); }
template<typename L> __forceinline const AffineSpaceT<L> operator /( const AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return a * rcp(b); }
template<typename L> __forceinline const AffineSpaceT<L> operator /( const AffineSpaceT<L>& a, const ScalarT & b ) { return a * rcp(b); }
template<typename L> __forceinline AffineSpaceT<L>& operator *=( AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return a = a * b; }
template<typename L> __forceinline AffineSpaceT<L>& operator *=( AffineSpaceT<L>& a, const ScalarT & b ) { return a = a * b; }
template<typename L> __forceinline AffineSpaceT<L>& operator /=( AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return a = a / b; }
template<typename L> __forceinline AffineSpaceT<L>& operator /=( AffineSpaceT<L>& a, const ScalarT & b ) { return a = a / b; }
template<typename L> __forceinline VectorT xfmPoint (const AffineSpaceT<L>& m, const VectorT& p) { return madd(VectorT(p.x),m.l.vx,madd(VectorT(p.y),m.l.vy,madd(VectorT(p.z),m.l.vz,m.p))); }
template<typename L> __forceinline VectorT xfmVector(const AffineSpaceT<L>& m, const VectorT& v) { return xfmVector(m.l,v); }
template<typename L> __forceinline VectorT xfmNormal(const AffineSpaceT<L>& m, const VectorT& n) { return xfmNormal(m.l,n); }
__forceinline const BBox<Vec3fa> xfmBounds(const AffineSpaceT<LinearSpace3<Vec3fa> >& m, const BBox<Vec3fa>& b)
{
BBox3fa dst = empty;
const Vec3fa p0(b.lower.x,b.lower.y,b.lower.z); dst.extend(xfmPoint(m,p0));
const Vec3fa p1(b.lower.x,b.lower.y,b.upper.z); dst.extend(xfmPoint(m,p1));
const Vec3fa p2(b.lower.x,b.upper.y,b.lower.z); dst.extend(xfmPoint(m,p2));
const Vec3fa p3(b.lower.x,b.upper.y,b.upper.z); dst.extend(xfmPoint(m,p3));
const Vec3fa p4(b.upper.x,b.lower.y,b.lower.z); dst.extend(xfmPoint(m,p4));
const Vec3fa p5(b.upper.x,b.lower.y,b.upper.z); dst.extend(xfmPoint(m,p5));
const Vec3fa p6(b.upper.x,b.upper.y,b.lower.z); dst.extend(xfmPoint(m,p6));
const Vec3fa p7(b.upper.x,b.upper.y,b.upper.z); dst.extend(xfmPoint(m,p7));
return dst;
}
////////////////////////////////////////////////////////////////////////////////
/// Comparison Operators
////////////////////////////////////////////////////////////////////////////////
template<typename L> __forceinline bool operator ==( const AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return a.l == b.l && a.p == b.p; }
template<typename L> __forceinline bool operator !=( const AffineSpaceT<L>& a, const AffineSpaceT<L>& b ) { return a.l != b.l || a.p != b.p; }
////////////////////////////////////////////////////////////////////////////////
/// Select
////////////////////////////////////////////////////////////////////////////////
template<typename L> __forceinline AffineSpaceT<L> select ( const typename L::Vector::Scalar::Bool& s, const AffineSpaceT<L>& t, const AffineSpaceT<L>& f ) {
return AffineSpaceT<L>(select(s,t.l,f.l),select(s,t.p,f.p));
}
////////////////////////////////////////////////////////////////////////////////
// Output Operators
////////////////////////////////////////////////////////////////////////////////
template<typename L> static embree_ostream operator<<(embree_ostream cout, const AffineSpaceT<L>& m) {
return cout << "{ l = " << m.l << ", p = " << m.p << " }";
}
////////////////////////////////////////////////////////////////////////////////
// Template Instantiations
////////////////////////////////////////////////////////////////////////////////
typedef AffineSpaceT<LinearSpace2f> AffineSpace2f;
typedef AffineSpaceT<LinearSpace3f> AffineSpace3f;
typedef AffineSpaceT<LinearSpace3fa> AffineSpace3fa;
typedef AffineSpaceT<LinearSpace3fx> AffineSpace3fx;
typedef AffineSpaceT<LinearSpace3ff> AffineSpace3ff;
typedef AffineSpaceT<Quaternion3f > OrthonormalSpace3f;
template<int N> using AffineSpace3vf = AffineSpaceT<LinearSpace3<Vec3<vfloat<N>>>>;
typedef AffineSpaceT<LinearSpace3<Vec3<vfloat<4>>>> AffineSpace3vf4;
typedef AffineSpaceT<LinearSpace3<Vec3<vfloat<8>>>> AffineSpace3vf8;
typedef AffineSpaceT<LinearSpace3<Vec3<vfloat<16>>>> AffineSpace3vf16;
template<int N> using AffineSpace3vff = AffineSpaceT<LinearSpace3<Vec4<vfloat<N>>>>;
typedef AffineSpaceT<LinearSpace3<Vec4<vfloat<4>>>> AffineSpace3vfa4;
typedef AffineSpaceT<LinearSpace3<Vec4<vfloat<8>>>> AffineSpace3vfa8;
typedef AffineSpaceT<LinearSpace3<Vec4<vfloat<16>>>> AffineSpace3vfa16;
//////////////////////////////////////////////////////////////////////////////
/// Interpolation
//////////////////////////////////////////////////////////////////////////////
template<typename T, typename R>
__forceinline AffineSpaceT<T> lerp(const AffineSpaceT<T>& M0,
const AffineSpaceT<T>& M1,
const R& t)
{
return AffineSpaceT<T>(lerp(M0.l,M1.l,t),lerp(M0.p,M1.p,t));
}
// slerp interprets the 16 floats of the matrix M = D * R * S as components of
// three matrizes (D, R, S) that are interpolated individually.
template<typename T> __forceinline AffineSpaceT<LinearSpace3<Vec3<T>>>
slerp(const AffineSpaceT<LinearSpace3<Vec4<T>>>& M0,
const AffineSpaceT<LinearSpace3<Vec4<T>>>& M1,
const T& t)
{
QuaternionT<T> q0(M0.p.w, M0.l.vx.w, M0.l.vy.w, M0.l.vz.w);
QuaternionT<T> q1(M1.p.w, M1.l.vx.w, M1.l.vy.w, M1.l.vz.w);
QuaternionT<T> q = slerp(q0, q1, t);
AffineSpaceT<LinearSpace3<Vec3<T>>> S = lerp(M0, M1, t);
AffineSpaceT<LinearSpace3<Vec3<T>>> D(one);
D.p.x = S.l.vx.y;
D.p.y = S.l.vx.z;
D.p.z = S.l.vy.z;
S.l.vx.y = 0;
S.l.vx.z = 0;
S.l.vy.z = 0;
AffineSpaceT<LinearSpace3<Vec3<T>>> R = LinearSpace3<Vec3<T>>(q);
return D * R * S;
}
// this is a specialized version for Vec3fa because that does
// not play along nicely with the other templated Vec3/Vec4 types
__forceinline AffineSpace3fa slerp(const AffineSpace3ff& M0,
const AffineSpace3ff& M1,
const float& t)
{
Quaternion3f q0(M0.p.w, M0.l.vx.w, M0.l.vy.w, M0.l.vz.w);
Quaternion3f q1(M1.p.w, M1.l.vx.w, M1.l.vy.w, M1.l.vz.w);
Quaternion3f q = slerp(q0, q1, t);
AffineSpace3fa S = lerp(M0, M1, t);
AffineSpace3fa D(one);
D.p.x = S.l.vx.y;
D.p.y = S.l.vx.z;
D.p.z = S.l.vy.z;
S.l.vx.y = 0;
S.l.vx.z = 0;
S.l.vy.z = 0;
AffineSpace3fa R = LinearSpace3fa(q);
return D * R * S;
}
__forceinline AffineSpace3fa quaternionDecompositionToAffineSpace(const AffineSpace3ff& qd)
{
// compute affine transform from quaternion decomposition
Quaternion3f q(qd.p.w, qd.l.vx.w, qd.l.vy.w, qd.l.vz.w);
AffineSpace3fa M = qd;
AffineSpace3fa D(one);
D.p.x = M.l.vx.y;
D.p.y = M.l.vx.z;
D.p.z = M.l.vy.z;
M.l.vx.y = 0;
M.l.vx.z = 0;
M.l.vy.z = 0;
AffineSpace3fa R = LinearSpace3fa(q);
return D * R * M;
}
__forceinline void quaternionDecomposition(const AffineSpace3ff& qd, Vec3fa& T, Quaternion3f& q, AffineSpace3fa& S)
{
q = Quaternion3f(qd.p.w, qd.l.vx.w, qd.l.vy.w, qd.l.vz.w);
S = qd;
T.x = qd.l.vx.y;
T.y = qd.l.vx.z;
T.z = qd.l.vy.z;
S.l.vx.y = 0;
S.l.vx.z = 0;
S.l.vy.z = 0;
}
__forceinline AffineSpace3fx quaternionDecomposition(Vec3fa const& T, Quaternion3f const& q, AffineSpace3fa const& S)
{
AffineSpace3ff M = S;
M.l.vx.w = q.i;
M.l.vy.w = q.j;
M.l.vz.w = q.k;
M.p.w = q.r;
M.l.vx.y = T.x;
M.l.vx.z = T.y;
M.l.vy.z = T.z;
return M;
}
struct __aligned(16) QuaternionDecomposition
{
float scale_x = 1.f;
float scale_y = 1.f;
float scale_z = 1.f;
float skew_xy = 0.f;
float skew_xz = 0.f;
float skew_yz = 0.f;
float shift_x = 0.f;
float shift_y = 0.f;
float shift_z = 0.f;
float quaternion_r = 1.f;
float quaternion_i = 0.f;
float quaternion_j = 0.f;
float quaternion_k = 0.f;
float translation_x = 0.f;
float translation_y = 0.f;
float translation_z = 0.f;
};
__forceinline QuaternionDecomposition quaternionDecomposition(AffineSpace3ff const& M)
{
QuaternionDecomposition qd;
qd.scale_x = M.l.vx.x;
qd.scale_y = M.l.vy.y;
qd.scale_z = M.l.vz.z;
qd.shift_x = M.p.x;
qd.shift_y = M.p.y;
qd.shift_z = M.p.z;
qd.translation_x = M.l.vx.y;
qd.translation_y = M.l.vx.z;
qd.translation_z = M.l.vy.z;
qd.skew_xy = M.l.vy.x;
qd.skew_xz = M.l.vz.x;
qd.skew_yz = M.l.vz.y;
qd.quaternion_r = M.p.w;
qd.quaternion_i = M.l.vx.w;
qd.quaternion_j = M.l.vy.w;
qd.quaternion_k = M.l.vz.w;
return qd;
}
////////////////////////////////////////////////////////////////////////////////
/*
* ! Template Specialization for 2D: return matrix for rotation around point
* (rotation around arbitrarty vector is not meaningful in 2D)
*/
template<> __forceinline
AffineSpace2f AffineSpace2f::rotate(const Vec2f& p, const float& r) {
return translate(+p)*AffineSpace2f(LinearSpace2f::rotate(r))*translate(-p);
}
////////////////////////////////////////////////////////////////////////////////
// Similarity Transform
//
// checks, if M is a similarity transformation, i.e if there exists a factor D
// such that for all x,y: distance(Mx, My) = D * distance(x, y)
////////////////////////////////////////////////////////////////////////////////
__forceinline bool similarityTransform(const AffineSpace3fa& M, float* D)
{
if (D) *D = 0.f;
if (abs(dot(M.l.vx, M.l.vy)) > 1e-5f) return false;
if (abs(dot(M.l.vx, M.l.vz)) > 1e-5f) return false;
if (abs(dot(M.l.vy, M.l.vz)) > 1e-5f) return false;
const float D_x = dot(M.l.vx, M.l.vx);
const float D_y = dot(M.l.vy, M.l.vy);
const float D_z = dot(M.l.vz, M.l.vz);
if (abs(D_x - D_y) > 1e-5f ||
abs(D_x - D_z) > 1e-5f ||
abs(D_y - D_z) > 1e-5f)
return false;
if (D) *D = sqrtf(D_x);
return true;
}
__forceinline void AffineSpace3fa_store_unaligned(const AffineSpace3fa &source, AffineSpace3fa* ptr)
{
Vec3fa::storeu(&ptr->l.vx, source.l.vx);
Vec3fa::storeu(&ptr->l.vy, source.l.vy);
Vec3fa::storeu(&ptr->l.vz, source.l.vz);
Vec3fa::storeu(&ptr->p, source.p);
}
__forceinline AffineSpace3fa AffineSpace3fa_load_unaligned(AffineSpace3fa* ptr)
{
AffineSpace3fa space;
space.l.vx = Vec3fa::loadu(&ptr->l.vx);
space.l.vy = Vec3fa::loadu(&ptr->l.vy);
space.l.vz = Vec3fa::loadu(&ptr->l.vz);
space.p = Vec3fa::loadu(&ptr->p);
return space;
}
#undef VectorT
#undef ScalarT
}
| 6,662 |
565 | <filename>ksp_plugin/equator_relevance_threshold.cpp
#include "ksp_plugin/equator_relevance_threshold.hpp"
#include <algorithm>
#include "physics/geopotential.hpp"
#include "physics/oblate_body.hpp"
#include "quantities/elementary_functions.hpp"
namespace principia {
namespace ksp_plugin {
namespace internal_equator_relevance_threshold {
using physics::Geopotential;
using physics::OblateBody;
using quantities::Cbrt;
using quantities::Pow;
using quantities::si::Metre;
using quantities::si::Radian;
Length EquatorRelevanceThreshold(RotatingBody<Barycentric> const& body) {
OblateBody<Barycentric> const* oblate_body =
dynamic_cast<OblateBody<Barycentric> const*>(&body);
Length const j2_threshold =
oblate_body == nullptr
? 0 * Metre
: Geopotential<Barycentric>(
oblate_body,
/*tolerance=*/0x1p-24).degree_damping()[2].inner_threshold();
Length const supersynchronous_threshold =
Cbrt(body.gravitational_parameter() /
Pow<2>(body.angular_frequency() / (2 * Radian)));
return std::max(j2_threshold, supersynchronous_threshold);
}
} // namespace internal_equator_relevance_threshold
} // namespace ksp_plugin
} // namespace principia
| 475 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-9w49-m7xh-5r39",
"modified": "2021-04-09T21:59:25Z",
"published": "2021-04-20T16:37:56Z",
"aliases": [
"CVE-2020-29456"
],
"summary": "Cross-site scripting in papermerge",
"details": "Multiple cross-site scripting (XSS) vulnerabilities in Papermerge before 1.5.2 allow remote attackers to inject arbitrary web script or HTML via the rename, tag, upload, or create folder function. The payload can be in a folder, a tag, or a document's filename. If email consumption is configured in Papermerge, a malicious document can be sent by email and is automatically uploaded into the Papermerge web application. Therefore, no authentication is required to exploit XSS if email consumption is configured. Otherwise authentication is required.",
"severity": [
{
"type": "CVSS_V3",
"score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:C/C:L/I:L/A:N"
}
],
"affected": [
{
"package": {
"ecosystem": "PyPI",
"name": "papermerge"
},
"ranges": [
{
"type": "ECOSYSTEM",
"events": [
{
"introduced": "0"
},
{
"fixed": "1.5.2"
}
]
}
]
}
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2020-29456"
},
{
"type": "WEB",
"url": "https://github.com/ciur/papermerge/issues/228"
},
{
"type": "WEB",
"url": "https://github.com/ciur/papermerge/releases/tag/v1.5.2"
},
{
"type": "WEB",
"url": "https://www.papermerge.com/"
}
],
"database_specific": {
"cwe_ids": [
"CWE-79"
],
"severity": "MODERATE",
"github_reviewed": true
}
} | 845 |
346 | <reponame>ikkisoft/serialkiller
package org.nibblesec.tools;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.attribute.FileTime;
import org.junit.Test;
import org.nibblesec.tools.SerialKiller.Configuration;
/**
* ConfigurationTest
*/
public class ConfigurationTest {
@Test(expected = IllegalStateException.class)
public void testCreateNull() {
new Configuration(null);
}
@Test(expected = IllegalStateException.class)
public void testCreateNonExistant() {
new Configuration("/i/am/pretty-sure/this-file/does-not-exist");
}
@Test(expected = IllegalStateException.class)
public void testCreateNonConfig() throws IOException {
Path tempFile = Files.createTempFile("sk-", ".tmp");
new Configuration(tempFile.toAbsolutePath().toString());
}
@Test(expected = IllegalStateException.class)
public void testCreateBadPattern() {
new Configuration("src/test/resources/broken-pattern.conf");
}
@Test
public void testCreateGood() {
Configuration configuration = new Configuration("src/test/resources/blacklist-all.conf");
assertFalse(configuration.isProfiling());
//@TODO after fixing loggging
//assertEquals("/tmp/serialkiller.log", configuration.logFile());
assertEquals(".*", configuration.blacklist().iterator().next().pattern());
assertEquals("java\\.lang\\..*", configuration.whitelist().iterator().next().pattern());
}
@Test
public void testReload() throws Exception {
Path tempFile = Files.createTempFile("sk-", ".conf");
Files.copy(new File("src/test/resources/blacklist-all-refresh-10-ms.conf").toPath(), tempFile, REPLACE_EXISTING);
Configuration configuration = new Configuration(tempFile.toAbsolutePath().toString());
assertFalse(configuration.isProfiling());
assertEquals(".*", configuration.blacklist().iterator().next().pattern());
assertEquals("java\\.lang\\..*", configuration.whitelist().iterator().next().pattern());
Files.copy(new File("src/test/resources/whitelist-all.conf").toPath(), tempFile, REPLACE_EXISTING);
Thread.sleep(1000L); // Wait to ensure the file is fully copied
Files.setLastModifiedTime(tempFile, FileTime.fromMillis(System.currentTimeMillis())); // Commons configuration watches file modified time
Thread.sleep(1000L); // Wait to ensure a reload happens
configuration.reloadIfNeeded(); // Trigger reload
assertFalse(configuration.blacklist().iterator().hasNext());
assertEquals(".*", configuration.whitelist().iterator().next().pattern());
}
} | 976 |
432 | <reponame>lambdaxymox/DragonFlyBSD
/* $NetBSD: filter-persistent.h,v 1.1.1.1 2008/12/22 00:18:47 haad Exp $ */
/*
* Copyright (C) 2001-2004 Sistina Software, Inc. All rights reserved.
* Copyright (C) 2004-2006 Red Hat, Inc. All rights reserved.
*
* This file is part of LVM2.
*
* This copyrighted material is made available to anyone wishing to use,
* modify, copy, or redistribute it subject to the terms and conditions
* of the GNU Lesser General Public License v.2.1.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#ifndef _LVM_FILTER_PERSISTENT_H
#define _LVM_FILTER_PERSISTENT_H
#include "dev-cache.h"
struct dev_filter *persistent_filter_create(struct dev_filter *f,
const char *file);
int persistent_filter_wipe(struct dev_filter *f);
int persistent_filter_load(struct dev_filter *f, struct config_tree **cft_out);
int persistent_filter_dump(struct dev_filter *f);
#endif
| 358 |
359 | <filename>MatrixSDK/Aggregations/Data/Store/Realm/MXRealmAggregationsMapper.h<gh_stars>100-1000
/*
Copyright 2019 New Vector Ltd
Copyright 2019 The Matrix.org Foundation C.I.C
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#import <Foundation/Foundation.h>
#import "MXReactionCount.h"
#import "MXRealmReactionCount.h"
#import "MXReactionRelation.h"
#import "MXRealmReactionRelation.h"
NS_ASSUME_NONNULL_BEGIN
/**
`MXRealmAggregationsMapper` is used to convert `MXRealmReactionCount` into `MXReactionCount` and vice versa.
*/
@interface MXRealmAggregationsMapper : NSObject
- (MXReactionCount*)reactionCountFromRealmReactionCount:(MXRealmReactionCount*)realmReactionCount;
- (MXRealmReactionCount*)realmReactionCountFromReactionCount:(MXReactionCount*)reactionCount onEvent:(NSString*)eventId inRoomId:(NSString*)roomId;
- (MXReactionRelation*)reactionRelationFromRealmReactionRelation:(MXRealmReactionRelation*)realmReactionRelation;
- (MXRealmReactionRelation*)realmReactionRelationFromReactionRelation:(MXReactionRelation*)reactionReaction inRoomId:(NSString*)roomId;
@end
NS_ASSUME_NONNULL_END
| 489 |
1,909 | <reponame>grmkris/XChange
package org.knowm.xchange.koinim.service;
import java.io.IOException;
import org.knowm.xchange.Exchange;
import org.knowm.xchange.currency.CurrencyPair;
import org.knowm.xchange.dto.marketdata.OrderBook;
import org.knowm.xchange.dto.marketdata.Ticker;
import org.knowm.xchange.dto.marketdata.Trades;
import org.knowm.xchange.exceptions.NotAvailableFromExchangeException;
import org.knowm.xchange.koinim.KoinimAdapters;
import org.knowm.xchange.service.marketdata.MarketDataService;
/** @author ahmetoz */
public class KoinimMarketDataService extends KoinimMarketDataServiceRaw
implements MarketDataService {
public KoinimMarketDataService(Exchange exchange) {
super(exchange);
}
@Override
public Ticker getTicker(CurrencyPair currencyPair, Object... args) throws IOException {
return KoinimAdapters.adaptTicker(getKoinimTicker(), currencyPair);
}
@Override
public OrderBook getOrderBook(CurrencyPair currencyPair, Object... args) throws IOException {
throw new NotAvailableFromExchangeException();
}
@Override
public Trades getTrades(CurrencyPair currencyPair, Object... args) throws IOException {
throw new NotAvailableFromExchangeException();
}
}
| 396 |
5,813 | <filename>extensions-contrib/redis-cache/src/main/java/org/apache/druid/client/cache/RedisCacheConfig.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.client.cache;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.annotations.VisibleForTesting;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.metadata.PasswordProvider;
import org.joda.time.Period;
import redis.clients.jedis.Protocol;
import javax.validation.constraints.Max;
import javax.validation.constraints.Min;
public class RedisCacheConfig
{
public static class RedisClusterConfig
{
@JsonProperty
private String nodes;
// cluster
@JsonProperty
private int maxRedirection = 5;
public String getNodes()
{
return nodes;
}
public int getMaxRedirection()
{
return maxRedirection;
}
}
/**
* Support for long-format and Period style format
*/
public static class DurationConfig
{
private long milliseconds;
public DurationConfig(String time)
{
try {
// before 0.19.0, only long-format is support,
// try to parse it as long
this.milliseconds = Long.parseLong(time);
}
catch (NumberFormatException e) {
// try to parse it as a Period string
this.milliseconds = Period.parse(time).toStandardDuration().getMillis();
}
}
/**
* kept for test cases only
*/
@VisibleForTesting
DurationConfig(long milliseconds)
{
this.milliseconds = milliseconds;
}
public long getMilliseconds()
{
return milliseconds;
}
public int getMillisecondsAsInt()
{
if (milliseconds > Integer.MAX_VALUE) {
throw new ISE("Milliseconds %d is out of range of int", milliseconds);
}
return (int) milliseconds;
}
public long getSeconds()
{
return milliseconds / 1000;
}
}
/**
* host of a standalone mode redis
*/
@JsonProperty
private String host;
/**
* port of a standalone mode redis
*/
@JsonProperty
@Min(0)
@Max(65535)
private int port;
@JsonProperty
private DurationConfig expiration = new DurationConfig("P1D");
@JsonProperty
private DurationConfig timeout = new DurationConfig("PT2S");
/**
* max connections of redis connection pool
*/
@JsonProperty
private int maxTotalConnections = 8;
/**
* max idle connections of redis connection pool
*/
@JsonProperty
private int maxIdleConnections = 8;
/**
* min idle connections of redis connection pool
*/
@JsonProperty
private int minIdleConnections = 0;
@JsonProperty
private PasswordProvider password;
@JsonProperty
@Min(0)
private int database = Protocol.DEFAULT_DATABASE;
@JsonProperty
private RedisClusterConfig cluster;
public String getHost()
{
return host;
}
public int getPort()
{
return port;
}
public DurationConfig getExpiration()
{
return expiration;
}
public DurationConfig getTimeout()
{
return timeout;
}
public int getMaxTotalConnections()
{
return maxTotalConnections;
}
public int getMaxIdleConnections()
{
return maxIdleConnections;
}
public int getMinIdleConnections()
{
return minIdleConnections;
}
public RedisClusterConfig getCluster()
{
return cluster;
}
public PasswordProvider getPassword()
{
return password;
}
public int getDatabase()
{
return database;
}
}
| 1,438 |
575 | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "base/callback_helpers.h"
#include "base/test/metrics/histogram_tester.h"
#include "base/test/metrics/user_action_tester.h"
#include "components/feed/core/v2/api_test/feed_api_test.h"
#include "components/feed/core/v2/config.h"
#include "components/feed/core/v2/feed_stream.h"
#include "components/feed/core/v2/test/callback_receiver.h"
#include "components/offline_pages/core/client_namespace_constants.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace feed {
namespace test {
namespace {
TEST_F(FeedApiTest, ProvidesPrefetchSuggestionsWhenModelLoaded) {
// Setup by triggering a model load.
response_translator_.InjectResponse(MakeTypicalInitialModelState());
TestForYouSurface surface(stream_.get());
WaitForIdleTaskQueue();
// Because we loaded from the network,
// PrefetchService::NewSuggestionsAvailable() should have been called.
EXPECT_EQ(1, prefetch_service_.NewSuggestionsAvailableCallCount());
CallbackReceiver<std::vector<offline_pages::PrefetchSuggestion>> callback;
prefetch_service_.suggestions_provider()->GetCurrentArticleSuggestions(
callback.Bind());
WaitForIdleTaskQueue();
ASSERT_TRUE(callback.GetResult());
const std::vector<offline_pages::PrefetchSuggestion>& suggestions =
callback.GetResult().value();
ASSERT_EQ(2UL, suggestions.size());
EXPECT_EQ("http://content0/", suggestions[0].article_url);
EXPECT_EQ("title0", suggestions[0].article_title);
EXPECT_EQ("publisher0", suggestions[0].article_attribution);
EXPECT_EQ("snippet0", suggestions[0].article_snippet);
EXPECT_EQ("http://image0/", suggestions[0].thumbnail_url);
EXPECT_EQ("http://favicon0/", suggestions[0].favicon_url);
EXPECT_EQ("http://content1/", suggestions[1].article_url);
}
TEST_F(FeedApiTest, ProvidesPrefetchSuggestionsWhenModelNotLoaded) {
store_->OverwriteStream(kForYouStream, MakeTypicalInitialModelState(),
base::DoNothing());
CallbackReceiver<std::vector<offline_pages::PrefetchSuggestion>> callback;
prefetch_service_.suggestions_provider()->GetCurrentArticleSuggestions(
callback.Bind());
WaitForIdleTaskQueue();
ASSERT_FALSE(stream_->GetModel(kForYouStream));
ASSERT_TRUE(callback.GetResult());
const std::vector<offline_pages::PrefetchSuggestion>& suggestions =
callback.GetResult().value();
ASSERT_EQ(2UL, suggestions.size());
EXPECT_EQ("http://content0/", suggestions[0].article_url);
EXPECT_EQ("http://content1/", suggestions[1].article_url);
EXPECT_EQ(0, prefetch_service_.NewSuggestionsAvailableCallCount());
}
TEST_F(FeedApiTest, ScrubsUrlsInProvidedPrefetchSuggestions) {
{
auto initial_state = MakeTypicalInitialModelState();
initial_state->content[0].mutable_prefetch_metadata(0)->set_uri(
"?notavalidurl?");
initial_state->content[0].mutable_prefetch_metadata(0)->set_image_url(
"?asdf?");
initial_state->content[0].mutable_prefetch_metadata(0)->set_favicon_url(
"?hi?");
initial_state->content[0].mutable_prefetch_metadata(0)->clear_uri();
store_->OverwriteStream(kForYouStream, std::move(initial_state),
base::DoNothing());
}
CallbackReceiver<std::vector<offline_pages::PrefetchSuggestion>> callback;
prefetch_service_.suggestions_provider()->GetCurrentArticleSuggestions(
callback.Bind());
WaitForIdleTaskQueue();
ASSERT_TRUE(callback.GetResult());
const std::vector<offline_pages::PrefetchSuggestion>& suggestions =
callback.GetResult().value();
ASSERT_EQ(2UL, suggestions.size());
EXPECT_EQ("", suggestions[0].article_url.possibly_invalid_spec());
EXPECT_EQ("", suggestions[0].thumbnail_url.possibly_invalid_spec());
EXPECT_EQ("", suggestions[0].favicon_url.possibly_invalid_spec());
}
TEST_F(FeedApiTest, OfflineBadgesArePopulatedInitially) {
// Add two offline pages. We exclude tab-bound pages, so only the first is
// used.
offline_page_model_.AddTestPage(GURL("http://content0/"));
offline_page_model_.AddTestPage(GURL("http://content1/"));
offline_page_model_.items()[1].client_id.name_space =
offline_pages::kLastNNamespace;
response_translator_.InjectResponse(MakeTypicalInitialModelState());
TestForYouSurface surface(stream_.get());
WaitForIdleTaskQueue();
EXPECT_EQ((std::map<std::string, std::string>(
{{"app/badge0", SerializedOfflineBadgeContent()}})),
surface.GetDataStoreEntries());
}
TEST_F(FeedApiTest, OfflineBadgesArePopulatedOnNewOfflineItemAdded) {
response_translator_.InjectResponse(MakeTypicalInitialModelState());
TestForYouSurface surface(stream_.get());
WaitForIdleTaskQueue();
ASSERT_EQ((std::map<std::string, std::string>({})),
surface.GetDataStoreEntries());
// Add an offline page.
offline_page_model_.AddTestPage(GURL("http://content1/"));
offline_page_model_.CallObserverOfflinePageAdded(
offline_page_model_.items()[0]);
task_environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(1));
EXPECT_EQ((std::map<std::string, std::string>(
{{"app/badge1", SerializedOfflineBadgeContent()}})),
surface.GetDataStoreEntries());
}
TEST_F(FeedApiTest, OfflineBadgesAreRemovedWhenOfflineItemRemoved) {
offline_page_model_.AddTestPage(GURL("http://content0/"));
response_translator_.InjectResponse(MakeTypicalInitialModelState());
TestForYouSurface surface(stream_.get());
WaitForIdleTaskQueue();
ASSERT_EQ((std::map<std::string, std::string>(
{{"app/badge0", SerializedOfflineBadgeContent()}})),
surface.GetDataStoreEntries());
// Remove the offline page.
offline_page_model_.CallObserverOfflinePageDeleted(
offline_page_model_.items()[0]);
task_environment_.FastForwardBy(base::TimeDelta::FromMilliseconds(1));
EXPECT_EQ((std::map<std::string, std::string>()),
surface.GetDataStoreEntries());
}
TEST_F(FeedApiTest, OfflineBadgesAreProvidedToNewSurfaces) {
offline_page_model_.AddTestPage(GURL("http://content0/"));
response_translator_.InjectResponse(MakeTypicalInitialModelState());
TestForYouSurface surface(stream_.get());
WaitForIdleTaskQueue();
TestForYouSurface surface2(stream_.get());
WaitForIdleTaskQueue();
EXPECT_EQ((std::map<std::string, std::string>(
{{"app/badge0", SerializedOfflineBadgeContent()}})),
surface2.GetDataStoreEntries());
}
TEST_F(FeedApiTest, OfflineBadgesAreRemovedWhenModelIsUnloaded) {
offline_page_model_.AddTestPage(GURL("http://content0/"));
response_translator_.InjectResponse(MakeTypicalInitialModelState());
TestForYouSurface surface(stream_.get());
WaitForIdleTaskQueue();
stream_->UnloadModel(surface.GetStreamType());
// Offline badge no longer present.
EXPECT_EQ((std::map<std::string, std::string>()),
surface.GetDataStoreEntries());
}
TEST_F(FeedApiTest, MultipleOfflineBadgesWithSameUrl) {
{
std::unique_ptr<StreamModelUpdateRequest> state =
MakeTypicalInitialModelState();
const feedwire::PrefetchMetadata& prefetch_metadata1 =
state->content[0].prefetch_metadata(0);
feedwire::PrefetchMetadata& prefetch_metadata2 =
*state->content[0].add_prefetch_metadata();
prefetch_metadata2 = prefetch_metadata1;
prefetch_metadata2.set_badge_id("app/badge0b");
response_translator_.InjectResponse(std::move(state));
}
offline_page_model_.AddTestPage(GURL("http://content0/"));
TestForYouSurface surface(stream_.get());
WaitForIdleTaskQueue();
EXPECT_EQ((std::map<std::string, std::string>(
{{"app/badge0", SerializedOfflineBadgeContent()},
{"app/badge0b", SerializedOfflineBadgeContent()}})),
surface.GetDataStoreEntries());
}
} // namespace
} // namespace test
} // namespace feed
| 2,896 |
561 | #pragma once
#include <unordered_map>
#include "device/device.h"
#include "device/device_listener.h"
#include "common/common.h"
namespace minerva {
class DeviceManager {
public:
DeviceManager();
~DeviceManager();
uint64_t CreateCpuDevice();
uint64_t CreateGpuDevice(int gid);
int GetGpuDeviceCount();
Device* GetDevice(uint64_t id);
void FreeData(uint64_t id);
void RegisterListener(DeviceListener* l) { listener_ = l; }
private:
uint64_t GenerateDeviceId();
DeviceListener* listener_;
std::unordered_map<uint64_t, Device*> device_storage_;
DISALLOW_COPY_AND_ASSIGN(DeviceManager);
};
} // namespace minerva
| 225 |
1,980 | from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser
import os
import textattack
from textattack import Attacker, CommandLineAttackArgs, DatasetArgs, ModelArgs
from textattack.commands import TextAttackCommand
class AttackResumeCommand(TextAttackCommand):
"""The TextAttack attack resume recipe module:
A command line parser to resume a checkpointed attack from user
specifications.
"""
def run(self, args):
checkpoint = self._parse_checkpoint_from_args(args)
assert isinstance(checkpoint.attack_args, CommandLineAttackArgs), (
f"Expect `attack_args` to be of type `textattack.args.CommandLineAttackArgs`, but got type `{type(checkpoint.attack_args)}`. "
f"If saved `attack_args` is not of type `textattack.args.CommandLineAttackArgs`, cannot resume attack from command line."
)
# merge/update arguments
checkpoint.attack_args.parallel = args.parallel
if args.checkpoint_dir:
checkpoint.attack_args.checkpoint_dir = args.checkpoint_dir
if args.checkpoint_interval:
checkpoint.attack_args.checkpoint_interval = args.checkpoint_interval
model_wrapper = ModelArgs._create_model_from_args(
checkpoint.attack_args.attack_args
)
attack = CommandLineAttackArgs._create_attack_from_args(
checkpoint.attack_args, model_wrapper
)
dataset = DatasetArgs.parse_dataset_from_args(checkpoint.attack_args)
attacker = Attacker.from_checkpoint(attack, dataset, checkpoint)
attacker.attack_dataset()
def _parse_checkpoint_from_args(self, args):
file_name = os.path.basename(args.checkpoint_file)
if file_name.lower() == "latest":
dir_path = os.path.dirname(args.checkpoint_file)
dir_path = dir_path if dir_path else "."
chkpt_file_names = [
f for f in os.listdir(dir_path) if f.endswith(".ta.chkpt")
]
assert chkpt_file_names, "AttackCheckpoint directory is empty"
timestamps = [int(f.replace(".ta.chkpt", "")) for f in chkpt_file_names]
latest_file = str(max(timestamps)) + ".ta.chkpt"
checkpoint_path = os.path.join(dir_path, latest_file)
else:
checkpoint_path = args.checkpoint_file
checkpoint = textattack.shared.AttackCheckpoint.load(checkpoint_path)
return checkpoint
@staticmethod
def register_subcommand(main_parser: ArgumentParser):
resume_parser = main_parser.add_parser(
"attack-resume",
help="resume a checkpointed attack",
formatter_class=ArgumentDefaultsHelpFormatter,
)
# Parser for parsing args for resume
resume_parser.add_argument(
"--checkpoint-file",
"-f",
type=str,
required=True,
help='Path of checkpoint file to resume attack from. If "latest" (or "{directory path}/latest") is entered,'
"recover latest checkpoint from either current path or specified directory.",
)
resume_parser.add_argument(
"--checkpoint-dir",
"-d",
required=False,
type=str,
default=None,
help="The directory to save checkpoint files. If not set, use directory from recovered arguments.",
)
resume_parser.add_argument(
"--checkpoint-interval",
"-i",
required=False,
type=int,
help="If set, checkpoint will be saved after attacking every N examples. If not set, no checkpoints will be saved.",
)
resume_parser.add_argument(
"--parallel",
action="store_true",
default=False,
help="Run attack using multiple GPUs.",
)
resume_parser.set_defaults(func=AttackResumeCommand())
| 1,647 |
14,668 | <filename>chrome/browser/metrics_settings/android/java/src/org/chromium/chrome/browser/metrics_settings/RadioButtonGroupMetricsPreference.java
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.metrics_settings;
import android.content.Context;
import android.util.AttributeSet;
import android.widget.RadioGroup;
import androidx.preference.Preference;
/**
* A radio button group used for Metrics settings. Currently, it has 3 options: Extended metrics,
* Basic metrics, and No metrics.
*/
public class RadioButtonGroupMetricsPreference
extends Preference implements RadioGroup.OnCheckedChangeListener {
public RadioButtonGroupMetricsPreference(Context context, AttributeSet attrs) {
super(context, attrs);
setLayoutResource(R.layout.radio_button_group_metrics_preference);
}
@Override
public void onCheckedChanged(RadioGroup group, int checkedId) {}
}
| 305 |
703 | """
@generated
cargo-raze generated Bazel file.
DO NOT EDIT! Replaced on runs of cargo-raze
"""
load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive") # buildifier: disable=load
load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") # buildifier: disable=load
def raze_fetch_remote_crates():
"""This function defines a collection of repos and should be called in a WORKSPACE file"""
maybe(
http_archive,
name = "raze__adler__1_0_2",
url = "https://crates.io/api/v1/crates/adler/1.0.2/download",
type = "tar.gz",
sha256 = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe",
strip_prefix = "adler-1.0.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.adler-1.0.2.bazel"),
)
maybe(
http_archive,
name = "raze__aho_corasick__0_7_18",
url = "https://crates.io/api/v1/crates/aho-corasick/0.7.18/download",
type = "tar.gz",
sha256 = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f",
strip_prefix = "aho-corasick-0.7.18",
build_file = Label("//kythe/rust/cargo/remote:BUILD.aho-corasick-0.7.18.bazel"),
)
maybe(
http_archive,
name = "raze__ansi_term__0_11_0",
url = "https://crates.io/api/v1/crates/ansi_term/0.11.0/download",
type = "tar.gz",
sha256 = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b",
strip_prefix = "ansi_term-0.11.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.ansi_term-0.11.0.bazel"),
)
maybe(
http_archive,
name = "raze__anyhow__1_0_43",
url = "https://crates.io/api/v1/crates/anyhow/1.0.43/download",
type = "tar.gz",
sha256 = "28ae2b3dec75a406790005a200b1bd89785afc02517a00ca99ecfe093ee9e6cf",
strip_prefix = "anyhow-1.0.43",
build_file = Label("//kythe/rust/cargo/remote:BUILD.anyhow-1.0.43.bazel"),
)
maybe(
http_archive,
name = "raze__atty__0_2_14",
url = "https://crates.io/api/v1/crates/atty/0.2.14/download",
type = "tar.gz",
sha256 = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8",
strip_prefix = "atty-0.2.14",
build_file = Label("//kythe/rust/cargo/remote:BUILD.atty-0.2.14.bazel"),
)
maybe(
http_archive,
name = "raze__autocfg__1_0_1",
url = "https://crates.io/api/v1/crates/autocfg/1.0.1/download",
type = "tar.gz",
sha256 = "cdb031dd78e28731d87d56cc8ffef4a8f36ca26c38fe2de700543e627f8a464a",
strip_prefix = "autocfg-1.0.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.autocfg-1.0.1.bazel"),
)
maybe(
http_archive,
name = "raze__base64__0_13_0",
url = "https://crates.io/api/v1/crates/base64/0.13.0/download",
type = "tar.gz",
sha256 = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd",
strip_prefix = "base64-0.13.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.base64-0.13.0.bazel"),
)
maybe(
http_archive,
name = "raze__bitflags__1_3_2",
url = "https://crates.io/api/v1/crates/bitflags/1.3.2/download",
type = "tar.gz",
sha256 = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a",
strip_prefix = "bitflags-1.3.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.bitflags-1.3.2.bazel"),
)
maybe(
http_archive,
name = "raze__block_buffer__0_9_0",
url = "https://crates.io/api/v1/crates/block-buffer/0.9.0/download",
type = "tar.gz",
sha256 = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4",
strip_prefix = "block-buffer-0.9.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.block-buffer-0.9.0.bazel"),
)
maybe(
http_archive,
name = "raze__byteorder__1_4_3",
url = "https://crates.io/api/v1/crates/byteorder/1.4.3/download",
type = "tar.gz",
sha256 = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610",
strip_prefix = "byteorder-1.4.3",
build_file = Label("//kythe/rust/cargo/remote:BUILD.byteorder-1.4.3.bazel"),
)
maybe(
http_archive,
name = "raze__bzip2__0_4_3",
url = "https://crates.io/api/v1/crates/bzip2/0.4.3/download",
type = "tar.gz",
sha256 = "6afcd980b5f3a45017c57e57a2fcccbb351cc43a356ce117ef760ef8052b89b0",
strip_prefix = "bzip2-0.4.3",
build_file = Label("//kythe/rust/cargo/remote:BUILD.bzip2-0.4.3.bazel"),
)
maybe(
http_archive,
name = "raze__bzip2_sys__0_1_11_1_0_8",
url = "https://crates.io/api/v1/crates/bzip2-sys/0.1.11+1.0.8/download",
type = "tar.gz",
sha256 = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc",
strip_prefix = "bzip2-sys-0.1.11+1.0.8",
build_file = Label("//kythe/rust/cargo/remote:BUILD.bzip2-sys-0.1.11+1.0.8.bazel"),
)
maybe(
http_archive,
name = "raze__cc__1_0_70",
url = "https://crates.io/api/v1/crates/cc/1.0.70/download",
type = "tar.gz",
sha256 = "d26a6ce4b6a484fa3edb70f7efa6fc430fd2b87285fe8b84304fd0936faa0dc0",
strip_prefix = "cc-1.0.70",
build_file = Label("//kythe/rust/cargo/remote:BUILD.cc-1.0.70.bazel"),
)
maybe(
http_archive,
name = "raze__cfg_if__0_1_10",
url = "https://crates.io/api/v1/crates/cfg-if/0.1.10/download",
type = "tar.gz",
sha256 = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822",
strip_prefix = "cfg-if-0.1.10",
build_file = Label("//kythe/rust/cargo/remote:BUILD.cfg-if-0.1.10.bazel"),
)
maybe(
http_archive,
name = "raze__cfg_if__1_0_0",
url = "https://crates.io/api/v1/crates/cfg-if/1.0.0/download",
type = "tar.gz",
sha256 = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd",
strip_prefix = "cfg-if-1.0.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.cfg-if-1.0.0.bazel"),
)
maybe(
http_archive,
name = "raze__clap__2_33_3",
url = "https://crates.io/api/v1/crates/clap/2.33.3/download",
type = "tar.gz",
sha256 = "37e58ac78573c40708d45522f0d80fa2f01cc4f9b4e2bf749807255454312002",
strip_prefix = "clap-2.33.3",
build_file = Label("//kythe/rust/cargo/remote:BUILD.clap-2.33.3.bazel"),
)
maybe(
http_archive,
name = "raze__cloudabi__0_0_3",
url = "https://crates.io/api/v1/crates/cloudabi/0.0.3/download",
type = "tar.gz",
sha256 = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f",
strip_prefix = "cloudabi-0.0.3",
build_file = Label("//kythe/rust/cargo/remote:BUILD.cloudabi-0.0.3.bazel"),
)
maybe(
http_archive,
name = "raze__colored__2_0_0",
url = "https://crates.io/api/v1/crates/colored/2.0.0/download",
type = "tar.gz",
sha256 = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd",
strip_prefix = "colored-2.0.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.colored-2.0.0.bazel"),
)
maybe(
http_archive,
name = "raze__cpufeatures__0_2_1",
url = "https://crates.io/api/v1/crates/cpufeatures/0.2.1/download",
type = "tar.gz",
sha256 = "95059428f66df56b63431fdb4e1947ed2190586af5c5a8a8b71122bdf5a7f469",
strip_prefix = "cpufeatures-0.2.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.cpufeatures-0.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__crc32fast__1_2_1",
url = "https://crates.io/api/v1/crates/crc32fast/1.2.1/download",
type = "tar.gz",
sha256 = "81156fece84ab6a9f2afdb109ce3ae577e42b1228441eded99bd77f627953b1a",
strip_prefix = "crc32fast-1.2.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.crc32fast-1.2.1.bazel"),
)
maybe(
http_archive,
name = "raze__crossbeam_channel__0_5_1",
url = "https://crates.io/api/v1/crates/crossbeam-channel/0.5.1/download",
type = "tar.gz",
sha256 = "06ed27e177f16d65f0f0c22a213e17c696ace5dd64b14258b52f9417ccb52db4",
strip_prefix = "crossbeam-channel-0.5.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.crossbeam-channel-0.5.1.bazel"),
)
maybe(
http_archive,
name = "raze__crossbeam_deque__0_8_1",
url = "https://crates.io/api/v1/crates/crossbeam-deque/0.8.1/download",
type = "tar.gz",
sha256 = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e",
strip_prefix = "crossbeam-deque-0.8.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.crossbeam-deque-0.8.1.bazel"),
)
maybe(
http_archive,
name = "raze__crossbeam_epoch__0_9_5",
url = "https://crates.io/api/v1/crates/crossbeam-epoch/0.9.5/download",
type = "tar.gz",
sha256 = "4ec02e091aa634e2c3ada4a392989e7c3116673ef0ac5b72232439094d73b7fd",
strip_prefix = "crossbeam-epoch-0.9.5",
build_file = Label("//kythe/rust/cargo/remote:BUILD.crossbeam-epoch-0.9.5.bazel"),
)
maybe(
http_archive,
name = "raze__crossbeam_utils__0_8_5",
url = "https://crates.io/api/v1/crates/crossbeam-utils/0.8.5/download",
type = "tar.gz",
sha256 = "d82cfc11ce7f2c3faef78d8a684447b40d503d9681acebed6cb728d45940c4db",
strip_prefix = "crossbeam-utils-0.8.5",
build_file = Label("//kythe/rust/cargo/remote:BUILD.crossbeam-utils-0.8.5.bazel"),
)
maybe(
http_archive,
name = "raze__derive_new__0_5_9",
url = "https://crates.io/api/v1/crates/derive-new/0.5.9/download",
type = "tar.gz",
sha256 = "3418329ca0ad70234b9735dc4ceed10af4df60eff9c8e7b06cb5e520d92c3535",
strip_prefix = "derive-new-0.5.9",
build_file = Label("//kythe/rust/cargo/remote:BUILD.derive-new-0.5.9.bazel"),
)
maybe(
http_archive,
name = "raze__digest__0_9_0",
url = "https://crates.io/api/v1/crates/digest/0.9.0/download",
type = "tar.gz",
sha256 = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066",
strip_prefix = "digest-0.9.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.digest-0.9.0.bazel"),
)
maybe(
http_archive,
name = "raze__either__1_6_1",
url = "https://crates.io/api/v1/crates/either/1.6.1/download",
type = "tar.gz",
sha256 = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457",
strip_prefix = "either-1.6.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.either-1.6.1.bazel"),
)
maybe(
http_archive,
name = "raze__flate2__1_0_21",
url = "https://crates.io/api/v1/crates/flate2/1.0.21/download",
type = "tar.gz",
sha256 = "80edafed416a46fb378521624fab1cfa2eb514784fd8921adbe8a8d8321da811",
strip_prefix = "flate2-1.0.21",
build_file = Label("//kythe/rust/cargo/remote:BUILD.flate2-1.0.21.bazel"),
)
maybe(
http_archive,
name = "raze__fst__0_4_7",
url = "https://crates.io/api/v1/crates/fst/0.4.7/download",
type = "tar.gz",
sha256 = "7ab85b9b05e3978cc9a9cf8fea7f01b494e1a09ed3037e16ba39edc7a29eb61a",
strip_prefix = "fst-0.4.7",
build_file = Label("//kythe/rust/cargo/remote:BUILD.fst-0.4.7.bazel"),
)
maybe(
http_archive,
name = "raze__fuchsia_cprng__0_1_1",
url = "https://crates.io/api/v1/crates/fuchsia-cprng/0.1.1/download",
type = "tar.gz",
sha256 = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba",
strip_prefix = "fuchsia-cprng-0.1.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.fuchsia-cprng-0.1.1.bazel"),
)
maybe(
http_archive,
name = "raze__generic_array__0_14_4",
url = "https://crates.io/api/v1/crates/generic-array/0.14.4/download",
type = "tar.gz",
sha256 = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817",
strip_prefix = "generic-array-0.14.4",
build_file = Label("//kythe/rust/cargo/remote:BUILD.generic-array-0.14.4.bazel"),
)
maybe(
http_archive,
name = "raze__hermit_abi__0_1_19",
url = "https://crates.io/api/v1/crates/hermit-abi/0.1.19/download",
type = "tar.gz",
sha256 = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33",
strip_prefix = "hermit-abi-0.1.19",
build_file = Label("//kythe/rust/cargo/remote:BUILD.hermit-abi-0.1.19.bazel"),
)
maybe(
http_archive,
name = "raze__hex__0_4_3",
url = "https://crates.io/api/v1/crates/hex/0.4.3/download",
type = "tar.gz",
sha256 = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70",
strip_prefix = "hex-0.4.3",
build_file = Label("//kythe/rust/cargo/remote:BUILD.hex-0.4.3.bazel"),
)
maybe(
http_archive,
name = "raze__itertools__0_9_0",
url = "https://crates.io/api/v1/crates/itertools/0.9.0/download",
type = "tar.gz",
sha256 = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b",
strip_prefix = "itertools-0.9.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.itertools-0.9.0.bazel"),
)
maybe(
http_archive,
name = "raze__itoa__0_4_8",
url = "https://crates.io/api/v1/crates/itoa/0.4.8/download",
type = "tar.gz",
sha256 = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4",
strip_prefix = "itoa-0.4.8",
build_file = Label("//kythe/rust/cargo/remote:BUILD.itoa-0.4.8.bazel"),
)
maybe(
http_archive,
name = "raze__json__0_12_4",
url = "https://crates.io/api/v1/crates/json/0.12.4/download",
type = "tar.gz",
sha256 = "078e285eafdfb6c4b434e0d31e8cfcb5115b651496faca5749b88fafd4f23bfd",
strip_prefix = "json-0.12.4",
build_file = Label("//kythe/rust/cargo/remote:BUILD.json-0.12.4.bazel"),
)
maybe(
http_archive,
name = "raze__lazy_static__1_4_0",
url = "https://crates.io/api/v1/crates/lazy_static/1.4.0/download",
type = "tar.gz",
sha256 = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646",
strip_prefix = "lazy_static-1.4.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.lazy_static-1.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__libc__0_2_101",
url = "https://crates.io/api/v1/crates/libc/0.2.101/download",
type = "tar.gz",
sha256 = "3cb00336871be5ed2c8ed44b60ae9959dc5b9f08539422ed43f09e34ecaeba21",
strip_prefix = "libc-0.2.101",
build_file = Label("//kythe/rust/cargo/remote:BUILD.libc-0.2.101.bazel"),
)
maybe(
http_archive,
name = "raze__lock_api__0_3_4",
url = "https://crates.io/api/v1/crates/lock_api/0.3.4/download",
type = "tar.gz",
sha256 = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75",
strip_prefix = "lock_api-0.3.4",
build_file = Label("//kythe/rust/cargo/remote:BUILD.lock_api-0.3.4.bazel"),
)
maybe(
http_archive,
name = "raze__log__0_4_14",
url = "https://crates.io/api/v1/crates/log/0.4.14/download",
type = "tar.gz",
sha256 = "51b9bbe6c47d51fc3e1a9b945965946b4c44142ab8792c50835a980d362c2710",
strip_prefix = "log-0.4.14",
build_file = Label("//kythe/rust/cargo/remote:BUILD.log-0.4.14.bazel"),
)
maybe(
http_archive,
name = "raze__memchr__2_4_1",
url = "https://crates.io/api/v1/crates/memchr/2.4.1/download",
type = "tar.gz",
sha256 = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a",
strip_prefix = "memchr-2.4.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.memchr-2.4.1.bazel"),
)
maybe(
http_archive,
name = "raze__memoffset__0_6_4",
url = "https://crates.io/api/v1/crates/memoffset/0.6.4/download",
type = "tar.gz",
sha256 = "59accc507f1338036a0477ef61afdae33cde60840f4dfe481319ce3ad116ddf9",
strip_prefix = "memoffset-0.6.4",
build_file = Label("//kythe/rust/cargo/remote:BUILD.memoffset-0.6.4.bazel"),
)
maybe(
http_archive,
name = "raze__miniz_oxide__0_4_4",
url = "https://crates.io/api/v1/crates/miniz_oxide/0.4.4/download",
type = "tar.gz",
sha256 = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b",
strip_prefix = "miniz_oxide-0.4.4",
build_file = Label("//kythe/rust/cargo/remote:BUILD.miniz_oxide-0.4.4.bazel"),
)
maybe(
http_archive,
name = "raze__num_cpus__1_13_0",
url = "https://crates.io/api/v1/crates/num_cpus/1.13.0/download",
type = "tar.gz",
sha256 = "05499f3756671c15885fee9034446956fff3f243d6077b91e5767df161f766b3",
strip_prefix = "num_cpus-1.13.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.num_cpus-1.13.0.bazel"),
)
maybe(
http_archive,
name = "raze__opaque_debug__0_3_0",
url = "https://crates.io/api/v1/crates/opaque-debug/0.3.0/download",
type = "tar.gz",
sha256 = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5",
strip_prefix = "opaque-debug-0.3.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.opaque-debug-0.3.0.bazel"),
)
maybe(
http_archive,
name = "raze__parking_lot__0_10_2",
url = "https://crates.io/api/v1/crates/parking_lot/0.10.2/download",
type = "tar.gz",
sha256 = "d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e",
strip_prefix = "parking_lot-0.10.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.parking_lot-0.10.2.bazel"),
)
maybe(
http_archive,
name = "raze__parking_lot_core__0_7_2",
url = "https://crates.io/api/v1/crates/parking_lot_core/0.7.2/download",
type = "tar.gz",
sha256 = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3",
strip_prefix = "parking_lot_core-0.7.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.parking_lot_core-0.7.2.bazel"),
)
maybe(
http_archive,
name = "raze__pkg_config__0_3_19",
url = "https://crates.io/api/v1/crates/pkg-config/0.3.19/download",
type = "tar.gz",
sha256 = "3831453b3449ceb48b6d9c7ad7c96d5ea673e9b470a1dc578c2ce6521230884c",
strip_prefix = "pkg-config-0.3.19",
build_file = Label("//kythe/rust/cargo/remote:BUILD.pkg-config-0.3.19.bazel"),
)
maybe(
http_archive,
name = "raze__proc_macro2__1_0_29",
url = "https://crates.io/api/v1/crates/proc-macro2/1.0.29/download",
type = "tar.gz",
sha256 = "b9f5105d4fdaab20335ca9565e106a5d9b82b6219b5ba735731124ac6711d23d",
strip_prefix = "proc-macro2-1.0.29",
build_file = Label("//kythe/rust/cargo/remote:BUILD.proc-macro2-1.0.29.bazel"),
)
maybe(
http_archive,
name = "raze__quick_error__2_0_1",
url = "https://crates.io/api/v1/crates/quick-error/2.0.1/download",
type = "tar.gz",
sha256 = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3",
strip_prefix = "quick-error-2.0.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.quick-error-2.0.1.bazel"),
)
maybe(
http_archive,
name = "raze__quote__1_0_9",
url = "https://crates.io/api/v1/crates/quote/1.0.9/download",
type = "tar.gz",
sha256 = "c3d0b9745dc2debf507c8422de05d7226cc1f0644216dfdfead988f9b1ab32a7",
strip_prefix = "quote-1.0.9",
build_file = Label("//kythe/rust/cargo/remote:BUILD.quote-1.0.9.bazel"),
)
maybe(
http_archive,
name = "raze__rand__0_4_6",
url = "https://crates.io/api/v1/crates/rand/0.4.6/download",
type = "tar.gz",
sha256 = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293",
strip_prefix = "rand-0.4.6",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rand-0.4.6.bazel"),
)
maybe(
http_archive,
name = "raze__rand_core__0_3_1",
url = "https://crates.io/api/v1/crates/rand_core/0.3.1/download",
type = "tar.gz",
sha256 = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b",
strip_prefix = "rand_core-0.3.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rand_core-0.3.1.bazel"),
)
maybe(
http_archive,
name = "raze__rand_core__0_4_2",
url = "https://crates.io/api/v1/crates/rand_core/0.4.2/download",
type = "tar.gz",
sha256 = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc",
strip_prefix = "rand_core-0.4.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rand_core-0.4.2.bazel"),
)
maybe(
http_archive,
name = "raze__rayon__1_5_1",
url = "https://crates.io/api/v1/crates/rayon/1.5.1/download",
type = "tar.gz",
sha256 = "c06aca804d41dbc8ba42dfd964f0d01334eceb64314b9ecf7c5fad5188a06d90",
strip_prefix = "rayon-1.5.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rayon-1.5.1.bazel"),
)
maybe(
http_archive,
name = "raze__rayon_core__1_9_1",
url = "https://crates.io/api/v1/crates/rayon-core/1.9.1/download",
type = "tar.gz",
sha256 = "d78120e2c850279833f1dd3582f730c4ab53ed95aeaaaa862a2a5c71b1656d8e",
strip_prefix = "rayon-core-1.9.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rayon-core-1.9.1.bazel"),
)
maybe(
http_archive,
name = "raze__rdrand__0_4_0",
url = "https://crates.io/api/v1/crates/rdrand/0.4.0/download",
type = "tar.gz",
sha256 = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2",
strip_prefix = "rdrand-0.4.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rdrand-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__redox_syscall__0_1_57",
url = "https://crates.io/api/v1/crates/redox_syscall/0.1.57/download",
type = "tar.gz",
sha256 = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce",
strip_prefix = "redox_syscall-0.1.57",
build_file = Label("//kythe/rust/cargo/remote:BUILD.redox_syscall-0.1.57.bazel"),
)
maybe(
http_archive,
name = "raze__regex__1_5_4",
url = "https://crates.io/api/v1/crates/regex/1.5.4/download",
type = "tar.gz",
sha256 = "d07a8629359eb56f1e2fb1652bb04212c072a87ba68546a04065d525673ac461",
strip_prefix = "regex-1.5.4",
build_file = Label("//kythe/rust/cargo/remote:BUILD.regex-1.5.4.bazel"),
)
maybe(
http_archive,
name = "raze__regex_syntax__0_6_25",
url = "https://crates.io/api/v1/crates/regex-syntax/0.6.25/download",
type = "tar.gz",
sha256 = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b",
strip_prefix = "regex-syntax-0.6.25",
build_file = Label("//kythe/rust/cargo/remote:BUILD.regex-syntax-0.6.25.bazel"),
)
maybe(
http_archive,
name = "raze__remove_dir_all__0_5_3",
url = "https://crates.io/api/v1/crates/remove_dir_all/0.5.3/download",
type = "tar.gz",
sha256 = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7",
strip_prefix = "remove_dir_all-0.5.3",
build_file = Label("//kythe/rust/cargo/remote:BUILD.remove_dir_all-0.5.3.bazel"),
)
maybe(
http_archive,
name = "raze__rls_analysis__0_18_2",
url = "https://crates.io/api/v1/crates/rls-analysis/0.18.2/download",
type = "tar.gz",
sha256 = "3c3f485c06c41fd839ea396dd1491615a1286916ade1e3d3cc0b1e7146f5c424",
strip_prefix = "rls-analysis-0.18.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rls-analysis-0.18.2.bazel"),
)
maybe(
http_archive,
name = "raze__rls_data__0_19_1",
url = "https://crates.io/api/v1/crates/rls-data/0.19.1/download",
type = "tar.gz",
sha256 = "a58135eb039f3a3279a33779192f0ee78b56f57ae636e25cec83530e41debb99",
strip_prefix = "rls-data-0.19.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rls-data-0.19.1.bazel"),
)
maybe(
http_archive,
name = "raze__rls_span__0_5_4",
url = "https://crates.io/api/v1/crates/rls-span/0.5.4/download",
type = "tar.gz",
sha256 = "b6e80f614ad4b37910bfe9b029af19c6f92612bb8e1af66e37d35829bf4ef6d1",
strip_prefix = "rls-span-0.5.4",
build_file = Label("//kythe/rust/cargo/remote:BUILD.rls-span-0.5.4.bazel"),
)
maybe(
http_archive,
name = "raze__ryu__1_0_5",
url = "https://crates.io/api/v1/crates/ryu/1.0.5/download",
type = "tar.gz",
sha256 = "71d301d4193d031abdd79ff7e3dd721168a9572ef3fe51a1517aba235bd8f86e",
strip_prefix = "ryu-1.0.5",
build_file = Label("//kythe/rust/cargo/remote:BUILD.ryu-1.0.5.bazel"),
)
maybe(
http_archive,
name = "raze__scopeguard__1_1_0",
url = "https://crates.io/api/v1/crates/scopeguard/1.1.0/download",
type = "tar.gz",
sha256 = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd",
strip_prefix = "scopeguard-1.1.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.scopeguard-1.1.0.bazel"),
)
maybe(
http_archive,
name = "raze__serde__1_0_130",
url = "https://crates.io/api/v1/crates/serde/1.0.130/download",
type = "tar.gz",
sha256 = "f12d06de37cf59146fbdecab66aa99f9fe4f78722e3607577a5375d66bd0c913",
strip_prefix = "serde-1.0.130",
build_file = Label("//kythe/rust/cargo/remote:BUILD.serde-1.0.130.bazel"),
)
maybe(
http_archive,
name = "raze__serde_derive__1_0_130",
url = "https://crates.io/api/v1/crates/serde_derive/1.0.130/download",
type = "tar.gz",
sha256 = "d7bc1a1ab1961464eae040d96713baa5a724a8152c1222492465b54322ec508b",
strip_prefix = "serde_derive-1.0.130",
build_file = Label("//kythe/rust/cargo/remote:BUILD.serde_derive-1.0.130.bazel"),
)
maybe(
http_archive,
name = "raze__serde_json__1_0_67",
url = "https://crates.io/api/v1/crates/serde_json/1.0.67/download",
type = "tar.gz",
sha256 = "a7f9e390c27c3c0ce8bc5d725f6e4d30a29d26659494aa4b17535f7522c5c950",
strip_prefix = "serde_json-1.0.67",
build_file = Label("//kythe/rust/cargo/remote:BUILD.serde_json-1.0.67.bazel"),
)
maybe(
http_archive,
name = "raze__serial_test__0_4_0",
url = "https://crates.io/api/v1/crates/serial_test/0.4.0/download",
type = "tar.gz",
sha256 = "fef5f7c7434b2f2c598adc6f9494648a1e41274a75c0ba4056f680ae0c117fd6",
strip_prefix = "serial_test-0.4.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.serial_test-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__serial_test_derive__0_4_0",
url = "https://crates.io/api/v1/crates/serial_test_derive/0.4.0/download",
type = "tar.gz",
sha256 = "d08338d8024b227c62bd68a12c7c9883f5c66780abaef15c550dc56f46ee6515",
strip_prefix = "serial_test_derive-0.4.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.serial_test_derive-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__sha2__0_9_6",
url = "https://crates.io/api/v1/crates/sha2/0.9.6/download",
type = "tar.gz",
sha256 = "9204c41a1597a8c5af23c82d1c921cb01ec0a4c59e07a9c7306062829a3903f3",
strip_prefix = "sha2-0.9.6",
build_file = Label("//kythe/rust/cargo/remote:BUILD.sha2-0.9.6.bazel"),
)
maybe(
http_archive,
name = "raze__smallvec__1_6_1",
url = "https://crates.io/api/v1/crates/smallvec/1.6.1/download",
type = "tar.gz",
sha256 = "fe0f37c9e8f3c5a4a66ad655a93c74daac4ad00c441533bf5c6e7990bb42604e",
strip_prefix = "smallvec-1.6.1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.smallvec-1.6.1.bazel"),
)
maybe(
http_archive,
name = "raze__strsim__0_8_0",
url = "https://crates.io/api/v1/crates/strsim/0.8.0/download",
type = "tar.gz",
sha256 = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a",
strip_prefix = "strsim-0.8.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.strsim-0.8.0.bazel"),
)
maybe(
http_archive,
name = "raze__syn__1_0_75",
url = "https://crates.io/api/v1/crates/syn/1.0.75/download",
type = "tar.gz",
sha256 = "b7f58f7e8eaa0009c5fec437aabf511bd9933e4b2d7407bd05273c01a8906ea7",
strip_prefix = "syn-1.0.75",
build_file = Label("//kythe/rust/cargo/remote:BUILD.syn-1.0.75.bazel"),
)
maybe(
http_archive,
name = "raze__tempdir__0_3_7",
url = "https://crates.io/api/v1/crates/tempdir/0.3.7/download",
type = "tar.gz",
sha256 = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8",
strip_prefix = "tempdir-0.3.7",
build_file = Label("//kythe/rust/cargo/remote:BUILD.tempdir-0.3.7.bazel"),
)
maybe(
http_archive,
name = "raze__textwrap__0_11_0",
url = "https://crates.io/api/v1/crates/textwrap/0.11.0/download",
type = "tar.gz",
sha256 = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060",
strip_prefix = "textwrap-0.11.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.textwrap-0.11.0.bazel"),
)
maybe(
http_archive,
name = "raze__thiserror__1_0_28",
url = "https://crates.io/api/v1/crates/thiserror/1.0.28/download",
type = "tar.gz",
sha256 = "283d5230e63df9608ac7d9691adc1dfb6e701225436eb64d0b9a7f0a5a04f6ec",
strip_prefix = "thiserror-1.0.28",
build_file = Label("//kythe/rust/cargo/remote:BUILD.thiserror-1.0.28.bazel"),
)
maybe(
http_archive,
name = "raze__thiserror_impl__1_0_28",
url = "https://crates.io/api/v1/crates/thiserror-impl/1.0.28/download",
type = "tar.gz",
sha256 = "fa3884228611f5cd3608e2d409bf7dce832e4eb3135e3f11addbd7e41bd68e71",
strip_prefix = "thiserror-impl-1.0.28",
build_file = Label("//kythe/rust/cargo/remote:BUILD.thiserror-impl-1.0.28.bazel"),
)
maybe(
http_archive,
name = "raze__time__0_1_44",
url = "https://crates.io/api/v1/crates/time/0.1.44/download",
type = "tar.gz",
sha256 = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255",
strip_prefix = "time-0.1.44",
build_file = Label("//kythe/rust/cargo/remote:BUILD.time-0.1.44.bazel"),
)
maybe(
http_archive,
name = "raze__typenum__1_14_0",
url = "https://crates.io/api/v1/crates/typenum/1.14.0/download",
type = "tar.gz",
sha256 = "b63708a265f51345575b27fe43f9500ad611579e764c79edbc2037b1121959ec",
strip_prefix = "typenum-1.14.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.typenum-1.14.0.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_width__0_1_8",
url = "https://crates.io/api/v1/crates/unicode-width/0.1.8/download",
type = "tar.gz",
sha256 = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3",
strip_prefix = "unicode-width-0.1.8",
build_file = Label("//kythe/rust/cargo/remote:BUILD.unicode-width-0.1.8.bazel"),
)
maybe(
http_archive,
name = "raze__unicode_xid__0_2_2",
url = "https://crates.io/api/v1/crates/unicode-xid/0.2.2/download",
type = "tar.gz",
sha256 = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3",
strip_prefix = "unicode-xid-0.2.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.unicode-xid-0.2.2.bazel"),
)
maybe(
http_archive,
name = "raze__vec_map__0_8_2",
url = "https://crates.io/api/v1/crates/vec_map/0.8.2/download",
type = "tar.gz",
sha256 = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191",
strip_prefix = "vec_map-0.8.2",
build_file = Label("//kythe/rust/cargo/remote:BUILD.vec_map-0.8.2.bazel"),
)
maybe(
http_archive,
name = "raze__version_check__0_9_3",
url = "https://crates.io/api/v1/crates/version_check/0.9.3/download",
type = "tar.gz",
sha256 = "5fecdca9a5291cc2b8dcf7dc02453fee791a280f3743cb0905f8822ae463b3fe",
strip_prefix = "version_check-0.9.3",
build_file = Label("//kythe/rust/cargo/remote:BUILD.version_check-0.9.3.bazel"),
)
maybe(
http_archive,
name = "raze__wasi__0_10_0_wasi_snapshot_preview1",
url = "https://crates.io/api/v1/crates/wasi/0.10.0+wasi-snapshot-preview1/download",
type = "tar.gz",
sha256 = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f",
strip_prefix = "wasi-0.10.0+wasi-snapshot-preview1",
build_file = Label("//kythe/rust/cargo/remote:BUILD.wasi-0.10.0+wasi-snapshot-preview1.bazel"),
)
maybe(
http_archive,
name = "raze__winapi__0_3_9",
url = "https://crates.io/api/v1/crates/winapi/0.3.9/download",
type = "tar.gz",
sha256 = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419",
strip_prefix = "winapi-0.3.9",
build_file = Label("//kythe/rust/cargo/remote:BUILD.winapi-0.3.9.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_i686_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-i686-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6",
strip_prefix = "winapi-i686-pc-windows-gnu-0.4.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.winapi-i686-pc-windows-gnu-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__winapi_x86_64_pc_windows_gnu__0_4_0",
url = "https://crates.io/api/v1/crates/winapi-x86_64-pc-windows-gnu/0.4.0/download",
type = "tar.gz",
sha256 = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f",
strip_prefix = "winapi-x86_64-pc-windows-gnu-0.4.0",
build_file = Label("//kythe/rust/cargo/remote:BUILD.winapi-x86_64-pc-windows-gnu-0.4.0.bazel"),
)
maybe(
http_archive,
name = "raze__zip__0_5_13",
url = "https://crates.io/api/v1/crates/zip/0.5.13/download",
type = "tar.gz",
sha256 = "93ab48844d61251bb3835145c521d88aa4031d7139e8485990f60ca911fa0815",
strip_prefix = "zip-0.5.13",
build_file = Label("//kythe/rust/cargo/remote:BUILD.zip-0.5.13.bazel"),
)
| 19,817 |
478 | <gh_stars>100-1000
/*
Q Light Controller Plus
showfunction.h
Copyright (c) <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0.txt
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#ifndef SHOWFUNCTION_H
#define SHOWFUNCTION_H
#include <QColor>
#include "function.h"
class QXmlStreamReader;
class Doc;
/** @addtogroup engine_functions Functions
* @{
*/
#define KXMLShowFunction "ShowFunction"
class ShowFunction: public QObject
{
Q_OBJECT
Q_DISABLE_COPY(ShowFunction)
Q_PROPERTY(int functionID READ functionID WRITE setFunctionID NOTIFY functionIDChanged)
Q_PROPERTY(int startTime READ startTime WRITE setStartTime NOTIFY startTimeChanged)
Q_PROPERTY(int duration READ duration WRITE setDuration NOTIFY durationChanged)
Q_PROPERTY(QColor color READ color WRITE setColor NOTIFY colorChanged)
Q_PROPERTY(bool locked READ isLocked WRITE setLocked NOTIFY lockedChanged)
public:
ShowFunction(QObject *parent = 0);
virtual ~ShowFunction() {}
/** Get/Set the Function ID this class represents */
void setFunctionID(quint32 id);
quint32 functionID() const;
/** Get/Set the Function start time over a Show timeline */
void setStartTime(quint32 time);
quint32 startTime() const;
/** Get/Set this item duration, not necessarily corresponding
* to the original Function duration */
void setDuration(quint32 duration);
quint32 duration() const;
quint32 duration(const Doc *doc) const;
/** Get/Set the color of the item when rendered in the Show Manager */
void setColor(QColor color);
QColor color() const;
static QColor defaultColor(Function::Type type);
/** Get/Set the lock state of this ShowFunction */
void setLocked(bool locked);
bool isLocked() const;
/** Get/Set the intensity attribute override ID to
* control a Function intensity */
int intensityOverrideId() const;
void setIntensityOverrideId(int id);
signals:
void functionIDChanged();
void startTimeChanged();
void durationChanged();
void colorChanged();
void lockedChanged();
private:
/** ID of the QLC+ Function this class represents */
quint32 m_id;
/** Start time of the Function in milliseconds */
quint32 m_startTime;
/** Duration of the Function in milliseconds */
quint32 m_duration;
/** Background color to be used when displaying the Function in
* the Show Manager */
QColor m_color;
/** Flag to indicate if this function is locked in the Show Manager timeline */
bool m_locked;
/** Intensity attribute override ID */
int m_intensityOverrideId;
/************************************************************************
* Load & Save
***********************************************************************/
public:
/** Load ShowFunction contents from $root */
bool loadXML(QXmlStreamReader &root);
/** Save ShowFunction contents to $doc */
bool saveXML(QXmlStreamWriter *doc) const;
};
/** @} */
#endif // SHOWFUNCTION_H
| 1,060 |
521 | //
// AdjustTrackingHelper.h
// AdjustExample-iWatch
//
// Created by <NAME> on 29/04/15.
// Copyright (c) 2015 adjust GmbH. All rights reserved.
//
#import <Foundation/Foundation.h>
@class AdjustDelegate;
@interface AdjustTrackingHelper : NSObject
+ (id)sharedInstance;
- (void)initialize:(NSObject<AdjustDelegate> *)delegate;
- (void)trackSimpleEvent;
- (void)trackRevenueEvent;
- (void)trackCallbackEvent;
- (void)trackPartnerEvent;
@end
| 159 |
318 | /**
* @file evdev.c Input event device UI module
*
* Copyright (C) 2010 <NAME>
*/
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <linux/input.h>
#include <string.h>
#include <re.h>
#include <baresip.h>
#include "print.h"
/**
* @defgroup evdev evdev
*
* User-Interface (UI) module using the Linux input subsystem.
*
* The following options can be configured:
*
\verbatim
evdev_device /dev/input/event0 # Name of the input device to use
\endverbatim
*/
struct ui_st {
int fd;
};
static struct ui_st *evdev;
static char evdev_device[64] = "/dev/input/event0";
static void evdev_close(struct ui_st *st)
{
if (st->fd < 0)
return;
fd_close(st->fd);
(void)close(st->fd);
st->fd = -1;
}
static void evdev_destructor(void *arg)
{
struct ui_st *st = arg;
evdev_close(st);
}
static int code2ascii(uint16_t modifier, uint16_t code)
{
switch (code) {
case KEY_0: return '0';
case KEY_1: return '1';
case KEY_2: return '2';
case KEY_3: return KEY_LEFTSHIFT==modifier ? '#' : '3';
case KEY_4: return '4';
case KEY_5: return '5';
case KEY_6: return '6';
case KEY_7: return '7';
case KEY_8: return '8';
case KEY_9: return '9';
case KEY_BACKSPACE: return '\b';
case KEY_ENTER: return '\n';
case KEY_ESC: return 0x1b;
case KEY_KPASTERISK: return '*';
#ifdef KEY_NUMERIC_0
case KEY_NUMERIC_0: return '0';
#endif
#ifdef KEY_NUMERIC_1
case KEY_NUMERIC_1: return '1';
#endif
#ifdef KEY_NUMERIC_2
case KEY_NUMERIC_2: return '2';
#endif
#ifdef KEY_NUMERIC_3
case KEY_NUMERIC_3: return '3';
#endif
#ifdef KEY_NUMERIC_4
case KEY_NUMERIC_4: return '4';
#endif
#ifdef KEY_NUMERIC_5
case KEY_NUMERIC_5: return '5';
#endif
#ifdef KEY_NUMERIC_6
case KEY_NUMERIC_6: return '6';
#endif
#ifdef KEY_NUMERIC_7
case KEY_NUMERIC_7: return '7';
#endif
#ifdef KEY_NUMERIC_8
case KEY_NUMERIC_8: return '8';
#endif
#ifdef KEY_NUMERIC_9
case KEY_NUMERIC_9: return '9';
#endif
#ifdef KEY_NUMERIC_STAR
case KEY_NUMERIC_STAR: return '*';
#endif
#ifdef KEY_NUMERIC_POUND
case KEY_NUMERIC_POUND: return '#';
#endif
#ifdef KEY_KP0
case KEY_KP0: return '0';
#endif
#ifdef KEY_KP1
case KEY_KP1: return '1';
#endif
#ifdef KEY_KP2
case KEY_KP2: return '2';
#endif
#ifdef KEY_KP3
case KEY_KP3: return '3';
#endif
#ifdef KEY_KP4
case KEY_KP4: return '4';
#endif
#ifdef KEY_KP5
case KEY_KP5: return '5';
#endif
#ifdef KEY_KP6
case KEY_KP6: return '6';
#endif
#ifdef KEY_KP7
case KEY_KP7: return '7';
#endif
#ifdef KEY_KP8
case KEY_KP8: return '8';
#endif
#ifdef KEY_KP9
case KEY_KP9: return '9';
#endif
#ifdef KEY_KPDOT
case KEY_KPDOT: return 0x1b;
#endif
#ifdef KEY_KPENTER
case KEY_KPENTER: return '\n';
#endif
default: return -1;
}
}
static int stderr_handler(const char *p, size_t sz, void *arg)
{
(void)arg;
if (write(STDERR_FILENO, p, sz) < 0)
return errno;
return 0;
}
static void reportkey(struct ui_st *st, int ascii)
{
static struct re_printf pf_stderr = {stderr_handler, NULL};
(void)st;
ui_input_key(baresip_uis(), ascii, &pf_stderr);
}
static void evdev_fd_handler(int flags, void *arg)
{
struct ui_st *st = arg;
struct input_event evv[64]; /* the events (up to 64 at once) */
uint16_t modifier = 0;
size_t n;
int i;
/* This might happen if you unplug a USB device */
if (flags & FD_EXCEPT) {
warning("evdev: fd handler: FD_EXCEPT - device unplugged?\n");
evdev_close(st);
return;
}
n = read(st->fd, evv, sizeof(evv));
if (n < (int) sizeof(struct input_event)) {
warning("evdev: event: short read (%m)\n", errno);
return;
}
for (i = 0; i < (int) (n / sizeof(struct input_event)); i++) {
const struct input_event *ev = &evv[i];
if (EV_KEY != ev->type)
continue;
if (KEY_LEFTSHIFT == ev->code) {
modifier = KEY_LEFTSHIFT;
continue;
}
if (1 == ev->value) {
const int ascii = code2ascii(modifier, ev->code);
if (-1 == ascii) {
warning("evdev: unhandled key code %u\n",
ev->code);
}
else
reportkey(st, ascii);
modifier = 0;
}
else if (0 == ev->value) {
reportkey(st, KEYCODE_REL);
}
}
}
static int evdev_alloc(struct ui_st **stp, const char *dev)
{
struct ui_st *st;
int err = 0;
if (!stp)
return EINVAL;
st = mem_zalloc(sizeof(*st), evdev_destructor);
if (!st)
return ENOMEM;
st->fd = open(dev, O_RDWR);
if (st->fd < 0) {
err = errno;
warning("evdev: failed to open device '%s' (%m)\n", dev, err);
goto out;
}
#if 0
/* grab the event device to prevent it from propagating
its events to the regular keyboard driver */
if (-1 == ioctl(st->fd, EVIOCGRAB, (void *)1)) {
warning("evdev: ioctl EVIOCGRAB on %s (%m)\n", dev, errno);
}
#endif
print_name(st->fd);
print_events(st->fd);
print_keys(st->fd);
print_leds(st->fd);
err = fd_listen(st->fd, FD_READ, evdev_fd_handler, st);
if (err)
goto out;
out:
if (err)
mem_deref(st);
else
*stp = st;
return err;
}
static int buzz(const struct ui_st *st, int value)
{
struct input_event ev;
ssize_t n;
memset(&ev, 0, sizeof(ev));
ev.type = EV_SND;
ev.code = SND_BELL;
ev.value = value;
n = write(st->fd, &ev, sizeof(ev));
if (n < 0) {
warning("evdev: output: write fd=%d (%m)\n", st->fd, errno);
}
return errno;
}
static int evdev_output(const char *str)
{
struct ui_st *st = evdev;
int err = 0;
if (!st || !str)
return EINVAL;
while (*str) {
switch (*str++) {
case '\a':
err |= buzz(st, 1);
break;
default:
err |= buzz(st, 0);
break;
}
}
return err;
}
static struct ui ui_evdev = {
.name = "evdev",
.outputh = evdev_output
};
static int module_init(void)
{
int err;
conf_get_str(conf_cur(), "evdev_device",
evdev_device, sizeof(evdev_device));
err = evdev_alloc(&evdev, evdev_device);
if (err)
return err;
ui_register(baresip_uis(), &ui_evdev);
return 0;
}
static int module_close(void)
{
ui_unregister(&ui_evdev);
evdev = mem_deref(evdev);
return 0;
}
const struct mod_export DECL_EXPORTS(evdev) = {
"evdev",
"ui",
module_init,
module_close
};
| 2,955 |
715 | #include <bits/stdc++.h>
#define MAX 1000007
using namespace std;
struct node{
int link, len;
map<char, int> next;
} st[MAX];
int sz, last;
void init(){
sz = 1;
last = 0;
st[0].len = 0;
st[0].link = -1;
st[0].next.clear();
}
void add(char c){
int cur = sz++;
st[cur].len = st[last].len + 1;
for(; last != -1 && !st[last].next[c]; last = st[last].link) st[last].next[c] = cur;
if(last == -1) st[cur].link = 0;
else{
int q = st[last].next[c];
if(st[q].len == st[last].len + 1) st[cur].link = q;
else{
int clone = sz++;
st[clone].len = st[last].len + 1;
st[clone].link = st[q].link;
st[clone].next = st[q].next;
for(; last != -1 && st[last].next[c] == q; last = st[last].link) st[last].next[c] = clone;
st[q].link = st[cur].link = clone;
}
}
last = cur;
}
int main(){
string palavra; cin >> palavra;
init();
for(int i = 0; i < palavra.size(); i++) add(palavra[i]);
return 0;
} | 439 |
4,538 | /*
* Copyright (C) 2015-2020 Alibaba Group Holding Limited
*/
#ifndef __A2DP_CODEC_OPUS_H__
#define __A2DP_CODEC_OPUS_H__
#if defined(__cplusplus)
extern "C" {
#endif
#include "avdtp_api.h"
#include "btapp.h"
#if defined(MASTER_USE_OPUS) || defined(ALL_USE_OPUS)
bt_status_t a2dp_codec_opus_init(int index);
#endif
#if defined(__cplusplus)
}
#endif
#endif /* __A2DP_CODEC_OPUS_H__ */ | 178 |
790 | import os
from django.conf.urls import patterns
here = os.path.dirname(__file__)
urlpatterns = patterns('',
(r'^custom_templates/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': os.path.join(here, 'custom_templates'),
}),
)
| 107 |
348 | {"nom":"Saclay","circ":"5ème circonscription","dpt":"Essonne","inscrits":2708,"abs":1034,"votants":1674,"blancs":17,"nuls":4,"exp":1653,"res":[{"nuance":"REM","nom":"<NAME>","voix":829},{"nuance":"LR","nom":"Mme <NAME>","voix":291},{"nuance":"FI","nom":"M. <NAME>","voix":136},{"nuance":"SOC","nom":"Mme <NAME>","voix":124},{"nuance":"FN","nom":"Mme <NAME>","voix":104},{"nuance":"ECO","nom":"<NAME>","voix":60},{"nuance":"DLF","nom":"Mme <NAME>","voix":37},{"nuance":"DIV","nom":"<NAME>","voix":26},{"nuance":"ECO","nom":"M. <NAME>","voix":15},{"nuance":"COM","nom":"Mme <NAME>","voix":14},{"nuance":"DIV","nom":"M. <NAME>","voix":9},{"nuance":"EXG","nom":"M. <NAME>","voix":5},{"nuance":"DIV","nom":"M. <NAME>","voix":2},{"nuance":"DIV","nom":"M. <NAME>","voix":1},{"nuance":"DVD","nom":"Mme <NAME>","voix":0}]} | 330 |
828 | <filename>hasor-rsf/rsf-registry/src/main/java/net/hasor/registry/client/support/RegistryClientReceiver.java
/*
* Copyright 2008-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.hasor.registry.client.support;
import net.hasor.core.EventContext;
import net.hasor.core.Inject;
import net.hasor.registry.client.RsfCenterListener;
import net.hasor.registry.common.RegistryConstants;
import net.hasor.rsf.RsfBindInfo;
import net.hasor.rsf.RsfContext;
import net.hasor.rsf.RsfUpdater;
import net.hasor.rsf.domain.RsfCenterException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Objects;
/**
* 注册中心数据接收器,负责更新注册中心推送过来的配置信息。
* @version : 2016年2月18日
* @author 赵永春 (<EMAIL>)
*/
public class RegistryClientReceiver implements RsfCenterListener {
protected Logger logger = LoggerFactory.getLogger(RegistryConstants.LoggerName_CenterReceiver);
@Inject
private RsfContext rsfContext;
@Inject
private EventContext eventContext;
private static final String[] checkServiceEventArrays;
static {
checkServiceEventArrays = new String[] { //
"AppendAddressEvent", //
"RefreshAddressEvent",//
"RemoveAddressEvent", //
"UpdateServiceRouteEvent", //
"UpdateMethodRouteEvent", //
"UpdateArgsRouteEvent", //
"UpdateFlowControlEvent" };
}
@Override
public boolean onEvent(String group, String name, String version, String eventType, String eventBody) throws Throwable {
RsfUpdater rsfUpdater = Objects.requireNonNull(this.rsfContext, " rsfContext is null.").getUpdater();
EventProcess process = EventProcessMapping.findEventProcess(eventType);
if (process == null) {
throw new RsfCenterException(eventType + " eventType is undefined.");
}
//-有些事件需要检测服务-
for (String checkItem : checkServiceEventArrays) {
if (checkItem.equals(eventType)) {
if (this.rsfContext.getServiceInfo(group, name, version) == null) {
throw new RsfCenterException(String.format("group =%s ,name =%s version =%s ->service is undefined. ", group, name, version));
}
}
}
//
RsfBindInfo<?> serviceInfo = rsfContext.getServiceInfo(group, name, version);
if (serviceInfo == null) {
return true;
}
boolean result = process.processEvent(rsfUpdater, serviceInfo.getBindID(), eventBody);
logger.info("centerEvent event ={} ,g ={} ,n ={} ,v ={} , result ={}, body ={}.", //
eventType, group, name, version, result, eventBody);
return result;
}
} | 1,374 |
1,253 | <reponame>avi-pal/al-go-rithms
# include<stdio.h>
/* Function to get sum of digits */
int getSum(int n)
{
int sum;
/*Single line that calculates sum*/
for(sum=0; n > 0; sum+=n%10,n/=10);
return sum;
}
int main()
{
int n = 687;
printf(" %d ", getSum(n));
return 0;
}
| 132 |
2,496 | <gh_stars>1000+
/******************************************************************************
* Qwt Widget Library
* Copyright (C) 1997 <NAME>
* Copyright (C) 2002 <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the Qwt License, Version 1.0
*****************************************************************************/
#include "qwt_spline_parametrization.h"
/*!
Constructor
\param type Parametrization type
\sa type()
*/
QwtSplineParametrization::QwtSplineParametrization( int type )
: m_type( type )
{
}
//! Destructor
QwtSplineParametrization::~QwtSplineParametrization()
{
}
/*!
\brief Calculate the parameter value increment for 2 points
\param point1 First point
\param point2 Second point
\return Value increment
*/
double QwtSplineParametrization::valueIncrement(
const QPointF& point1, const QPointF& point2 ) const
{
switch( m_type )
{
case QwtSplineParametrization::ParameterX:
{
return valueIncrementX( point1, point2 );
}
case QwtSplineParametrization::ParameterY:
{
return valueIncrementY( point1, point2 );
}
case QwtSplineParametrization::ParameterCentripetal:
{
return valueIncrementCentripetal( point1, point2 );
}
case QwtSplineParametrization::ParameterChordal:
{
return valueIncrementChordal( point1, point2 );
}
case QwtSplineParametrization::ParameterManhattan:
{
return valueIncrementManhattan( point1, point2 );
}
case QwtSplineParametrization::ParameterUniform:
{
return valueIncrementUniform( point1, point2 );
}
default:
{
return 1;
}
}
}
//! \return Parametrization type
int QwtSplineParametrization::type() const
{
return m_type;
}
| 779 |
544 | from office365.runtime.paths.resource_path import ResourcePath
from office365.sharepoint.base_entity import BaseEntity
class VideoServiceDiscoverer(BaseEntity):
def __init__(self, context):
super(VideoServiceDiscoverer, self).__init__(context, ResourcePath("SP.Publishing.VideoServiceDiscoverer"))
| 93 |
471 | from django.core.management.base import BaseCommand
from corehq.blobs import get_blob_db
from corehq.form_processor.interfaces.dbaccessors import FormAccessors
class Command(BaseCommand):
help = "Delete form attachments matching filter, for use upon client request."
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument('--xmlns', required=True)
parser.add_argument('--app-id', required=True)
parser.add_argument('--xform-ids', help='Comma-separated xform ids')
parser.add_argument('--dry-run', action='store_true')
def handle(self, domain, xmlns, app_id, dry_run, xform_ids, **options):
accessor = FormAccessors(domain)
if xform_ids:
form_ids = xform_ids.split(',')
else:
form_ids = accessor.iter_form_ids_by_xmlns(xmlns)
attachments_to_delete = []
for form_id in form_ids:
form = accessor.get_with_attachments(form_id)
if form.domain != domain or form.xmlns != xmlns or form.app_id != app_id:
continue
print(f'{form_id}\t{",".join(form.attachments) or "No attachments to delete"}')
for name, blob_meta in form.attachments.items():
attachments_to_delete.append((form_id, name, blob_meta))
if not dry_run:
if input("Delete all the above attachments? [y/n]").lower() in ('y', 'yes'):
for form_id, name, blob_meta in attachments_to_delete:
print(f'Deleting {form_id}/{name} ({blob_meta.key})')
# todo: if this is ever too slow, we can bulk delete instead
# https://github.com/dimagi/commcare-hq/pull/26672#discussion_r380522955
get_blob_db().delete(blob_meta.key)
| 801 |
617 | // Copyright (c) Open Enclave SDK contributors.
// Licensed under the MIT License.
/**
* @file mallinfo.h
*
* This file defines an interface that pluggable allocators can implement.
* See
* https://github.com/openenclave/openenclave/blob/master/docs/DesignDocs/Mallinfo.md
*
*/
#ifndef OE_ADVANCED_MALLINFO_H
#define OE_ADVANCED_MALLINFO_H
#include <openenclave/bits/result.h>
/**
* @cond IGNORE
*/
OE_EXTERNC_BEGIN
/**
* @endcond
*/
typedef struct _oe_mallinfo
{
/// Maximum number of bytes that can be allocated in total
size_t max_total_heap_size;
/// Number of bytes allocated at the moment.
size_t current_allocated_heap_size;
/// Highest value reached by `current_allocated_heap_size` during execution.
size_t peak_allocated_heap_size;
} oe_mallinfo_t;
/**
* Obtain current memory usage statistics.
*
* Users may make this call on allocators that support it, to find out how much
* memory can be allocated in total, how much is allocated at the moment, and
* the high watermark of allocation so far.
*
* @param[out] info An oe_mallinfo_t struct, to be populated by the allocator.
*
* @retval OE_OK Allocation information was set successfully.
* @retval OE_UNSUPPORTED The allocator does not support this interface.
* @retval OE_FAILURE Other failure.
*/
oe_result_t oe_allocator_mallinfo(oe_mallinfo_t* info);
OE_EXTERNC_END
#endif // OE_ADVANCED_MALLINFO_H
| 492 |
388 | <reponame>andreikop/botbuilder-python
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
from botframework.connector.auth import BotFrameworkAuthentication, ClaimsIdentity
from .channel_service_handler import ChannelServiceHandler
class CloudChannelServiceHandler(ChannelServiceHandler):
def __init__( # pylint: disable=super-init-not-called
self, auth: BotFrameworkAuthentication
):
if not auth:
raise TypeError("Auth can't be None")
self._auth = auth
async def _authenticate(self, auth_header: str) -> ClaimsIdentity:
return await self._auth.authenticate_channel_request(auth_header)
| 221 |
5,079 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
import time
from desktop.lib.exceptions import StructuredThriftTransportException
from desktop.lib.exceptions_renderable import PopupException
from libsentry.client2 import SentryClient
from libsentry.sentry_site import get_sentry_server
if sys.version_info[0] > 2:
from django.utils.translation import gettext as _
else:
from django.utils.translation import ugettext as _
LOG = logging.getLogger(__name__)
def create_client(client_class, username, server, component=None):
if server is not None:
kwargs = {
'host': server['hostname'],
'port': server['port'],
'username': username
}
if client_class == SentryClient:
kwargs.update({'component': component})
return client_class(**kwargs)
else:
raise PopupException(_('Cannot create a Sentry client without server hostname and port.'))
def get_next_available_server(client_class, username, failed_host=None, component=None, create_client_fn=create_client):
'''
Given a failed host, attempts to find the next available host and returns a Sentry server if found, as well as a list
of all Sentry hosts attempted.
'''
current_host = failed_host
has_next = True
attempted_hosts = []
while has_next:
LOG.warning('Could not connect to Sentry server %s, attempting to fetch next available client.' % current_host)
next_server = get_sentry_server(current_host=current_host)
time.sleep(1)
try:
client = create_client_fn(client_class, username, next_server, component)
client.list_sentry_roles_by_group(groupName='*')
# If above operation succeeds, return client
LOG.info('Successfully connected to Sentry server %s, after attempting [%s], returning client.' % (client.host, ', '.join(attempted_hosts)))
return next_server, attempted_hosts
except StructuredThriftTransportException as e:
# If we have come back around to the original failed client, exit
if client.host == failed_host:
has_next = False
else:
current_host = client.host
attempted_hosts.append(current_host)
except Exception as e:
raise PopupException(_('Encountered unexpected error while trying to find available Sentry client: %s' % e))
return None, attempted_hosts
| 950 |
14,668 | {"schema":"pedal_concepts_runtime","schema_version":3,"data_version":16163558,"tokenize_each_character":false,"dictionary":["ajalugu","aken","alusta","andmed","ava","brauser","chrome","eemalda","google","halda","haldamine","haldur","inkognito","jaoks","kaardi","kaardid","kaarditeave","krediitkaardi","krediitkaardid","krediitkaarditeave","krediitkaart","kuidas","kuni","kustuta","kuva","käivita","küpsised","küpsisefailid","loo","makse","maksed","makseviisid","mandaat","minu","muuda","muutmine","näita","paroolid","peal","privaatne","režiim","salvesta","sees","sirvimine","sisene","teave","tee","tühjenda","vaheleht","vahemikus","vahemälu","värskenda","一時的踏板placeholder"],"pedals":[{"id":1,"groups":[{"required":false,"single":true,"synonyms":[[8,6],[43],[5],[6]]},{"required":true,"single":true,"synonyms":[[47],[7],[23]]},{"required":true,"single":true,"synonyms":[[27],[26],[50],[0],[3],[45]]}],"omnibox_ui_strings":null,"url":""},{"id":2,"groups":[{"required":false,"single":true,"synonyms":[[8,6],[5],[6]]},{"required":true,"single":true,"synonyms":[[51],[10],[35],[41],[11],[36],[34],[24]]},{"required":true,"single":true,"synonyms":[[37],[32]]}],"omnibox_ui_strings":null,"url":""},{"id":3,"groups":[{"required":false,"single":true,"synonyms":[[8,6],[5],[6]]},{"required":true,"single":true,"synonyms":[[51],[35],[41],[9],[34]]},{"required":true,"single":true,"synonyms":[[17,45],[14,45],[19],[18],[20],[16],[31],[15],[30],[29]]}],"omnibox_ui_strings":null,"url":""},{"id":4,"groups":[{"required":false,"single":true,"synonyms":[[8,6],[5],[6]]},{"required":false,"single":true,"synonyms":[[25],[2],[44],[4],[28]]},{"required":true,"single":true,"synonyms":[[12,48],[39,48],[12,40],[39,40],[12,1],[39,1],[12]]}],"omnibox_ui_strings":null,"url":""},{"id":27,"groups":[{"required":true,"single":true,"synonyms":[[52]]}],"omnibox_ui_strings":null,"url":"chrome://dino"},{"id":31,"groups":[{"required":true,"single":true,"synonyms":[[52]]}],"omnibox_ui_strings":null,"url":"chrome://settings"},{"id":33,"groups":[{"required":true,"single":true,"synonyms":[[52]]}],"omnibox_ui_strings":null,"url":"chrome://history"},{"id":35,"groups":[{"required":true,"single":true,"synonyms":[[52]]}],"omnibox_ui_strings":null,"url":"chrome://settings/accessibility"}],"ignore_group":{"required":false,"single":false,"synonyms":[[49],[21],[13],[42],[33],[38],[22],[46]]},"max_tokens":32} | 853 |
14,668 | <filename>third_party/blink/public/common/web_preferences/web_preferences.h<gh_stars>1000+
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef THIRD_PARTY_BLINK_PUBLIC_COMMON_WEB_PREFERENCES_WEB_PREFERENCES_H_
#define THIRD_PARTY_BLINK_PUBLIC_COMMON_WEB_PREFERENCES_WEB_PREFERENCES_H_
#include <map>
#include <string>
#include <vector>
#include "build/build_config.h"
#include "net/nqe/effective_connection_type.h"
#include "third_party/blink/public/common/common_export.h"
#include "third_party/blink/public/mojom/css/preferred_color_scheme.mojom-shared.h"
#include "third_party/blink/public/mojom/css/preferred_contrast.mojom-shared.h"
#include "third_party/blink/public/mojom/v8_cache_options.mojom-forward.h"
#include "third_party/blink/public/mojom/webpreferences/web_preferences.mojom-shared.h"
#include "url/gurl.h"
#include "url/origin.h"
namespace blink {
class WebView;
namespace web_pref {
using blink::mojom::EffectiveConnectionType;
// Map of ISO 15924 four-letter script code to font family. For example,
// "Arab" to "My Arabic Font".
typedef std::map<std::string, std::u16string> ScriptFontFamilyMap;
// The ISO 15924 script code for undetermined script aka Common. It's the
// default used on WebKit's side to get/set a font setting when no script is
// specified.
BLINK_COMMON_EXPORT extern const char kCommonScript[];
// A struct for managing blink's settings.
//
// Adding new values to this class probably involves updating
// blink::WebSettings,
// browser/profiles/profile.cc, and
// content/public/common/common_param_traits_macros.h
struct BLINK_COMMON_EXPORT WebPreferences {
ScriptFontFamilyMap standard_font_family_map;
ScriptFontFamilyMap fixed_font_family_map;
ScriptFontFamilyMap serif_font_family_map;
ScriptFontFamilyMap sans_serif_font_family_map;
ScriptFontFamilyMap cursive_font_family_map;
ScriptFontFamilyMap fantasy_font_family_map;
int default_font_size;
int default_fixed_font_size;
int minimum_font_size;
int minimum_logical_font_size;
std::string default_encoding;
bool context_menu_on_mouse_up;
bool javascript_enabled;
bool web_security_enabled;
bool loads_images_automatically;
bool images_enabled;
bool plugins_enabled;
bool dom_paste_enabled;
bool shrinks_standalone_images_to_fit;
bool text_areas_are_resizable;
bool allow_scripts_to_close_windows;
bool remote_fonts_enabled;
bool javascript_can_access_clipboard;
bool xslt_enabled;
// We don't use dns_prefetching_enabled to disable DNS prefetching. Instead,
// we disable the feature at a lower layer so that we catch non-WebKit uses
// of DNS prefetch as well.
bool dns_prefetching_enabled;
// Preference to save data. When enabled, requests will contain the header
// 'Save-Data: on'.
bool data_saver_enabled;
// Whether data saver holdback for Web APIs is enabled. If enabled, data saver
// appears as disabled to the web consumers even if it has been actually
// enabled by the user.
bool data_saver_holdback_web_api_enabled;
bool local_storage_enabled;
bool databases_enabled;
bool tabs_to_links;
bool disable_ipc_flooding_protection;
bool hyperlink_auditing_enabled;
bool allow_universal_access_from_file_urls;
bool allow_file_access_from_file_urls;
bool webgl1_enabled;
bool webgl2_enabled;
bool pepper_3d_enabled;
bool flash_3d_enabled;
bool flash_stage3d_enabled;
bool flash_stage3d_baseline_enabled;
bool privileged_webgl_extensions_enabled;
bool webgl_errors_to_console_enabled;
bool hide_scrollbars;
bool accelerated_2d_canvas_enabled;
bool canvas_2d_layers_enabled = false;
bool canvas_context_lost_in_background_enabled = false;
bool new_canvas_2d_api_enabled;
bool antialiased_2d_canvas_disabled;
bool antialiased_clips_2d_canvas_enabled;
bool accelerated_filters_enabled;
bool deferred_filters_enabled;
bool container_culling_enabled;
bool allow_running_insecure_content;
// If true, taints all <canvas> elements, regardless of origin.
bool disable_reading_from_canvas;
// Strict mixed content checking disables both displaying and running insecure
// mixed content, and disables embedder notifications that such content was
// requested (thereby preventing user override).
bool strict_mixed_content_checking;
// Strict powerful feature restrictions block insecure usage of powerful
// features (like device orientation) that we haven't yet disabled for the web
// at large.
bool strict_powerful_feature_restrictions;
// TODO(jww): Remove when WebView no longer needs this exception.
bool allow_geolocation_on_insecure_origins;
// Disallow user opt-in for blockable mixed content.
bool strictly_block_blockable_mixed_content;
bool block_mixed_plugin_content;
bool password_echo_enabled;
bool should_print_backgrounds;
bool should_clear_document_background;
bool enable_scroll_animator;
bool threaded_scrolling_enabled;
bool prefers_reduced_motion;
bool touch_event_feature_detection_enabled;
int pointer_events_max_touch_points;
int available_pointer_types;
blink::mojom::PointerType primary_pointer_type;
int available_hover_types;
blink::mojom::HoverType primary_hover_type;
bool dont_send_key_events_to_javascript;
bool barrel_button_for_drag_enabled = false;
bool sync_xhr_in_documents_enabled;
// TODO(https://crbug.com/1163644): Remove once Chrome Apps are deprecated.
bool target_blank_implies_no_opener_enabled_will_be_removed = true;
// TODO(https://crbug.com/1172495): Remove once Chrome Apps are deprecated.
bool allow_non_empty_navigator_plugins = false;
int number_of_cpu_cores;
blink::mojom::EditingBehavior editing_behavior;
bool supports_multiple_windows;
bool viewport_enabled;
bool viewport_meta_enabled;
// If true - Blink will clamp the minimum scale factor to the content width,
// preventing zoom beyond the visible content. This is really only needed if
// viewport_enabled is on.
bool shrinks_viewport_contents_to_fit;
blink::mojom::ViewportStyle viewport_style;
bool always_show_context_menu_on_touch;
bool smooth_scroll_for_find_enabled;
bool main_frame_resizes_are_orientation_changes;
bool initialize_at_minimum_page_scale;
bool smart_insert_delete_enabled;
bool spatial_navigation_enabled;
bool navigate_on_drag_drop;
bool fake_no_alloc_direct_call_for_testing_enabled;
blink::mojom::V8CacheOptions v8_cache_options;
bool record_whole_document;
// This flags corresponds to a Page's Settings' setCookieEnabled state. It
// only controls whether or not the "document.cookie" field is properly
// connected to the backing store, for instance if you wanted to be able to
// define custom getters and setters from within a unique security content
// without raising a DOM security exception.
bool cookie_enabled;
// This flag indicates whether H/W accelerated video decode is enabled.
// Defaults to false.
bool accelerated_video_decode_enabled;
blink::mojom::ImageAnimationPolicy animation_policy =
blink::mojom::ImageAnimationPolicy::kImageAnimationPolicyAllowed;
bool user_gesture_required_for_presentation;
bool text_tracks_enabled;
// These fields specify the foreground and background color for WebVTT text
// tracks. Their values can be any legal CSS color descriptor.
std::string text_track_background_color;
std::string text_track_text_color;
// These fields specify values for CSS properties used to style WebVTT text
// tracks.
// Specifies CSS font-size property in percentage.
std::string text_track_text_size;
std::string text_track_text_shadow;
std::string text_track_font_family;
std::string text_track_font_style;
// Specifies the value for CSS font-variant property.
std::string text_track_font_variant;
// These fields specify values for CSS properties used to style the window
// around WebVTT text tracks.
// Window color can be any legal CSS color descriptor.
std::string text_track_window_color;
// Window padding is in em.
std::string text_track_window_padding;
// Window radius is in pixels.
std::string text_track_window_radius;
// Specifies the margin for WebVTT text tracks as a percentage of media
// element height/width (for horizontal/vertical text respectively).
// Cues will not be placed in this margin area.
float text_track_margin_percentage;
bool immersive_mode_enabled;
bool double_tap_to_zoom_enabled;
bool fullscreen_supported;
bool text_autosizing_enabled;
// Representation of the Web App Manifest scope if any.
GURL web_app_scope;
#if defined(OS_ANDROID)
float font_scale_factor;
float device_scale_adjustment;
bool force_enable_zoom;
GURL default_video_poster_url;
bool support_deprecated_target_density_dpi;
bool use_legacy_background_size_shorthand_behavior;
bool wide_viewport_quirk;
bool use_wide_viewport;
bool force_zero_layout_height;
bool viewport_meta_merge_content_quirk;
bool viewport_meta_non_user_scalable_quirk;
bool viewport_meta_zero_values_quirk;
bool clobber_user_agent_initial_scale_quirk;
bool ignore_main_frame_overflow_hidden_quirk;
bool report_screen_size_in_physical_pixels_quirk;
// Used by Android_WebView only to support legacy apps that inject script into
// a top-level initial empty document and expect it to persist on navigation.
bool reuse_global_for_unowned_main_frame;
// Specifies default setting for spellcheck when the spellcheck attribute is
// not explicitly specified.
bool spellcheck_enabled_by_default;
// If enabled, when a video goes fullscreen, the orientation should be locked.
bool video_fullscreen_orientation_lock_enabled;
// If enabled, fullscreen should be entered/exited when the device is rotated
// to/from the orientation of the video.
bool video_rotate_to_fullscreen_enabled;
bool embedded_media_experience_enabled;
// Enable 8 (#RRGGBBAA) and 4 (#RGBA) value hex colors in CSS Android
// WebView quirk (http://crbug.com/618472).
bool css_hex_alpha_color_enabled;
// Enable support for document.scrollingElement
// WebView sets this to false to retain old documentElement behaviour
// (http://crbug.com/761016).
bool scroll_top_left_interop_enabled;
// Don't accelerate small canvases to avoid crashes TODO(crbug.com/1004304)
bool disable_accelerated_small_canvases;
#endif // defined(OS_ANDROID)
// Enable forcibly modifying content rendering to result in a light on dark
// color scheme.
bool force_dark_mode_enabled = false;
// Default (used if the page or UA doesn't override these) values for page
// scale limits. These are set directly on the WebView so there's no analogue
// in WebSettings.
float default_minimum_page_scale_factor;
float default_maximum_page_scale_factor;
// Whether download UI should be hidden on this page.
bool hide_download_ui;
// Whether it is a presentation receiver.
bool presentation_receiver;
// If disabled, media controls should never be used.
bool media_controls_enabled;
// Whether we want to disable updating selection on mutating selection range.
// This is to work around Samsung's email app issue. See
// https://crbug.com/699943 for details.
// TODO(changwan): remove this once we no longer support Android N.
bool do_not_update_selection_on_mutating_selection_range;
// Defines the current autoplay policy.
blink::mojom::AutoplayPolicy autoplay_policy =
blink::mojom::AutoplayPolicy::kNoUserGestureRequired;
// The preferred color scheme for the web content. The scheme is used to
// evaluate the prefers-color-scheme media query and resolve UA color scheme
// to be used based on the supported-color-schemes META tag and CSS property.
blink::mojom::PreferredColorScheme preferred_color_scheme =
blink::mojom::PreferredColorScheme::kLight;
// The preferred contrast for the web content. The contrast is used to
// evaluate the prefers-contrast media query.
blink::mojom::PreferredContrast preferred_contrast =
blink::mojom::PreferredContrast::kNoPreference;
// Network quality threshold below which resources from iframes are assigned
// either kVeryLow or kVeryLow Blink priority.
EffectiveConnectionType low_priority_iframes_threshold;
// Whether Picture-in-Picture is enabled.
bool picture_in_picture_enabled;
// Whether a translate service is available.
// blink's hrefTranslate attribute existence relies on the result.
// See https://github.com/dtapuska/html-translate
bool translate_service_available;
// A value other than
// mojom::EffectiveConnectionType::kEffectiveConnectionUnknownType implies
// that the network quality estimate related Web APIs are in the holdback
// mode. When the holdback is enabled, the related Web APIs return network
// quality estimate corresponding to |network_quality_estimator_web_holdback|
// regardless of the actual quality.
EffectiveConnectionType network_quality_estimator_web_holdback;
// Whether lazy loading of frames and images is enabled.
bool lazy_load_enabled = true;
// Specifies how close a lazily loaded iframe or image should be from the
// viewport before it should start being loaded in, depending on the effective
// connection type of the current network. Blink will use the default distance
// threshold for effective connection types that aren't specified here.
std::map<EffectiveConnectionType, int>
lazy_frame_loading_distance_thresholds_px;
std::map<EffectiveConnectionType, int>
lazy_image_loading_distance_thresholds_px;
std::map<EffectiveConnectionType, int> lazy_image_first_k_fully_load;
// Setting to false disables upgrades to HTTPS for HTTP resources in HTTPS
// sites.
bool allow_mixed_content_upgrades;
// Whether the focused element should always be indicated (for example, by
// forcing :focus-visible to match regardless of focus method).
bool always_show_focus;
// Whether touch input can trigger HTML drag-and-drop operations. The
// default value depends on the platform.
bool touch_drag_drop_enabled;
// Whether the end of a drag fires a contextmenu event and possibly shows a
// context-menu (depends on how the event is handled). Currently touch-drags
// cannot show context menus, see crbug.com/1096189.
bool touch_dragend_context_menu = false;
// By default, WebXR's immersive-ar session creation is allowed, but this can
// change depending on the enterprise policy if the platform supports it.
bool webxr_immersive_ar_allowed = true;
// LitePage origin the subresources such as images should be redirected to
// when the kSubresourceRedirect feature is enabled.
url::Origin litepage_subresource_redirect_origin;
// We try to keep the default values the same as the default values in
// chrome, except for the cases where it would require lots of extra work for
// the embedder to use the same default value.
WebPreferences();
WebPreferences(const WebPreferences& other);
WebPreferences(WebPreferences&& other);
~WebPreferences();
WebPreferences& operator=(const WebPreferences& other);
WebPreferences& operator=(WebPreferences&& other);
};
} // namespace web_pref
} // namespace blink
#endif // THIRD_PARTY_BLINK_PUBLIC_COMMON_WEB_PREFERENCES_WEB_PREFERENCES_H_
| 4,603 |
852 | <reponame>ckamtsikis/cmssw
#ifndef CastorSaturationCorrs_h
#define CastorSaturationCorrs_h
#include "CondFormats/Serialization/interface/Serializable.h"
#include "CondFormats/CastorObjects/interface/CastorSaturationCorr.h"
#include "CondFormats/CastorObjects/interface/CastorCondObjectContainer.h"
class CastorSaturationCorrs : public CastorCondObjectContainer<CastorSaturationCorr> {
public:
CastorSaturationCorrs() : CastorCondObjectContainer<CastorSaturationCorr>() {}
std::string myname() const { return (std::string) "CastorSaturationCorrs"; }
private:
COND_SERIALIZABLE;
};
#endif
| 203 |
10,225 | <filename>extensions/reactive-mssql-client/deployment/src/main/java/io/quarkus/reactive/mssql/client/deployment/MSSQLPoolBuildItem.java<gh_stars>1000+
package io.quarkus.reactive.mssql.client.deployment;
import io.quarkus.builder.item.MultiBuildItem;
import io.quarkus.datasource.common.runtime.DataSourceUtil;
import io.quarkus.runtime.RuntimeValue;
import io.vertx.mssqlclient.MSSQLPool;
public final class MSSQLPoolBuildItem extends MultiBuildItem {
private final String dataSourceName;
private final RuntimeValue<MSSQLPool> mssqlPool;
public MSSQLPoolBuildItem(String dataSourceName, RuntimeValue<MSSQLPool> mssqlPool) {
this.dataSourceName = dataSourceName;
this.mssqlPool = mssqlPool;
}
public String getDataSourceName() {
return dataSourceName;
}
public RuntimeValue<MSSQLPool> getMSSQLPool() {
return mssqlPool;
}
public boolean isDefault() {
return DataSourceUtil.isDefault(dataSourceName);
}
}
| 377 |
831 | <gh_stars>100-1000
/*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.common.scene;
import com.android.tools.idea.common.model.NlComponent;
import com.google.common.collect.ImmutableList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.jetbrains.annotations.NotNull;
/**
* Default {@link SceneManager.SceneComponentHierarchyProvider}. It will create one {@link SceneComponent} per
* every given {@link NlComponent}. It will move the existing components in the {@link SceneManager} to their correct
* position if they already existed.
*/
public class DefaultSceneManagerHierarchyProvider implements SceneManager.SceneComponentHierarchyProvider {
@Override
@NotNull
public List<SceneComponent> createHierarchy(@NotNull SceneManager manager, @NotNull NlComponent component) {
SceneComponent sceneComponent = manager.getScene().getSceneComponent(component);
if (sceneComponent == null) {
sceneComponent = new SceneComponent(manager.getScene(), component, manager.getHitProvider(component));
}
sceneComponent.setToolLocked(SceneManager.isComponentLocked(component));
Set<SceneComponent> oldChildren = new HashSet<>(sceneComponent.getChildren());
for (NlComponent nlChild : component.getChildren()) {
List<SceneComponent> children = createHierarchy(manager, nlChild);
oldChildren.removeAll(children);
for (SceneComponent child : children) {
// Even the parent of child is the same, re-add it to make the order same as NlComponent.
child.removeFromParent();
sceneComponent.addChild(child);
}
}
for (SceneComponent child : oldChildren) {
if (child instanceof TemporarySceneComponent && child.getParent() == sceneComponent) {
// ignore TemporarySceneComponent since its associated NlComponent has not been added to the hierarchy.
continue;
}
if (child.getParent() == sceneComponent) {
child.removeFromParent();
}
}
return ImmutableList.of(sceneComponent);
}
@Override
public void syncFromNlComponent(@NotNull SceneComponent sceneComponent) {
sceneComponent.setToolLocked(false); // the root is always unlocked.
}
}
| 832 |
892 | <gh_stars>100-1000
{
"schema_version": "1.2.0",
"id": "GHSA-6699-wcr8-79cg",
"modified": "2022-05-01T07:38:58Z",
"published": "2022-05-01T07:38:58Z",
"aliases": [
"CVE-2006-6541"
],
"details": "** DISPUTED ** PHP remote file inclusion vulnerability in signer/final.php in warez distributions of Animated Smiley Generator allows remote attackers to execute arbitrary PHP code via a URL in the smiley parameter. NOTE: the vendor disputes this issue, stating that only Warez versions of Animated Smiley Generator were affected, not the developer-provided software: \"Legitimately purchased applications do not allow this exploit.\"",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2006-6541"
},
{
"type": "WEB",
"url": "https://exchange.xforce.ibmcloud.com/vulnerabilities/30794"
},
{
"type": "WEB",
"url": "http://securityreason.com/securityalert/2031"
},
{
"type": "WEB",
"url": "http://www.attrition.org/pipermail/vim/2006-December/001195.html"
},
{
"type": "WEB",
"url": "http://www.securityfocus.com/archive/1/453934/100/0/threaded"
},
{
"type": "WEB",
"url": "http://www.smileygenerator.us/sales/index.php"
},
{
"type": "WEB",
"url": "http://www.smileygenerator.us/sales/index.php?act=viewProd&productId=8"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 665 |
19,824 | <gh_stars>1000+
{
"main": "dist/keystone-ui-icons-icons-Navigation2Icon.cjs.js",
"module": "dist/keystone-ui-icons-icons-Navigation2Icon.esm.js"
}
| 65 |
550 | <gh_stars>100-1000
package com.qiniu.qvs.model;
import com.qiniu.common.Constants;
import com.qiniu.util.Md5;
import com.qiniu.util.UrlSafeBase64;
public class StaticLiveRoute {
private String domain; // 域名
private String domainType; // 域名类型 publishRtmp | liveRtmp | liveHls | liveHdl
private int urlExpireSec; // 地址过期时间,urlExpireSec:100代表100秒后过期; 默认urlExpireSec:0,永不过期.
public StaticLiveRoute(String domain, String domainType) {
this.domain = domain;
this.domainType = domainType;
}
public StaticLiveRoute(String domain, String domainType, int urlExpireSec) {
this.domain = domain;
this.domainType = domainType;
this.urlExpireSec = urlExpireSec;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public String getDomainType() {
return domainType;
}
public void setDomainType(String domainType) {
this.domainType = domainType;
}
public int getUrlExpireSec() {
return urlExpireSec;
}
public void setUrlExpireSec(int urlExpireSec) {
this.urlExpireSec = urlExpireSec;
}
public String genStaticHLSFLVDomain(String nsId, String streamId, String key, boolean useHttps) {
String path = "/" + nsId + "/" + streamId;
String scheme = useHttps ? "https" : "http";
String host = "";
if ("liveHls".equals(domainType)) {
host = domain + ":1370";
path += ".m3u8";
} else {
host = domain + ":1360";
path += ".flv";
}
long expireTime = System.currentTimeMillis() + urlExpireSec * 1000;
String token = signToken(key, path, expireTime);
return String.format("%s://%s%s?e=%d&token=%s", scheme, host, path, expireTime, token);
}
public String genStaticRtmpDomain(String nsId, String streamId, String key) {
String path = "/" + nsId + "/" + streamId;
String scheme = "rtmp";
String host = domain + ":2045";
long expireTime = System.currentTimeMillis() + urlExpireSec * 1000;
String token = signToken(key, path, expireTime);
return String.format("%s://%s%s?e=%d&token=%s", scheme, host, path, expireTime, token);
}
private String signToken(String key, String path, long expireTime) {
String encode_path = UrlSafeBase64.encodeToString(path);
String tempS = key + encode_path + Long.toHexString(expireTime);
return Md5.md5(tempS.getBytes(Constants.UTF_8));
}
}
| 1,116 |
334 | #pragma once
// mapnik
#include <mapnik/datasource_cache.hpp>
#include <mapnik/version.hpp>
// stl
#include <vector>
#include <string>
#include "utils.hpp"
namespace node_mapnik {
/**
* Register all plugins available. This is not recommend in environments where high-performance is priority.
* Consider registering plugins on a per-need basis.
*
* @memberof mapnik
* @name register_default_input_plugins
* @example
* var mapnik = require('mapnik');
* mapnik.register_default_input_plugins();
*/
/**
* List all plugins that are currently available.
*
* @memberof mapnik
* @name datasources
* @returns {Array<String>} list of plugins available to use
*/
static inline Napi::Value available_input_plugins(Napi::CallbackInfo const& info)
{
Napi::Env env = info.Env();
Napi::EscapableHandleScope scope(env);
std::vector<std::string> names = mapnik::datasource_cache::instance().plugin_names();
Napi::Array array = Napi::Array::New(env, names.size());
for (std::size_t i = 0; i < names.size(); ++i)
{
array.Set(i, names[i]);
}
return scope.Escape(array);
}
/**
* Register a single datasource input plugin. The available plugins are:
*
* * `'csv.input'`
* * `'gdal.input'`
* * `'geojson.input'`
* * `'ogr.input'`
* * `'pgraster.input'`
* * `'postgis.input'`
* * `'raster.input'`
* * `'shape.input'`
* * `'sqlite.input'`
* * `'topojson.input'`
*
* @memberof mapnik
* @name registerDatasource
* @param {String} path to a datasource to register.
* @example
* mapnik.registerDatasource(path.join(mapnik.settings.paths.input_plugins, 'geojson.input'));
*/
static inline Napi::Value register_datasource(Napi::CallbackInfo const& info)
{
Napi::Env env = info.Env();
if (info.Length() != 1 || !info[0].IsString())
{
Napi::TypeError::New(env, "first argument must be a path to a mapnik input plugin (.input)").ThrowAsJavaScriptException();
return env.Undefined();
}
std::vector<std::string> names_before = mapnik::datasource_cache::instance().plugin_names();
std::string path = info[0].As<Napi::String>();
mapnik::datasource_cache::instance().register_datasource(path);
std::vector<std::string> names_after = mapnik::datasource_cache::instance().plugin_names();
bool status = (names_after.size() > names_before.size()) ? true : false;
return Napi::Boolean::New(env, status);
}
/**
* Register multiple datasources.
*
* @memberof mapnik
* @name registerDatasources
* @param {Array<String>} list of paths to their respective datasources
*/
static inline Napi::Value register_datasources(Napi::CallbackInfo const& info)
{
Napi::Env env = info.Env();
if (info.Length() != 1 || !info[0].IsString())
{
Napi::TypeError::New(env, "first argument must be a path to a directory of mapnik input plugins").ThrowAsJavaScriptException();
return env.Null();
}
std::vector<std::string> names_before = mapnik::datasource_cache::instance().plugin_names();
std::string path = info[0].As<Napi::String>();
mapnik::datasource_cache::instance().register_datasources(path);
std::vector<std::string> names_after = mapnik::datasource_cache::instance().plugin_names();
bool status = (names_after.size() > names_before.size()) ? true : false;
return Napi::Boolean::New(env, status);
}
} // namespace node_mapnik
| 1,192 |
16,989 | // Copyright 2021 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
package com.google.devtools.build.lib.bazel.bzlmod;
import static com.google.common.truth.Truth.assertThat;
import static com.google.devtools.build.lib.bazel.bzlmod.BzlmodTestUtil.createModuleKey;
import static com.google.devtools.build.lib.bazel.bzlmod.BzlmodTestUtil.createRepositoryMapping;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link Module}. */
@RunWith(JUnit4.class)
public class ModuleTest {
@Test
public void withDepKeysTransformed() throws Exception {
assertThat(
Module.builder()
.addDep("dep_foo", createModuleKey("foo", "1.0"))
.addDep("dep_bar", createModuleKey("bar", "2.0"))
.build()
.withDepKeysTransformed(
key ->
createModuleKey(
key.getName() + "_new", key.getVersion().getOriginal() + ".1")))
.isEqualTo(
Module.builder()
.addDep("dep_foo", createModuleKey("foo_new", "1.0.1"))
.addDep("dep_bar", createModuleKey("bar_new", "2.0.1"))
.build());
}
@Test
public void getRepoMapping() throws Exception {
ModuleKey key = createModuleKey("test_module", "1.0");
Module module =
Module.builder()
.setName(key.getName())
.setVersion(key.getVersion())
.setKey(key)
.addDep("my_foo", createModuleKey("foo", "1.0"))
.addDep("my_bar", createModuleKey("bar", "2.0"))
.addDep("my_root", ModuleKey.ROOT)
.build();
assertThat(module.getRepoMappingWithBazelDepsOnly())
.isEqualTo(
createRepositoryMapping(
key,
"test_module",
"test_module.1.0",
"my_foo",
"foo.1.0",
"my_bar",
"bar.2.0",
"my_root",
""));
}
@Test
public void getRepoMapping_asMainModule() throws Exception {
Module module =
Module.builder()
.setName("test_module")
.setVersion(Version.parse("1.0"))
.setKey(ModuleKey.ROOT)
.addDep("my_foo", createModuleKey("foo", "1.0"))
.addDep("my_bar", createModuleKey("bar", "2.0"))
.build();
assertThat(module.getRepoMappingWithBazelDepsOnly())
.isEqualTo(
createRepositoryMapping(
ModuleKey.ROOT,
"",
"",
"test_module",
"",
"my_foo",
"foo.1.0",
"my_bar",
"bar.2.0"));
}
}
| 1,597 |
488 | <reponame>AliOsamaHassan/NLP-Cube
import json, os, yaml, uuid
import ntpath
from shutil import rmtree, copyfile
import logging
from tqdm.autonotebook import tqdm as tqdm
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
os.sys.path.insert(0, parent_dir)
from cube.io_utils.modelstore import ModelStore
if __name__ == "__main__":
# set vars here
VERSION = "1.0"
FOLDER_WITH_YAMLS = os.path.abspath("scripts//train//2.7//language//")
FOLDER_WITH_TRAINED_MODELS = "/media/echo/5CA436CBA436A802/work/models" # os.path.abspath("models")
FOLDER_WHERE_TO_OUTPUT_EVERYTHING = "/media/echo/5CA436CBA436A802/work/nlp-cube-models" # os.path.abspath("nlp-cube-models")
URL_ROOT_FOR_MODELS = "https://raw.githubusercontent.com/adobe/NLP-Cube-Models/3.0/models/" # !! make sure it ends with a /
"""
0. Open existing catalog, create new key "version" if it does not exist
1. Load all yaml files:
lang_code_id : "lang_code" -> index in original yaml file
lang_map : "name" -> "lang_code"
2. For each lang_code, check to see if files are available to form a model. If not, report errors.
3. Copy all files in temp folder
4. Pack all files and split in shards
5. Move in final model dir with name lang_code.version.###
6. For all entries in lang_map, if lang_code is the current one, create entry in catalog if does not exist
"name":["version","link","lang_code_index"]
7. Write catalog
"""
logger = logging.getLogger("cube")
log_handler = logging.StreamHandler()
log_formatter = logging.Formatter(
fmt="[%(levelname)8s | %(asctime)s | %(filename)-20s:%(lineno)3s | %(funcName)-26s] %(message)s",
datefmt='%Y-%m-%d %H:%M:%S')
log_handler.setFormatter(log_formatter)
logger.addHandler(log_handler)
logger.setLevel(logging.DEBUG)
# STEP 0 - open current catalog or create new
catalog_path = os.path.join(FOLDER_WHERE_TO_OUTPUT_EVERYTHING, "catalog.json")
if os.path.exists(catalog_path):
catalog = json.load(open(catalog_path, "r", encoding="utf8"))
else:
catalog = {}
# STEP 1 - load all yamls
langcode_to_index = {}
name_to_langcode = {}
yamls = os.listdir(FOLDER_WITH_YAMLS) # this just lists the files without path
yamls = [os.path.abspath(os.path.join(FOLDER_WITH_YAMLS, x)) for x in yamls] # fill full path in
yamls = [x for x in yamls if os.path.isfile(x) and x.endswith(".yaml")] # filter out possible junk
print("I see {} yamls in {}.".format(len(yamls), FOLDER_WITH_YAMLS))
for yaml_file in yamls:
y = yaml.safe_load(open(yaml_file, "r"))
for index, lang_code in enumerate(y["language_codes"]):
if lang_code in langcode_to_index:
print(f"Warning! lang_code {lang_code} already found in langcode_to_index!")
langcode_to_index[lang_code] = index
for name in y["language_map"]:
if name in name_to_langcode:
print(f"Warning! name {name} already found in name_to_langcode!")
name_to_langcode[name] = y["language_map"][name]
# STEP 2 - for each lang_code
for lang_code in langcode_to_index:
print(f"Working on {lang_code} ...")
# STEP 3 - copy all relevant files for this language_code in temp folder
files = os.listdir(FOLDER_WITH_TRAINED_MODELS) # this just lists the files without path
files = [os.path.abspath(os.path.join(FOLDER_WITH_TRAINED_MODELS, x)) for x in files] # fill full path in
files = [x for x in files if os.path.isfile(x) and ".last" not in x]
# get major language_code
tt = [x for x in files if lang_code in x]
if len(tt) == 0:
print(f"\tCould not get major language code!")
continue
tt = tt[0]
tt = ntpath.basename(tt)
major_lang_code = tt.split("-")[0]
print(f"\t major language code is [{major_lang_code}]")
valid_files = []
# copy encodings and config
for f in files:
ff = ntpath.basename(f)
if ff.startswith(major_lang_code+"-") and (".config" in ff or ".encodings" in ff):
valid_files.append(f)
# copy lang_codes with tok, best and las
for f in files:
if lang_code in f and (".tok" in f or ".best" in f or ".las" in f):
valid_files.append(f)
files = valid_files
# check they are valid
found_tokenizer, found_lemmatizer, found_parser = False, False, False
for f in files:
if "tokenizer" in f:
found_tokenizer = True
if "lemmatizer" in f:
found_lemmatizer = True
if "parser" in f:
found_parser = True
if not(found_tokenizer and found_lemmatizer and found_parser):
print(f"\t {lang_code} does not have all files: tokenizer={found_tokenizer}, lemmatizer={found_lemmatizer}, parser={found_parser}, skipping")
with open("log.txt", "a") as f:
f.write(f"\t {lang_code} does not have all files: tokenizer={found_tokenizer}, lemmatizer={found_lemmatizer}, parser={found_parser}, skipping\n")
continue
temp_folder = os.path.join(FOLDER_WHERE_TO_OUTPUT_EVERYTHING, str(uuid.uuid4().hex))
os.mkdir(temp_folder)
# copy files to temp folder
print("\t copying files to temp folder ... ")
for src_file in files:
_, name = os.path.split(src_file)
dst_file = os.path.join(temp_folder, name)
# print((src_file, dst_file))
copyfile(src_file, dst_file)
# pack folder in zip file
zip_file_path = os.path.join(temp_folder, lang_code + "-" + VERSION)
split_count = ModelStore._pack_model(
input_folder=temp_folder,
output_folder=FOLDER_WHERE_TO_OUTPUT_EVERYTHING,
model_name=lang_code + "-" + VERSION,
split_size_in_mb=99)
# delete temp folder
print("\t deleting temp folder ...")
if os.path.exists(temp_folder):
rmtree(temp_folder, ignore_errors=True)
# STEP 7 - make a catalog entry for all language names affected :
entry = {
"version": VERSION,
"link": URL_ROOT_FOR_MODELS + lang_code + "-" + VERSION,
"langid": langcode_to_index[lang_code],
"parts": split_count
}
for name in name_to_langcode:
if name_to_langcode[name] == lang_code:
print(f"\t making a catalog entry for [{name}] -> [{lang_code}], {split_count} parts, langid {langcode_to_index[lang_code]}")
if name not in catalog:
catalog[name] = []
catalog[name].append(entry)
print("Finished processing all language codes, writing catalog ... ")
json.dump(catalog, open(catalog_path, "w", encoding="utf8"), indent=4, sort_keys=True)
print("Done.")
| 3,223 |
496 | <reponame>jda/lexbor<filename>source/lexbor/core/bst_map.h
/*
* Copyright (C) 2018 <NAME>
*
* Author: <NAME> <<EMAIL>>
*/
#ifndef LEXBOR_BST_MAP_H
#define LEXBOR_BST_MAP_H
#ifdef __cplusplus
extern "C" {
#endif
#include "lexbor/core/bst.h"
#include "lexbor/core/str.h"
#include "lexbor/core/mraw.h"
#include "lexbor/core/dobject.h"
typedef struct {
lexbor_str_t str;
void *value;
}
lexbor_bst_map_entry_t;
typedef struct {
lexbor_bst_t *bst;
lexbor_mraw_t *mraw;
lexbor_dobject_t *entries;
}
lexbor_bst_map_t;
LXB_API lexbor_bst_map_t *
lexbor_bst_map_create(void);
LXB_API lxb_status_t
lexbor_bst_map_init(lexbor_bst_map_t *bst_map, size_t size);
LXB_API void
lexbor_bst_map_clean(lexbor_bst_map_t *bst_map);
LXB_API lexbor_bst_map_t *
lexbor_bst_map_destroy(lexbor_bst_map_t *bst_map, bool self_destroy);
LXB_API lexbor_bst_map_entry_t *
lexbor_bst_map_search(lexbor_bst_map_t *bst_map, lexbor_bst_entry_t *scope,
const lxb_char_t *key, size_t key_len);
LXB_API lexbor_bst_map_entry_t *
lexbor_bst_map_insert(lexbor_bst_map_t *bst_map, lexbor_bst_entry_t **scope,
const lxb_char_t *key, size_t key_len, void *value);
LXB_API lexbor_bst_map_entry_t *
lexbor_bst_map_insert_not_exists(lexbor_bst_map_t *bst_map,
lexbor_bst_entry_t **scope,
const lxb_char_t *key, size_t key_len);
LXB_API void *
lexbor_bst_map_remove(lexbor_bst_map_t *bst_map, lexbor_bst_entry_t **scope,
const lxb_char_t *key, size_t key_len);
/*
* Inline functions
*/
lxb_inline lexbor_mraw_t *
lexbor_bst_map_mraw(lexbor_bst_map_t *bst_map)
{
return bst_map->mraw;
}
/*
* No inline functions for ABI.
*/
lexbor_mraw_t *
lexbor_bst_map_mraw_noi(lexbor_bst_map_t *bst_map);
#ifdef __cplusplus
} /* extern "C" */
#endif
#endif /* LEXBOR_BST_MAP_H */
| 1,024 |
407 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
/**
* Enum describing the durability guarantees for {@link Mutation}
* Note that the items must be sorted in order of increasing durability
*/
public enum Durability {
/**
* Use the column family's default setting to determine durability.
* This must remain the first option.
*/
USE_DEFAULT,
/**
* Do not write the Mutation to the WAL
*/
SKIP_WAL,
/**
* Write the Mutation to the WAL asynchronously
*/
ASYNC_WAL,
/**
* Write the Mutation to the WAL synchronously.
* The data is flushed to the filesystem implementation, but not necessarily to disk.
* For HDFS this will flush the data to the designated number of DataNodes.
* See <a href="https://issues.apache.org/jira/browse/HADOOP-6313">HADOOP-6313<a/>
*/
SYNC_WAL,
/**
* Write the Mutation to the WAL synchronously and force the entries to disk.
* (Note: this is currently not supported and will behave identical to {@link #SYNC_WAL})
* See <a href="https://issues.apache.org/jira/browse/HADOOP-6313">HADOOP-6313<a/>
*/
FSYNC_WAL;
// efficiently translate ordinal back to items of this Enum
// (Enum.values()[ordinal] generates too much garbage)
public static Durability valueOf(int ordinal) {
switch (ordinal) {
case 0: return USE_DEFAULT;
case 1: return SKIP_WAL;
case 2: return ASYNC_WAL;
case 3: return SYNC_WAL;
case 4: return FSYNC_WAL;
default: throw new IllegalArgumentException("Unknown Durability Ordinal:"+ordinal);
}
}
}
| 739 |
427 | //===-- ProcFileReader.cpp --------------------------------------*- C++ -*-===//
//
// The LLVM Compiler Infrastructure
//
// This file is distributed under the University of Illinois Open Source
// License. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
#include "Plugins/Process/Linux/ProcFileReader.h"
// C Headers
#include <fcntl.h>
#include <inttypes.h>
#include <limits.h>
#include <stdio.h>
#include <sys/stat.h>
// C++ Headers
#include <fstream>
// LLDB Headers
#include "lldb/Core/DataBufferHeap.h"
#include "lldb/Core/Error.h"
using namespace lldb_private;
using namespace lldb_private::process_linux;
lldb::DataBufferSP ProcFileReader::ReadIntoDataBuffer(lldb::pid_t pid,
const char *name) {
int fd;
char path[PATH_MAX];
// Make sure we've got a nil terminated buffer for all the folks calling
// GetBytes() directly off our returned DataBufferSP if we hit an error.
lldb::DataBufferSP buf_sp(new DataBufferHeap(1, 0));
// Ideally, we would simply create a FileSpec and call ReadFileContents.
// However, files in procfs have zero size (since they are, in general,
// dynamically generated by the kernel) which is incompatible with the
// current ReadFileContents implementation. Therefore we simply stream the
// data into a DataBuffer ourselves.
if (snprintf(path, PATH_MAX, "/proc/%" PRIu64 "/%s", pid, name) > 0) {
if ((fd = open(path, O_RDONLY, 0)) >= 0) {
size_t bytes_read = 0;
std::unique_ptr<DataBufferHeap> buf_ap(new DataBufferHeap(1024, 0));
for (;;) {
size_t avail = buf_ap->GetByteSize() - bytes_read;
ssize_t status = read(fd, buf_ap->GetBytes() + bytes_read, avail);
if (status < 0)
break;
if (status == 0) {
buf_ap->SetByteSize(bytes_read);
buf_sp.reset(buf_ap.release());
break;
}
bytes_read += status;
if (avail - status == 0)
buf_ap->SetByteSize(2 * buf_ap->GetByteSize());
}
close(fd);
}
}
return buf_sp;
}
Error ProcFileReader::ProcessLineByLine(
lldb::pid_t pid, const char *name,
std::function<bool(const std::string &line)> line_parser) {
Error error;
// Try to open the /proc/{pid}/maps entry.
char filename[PATH_MAX];
snprintf(filename, sizeof(filename), "/proc/%" PRIu64 "/%s", pid, name);
filename[sizeof(filename) - 1] = '\0';
std::ifstream proc_file(filename);
if (proc_file.fail()) {
error.SetErrorStringWithFormat("failed to open file '%s'", filename);
return error;
}
// Read the file line by line, processing until either end of file or when the
// line_parser returns false.
std::string line;
bool should_continue = true;
while (should_continue && std::getline(proc_file, line)) {
// Pass the line over to the line_parser for processing. If the line_parser
// returns false, we
// stop processing.
should_continue = line_parser(line);
}
return error;
}
| 1,146 |
506 | #!/usr/bin/env pypy3
from sys import stdin
ps = set()
n = 0
for l in stdin:
n+=1
x,y=map(int, l.split())
ps.add((x, y))
while True:
stable = True
d={}
for x,y in ps:
for k in [(x-1,y+2),(x-1,y-2),(x-2,y-1),(x-2,y+1),(x+1,y-2),(x+1,y+2),(x+2,y-1),(x+2,y+1)]:
if k in d: d[k]+=1
else: d[k]=1
for k, v in d.items():
if v >= 4:
if k in ps: continue
ps.add(k)
stable = False
if stable:
break
print(n, len(ps), n*n//10)
assert(len(ps)>=n*n//10)
| 341 |
557 | //=================================================================================================
//
// MJP's DX11 Sample Framework
// http://mynameismjp.wordpress.com/
//
// All code licensed under the MIT license
//
//=================================================================================================
#pragma once
#include "PCH.h"
namespace SampleFramework11
{
class Timer
{
public:
Timer();
~Timer();
void Update();
int64 ElapsedSeconds() const;
float ElapsedSecondsF() const;
double ElapsedSecondsD() const;
int64 DeltaSeconds() const;
float DeltaSecondsF() const;
double DeltaSecondsD() const;
int64 ElapsedMilliseconds() const;
float ElapsedMillisecondsF() const;
double ElapsedMillisecondsD() const;
int64 DeltaMilliseconds() const;
float DeltaMillisecondsF() const;
double DeltaMillisecondsD() const;
int64 ElapsedMicroseconds() const;
float ElapsedMicrosecondsF() const;
double ElapsedMicrosecondsD() const;
int64 DeltaMicroseconds() const;
float DeltaMicrosecondsF() const;
double DeltaMicrosecondsD() const;
protected:
int64 startTime;
int64 frequency;
double frequencyD;
int64 elapsed;
int64 delta;
float elapsedF;
float deltaF;
double elapsedD;
double deltaD;
int64 elapsedSeconds;
int64 deltaSeconds;
float elapsedSecondsF;
float deltaSecondsF;
double elapsedSecondsD;
double deltaSecondsD;
int64 elapsedMilliseconds;
int64 deltaMilliseconds;
float elapsedMillisecondsF;
float deltaMillisecondsF;
double elapsedMillisecondsD;
double deltaMillisecondsD;
int64 elapsedMicroseconds;
int64 deltaMicroseconds;
float elapsedMicrosecondsF;
float deltaMicrosecondsF;
double elapsedMicrosecondsD;
double deltaMicrosecondsD;
};
} | 607 |
14,425 | <filename>hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.v2.app.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet2.Hamlet.TBODY;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import com.google.inject.Inject;
public class JobsBlock extends HtmlBlock {
final AppContext appContext;
@Inject JobsBlock(AppContext appCtx) {
appContext = appCtx;
}
@Override protected void render(Block html) {
TBODY<TABLE<Hamlet>> tbody = html.
h2("Active Jobs").
table("#jobs").
thead().
tr().
th(".id", "Job ID").
th(".name", "Name").
th(".state", "State").
th("Map Progress").
th("Maps Total").
th("Maps Completed").
th("Reduce Progress").
th("Reduces Total").
th("Reduces Completed").__().__().
tbody();
for (Job j : appContext.getAllJobs().values()) {
JobInfo job = new JobInfo(j, false);
tbody.
tr().
td().
span().$title(String.valueOf(job.getId())).__(). // for sorting
a(url("job", job.getId()), job.getId()).__().
td(job.getName()).
td(job.getState()).
td().
span().$title(job.getMapProgressPercent()).__(). // for sorting
div(_PROGRESSBAR).
$title(join(job.getMapProgressPercent(), '%')). // tooltip
div(_PROGRESSBAR_VALUE).
$style(join("width:", job.getMapProgressPercent(), '%')).__().__().__().
td(String.valueOf(job.getMapsTotal())).
td(String.valueOf(job.getMapsCompleted())).
td().
span().$title(job.getReduceProgressPercent()).__(). // for sorting
div(_PROGRESSBAR).
$title(join(job.getReduceProgressPercent(), '%')). // tooltip
div(_PROGRESSBAR_VALUE).
$style(join("width:", job.getReduceProgressPercent(), '%')).__().__().__().
td(String.valueOf(job.getReducesTotal())).
td(String.valueOf(job.getReducesCompleted())).__();
}
tbody.__().__();
}
}
| 1,466 |
6,989 | # $Id$
#
# Copyright (C) 2005 <NAME> (<EMAIL>)
# Licensed to PSF under a Contributor Agreement.
import warnings
warnings.warn("the sha module is deprecated; use the hashlib module instead",
DeprecationWarning, 2)
from hashlib import sha1 as sha
new = sha
blocksize = 1 # legacy value (wrong in any useful sense)
digest_size = 20
digestsize = 20
| 137 |
571 | <gh_stars>100-1000
# Copyright (C) 2017 Dell Inc. or its subsidiaries.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from copy import deepcopy
from unittest import mock
import ddt
from cinder.tests.unit import fake_volume
from cinder.tests.unit.volume.drivers.dell_emc import powerflex
VOLUME_ID = "abcdabcd-1234-abcd-1234-abcdabcdabcd"
PROVIDER_ID = "0000000000000001"
MANAGEABLE_FLEX_VOLS = [
{
"volumeType": "ThinProvisioned",
"storagePoolId": "6c6dc54500000000",
"sizeInKb": 8388608,
"name": "volume1",
"id": PROVIDER_ID,
"mappedSdcInfo": [],
},
{
"volumeType": "ThinProvisioned",
"storagePoolId": "6c6dc54500000000",
"sizeInKb": 8388608,
"name": "volume2",
"id": "0000000000000002",
"mappedSdcInfo": [],
},
{
"volumeType": "ThickProvisioned",
"storagePoolId": "6c6dc54500000000",
"sizeInKb": 8388608,
"name": "volume3",
"id": "0000000000000003",
"mappedSdcInfo": [],
}
]
POWERFLEX_SNAPSHOT = {
"volumeType": "Snapshot",
"storagePoolId": "6c6dc54500000000",
"sizeInKb": 8388608,
"name": "snapshot1",
"id": "1000000000000001",
"mappedSdcInfo": [],
}
MANAGEABLE_FLEX_VOL_REFS = [
{
'reference': {'source-id': PROVIDER_ID},
'size': 8,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': {
"volumeType": "ThinProvisioned",
"name": "volume1"
}
},
{
'reference': {'source-id': '0000000000000002'},
'size': 8,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': {
"volumeType": "ThinProvisioned",
"name": "volume2"
}
},
{
'reference': {'source-id': '0000000000000003'},
'size': 8,
'safe_to_manage': True,
'reason_not_safe': None,
'cinder_id': None,
'extra_info': {
"volumeType": "ThickProvisioned",
"name": "volume3"
}
}
]
@ddt.ddt
class PowerFlexManageableCase(powerflex.TestPowerFlexDriver):
def setUp(self):
"""Setup a test case environment."""
super(PowerFlexManageableCase, self).setUp()
self.driver.storage_pools = super().STORAGE_POOLS
def _test_get_manageable_things(self,
powerflex_objects=MANAGEABLE_FLEX_VOLS,
expected_refs=MANAGEABLE_FLEX_VOL_REFS,
cinder_objs=list()):
marker = mock.Mock()
limit = mock.Mock()
offset = mock.Mock()
sort_keys = mock.Mock()
sort_dirs = mock.Mock()
self.HTTPS_MOCK_RESPONSES = {
self.RESPONSE_MODE.Valid: {
'instances/StoragePool::{}/relationships/Volume'.format(
self.STORAGE_POOL_ID
): powerflex_objects,
'types/Pool/instances/getByName::{},{}'.format(
self.PROT_DOMAIN_ID,
self.STORAGE_POOL_NAME
): '"{}"'.format(self.STORAGE_POOL_ID),
'instances/ProtectionDomain::{}'.format(
self.PROT_DOMAIN_ID
): {'id': self.PROT_DOMAIN_ID},
'instances/StoragePool::{}'.format(
self.STORAGE_POOL_ID
): {'id': self.STORAGE_POOL_ID},
'types/Domain/instances/getByName::' +
self.PROT_DOMAIN_NAME: '"{}"'.format(self.PROT_DOMAIN_ID),
},
}
with mock.patch('cinder.volume.volume_utils.'
'paginate_entries_list') as mpage:
test_func = self.driver.get_manageable_volumes
test_func(cinder_objs, marker, limit, offset, sort_keys, sort_dirs)
mpage.assert_called_once_with(
expected_refs,
marker,
limit,
offset,
sort_keys,
sort_dirs
)
def test_get_manageable_volumes(self):
"""Default success case.
Given a list of PowerFlex volumes from the REST API, give back a list
of volume references.
"""
self._test_get_manageable_things()
def test_get_manageable_volumes_connected_vol(self):
"""Make sure volumes connected to hosts are flagged as unsafe."""
mapped_sdc = deepcopy(MANAGEABLE_FLEX_VOLS)
mapped_sdc[0]['mappedSdcInfo'] = ["host1"]
mapped_sdc[1]['mappedSdcInfo'] = ["host1", "host2"]
# change up the expected results
expected_refs = deepcopy(MANAGEABLE_FLEX_VOL_REFS)
for x in range(len(mapped_sdc)):
sdc = mapped_sdc[x]['mappedSdcInfo']
if sdc and len(sdc) > 0:
expected_refs[x]['safe_to_manage'] = False
expected_refs[x]['reason_not_safe'] \
= 'Volume mapped to %d host(s).' % len(sdc)
self._test_get_manageable_things(expected_refs=expected_refs,
powerflex_objects=mapped_sdc)
def test_get_manageable_volumes_already_managed(self):
"""Make sure volumes already owned by cinder are flagged as unsafe."""
cinder_vol = fake_volume.fake_volume_obj(mock.MagicMock())
cinder_vol.id = VOLUME_ID
cinder_vol.provider_id = PROVIDER_ID
cinders_vols = [cinder_vol]
# change up the expected results
expected_refs = deepcopy(MANAGEABLE_FLEX_VOL_REFS)
expected_refs[0]['reference'] = {'source-id': PROVIDER_ID}
expected_refs[0]['safe_to_manage'] = False
expected_refs[0]['reason_not_safe'] = 'Volume already managed.'
expected_refs[0]['cinder_id'] = VOLUME_ID
self._test_get_manageable_things(expected_refs=expected_refs,
cinder_objs=cinders_vols)
def test_get_manageable_volumes_no_snapshots(self):
"""Make sure refs returned do not include snapshots."""
volumes = deepcopy(MANAGEABLE_FLEX_VOLS)
volumes.append(POWERFLEX_SNAPSHOT)
self._test_get_manageable_things(powerflex_objects=volumes)
def test_get_manageable_volumes_no_powerflex_volumes(self):
"""Expect no refs to be found if no volumes are on PowerFlex."""
self._test_get_manageable_things(powerflex_objects=[],
expected_refs=[])
| 3,561 |
435 | {
"copyright_text": null,
"description": "Il talk vuole illustrare gli strumenti e le Api Python che Google mette\na disposizione liberamente agli sviluppatori per interagire con alcune\ndelle sue applicazioni pi\u00f9 diffuse come Google Calendar, Google Docs,\nGoogle Drive, Google Gmail. Verr\u00e0 illustrato quindi l\u2019uso delle Google\nAPI Client Libraries e verranno mostrati esempi pratici per creare e\npopolare un Foglio di Google, creare appuntamenti su un Google Calendar,\nfare l\u2019upload di un file su Google Drive ed analizzare la propria\ncasella Gmail. Per seguire il talk \u00e8 sufficiente una discreta conoscenza\ndel linguaggio Python.\n\nin \\_\\_on **sabato 21 aprile** at 17:45 `**See\nschedule** </p3/schedule/pycon9/>`__\n",
"duration": 2301,
"language": "ita",
"recorded": "2018-04-21",
"related_urls": [
{
"label": "Conference schedule",
"url": "https://www.pycon.it/p3/schedule/pycon9/"
}
],
"speakers": [
"<NAME>"
],
"tags": [
"Python",
"api",
"google",
"integration",
"REST"
],
"thumbnail_url": "https://i.ytimg.com/vi/um2gubRRmYA/maxresdefault.jpg",
"title": "Google loves Python 2.0",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=um2gubRRmYA"
}
]
}
| 530 |
3,586 | <reponame>pramodbiligiri/datahub<gh_stars>1000+
import json
import click
from feast import Client
from feast.data_source import BigQuerySource, FileSource, KafkaSource, KinesisSource
@click.command(
context_settings=dict(
ignore_unknown_options=True,
allow_extra_args=True,
)
)
@click.option("--core_url", required=True, type=str, help="Feast core URL")
@click.option(
"--output_path",
required=False,
default=None,
type=str,
help="Path to write output JSON file to",
)
def cli(core_url, output_path):
client = Client(core_url=core_url)
tables = client.list_feature_tables()
# sort tables by name for consistent outputs
tables = sorted(tables, key=lambda x: x.name)
parsed_tables = []
for table in tables:
# sort entities by name for consistent outputs
entities = sorted(table.entities)
batch_source = None
stream_source = None
# platform and name for constructing URN later on
batch_source_platform = "unknown"
stream_source_platform = "unknown"
batch_source_name = "unknown"
stream_source_name = "unknown"
if isinstance(table.batch_source, BigQuerySource):
batch_source = "BigQuerySource"
batch_source_platform = "bigquery"
batch_source_name = table.batch_source.bigquery_options.table_ref
if isinstance(table.batch_source, FileSource):
batch_source = "FileSource"
batch_source_platform = "file"
# replace slashes because the react frontend can't parse them correctly
batch_source_name = table.batch_source.file_options.file_url.replace(
"/", "."
)
# replace redundant file prefix
if batch_source_name.startswith("file:.."):
batch_source_name = batch_source_name[7:]
if isinstance(table.stream_source, KafkaSource):
stream_source = "KafkaSource"
stream_source_platform = "kafka"
stream_source_name = table.stream_source.kafka_options.topic
if isinstance(table.stream_source, KinesisSource):
stream_source = "KinesisSource"
stream_source_platform = "kinesis"
stream_source_name = f"{table.stream_source.kinesis_options.region}-{table.stream_source.kinesis_options.stream_name}"
# currently unused in MCE outputs, but useful for debugging
stream_source_config = table.to_dict()["spec"].get("streamSource")
batch_source_config = table.to_dict()["spec"]["batchSource"]
raw_entities = [
client.get_entity(entity_name) for entity_name in table.entities
]
raw_entities = sorted(raw_entities, key=lambda x: x.name)
source_info = {
"batch_source": batch_source,
"stream_source": stream_source,
"batch_source_config": batch_source_config,
"stream_source_config": stream_source_config,
"batch_source_platform": batch_source_platform,
"stream_source_platform": stream_source_platform,
"batch_source_name": batch_source_name,
"stream_source_name": stream_source_name,
}
# sort entities by name for consistent outputs
entities = sorted(
[
{
"name": x.name,
"type": x.value_type.name,
"description": x.description,
**source_info,
}
for x in raw_entities
],
key=lambda x: x["name"],
)
# sort features by name for consistent outputs
features = sorted(
[
{"name": x.name, "type": x.dtype.name, **source_info}
for x in table.features
],
key=lambda x: x["name"],
)
parsed_tables.append(
{
"name": table.name,
"entities": entities,
"features": features,
}
)
if output_path is not None:
with open(output_path, "w") as f:
json.dump(parsed_tables, f)
else:
print(parsed_tables)
if __name__ == "__main__":
cli()
| 1,965 |
1,045 | <filename>optimus/engines/pandas/io/load.py
import glob
from optimus.infer import is_url
import uuid
import zipfile
from pathlib import Path
from io import StringIO
import requests
import pandas as pd
import pandavro as pdx
from optimus.optimus import EnginePretty
from optimus.engines.base.io.load import BaseLoad
from optimus.engines.base.meta import Meta
from optimus.engines.pandas.dataframe import PandasDataFrame
from optimus.helpers.functions import prepare_path, unquote_path
from optimus.helpers.logger import logger
from optimus.helpers.core import val_to_list
class Load(BaseLoad):
@staticmethod
def df(*args, **kwargs):
return PandasDataFrame(*args, **kwargs)
@staticmethod
def _csv(filepath_or_buffer, *args, **kwargs):
kwargs.pop("n_partitions", None)
if is_url(filepath_or_buffer):
try:
resp = requests.get(filepath_or_buffer)
df = pd.read_csv(StringIO(resp.text), *args, **kwargs)
resp.raise_for_status()
except requests.exceptions.HTTPError as err:
print(err)
else:
df = pd.read_csv(filepath_or_buffer, *args, **kwargs)
if isinstance(df, pd.io.parsers.TextFileReader):
df = df.get_chunk()
return df
@staticmethod
def _json(filepath_or_buffer, *args, **kwargs):
kwargs.pop("n_partitions", None)
if is_url(filepath_or_buffer):
s = requests.get(filepath_or_buffer).text
df = pd.read_json(StringIO(s), *args, **kwargs)
else:
df = pd.read_json(filepath_or_buffer, *args, **kwargs)
return df
@staticmethod
def _avro(filepath_or_buffer, nrows=None, *args, **kwargs):
kwargs.pop("n_partitions", None)
if is_url(filepath_or_buffer):
s = requests.get(filepath_or_buffer).text
df = pdx.read_avro(StringIO(s), *args, **kwargs)
else:
df = pdx.read_avro(filepath_or_buffer, *args, **kwargs)
if nrows:
logger.warn(f"'load.avro' on {EnginePretty.PANDAS.value} loads the whole dataset and then truncates it")
df = df[:nrows]
return df
@staticmethod
def _parquet(filepath_or_buffer, nrows=None, engine="pyarrow", *args, **kwargs):
kwargs.pop("n_partitions", None)
if is_url(filepath_or_buffer):
s = requests.get(filepath_or_buffer).text
df = pd.read_parquet(StringIO(s), engine=engine, *args, **kwargs)
else:
df = pd.read_parquet(filepath_or_buffer, engine=engine, *args, **kwargs)
if nrows:
logger.warn(f"'load.parquet' on {EnginePretty.PANDAS.value} loads the whole dataset and then truncates it")
df = df[:nrows]
return df
@staticmethod
def _xml(filepath_or_buffer, nrows=None, *args, **kwargs):
kwargs.pop("n_partitions", None)
if is_url(filepath_or_buffer):
s = requests.get(filepath_or_buffer).text
df = pd.read_xml(StringIO(s), *args, **kwargs)
else:
df = pd.read_xml(filepath_or_buffer, *args, **kwargs)
if nrows:
logger.warn(f"'load.xml' on {EnginePretty.PANDAS.value} loads the whole dataset and then truncates it")
df = df[:nrows]
return df
@staticmethod
def _excel(filepath_or_buffer, nrows=None, storage_options=None, *args, **kwargs):
kwargs.pop("n_partitions", None)
if is_url(filepath_or_buffer):
s = requests.get(filepath_or_buffer).text
filepath_or_buffer = StringIO(s)
dfs = pd.read_excel(filepath_or_buffer, nrows=nrows, storage_options=storage_options, *args, **kwargs)
sheet_names = list(pd.read_excel(filepath_or_buffer, None, storage_options=storage_options).keys())
df = pd.concat(val_to_list(dfs), axis=0).reset_index(drop=True)
return df, sheet_names
def orc(self, path, columns, storage_options=None, conn=None, n_partitions=1, *args, **kwargs):
path = unquote_path(path)
if conn is not None:
path = conn.path(path)
storage_options = conn.storage_options
file, file_name = prepare_path(path, "orc")[0]
try:
df = pdx.read_orc(file_name, columns, storage_options=storage_options)
df = PandasDataFrame(df, op=self.op)
df.meta = Meta.set(df.meta, "file_name", file_name)
except IOError as error:
logger.print(error)
raise
return df
@staticmethod
def zip(zip_path, filename, dest=None, merge=False, storage_options=None, conn=None, n_partitions=1, *args, **kwargs):
if dest is None:
dest = str(uuid.uuid4()) + "/"
zip_path = glob.glob(zip_path)
dest = Path(dest).expanduser()
# if csv concat all files
# if json multilie concat files
for filename in zip_path:
# print(filename)
with zipfile.ZipFile(filename) as zf:
zf.infolist()
for member in zf.infolist():
# print(member.filename)
try:
zf.extract(member, dest)
except zipfile.error as e:
pass
| 2,528 |
3,531 | <filename>src/extra/widgets/calendar/lv_calendar_header_arrow.h
/**
* @file lv_calendar_header_arrow.h
*
*/
#ifndef LV_CALENDAR_HEADER_ARROW_H
#define LV_CALENDAR_HEADER_ARROW_H
#ifdef __cplusplus
extern "C" {
#endif
/*********************
* INCLUDES
*********************/
#include "../../../core/lv_obj.h"
#if LV_USE_CALENDAR_HEADER_ARROW
/*********************
* DEFINES
*********************/
/**********************
* TYPEDEFS
**********************/
extern const lv_obj_class_t lv_calendar_header_arrow_class;
/**********************
* GLOBAL PROTOTYPES
**********************/
/**
* Create a calendar header with drop-drowns to select the year and month
* @param parent pointer to a calendar object.
* @return the created header
*/
lv_obj_t * lv_calendar_header_arrow_create(lv_obj_t * parent);
/**********************
* MACROS
**********************/
#endif /*LV_USE_CALENDAR_HEADER_ARROW*/
#ifdef __cplusplus
} /*extern "C"*/
#endif
#endif /*LV_CALENDAR_HEADER_ARROW_H*/
| 404 |
4,772 | package example.repo;
import example.model.Customer1527;
import java.util.List;
import org.springframework.data.repository.CrudRepository;
public interface Customer1527Repository extends CrudRepository<Customer1527, Long> {
List<Customer1527> findByLastName(String lastName);
}
| 87 |
971 | <gh_stars>100-1000
package com.ucar.datalink.worker.core.runtime;
/**
*
* Created by lubiao on 2017/1/3.
*/
public interface TaskConfigUpdateListener {
/**
* Invoked when a task configuration has been removed
*
* @param taskId identifier of the task
*/
void onTaskConfigRemove(String taskId);
/**
* Invoked when a task configuration has been added.
*
* @param taskId identifier of the task
*/
void onTaskConfigAdd(String taskId);
/**
* Invoked when task configs are updated.
*
* @param taskId identifier of the task
*/
void onTaskConfigUpdate(String taskId);
/**
* Invoked when task state are changed.
*
* @param taskId identifier of the task
*/
void onTaskStateChanged(String taskId);
}
| 297 |
1,644 | <filename>core/src/test/java/google/registry/flows/EppXmlSanitizerTest.java
// Copyright 2018 The Nomulus Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.package google.registry.flows;
package google.registry.flows;
import static com.google.common.truth.Truth.assertThat;
import static google.registry.flows.EppXmlSanitizer.sanitizeEppXml;
import static google.registry.testing.TestDataHelper.loadBytes;
import static google.registry.xml.XmlTestUtils.assertXmlEqualsIgnoreHeader;
import static java.nio.charset.StandardCharsets.UTF_16LE;
import static java.nio.charset.StandardCharsets.UTF_8;
import com.google.common.collect.ImmutableMap;
import google.registry.testing.EppLoader;
import java.util.Base64;
import org.junit.jupiter.api.Test;
/** Unit tests for {@link EppXmlSanitizer}. */
class EppXmlSanitizerTest {
private static final String UTF8_HEADER = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>";
@Test
void testSanitize_noSensitiveData_noop() throws Exception {
byte[] inputXmlBytes = loadBytes(getClass(), "host_create.xml").read();
String expectedXml = UTF8_HEADER + new String(inputXmlBytes, UTF_8);
assertXmlEqualsIgnoreHeader(expectedXml, sanitizeEppXml(inputXmlBytes));
}
@Test
void testSanitize_loginPasswords_sanitized() throws Exception {
String inputXml =
new EppLoader(
this,
"login_update_password.xml",
ImmutableMap.of("PW", "oldpass", "NEWPW", "newPw"))
.getEppXml();
String expectedXml =
UTF8_HEADER
+ new EppLoader(
this,
"login_update_password.xml",
ImmutableMap.of("PW", "*******", "NEWPW", "*****"))
.getEppXml();
assertXmlEqualsIgnoreHeader(expectedXml, sanitizeEppXml(inputXml.getBytes(UTF_8)));
}
@Test
void testSanitize_loginPasswordTagWrongCase_sanitized() throws Exception {
String inputXml =
new EppLoader(
this, "login_wrong_case.xml", ImmutableMap.of("PW", "oldpass", "NEWPW", "newPw"))
.getEppXml();
String expectedXml =
UTF8_HEADER
+ new EppLoader(
this,
"login_wrong_case.xml",
ImmutableMap.of("PW", "*******", "NEWPW", "*****"))
.getEppXml();
assertXmlEqualsIgnoreHeader(expectedXml, sanitizeEppXml(inputXml.getBytes(UTF_8)));
}
@Test
void testSanitize_contactAuthInfo_sanitized() throws Exception {
byte[] inputXmlBytes = loadBytes(getClass(), "contact_info.xml").read();
String expectedXml =
UTF8_HEADER
+ new EppLoader(this, "contact_info_sanitized.xml", ImmutableMap.of()).getEppXml();
assertXmlEqualsIgnoreHeader(expectedXml, sanitizeEppXml(inputXmlBytes));
}
@Test
void testSanitize_contactCreateResponseAuthInfo_sanitized() throws Exception {
byte[] inputXmlBytes = loadBytes(getClass(), "contact_info_from_create_response.xml").read();
String expectedXml =
UTF8_HEADER
+ new EppLoader(
this, "contact_info_from_create_response_sanitized.xml", ImmutableMap.of())
.getEppXml();
assertXmlEqualsIgnoreHeader(expectedXml, sanitizeEppXml(inputXmlBytes));
}
@Test
void testSanitize_emptyElement_transformedToLongForm() throws Exception {
byte[] inputXmlBytes = "<pw/>".getBytes(UTF_8);
assertXmlEqualsIgnoreHeader("<pw></pw>", sanitizeEppXml(inputXmlBytes));
}
@Test
void testSanitize_invalidXML_throws() {
byte[] inputXmlBytes = "<pw>".getBytes(UTF_8);
assertThat(sanitizeEppXml(inputXmlBytes))
.isEqualTo(Base64.getMimeEncoder().encodeToString(inputXmlBytes));
}
@Test
void testSanitize_unicode_hasCorrectCharCount() throws Exception {
byte[] inputXmlBytes = "<pw>\u007F\u4E43x</pw>".getBytes(UTF_8);
assertXmlEqualsIgnoreHeader("<pw>C**</pw>", sanitizeEppXml(inputXmlBytes));
}
@Test
void testSanitize_emptyString_encodedToBase64() {
byte[] inputXmlBytes = "".getBytes(UTF_8);
assertThat(sanitizeEppXml(inputXmlBytes)).isEqualTo("");
}
@Test
void testSanitize_utf16_encodingPreserved() {
// Test data should specify an endian-specific UTF-16 scheme for easy assertion. If 'UTF-16' is
// used, the XMLEventReader in sanitizer may resolve it to an endian-specific one.
String inputXml =
"<?xml version=\"1.0\" encoding=\"UTF-16LE\" standalone=\"no\"?>" + "<p>\u03bc</p>\n";
String sanitizedXml = sanitizeEppXml(inputXml.getBytes(UTF_16LE));
assertThat(sanitizedXml).isEqualTo(inputXml);
}
}
| 2,084 |
14,668 | <reponame>chromium/chromium<gh_stars>1000+
// Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef IOS_CHROME_BROWSER_SCREEN_TIME_SCREEN_TIME_HISTORY_DELETER_H_
#define IOS_CHROME_BROWSER_SCREEN_TIME_SCREEN_TIME_HISTORY_DELETER_H_
#include "base/scoped_observation.h"
#include "components/history/core/browser/history_service.h"
#include "components/history/core/browser/history_service_observer.h"
#include "components/keyed_service/core/keyed_service.h"
@class STWebHistory;
// ScreenTimeHistoryDeleter is responsible for deleting ScreenTime history when
// Chrome history is deleted.
class API_AVAILABLE(ios(14.0)) ScreenTimeHistoryDeleter
: public KeyedService,
public history::HistoryServiceObserver {
public:
explicit ScreenTimeHistoryDeleter(history::HistoryService* history_service);
ScreenTimeHistoryDeleter(const ScreenTimeHistoryDeleter&) = delete;
ScreenTimeHistoryDeleter& operator=(const ScreenTimeHistoryDeleter&) = delete;
~ScreenTimeHistoryDeleter() override;
// KeyedService:
void Shutdown() override;
private:
// history::HistoryServiceObserver:
void OnURLsDeleted(history::HistoryService* history_service,
const history::DeletionInfo& deletion_info) override;
history::HistoryService* history_service_;
STWebHistory* screen_time_history_;
base::ScopedObservation<history::HistoryService,
history::HistoryServiceObserver>
history_service_observation_{this};
};
#endif // IOS_CHROME_BROWSER_SCREEN_TIME_SCREEN_TIME_HISTORY_DELETER_H_
| 570 |
2,111 | <gh_stars>1000+
/**
* File: BRIEF.cpp
* Author: <NAME>
* Date: September 2010
* Description: implementation of BRIEF (Binary Robust Independent
* Elementary Features) descriptor by
* <NAME>, <NAME> <NAME>
* + close binary tests (by <NAME>)
* License: see the LICENSE.txt file
*
*/
#include "BRIEF.h"
#include "../src/DUtils.h"
#include <boost/dynamic_bitset.hpp>
#include <vector>
using namespace std;
using namespace DVision;
// ----------------------------------------------------------------------------
BRIEF::BRIEF(int nbits, int patch_size, Type type):
m_bit_length(nbits), m_patch_size(patch_size), m_type(type)
{
assert(patch_size > 1);
assert(nbits > 0);
generateTestPoints();
}
// ----------------------------------------------------------------------------
BRIEF::~BRIEF()
{
}
// ---------------------------------------------------------------------------
void BRIEF::compute(const cv::Mat &image,
const std::vector<cv::KeyPoint> &points,
vector<bitset> &descriptors,
bool treat_image) const
{
const float sigma = 2.f;
const cv::Size ksize(9, 9);
cv::Mat im;
if(treat_image)
{
cv::Mat aux;
if(image.depth() == 3)
{
cv::cvtColor(image, aux, CV_RGB2GRAY);
//cv::cvtColor(image, aux, cv::COLOR_RGB2GRAY);
}
else
{
aux = image;
}
cv::GaussianBlur(aux, im, ksize, sigma, sigma);
}
else
{
im = image;
}
assert(im.type() == CV_8UC1);
assert(im.isContinuous());
// use im now
const int W = im.cols;
const int H = im.rows;
descriptors.resize(points.size());
std::vector<bitset>::iterator dit;
std::vector<cv::KeyPoint>::const_iterator kit;
int x1, y1, x2, y2;
dit = descriptors.begin();
for(kit = points.begin(); kit != points.end(); ++kit, ++dit)
{
dit->resize(m_bit_length);
dit->reset();
for(unsigned int i = 0; i < m_x1.size(); ++i)
{
x1 = (int)(kit->pt.x + m_x1[i]);
y1 = (int)(kit->pt.y + m_y1[i]);
x2 = (int)(kit->pt.x + m_x2[i]);
y2 = (int)(kit->pt.y + m_y2[i]);
if(x1 >= 0 && x1 < W && y1 >= 0 && y1 < H
&& x2 >= 0 && x2 < W && y2 >= 0 && y2 < H)
{
if( im.ptr<unsigned char>(y1)[x1] < im.ptr<unsigned char>(y2)[x2] )
{
dit->set(i);
}
} // if (x,y)_1 and (x,y)_2 are in the image
} // for each (x,y)
} // for each keypoint
}
// ---------------------------------------------------------------------------
void BRIEF::generateTestPoints()
{
m_x1.resize(m_bit_length);
m_y1.resize(m_bit_length);
m_x2.resize(m_bit_length);
m_y2.resize(m_bit_length);
const float g_mean = 0.f;
const float g_sigma = 0.2f * (float)m_patch_size;
const float c_sigma = 0.08f * (float)m_patch_size;
float sigma2;
if(m_type == RANDOM)
sigma2 = g_sigma;
else
sigma2 = c_sigma;
const int max_v = m_patch_size / 2;
DUtils::Random::SeedRandOnce();
for(int i = 0; i < m_bit_length; ++i)
{
int x1, y1, x2, y2;
do
{
x1 = DUtils::Random::RandomGaussianValue(g_mean, g_sigma);
} while( x1 > max_v || x1 < -max_v);
do
{
y1 = DUtils::Random::RandomGaussianValue(g_mean, g_sigma);
} while( y1 > max_v || y1 < -max_v);
float meanx, meany;
if(m_type == RANDOM)
meanx = meany = g_mean;
else
{
meanx = x1;
meany = y1;
}
do
{
x2 = DUtils::Random::RandomGaussianValue(meanx, sigma2);
} while( x2 > max_v || x2 < -max_v);
do
{
y2 = DUtils::Random::RandomGaussianValue(meany, sigma2);
} while( y2 > max_v || y2 < -max_v);
m_x1[i] = x1;
m_y1[i] = y1;
m_x2[i] = x2;
m_y2[i] = y2;
}
}
// ----------------------------------------------------------------------------
| 1,755 |
577 | /**
* Copyright (c) 2016 - 2018 Syncleus, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.aparapi.internal.util;
/**
* Created by Barney on 03/09/2015.
*/
public class Reflection {
/** Avoids getting dumb empty names for anonymous inners. */
public static String getSimpleName(Class<?> klass) {
String simpleName = klass.getSimpleName();
if (simpleName.isEmpty()) {
String fullName = klass.getName();
int index = fullName.lastIndexOf('.');
simpleName = (index < 0) ? fullName : fullName.substring(index + 1);
}
return simpleName;
}
}
| 355 |
1,267 | from __future__ import absolute_import
import logging
LOG_FORMAT = "[%(name)s] %(message)s"
try:
from .color import ColorFormatter
LOG_COLOR_FORMAT = "%(log_color)s"+LOG_FORMAT
COLORS = {
"DEBUG": "white",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "bold_purple",
}
CONSOLE = ColorFormatter(
format=LOG_COLOR_FORMAT,
log_colors=COLORS.copy(),
log_color_field="levelname")
except ImportError:
CONSOLE = logging.Formatter(fmt=LOG_FORMAT)
| 249 |
2,605 | import unittest
from tests import PluginTest
from plugins.corona import CoronaInfo
from mock import patch, call
import requests
class CoronaInfoTest(PluginTest):
"""
Tests For CoronaInfo Plugin
"""
def setUp(self):
self.test = self.load_plugin(CoronaInfo)
def test_get_corona_info(self):
with patch.object(requests, 'get') as get_mock:
self.test.get_corona_info("usa")
get_mock.assert_called_with(
"https://api.covid19api.com/summary")
if __name__ == '__main__':
unittest.main()
| 233 |
441 | <reponame>Yawanaika/intelligent-test-platform
package com.alibaba.markovdemo.controller;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONArray;
import com.alibaba.fastjson.JSONObject;
import com.alibaba.markovdemo.BO.MultiAIInfo;
import com.alibaba.markovdemo.BO.TestCaseInput;
import com.alibaba.markovdemo.common.AjaxResult;
import com.alibaba.markovdemo.engine.AI.GACaseGenerator.GenerateController;
import com.alibaba.markovdemo.engine.MultiAIPlusController;
import com.alibaba.markovdemo.engine.SingleController;
import com.alibaba.markovdemo.engine.stages.ResultStatus;
import com.alibaba.markovdemo.engine.util.Toolkit;
import com.alibaba.markovdemo.entity.*;
import com.alibaba.markovdemo.service.*;
import com.google.gson.Gson;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import javax.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.util.*;
/**
* 说明:用例管理页
* 1.包含了用例新建/查询/克隆/执行/回归等功能
* 2.测试环境的选择此处采用mock方式,当然,真实测试场景下仍需要用户去实现真实环境的获取.
*/
@Controller
@RequestMapping(value = "/api")
public class TestcaseController {
private static final Logger logger = LoggerFactory.getLogger(TestcaseController.class);
@Autowired
private GotReportsService gotReportsService;
@Autowired
private TestcaseService testcaseService;
@Autowired
private SingleController singleController;
@Autowired
private PipelineService pipelineService;
@Autowired
MultiAIPlusController multiAIController;
@Autowired
private TestcaseSnapsService testcaseSnapsService;
@Autowired
private GenerateController generateController;
@Autowired
private CaseGenerateTaskService caseGenerateTaskService;
@Autowired
private CaseAccuracyService caseAccuracyService;
@Autowired
private HttpServletRequest request;
/**
* 功能:获取测试场景下的分页用例列表
* @param scenarioId
* @param pageNo
* @return
*/
@RequestMapping(value = "/getTestcaseByScenario", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getTestcaseByScenario(Long scenarioId, Integer pageNo){
try{
if(pageNo==null || pageNo==0){
pageNo=1;
}
List<GotTestCase> caseList = testcaseService.getVisibleTestcaseByScenario(scenarioId, pageNo);
JSONObject res = new JSONObject();
res.put("testCaseList", caseList);
Integer allCaseNum = testcaseService.getAllCaseNum(scenarioId);
res.put("allNumber", allCaseNum);
return AjaxResult.succResult(res);
}catch(Exception e){
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:获取用例调试页的布局json
* 该布局json描述了用例依赖的测试数据类型/测试流程等,与用例数据结合后形成完整的测试流程+测试数据.
* @param scenarioId
* @return
*/
@RequestMapping(value = "/getLayoutJson", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getLayout(Long scenarioId) throws IOException {
try {
return AjaxResult.succResult(singleController.getLayout(scenarioId));
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:获取测试用例数据
* 测试用例数据与布局json结合后形成完整的测试流程+测试数据
* @param testCaseId
* @return
*/
@RequestMapping(value = "/getTestCase", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getTestCase(Long testCaseId){
try {
return AjaxResult.succResult(testcaseService.getTestCaseById(testCaseId));
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:
* 获取指定分组scenarioId下的用例分组列表
* 注:1.该接口可扩展为指定场景和用例分支,但在demo中我们暂不考虑用例分支的情况
* 2.用例分组的在回归测试中使用,即用户可选择某类分组进行回归.
* @param appId
* @param scenarioId
* @return
*/
@RequestMapping(value = "/getCaseGroupListByBranch", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getTestCaseGroupList(Long appId, Long scenarioId){
try {
Map<String, Object> map = new HashMap();
List<String> caseGroupList =testcaseService.getCaseGroupByScenarioId(scenarioId);
int allNumber = caseGroupList.size();
map.put("allNumber", allNumber);
map.put("caseGroupList", caseGroupList);
return AjaxResult.succResult(map);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:测试用例保存
* @param testCase
* @return
*/
@RequestMapping(value = "/saveTestCase", method = RequestMethod.POST)
@ResponseBody
public AjaxResult saveTestCase(@RequestBody TestCaseInput testCase){
try {
return AjaxResult.succResult(testcaseService.saveTestCase(testCase));
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:测试用例删除
* 注意:此处为逻辑删
* @param testCaseId
* @return
*/
@RequestMapping(value = "/deleteTestCase", method = RequestMethod.DELETE)
@ResponseBody
public AjaxResult deleteTestCase(Long testCaseId){
try {
testcaseService.deleteTestCase(testCaseId);
return AjaxResult.succResultMessage("delete success.");
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:测试用例执行
* 根据前端传来的测试用例数据,后端初始化执行插件,按照预定义的执行流程依次运行,最终将结果返回给前端
* 注意:
* 此处demo采用同步执行机制,便于描述.但在实际中往往我们采用异步执行方式.后续开源如有需要,将逐步开放基于zk的异步执行调用方式
* @param testCase
* @return
* @throws Exception
*/
@RequestMapping(value = "/runSingleTestCase", method = RequestMethod.POST)
@ResponseBody
public AjaxResult runSingleTestCase(@RequestBody TestCaseInput testCase) throws Exception {
try {
GotPipeline gotPipeline = pipelineService.getPipeline(testCase.getScenarioId());
JSONObject pipelineJsonObj = JSON.parseObject(gotPipeline.getPipeline());
return AjaxResult.succResult(singleController.runCase(testCase,pipelineJsonObj));
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:功能回归测试
* 注意:
* 此处demo采用了最基本和最普适的caseBYcase回归机制,便于描述平台思想.后续开源将逐步开放智能回归的高效回归方式.
* @param multiAIInfo
* @return
* @throws IOException
*/
@RequestMapping(value = "/runMultiAITestCase", method = RequestMethod.POST)
@ResponseBody
public AjaxResult runMultiAITestCase(@RequestBody MultiAIInfo multiAIInfo) throws IOException {
try {
Gson gson = new Gson();
System.out.print(gson.toJson(multiAIInfo));
multiAIController.runIntelligent(multiAIInfo);
return AjaxResult.succResult("智能回归测试任务提交成功,请进入执行历史页面查看!");
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage()) ;
}
}
/**
* 功能:获取测试报告列表
* 说明:执行一次回归测试后会产生一份测试报告.
* @param appId
* @param scenarioId
* @param pageId
* @param pageSize
* @return
*/
@RequestMapping(value = "/getReportList", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getReportList(Long appId, Long scenarioId, Integer pageId, Integer pageSize){
try {
Map<String, Object> map = new HashMap();
Integer allNumber = 0;
List<GotReports> reportsList;
Integer fromRow = (pageId - 1) * pageSize;
allNumber = gotReportsService.getIdsByScenarioId(scenarioId).size();
reportsList = gotReportsService.getVisibleByScenarioIdPage(scenarioId, fromRow, pageSize);
// for (GotReports gotReports : reportsList){
// gotReports.setGmtCreateStr(sdf.format(gotReports.getGmtCreate()));
// gotReports.setGmtModifiedStr(sdf.format(gotReports.getGmtModified()));
// }
map.put("allNumber", allNumber);
map.put("reportsList", reportsList);
return AjaxResult.succResult(map);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
/**
* 功能:获取测试报告详情
* 说明:详情包含了测试执行时间/用例快照列表/执行用例数/回归的交付物信息等..
* @param testReportId
* @param pageId
* @param pageSize
* @param status
* @return
*/
@RequestMapping(value = "/getReportTestCase", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getReportTestCase(Long testReportId, Integer pageId, Integer pageSize, String status){
try {
Map map = new HashMap();
Integer runNumber;
Integer sucessNumber= 0;
Integer failureNumber = 0;
Integer allStatusNumber = 0;
//获取执行人,回归时间,回归名称等
GotReports gotReports = gotReportsService.findById(testReportId);
String reportName = gotReports.getReportName();
String user = gotReports.getUser();
Date gmtCreate = gotReports.getGmtCreate();
Date gmtModified = gotReports.getGmtModified();
int caseNum = gotReports.getCaseNum();
double seconds = (double)(gmtModified.getTime()-gmtCreate.getTime());
//统计用例数
List<GotTestcaseSnaps> testcaseList = testcaseSnapsService.getReportTestcaseList(testReportId);
runNumber = testcaseList.size();
for (GotTestcaseSnaps testcase : testcaseList) {
if (!testcase.getStatus().equals(ResultStatus.SUCCESS.name())) {
failureNumber++;
}
else{
sucessNumber++;
}
}
Integer fromRow = (pageId - 1) * pageSize;
List<GotTestcaseSnaps> testcaseListPage = testcaseSnapsService.getReportTestcaseListPage(testReportId, fromRow, pageSize,status);
List<Long> caseids = new ArrayList<>();
for (GotTestcaseSnaps gotTestcaseSnaps :testcaseListPage){
caseids.add(gotTestcaseSnaps.getTestcaseId());
}
if(status == null){
allStatusNumber = runNumber;
}
else if(status.contains("ERROR")){
allStatusNumber = failureNumber;
}
else if(status.contains("SUCCESS")){
allStatusNumber = runNumber - failureNumber;
}
else{
allStatusNumber = 0;
}
map.put("status", gotReports.getStatus());
map.put("allNumber", caseNum);
map.put("runNumber", runNumber);
map.put("sucessNumber", sucessNumber);
map.put("allStatusNumber", allStatusNumber);
map.put("failureNumber", failureNumber);
map.put("testcaseList", testcaseListPage);
map.put("gmtCreate", gmtCreate);
map.put("gmtModified", gmtModified);
map.put("reportName", reportName);
map.put("user", user);
map.put("timeGap", Toolkit.changeCostFormat((long) seconds));
map.put("imageName",gotReports.getImageName());
return AjaxResult.succResult(map);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
@RequestMapping(value = "/getReport", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getReport( Long testReportId){
try {
GotReports gotReport = gotReportsService.findById(testReportId);
JSONObject analysisObj = JSONObject.parseObject(gotReport.getAnalysis());
return AjaxResult.succResult(analysisObj);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
@RequestMapping(value = "/startAICaseGenerator", method = RequestMethod.POST)
@ResponseBody
public AjaxResult startAICaseGenerator( @RequestBody JSONObject params){
try {
String caseId = params.getString("caseId");
Long scenarioId = params.getLong("scenarioId");
String fieldConf = params.getString("fieldConf");
HashMap<String, Object> envInfo = (HashMap<String, Object>)params.get("envInfo");
JSONObject res = new JSONObject();
res.put("taskId", generateController.startGenerateTask(Long.parseLong(caseId), scenarioId, fieldConf, envInfo));
return AjaxResult.succResult(res);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
@RequestMapping(value = "/getDefaultGeneConf", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getDefaultGeneConf( Long scenarioId){
try {
//just mock
String defaultConf = "{\n" +
"\t\"neg_tesla_key\": [\n" +
"\t\t\"ad_id\"\n" +
"\t],\n" +
"\t\"field_setting\": {\n" +
"\t\t\"search_key\": [\"key1\", \"key2\", \"key3\", \"key4\", \"key5\", \"key6\", \"key7\"],\n" +
"\t\t\"match_level\": [1, 2, 3, 4, 5, 6],\n" +
"\t\t\"user_type\": [\"type1\", \"type2\", \"type3\", \"type4\", \"type5\"],\n" +
"\t\t\"top_num\": [10, 11, 12, 13, 14, 15],\n" +
"\t\t\"use_feature\": [true, false],\n" +
"\t\t\"other1\": [\"0\", \"1\", \"2\", \"3\"],\n" +
"\t\t\"other2\": [\"0\", \"1\", \"2\", \"3\"]\n" +
"\t}\n" +
"}";
return AjaxResult.succResult(defaultConf);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
@RequestMapping(value = "/getGeneratorTaskDetail", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getGeneratorTaskDetail( Long taskId){
try {
GotCaseGenerateTask task = caseGenerateTaskService.findTaskById(taskId);
JSONObject res = new JSONObject();
res.put("task", task);
try {
JSONArray cases = JSONObject.parseObject(task.getTaskResult()).getJSONArray("valid_ids");
JSONArray caseArray = new JSONArray();
for (Object caseId : cases) {
GotTestCase testcase = testcaseService.findById(Long.parseLong(caseId.toString()));
GotCaseAccuracy accuracy = caseAccuracyService.getLastedByCaseId(Long.parseLong(caseId.toString()));
JSONObject temp = new JSONObject();
temp.put("caseid", testcase.getId());
temp.put("caseName", testcase.getDescription());
temp.put("accuracy", accuracy.getCovLine());
caseArray.add(temp);
}
res.put("validCaseInfo", caseArray);
}catch (Exception e){
}
return AjaxResult.succResult(res);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
@RequestMapping(value = "/changeVisible", method = RequestMethod.GET)
@ResponseBody
public AjaxResult changeVisible( Long caseId){
try {
//just mock
GotTestCase testCase = testcaseService.findById(caseId);
testCase.setIsVisible(0);
testcaseService.update(testCase);
return AjaxResult.succResult(true);
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
@RequestMapping(value = "/getLastGenerateTask", method = RequestMethod.GET)
@ResponseBody
public AjaxResult getLastGenerateTask( Long scenarioId){
try {
//just mock
GotCaseGenerateTask task = caseGenerateTaskService.getLastGenerateTask(scenarioId);
if(task!= null){
return AjaxResult.succResult(task.getId());
}else{
return AjaxResult.errorResult(null);
}
} catch (Exception e) {
return AjaxResult.errorResult(e.getMessage());
}
}
}
| 8,329 |
72,551 | @protocol P1
-(void)meth;
@end
@interface B1<P1>
-(void)meth;
@end
@interface S1 : B1
-(void)meth;
@end
| 55 |
448 | <filename>platform/Apple/source/CicadaRenderCBWrapper.h
#ifndef CicadaRenderCBWrapper_H
#define CicadaRenderCBWrapper_H
class IAFFrame;
class CicadaRenderCBWrapper {
public:
static bool OnRenderFrame(void *userData, IAFFrame *frame);
};
#endif //CicadaRenderCBWrapper_H
| 101 |
313 | //
// BRChainParams.c
// BRCore
//
// Created by <NAME> on 3/11/19.
// Copyright (c) 2019 breadwallet LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
#include "BRChainParams.h"
static const char *BRMainNetDNSSeeds[] = {
"seed.breadwallet.com.", "seed.bitcoin.sipa.be.", "dnsseed.bluematt.me.", "dnsseed.bitcoin.dashjr.org.",
"seed.bitcoinstats.com.", "bitseed.xf2.org.", "seed.bitcoin.jonasschnelli.ch.", NULL
};
static const char *BRTestNetDNSSeeds[] = {
"testnet-seed.breadwallet.com.", "testnet-seed.bitcoin.petertodd.org.", "testnet-seed.bluematt.me.",
"testnet-seed.bitcoin.schildbach.de.", NULL
};
// blockchain checkpoints - these are also used as starting points for partial chain downloads, so they must be at
// difficulty transition boundaries in order to verify the block difficulty at the immediately following transition
BRCheckPoint BRMainNetCheckpoints[31];
/*
= {
{ 0, toUInt256("000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"), 1231006505, 0x1d00ffff },
{ 20160, toUInt256("000000000f1aef56190aee63d33a373e6487132d522ff4cd98ccfc96566d461e"), 1248481816, 0x1d00ffff },
{ 40320, toUInt256("0000000045861e169b5a961b7034f8de9e98022e7a39100dde3ae3ea240d7245"), 1266191579, 0x1c654657 },
{ 60480, toUInt256("000000000632e22ce73ed38f46d5b408ff1cff2cc9e10daaf437dfd655153837"), 1276298786, 0x1c0eba64 },
{ 80640, toUInt256("0000000000307c80b87edf9f6a0697e2f01db67e518c8a4d6065d1d859a3a659"), 1284861847, 0x1b4766ed },
{ 100800, toUInt256("000000000000e383d43cc471c64a9a4a46794026989ef4ff9611d5acb704e47a"), 1294031411, 0x1b0404cb },
{ 120960, toUInt256("0000000000002c920cf7e4406b969ae9c807b5c4f271f490ca3de1b0770836fc"), 1304131980, 0x1b0098fa },
{ 141120, toUInt256("00000000000002d214e1af085eda0a780a8446698ab5c0128b6392e189886114"), 1313451894, 0x1a094a86 },
{ 161280, toUInt256("00000000000005911fe26209de7ff510a8306475b75ceffd434b68dc31943b99"), 1326047176, 0x1a0d69d7 },
{ 181440, toUInt256("00000000000000e527fc19df0992d58c12b98ef5a17544696bbba67812ef0e64"), 1337883029, 0x1a0a8b5f },
{ 201600, toUInt256("00000000000003a5e28bef30ad31f1f9be706e91ae9dda54179a95c9f9cd9ad0"), 1349226660, 0x1a057e08 },
{ 221760, toUInt256("00000000000000fc85dd77ea5ed6020f9e333589392560b40908d3264bd1f401"), 1361148470, 0x1a04985c },
{ 241920, toUInt256("00000000000000b79f259ad14635739aaf0cc48875874b6aeecc7308267b50fa"), 1371418654, 0x1a00de15 },
{ 262080, toUInt256("000000000000000aa77be1c33deac6b8d3b7b0757d02ce72fffddc768235d0e2"), 1381070552, 0x1916b0ca },
{ 282240, toUInt256("0000000000000000ef9ee7529607286669763763e0c46acfdefd8a2306de5ca8"), 1390570126, 0x1901f52c },
{ 302400, toUInt256("0000000000000000472132c4daaf358acaf461ff1c3e96577a74e5ebf91bb170"), 1400928750, 0x18692842 },
{ 322560, toUInt256("000000000000000002df2dd9d4fe0578392e519610e341dd09025469f101cfa1"), 1411680080, 0x181fb893 },
{ 342720, toUInt256("00000000000000000f9cfece8494800d3dcbf9583232825da640c8703bcd27e7"), 1423496415, 0x1818bb87 },
{ 362880, toUInt256("000000000000000014898b8e6538392702ffb9450f904c80ebf9d82b519a77d5"), 1435475246, 0x1816418e },
{ 383040, toUInt256("00000000000000000a974fa1a3f84055ad5ef0b2f96328bc96310ce83da801c9"), 1447236692, 0x1810b289 },
{ 403200, toUInt256("000000000000000000c4272a5c68b4f55e5af734e88ceab09abf73e9ac3b6d01"), 1458292068, 0x1806a4c3 },
{ 423360, toUInt256("000000000000000001630546cde8482cc183708f076a5e4d6f51cd24518e8f85"), 1470163842, 0x18057228 },
{ 443520, toUInt256("00000000000000000345d0c7890b2c81ab5139c6e83400e5bed00d23a1f8d239"), 1481765313, 0x18038b85 },
{ 463680, toUInt256("000000000000000000431a2f4619afe62357cd16589b638bb638f2992058d88e"), 1493259601, 0x18021b3e },
{ 483840, toUInt256("0000000000000000008e5d72027ef42ca050a0776b7184c96d0d4b300fa5da9e"), 1504704195, 0x1801310b },
{ 504000, toUInt256("0000000000000000006cd44d7a940c79f94c7c272d159ba19feb15891aa1ea54"), 1515827554, 0x177e578c },
{ 524160, toUInt256("00000000000000000009d1e9bee76d334347060c6a2985d6cbc5c22e48f14ed2"), 1527168053, 0x17415a49 },
{ 544320, toUInt256("0000000000000000000a5e9b5e4fbee51f3d53f31f40cd26b8e59ef86acb2ebd"), 1538639362, 0x1725c191 },
{ 564480, toUInt256("0000000000000000002567dc317da20ddb0d7ef922fe1f9c2375671654f9006c"), 1551026038, 0x172e5b50 },
{ 584640, toUInt256("0000000000000000000e5af6f531133eb548fe3854486ade75523002a1a27687"), 1562663868, 0x171f0d9b }
//{ 604800,
};
*/
BRCheckPoint BRTestNetCheckpoints[18];
/*
= {
{ 0, toUInt256("000000000933ea01ad0ee984209779baaec3ced90fa3f408719526f8d77f4943"), 1296688602, 0x1d00ffff },
{ 100800, toUInt256("0000000000a33112f86f3f7b0aa590cb4949b84c2d9c673e9e303257b3be9000"), 1376543922, 0x1c00d907 },
{ 201600, toUInt256("0000000000376bb71314321c45de3015fe958543afcbada242a3b1b072498e38"), 1393813869, 0x1b602ac0 },
{ 302400, toUInt256("0000000000001c93ebe0a7c33426e8edb9755505537ef9303a023f80be29d32d"), 1413766239, 0x1a33605e },
{ 403200, toUInt256("0000000000ef8b05da54711e2106907737741ac0278d59f358303c71d500f3c4"), 1431821666, 0x1c02346c },
{ 504000, toUInt256("0000000000005d105473c916cd9d16334f017368afea6bcee71629e0fcf2f4f5"), 1436951946, 0x1b00ab86 },
{ 604800, toUInt256("00000000000008653c7e5c00c703c5a9d53b318837bb1b3586a3d060ce6fff2e"), 1447484641, 0x1a092a20 },
{ 705600, toUInt256("00000000004ee3bc2e2dd06c31f2d7a9c3e471ec0251924f59f222e5e9c37e12"), 1455728685, 0x1c0ffff0 },
{ 806400, toUInt256("0000000000000faf114ff29df6dbac969c6b4a3b407cd790d3a12742b50c2398"), 1462006183, 0x1a34e280 },
{ 907200, toUInt256("0000000000166938e6f172a21fe69fe335e33565539e74bf74eeb00d2022c226"), 1469705562, 0x1c00ffff },
{ 1008000, toUInt256("000000000000390aca616746a9456a0d64c1bd73661fd60a51b5bf1c92bae5a0"), 1476926743, 0x1a52ccc0 },
{ 1108800, toUInt256("00000000000288d9a219419d0607fb67cc324d4b6d2945ca81eaa5e739fab81e"), 1490751239, 0x1b09ecf0 },
{ 1209600, toUInt256("0000000000000026b4692a26f1651bec8e9d4905640bd8e56056c9a9c53badf8"), 1507328506, 0x1973e180 },
{ 1310400, toUInt256("0000000000013b434bbe5668293c92ef26df6d6d4843228e8958f6a3d8101709"), 1527038604, 0x1b0ffff0 },
{ 1411200, toUInt256("00000000000000008b3baea0c3de24b9333c169e1543874f4202397f5b8502cb"), 1535535770, 0x194ac105 }
//{ 1512000,
};
*/
static int BRMainNetVerifyDifficulty(const BRMerkleBlock *block, const BRSet *blockSet)
{
const BRMerkleBlock *previous, *b = NULL;
uint32_t i;
assert(block != NULL);
assert(blockSet != NULL);
// check if we hit a difficulty transition, and find previous transition block
if ((block->height % BLOCK_DIFFICULTY_INTERVAL) == 0) {
for (i = 0, b = block; b && i < BLOCK_DIFFICULTY_INTERVAL; i++) {
b = (BRMerkleBlock*)BRSetGet(blockSet, &b->prevBlock);
}
}
previous = (BRMerkleBlock*)BRSetGet(blockSet, &block->prevBlock);
return BRMerkleBlockVerifyDifficulty(block, previous, (b) ? b->timestamp : 0);
}
static int BRTestNetVerifyDifficulty(const BRMerkleBlock *block, const BRSet *blockSet)
{
return 1; // XXX skip testnet difficulty check for now
}
static const char BRMainBip32xprv[] = <KEY>";
static const char BRMainBip32xpub[] = <KEY>";
static const char BRMainBech32[] = "bc";
static const char BRTestBip32xprv[] = <KEY>";
static const char BRTestBip32xpub[] = <KEY>";
static const char BRTestBech32[] = "tb";
static const char BRRegtestBech32[] = "bcrt";
static const BRChainParams BRMainNetParamsRecord = {
BRMainNetDNSSeeds,
8333, // standardPort
0xd9b4bef9, // magicNumber
SERVICES_NODE_WITNESS, // services
BRMainNetVerifyDifficulty,
BRMainNetCheckpoints,
sizeof(BRMainNetCheckpoints)/sizeof(*BRMainNetCheckpoints),
128,
0,
5,
BRMainBip32xprv,
BRMainBip32xpub,
BRMainBech32
};
const BRChainParams *BRMainNetParams = &BRMainNetParamsRecord;
static const BRChainParams BRTestNetParamsRecord = {
BRTestNetDNSSeeds,
18333, // standardPort
0x0709110b, // magicNumber
SERVICES_NODE_WITNESS, // services
BRTestNetVerifyDifficulty,
BRTestNetCheckpoints,
sizeof(BRTestNetCheckpoints)/sizeof(*BRTestNetCheckpoints),
239,
111,
196,
BRTestBip32xprv,
BRTestBip32xpub,
BRTestBech32
};
const BRChainParams *BRTestNetParams = &BRTestNetParamsRecord;
static const BRChainParams BRRegtestParamsRecord = {
BRTestNetDNSSeeds,
18443, // standardPort
0xdab5bffa, // magicNumber
SERVICES_NODE_WITNESS, // services
BRTestNetVerifyDifficulty,
BRTestNetCheckpoints,
0,
239,
111,
196,
BRTestBip32xprv,
BRTestBip32xpub,
BRRegtestBech32
};
const BRChainParams *BRRegtestParams = &BRRegtestParamsRecord;
int spv_mainnet = 1;
| 4,403 |
1,792 | package org.hongxi.summer.exception;
/**
* Created by shenhongxi on 2020/7/26.
*/
public class SummerBizException extends SummerAbstractException {
private static final long serialVersionUID = -9030222846555573201L;
public SummerBizException() {
super(SummerErrorMsgConstants.BIZ_DEFAULT_EXCEPTION);
}
public SummerBizException(SummerErrorMsg summerErrorMsg) {
super(summerErrorMsg);
}
public SummerBizException(String message) {
super(message, SummerErrorMsgConstants.BIZ_DEFAULT_EXCEPTION);
}
public SummerBizException(String message, SummerErrorMsg summerErrorMsg) {
super(message, summerErrorMsg);
}
public SummerBizException(String message, Throwable cause) {
super(message, cause, SummerErrorMsgConstants.BIZ_DEFAULT_EXCEPTION);
}
public SummerBizException(String message, Throwable cause, SummerErrorMsg summerErrorMsg) {
super(message, cause, summerErrorMsg);
}
public SummerBizException(Throwable cause) {
super(cause, SummerErrorMsgConstants.BIZ_DEFAULT_EXCEPTION);
}
public SummerBizException(Throwable cause, SummerErrorMsg summerErrorMsg) {
super(cause, summerErrorMsg);
}
}
| 434 |
437 | package com.fasterxml.jackson.dataformat.xml.misc;
import java.util.ArrayList;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIdentityInfo;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonSubTypes;
import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.annotation.ObjectIdGenerators;
import com.fasterxml.jackson.dataformat.xml.XmlMapper;
import com.fasterxml.jackson.dataformat.xml.XmlTestBase;
public class PolymorphicTypesTest extends XmlTestBase
{
@JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, include=JsonTypeInfo.As.PROPERTY)
static class BaseTypeWithClassProperty { }
static class SubTypeWithClassProperty extends BaseTypeWithClassProperty {
public String name;
public SubTypeWithClassProperty() { }
public SubTypeWithClassProperty(String s) { name = s; }
}
@JsonTypeInfo(use=JsonTypeInfo.Id.CLASS, include=JsonTypeInfo.As.WRAPPER_OBJECT)
protected static class BaseTypeWithClassObject { }
protected static class SubTypeWithClassObject extends BaseTypeWithClassObject {
public String name;
public SubTypeWithClassObject() { }
public SubTypeWithClassObject(String s) { name = s; }
}
@JsonTypeInfo(use = JsonTypeInfo.Id.CLASS, include = JsonTypeInfo.As.PROPERTY)
@JsonIdentityInfo(generator = ObjectIdGenerators.PropertyGenerator.class, property = "id")
protected static class TypeWithClassPropertyAndObjectId {
public String id;
public TypeWithClassPropertyAndObjectId() {}
public TypeWithClassPropertyAndObjectId(String id) { this.id = id; }
}
protected static class Wrapper {
public List<TypeWithClassPropertyAndObjectId> data;
public Wrapper(){}
public Wrapper(List<TypeWithClassPropertyAndObjectId> data) { this.data = data; }
}
// [dataformat-xml#451]
@JsonTypeInfo(use = JsonTypeInfo.Id.DEDUCTION)
@JsonSubTypes(@JsonSubTypes.Type(Child451.class))
public interface Value451 {}
public static class Child451 implements Value451 {
private final String property1;
@JsonCreator(mode = JsonCreator.Mode.PROPERTIES)
public Child451(@JsonProperty("property1") String property1) {
this.property1 = property1;
}
public String getProperty1() {
return property1;
}
}
/*
/**********************************************************************
/* Test methods
/**********************************************************************
*/
private final XmlMapper MAPPER = newMapper();
public void testAsClassProperty() throws Exception
{
String xml = MAPPER.writeValueAsString(new SubTypeWithClassProperty("Foobar"));
// Type info should be written as an attribute, so:
final String exp =
"<SubTypeWithClassProperty _class=\"com.fasterxml.jackson.dataformat.xml.misc.PolymorphicTypesTest..SubTypeWithClassProperty\">"
//"<SubTypeWithClassProperty><_class>com.fasterxml.jackson.xml.types.TestPolymorphic..SubTypeWithClassProperty</_class>"
+"<name>Foobar</name></SubTypeWithClassProperty>"
;
assertEquals(exp, xml);
Object result = MAPPER.readValue(xml, BaseTypeWithClassProperty.class);
assertNotNull(result);
assertEquals(SubTypeWithClassProperty.class, result.getClass());
assertEquals("Foobar", ((SubTypeWithClassProperty) result).name);
}
public void testAsClassObject() throws Exception
{
String xml = MAPPER.writeValueAsString(new SubTypeWithClassObject("Foobar"));
Object result = MAPPER.readValue(xml, BaseTypeWithClassObject.class);
assertNotNull(result);
assertEquals(SubTypeWithClassObject.class, result.getClass());
assertEquals("Foobar", ((SubTypeWithClassObject) result).name);
}
// Test for [dataformat-xml#81]
public void testAsPropertyWithObjectId() throws Exception
{
List<TypeWithClassPropertyAndObjectId> data = new ArrayList<PolymorphicTypesTest.TypeWithClassPropertyAndObjectId>();
TypeWithClassPropertyAndObjectId object = new TypeWithClassPropertyAndObjectId("Foobar");
data.add(object);
// This will be written as an id reference instead of object; as such, no type info will be written.
data.add(object);
String xml = MAPPER.writeValueAsString(new Wrapper(data));
Wrapper result = MAPPER.readValue(xml, Wrapper.class);
assertNotNull(result);
assertSame(result.data.get(0), result.data.get(1));
assertEquals("Foobar", result.data.get(0).id);
}
// Test for [dataformat-xml#451]
public void testDeduction() throws Exception
{
String xml = MAPPER.writeValueAsString(new Child451("value1"));
assertTrue(xml.contains("<property1>value1</property1>"));
// and try reading back for funsies
Value451 result = MAPPER.readValue(xml, Value451.class);
assertNotNull(result);
assertEquals(Child451.class, result.getClass());
}
}
| 1,932 |
965 | // report-progress.cpp
// compile with: /EHsc
#include <agents.h>
#include <iostream>
using namespace concurrency;
using namespace std;
// Simulates a lengthy operation.
void perform_lengthy_operation()
{
// Yield the current context for one second.
wait(1000);
}
int wmain()
{
// Create a call object that prints a single character to the console.
call<wchar_t> report_progress([](wchar_t c) {
wcout << c;
});
// Create a timer object that sends the dot character to the
// call object every 100 milliseconds.
timer<wchar_t> progress_timer(100, L'.', &report_progress, true);
wcout << L"Performing a lengthy operation";
// Start the timer on a separate context.
progress_timer.start();
// Perform a lengthy operation on the main context.
perform_lengthy_operation();
// Stop the timer and print a message.
progress_timer.stop();
wcout << L"done.";
} | 297 |
360 | /*
* Copyright (c) 2020 Huawei Technologies Co.,Ltd.
*
* openGauss is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
*
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
* See the Mulan PSL v2 for more details.
* ---------------------------------------------------------------------------------------
*
* gtm_atomic.h
*
*
*
* IDENTIFICATION
* src/include/gtm/gtm_atomic.h
*
* ---------------------------------------------------------------------------------------
*/
#ifndef CODE_SRC_INCLUDE_GTM_GTM_ATOMIC_H_
#define CODE_SRC_INCLUDE_GTM_GTM_ATOMIC_H_
template <typename T>
struct GTM_AtomicStride {
/* increment value when add it. */
static const unsigned int unitSize = 1;
};
template <typename T>
struct GTM_AtomicStride<T *> {
/* increment value when add it. */
static const unsigned int unitSize = sizeof(T);
};
/**
* atomic operations used in gtm
*/
template <typename ValueType>
class GTM_Atomic {
public:
/**
* Construct an GTM_Atomic use initial value
*/
explicit GTM_Atomic(const ValueType value = 0) : m_value(value)
{
}
~GTM_Atomic(){};
/**
* atomic increment
*/
void add(int64_t increment)
{
__sync_fetch_and_add(&m_value, increment);
}
/**
* atomic compare exchange
*/
ValueType compareExchange(ValueType tmpValue, ValueType newValue)
{
tmpValue = __sync_val_compare_and_swap(&m_value, tmpValue, newValue);
return tmpValue;
}
/**
* atomic exchange
*/
ValueType exchange(ValueType newValue)
{
newValue = __sync_lock_test_and_set(&m_value, newValue);
return newValue;
}
/**
* atomic increment
*/
void inc()
{
add(1);
}
/**
* Return the current value.
*/
ValueType load()
{
return m_value;
}
/**
* overload operator =
*/
GTM_Atomic<ValueType>& operator=(ValueType newValue)
{
store(newValue);
return *this;
}
/**
* Return the current value.
*/
operator ValueType()
{
return load();
}
/**
* overload operator ++
*/
const GTM_Atomic<ValueType>& operator++()
{
inc();
return *this;
}
const GTM_Atomic<ValueType> operator++(int)
{
GTM_Atomic<ValueType> tmp = *this;
inc();
return tmp;
}
/**
* overload operator --
*/
const GTM_Atomic<ValueType>& operator--()
{
add(-1);
return *this;
}
const GTM_Atomic<ValueType> operator--(int)
{
GTM_Atomic<ValueType> tmp = *this;
add(-1);
return tmp;
}
/**
* set value
*/
void store(ValueType newValue)
{
m_value = newValue;
}
protected:
/* The value on which the atomic operations operate. */
volatile ValueType m_value;
};
/**
* This method provides appropriate fencing for the end of a critical
* section. It guarantees the following:
* - Loads coming from code preceding this method will complete before the
* method returns, so they will not see any changes made to memory by other
* threads after the method is invoked.
* - Stores coming from code preceding this method will be reflected
* in memory before the method returns, so when the next thread enters
* the critical section it is guaranteed to see any changes made in the
* current critical section.
*/
static inline void GTM_loadStoreFence()
{
#ifdef __aarch64__
__asm__ __volatile__("DMB ish" ::: "memory");
#else
__asm__ __volatile__("lfence" ::: "memory");
__asm__ __volatile__("sfence" ::: "memory");
#endif
}
static inline void GTM_loadFence()
{
__asm__ __volatile__("lfence" ::: "memory");
}
static inline void GTM_StoreFence()
{
__asm__ __volatile__("sfence" ::: "memory");
}
#endif /* CODE_SRC_INCLUDE_GTM_GTM_ATOMIC_H_ */
| 1,668 |
16,461 | // Copyright 2015-present 650 Industries. All rights reserved.
package host.exp.exponent.generated;
public class ExponentKeys {
public static final String AMPLITUDE_KEY = "${AMPLITUDE_KEY}";
public static final String AMPLITUDE_DEV_KEY = "${AMPLITUDE_DEV_KEY}";
}
| 90 |
14,668 | <reponame>zealoussnow/chromium
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_PERFORMANCE_MANAGER_PUBLIC_GRAPH_SYSTEM_NODE_H_
#define COMPONENTS_PERFORMANCE_MANAGER_PUBLIC_GRAPH_SYSTEM_NODE_H_
#include "base/memory/memory_pressure_listener.h"
#include "components/performance_manager/public/graph/node.h"
namespace performance_manager {
class SystemNodeObserver;
// The SystemNode represents system-wide state. Each graph owns exactly one
// system node. This node has the same lifetime has the graph that owns it.
class SystemNode : public Node {
public:
using Observer = SystemNodeObserver;
using MemoryPressureLevel = base::MemoryPressureListener::MemoryPressureLevel;
class ObserverDefaultImpl;
SystemNode();
SystemNode(const SystemNode&) = delete;
SystemNode& operator=(const SystemNode&) = delete;
~SystemNode() override;
};
// Pure virtual observer interface. Derive from this if you want to be forced to
// implement the entire interface.
class SystemNodeObserver {
public:
SystemNodeObserver();
SystemNodeObserver(const SystemNodeObserver&) = delete;
SystemNodeObserver& operator=(const SystemNodeObserver&) = delete;
virtual ~SystemNodeObserver();
// Called when a new set of process memory metrics is available.
//
// Note: This is only valid if at least one component has expressed interest
// for process memory metrics by calling
// ProcessMetricsDecorator::RegisterInterestForProcessMetrics.
virtual void OnProcessMemoryMetricsAvailable(
const SystemNode* system_node) = 0;
// Called before OnMemoryPressure(). This can be used to track state before
// memory start being released in response to memory pressure.
//
// Note: This is guaranteed to be invoked before OnMemoryPressure(), but
// will not necessarily be called before base::MemoryPressureListeners
// are notified.
virtual void OnBeforeMemoryPressure(
base::MemoryPressureListener::MemoryPressureLevel new_level) = 0;
// Called when the system is under memory pressure. Observers may start
// releasing memory in response to memory pressure.
//
// NOTE: This isn't called for a transition to the MEMORY_PRESSURE_LEVEL_NONE
// level. For this reason there's no corresponding property in this node and
// the response to these notifications should be stateless.
virtual void OnMemoryPressure(
base::MemoryPressureListener::MemoryPressureLevel new_level) = 0;
};
// Default implementation of observer that provides dummy versions of each
// function. Derive from this if you only need to implement a few of the
// functions.
class SystemNode::ObserverDefaultImpl : public SystemNodeObserver {
public:
ObserverDefaultImpl();
ObserverDefaultImpl(const ObserverDefaultImpl&) = delete;
ObserverDefaultImpl& operator=(const ObserverDefaultImpl&) = delete;
~ObserverDefaultImpl() override;
// SystemNodeObserver implementation:
void OnProcessMemoryMetricsAvailable(const SystemNode* system_node) override {
}
void OnBeforeMemoryPressure(
base::MemoryPressureListener::MemoryPressureLevel new_level) override {}
void OnMemoryPressure(
base::MemoryPressureListener::MemoryPressureLevel new_level) override {}
};
} // namespace performance_manager
#endif // COMPONENTS_PERFORMANCE_MANAGER_PUBLIC_GRAPH_SYSTEM_NODE_H_
| 943 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.