text
stringlengths 2
99.9k
| meta
dict |
---|---|
Vue.component('dropdown', {
template: '#tmpl-dropdown',
props: {
id: {
type: String,
},
visible: {
type: Boolean,
default: false,
},
items: {
type: Array,
default: [],
},
classes: {
type: Array,
default: () => [],
},
},
methods: {
clicked: function(item) {
if (item.click && !item.disabled) {
item.click();
}
if (!item.preventClose) {
closeDropdown();
}
},
},
});
let openedDropdown;
let clickHndl = function(event) {
if (!openedDropdown) {
return;
}
var element = event.target;
while (element) {
if (element === openedDropdown) {
return;
}
element = element.parentElement;
}
// Click outside the dropdown, close it
closeDropdown();
};
function closeDropdown() {
if (!openedDropdown) {
return;
}
document.removeEventListener('click', clickHndl);
if (openedDropdown.className.indexOf('hidden') < 0) {
openedDropdown.className = (openedDropdown.className + ' hidden').trim();
}
openedDropdown = undefined;
}
function openDropdown(element) {
element = parseElement(element);
if (!element) {
console.error('Invalid dropdown element');
return;
}
event.stopPropagation();
closeDropdown();
if (getComputedStyle(element.parentElement).position === 'relative') {
// Position the dropdown relatively to the parent
element.style.left = (window.event.clientX - element.parentElement.offsetLeft + element.parentElement.scrollLeft) + 'px';
element.style.top = (window.event.clientY - element.parentElement.offsetTop + element.parentElement.scrollTop) + 'px';
} else {
// Position the dropdown absolutely on the window
element.style.left = (window.event.clientX + window.scrollX) + 'px';
element.style.top = (window.event.clientY + window.scrollY) + 'px';
}
document.addEventListener('click', clickHndl);
element.className = element.className.split(' ').filter(c => c !== 'hidden').join(' ');
openedDropdown = element;
const maxLeft = Math.min(window.innerWidth, element.parentElement.clientWidth) + element.parentElement.scrollLeft;
const maxTop = Math.min(window.innerHeight, element.parentElement.clientHeight) + element.parentElement.scrollTop;
if (element.parentElement.offsetLeft + element.offsetLeft + parseFloat(getComputedStyle(element).width) >= maxLeft) {
if (parseFloat(element.style.left) - parseFloat(getComputedStyle(element).width) >= 0) {
element.style.left = (parseFloat(element.style.left) - parseFloat(getComputedStyle(element).width)) + 'px';
}
}
if (element.parentElement.offsetTop + element.offsetTop + parseFloat(getComputedStyle(element).height) >= maxTop) {
if (parseFloat(element.style.top) - parseFloat(getComputedStyle(element).height) >= 0) {
element.style.top = (parseFloat(element.style.top) - parseFloat(getComputedStyle(element).height)) + 'px';
}
}
if (parseFloat(element.style.left) < 0) {
element.style.left = 0;
}
if (parseFloat(element.style.top) < 0) {
element.style.top = 0;
}
}
| {
"pile_set_name": "Github"
} |
const express = require('express')
const router = express()
const { createWebAPIRequest } = require('../util/util')
router.get('/', (req, res) => {
const cookie = req.get('Cookie') ? req.get('Cookie') : ''
const data = {
mvid: req.query.mvid
}
createWebAPIRequest(
'music.163.com',
'/weapi/discovery/simiMV',
'POST',
data,
cookie,
music_req => res.send(music_req),
err => res.status(502).send('fetch error')
)
})
module.exports = router
| {
"pile_set_name": "Github"
} |
///////////////////////////////////////////////////////////////////////////////////
/// OpenGL Mathematics (glm.g-truc.net)
///
/// Copyright (c) 2005 - 2015 G-Truc Creation (www.g-truc.net)
/// Permission is hereby granted, free of charge, to any person obtaining a copy
/// of this software and associated documentation files (the "Software"), to deal
/// in the Software without restriction, including without limitation the rights
/// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
/// copies of the Software, and to permit persons to whom the Software is
/// furnished to do so, subject to the following conditions:
///
/// The above copyright notice and this permission notice shall be included in
/// all copies or substantial portions of the Software.
///
/// Restrictions:
/// By making use of the Software for military purposes, you choose to make
/// a Bunny unhappy.
///
/// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
/// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
/// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
/// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
/// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
/// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
/// THE SOFTWARE.
///
/// @ref core
/// @file glm/mat2x3.hpp
/// @date 2013-12-24 / 2013-12-24
/// @author Christophe Riccio
///////////////////////////////////////////////////////////////////////////////////
#pragma once
#include "detail/type_mat2x3.hpp"
namespace glm
{
/// 2 columns of 3 components matrix of low precision floating-point numbers.
/// There is no guarantee on the actual precision.
///
/// @see <a href="http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf">GLSL 4.20.8 specification, section 4.1.6 Matrices</a>
/// @see <a href="http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf">GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier</a>
typedef tmat2x3<float, lowp> lowp_mat2x3;
/// 2 columns of 3 components matrix of medium precision floating-point numbers.
/// There is no guarantee on the actual precision.
///
/// @see <a href="http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf">GLSL 4.20.8 specification, section 4.1.6 Matrices</a>
/// @see <a href="http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf">GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier</a>
typedef tmat2x3<float, mediump> mediump_mat2x3;
/// 2 columns of 3 components matrix of high precision floating-point numbers.
/// There is no guarantee on the actual precision.
///
/// @see <a href="http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf">GLSL 4.20.8 specification, section 4.1.6 Matrices</a>
/// @see <a href="http://www.opengl.org/registry/doc/GLSLangSpec.4.20.8.pdf">GLSL 4.20.8 specification, section 4.7.2 Precision Qualifier</a>
typedef tmat2x3<float, highp> highp_mat2x3;
}//namespace glm
| {
"pile_set_name": "Github"
} |
/**
* Copyright 1993-2013 NVIDIA Corporation. All rights reserved.
*
* Please refer to the NVIDIA end user license agreement (EULA) associated
* with this source code for terms and conditions that govern your use of
* this software. Any use, reproduction, disclosure, or distribution of
* this software and related documentation outside the terms of the EULA
* is strictly prohibited.
*
*/
// Helper functions for CUDA Driver API error handling (make sure that CUDA_H is included in your projects)
#ifndef HELPER_CUDA_DRVAPI_H
#define HELPER_CUDA_DRVAPI_H
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <helper_string.h>
#include <drvapi_error_string.h>
#ifndef MAX
#define MAX(a,b) (a > b ? a : b)
#endif
#ifndef EXIT_WAIVED
#define EXIT_WAIVED 2
#endif
////////////////////////////////////////////////////////////////////////////////
// These are CUDA Helper functions
// add a level of protection to the CUDA SDK samples, let's force samples to explicitly include CUDA.H
#ifdef __cuda_cuda_h__
// This will output the proper CUDA error strings in the event that a CUDA host call returns an error
#ifndef checkCudaErrors
#define checkCudaErrors(err) __checkCudaErrors (err, __FILE__, __LINE__)
// These are the inline versions for all of the SDK helper functions
inline void __checkCudaErrors(CUresult err, const char *file, const int line)
{
if (CUDA_SUCCESS != err)
{
fprintf(stderr, "checkCudaErrors() Driver API error = %04d \"%s\" from file <%s>, line %i.\n",
err, getCudaDrvErrorString(err), file, line);
exit(EXIT_FAILURE);
}
}
#endif
#ifdef getLastCudaDrvErrorMsg
#undef getLastCudaDrvErrorMsg
#endif
#define getLastCudaDrvErrorMsg(msg) __getLastCudaDrvErrorMsg (msg, __FILE__, __LINE__)
inline void __getLastCudaDrvErrorMsg(const char *msg, const char *file, const int line)
{
CUresult err = cuCtxSynchronize();
if (CUDA_SUCCESS != err)
{
fprintf(stderr, "getLastCudaDrvErrorMsg -> %s", msg);
fprintf(stderr, "getLastCudaDrvErrorMsg -> cuCtxSynchronize API error = %04d \"%s\" in file <%s>, line %i.\n",
err, getCudaDrvErrorString(err), file, line);
exit(EXIT_FAILURE);
}
}
// This function wraps the CUDA Driver API into a template function
template <class T>
inline void getCudaAttribute(T *attribute, CUdevice_attribute device_attribute, int device)
{
CUresult error_result = cuDeviceGetAttribute(attribute, device_attribute, device);
if (error_result != CUDA_SUCCESS)
{
printf("cuDeviceGetAttribute returned %d\n-> %s\n", (int)error_result, getCudaDrvErrorString(error_result));
exit(EXIT_SUCCESS);
}
}
#endif
// Beginning of GPU Architecture definitions
inline int _ConvertSMVer2CoresDRV(int major, int minor)
{
// Defines for GPU Architecture types (using the SM version to determine the # of cores per SM
typedef struct
{
int SM; // 0xMm (hexidecimal notation), M = SM Major version, and m = SM minor version
int Cores;
} sSMtoCores;
sSMtoCores nGpuArchCoresPerSM[] =
{
{ 0x10, 8 }, // Tesla Generation (SM 1.0) G80 class
{ 0x11, 8 }, // Tesla Generation (SM 1.1) G8x class
{ 0x12, 8 }, // Tesla Generation (SM 1.2) G9x class
{ 0x13, 8 }, // Tesla Generation (SM 1.3) GT200 class
{ 0x20, 32 }, // Fermi Generation (SM 2.0) GF100 class
{ 0x21, 48 }, // Fermi Generation (SM 2.1) GF10x class
{ 0x30, 192}, // Kepler Generation (SM 3.0) GK10x class
{ 0x35, 192}, // Kepler Generation (SM 3.5) GK11x class
{ -1, -1 }
};
int index = 0;
while (nGpuArchCoresPerSM[index].SM != -1)
{
if (nGpuArchCoresPerSM[index].SM == ((major << 4) + minor))
{
return nGpuArchCoresPerSM[index].Cores;
}
index++;
}
// If we don't find the values, we default use the previous one to run properly
printf("MapSMtoCores for SM %d.%d is undefined. Default to use %d Cores/SM\n", major, minor, nGpuArchCoresPerSM[7].Cores);
return nGpuArchCoresPerSM[7].Cores;
}
// end of GPU Architecture definitions
#ifdef __cuda_cuda_h__
// General GPU Device CUDA Initialization
inline int gpuDeviceInitDRV(int ARGC, const char **ARGV)
{
int cuDevice = 0;
int deviceCount = 0;
CUresult err = cuInit(0);
if (CUDA_SUCCESS == err)
{
checkCudaErrors(cuDeviceGetCount(&deviceCount));
}
if (deviceCount == 0)
{
fprintf(stderr, "cudaDeviceInit error: no devices supporting CUDA\n");
exit(EXIT_FAILURE);
}
int dev = 0;
dev = getCmdLineArgumentInt(ARGC, (const char **) ARGV, "device=");
if (dev < 0)
{
dev = 0;
}
if (dev > deviceCount-1)
{
fprintf(stderr, "\n");
fprintf(stderr, ">> %d CUDA capable GPU device(s) detected. <<\n", deviceCount);
fprintf(stderr, ">> cudaDeviceInit (-device=%d) is not a valid GPU device. <<\n", dev);
fprintf(stderr, "\n");
return -dev;
}
checkCudaErrors(cuDeviceGet(&cuDevice, dev));
char name[100];
cuDeviceGetName(name, 100, cuDevice);
int computeMode;
getCudaAttribute<int>(&computeMode, CU_DEVICE_ATTRIBUTE_COMPUTE_MODE, dev);
if (computeMode == CU_COMPUTEMODE_PROHIBITED)
{
fprintf(stderr, "Error: device is running in <CU_COMPUTEMODE_PROHIBITED>, no threads can use this CUDA Device.\n");
return -1;
}
if (checkCmdLineFlag(ARGC, (const char **) ARGV, "quiet") == false)
{
printf("gpuDeviceInitDRV() Using CUDA Device [%d]: %s\n", dev, name);
}
return dev;
}
// This function returns the best GPU based on performance
inline int gpuGetMaxGflopsDeviceIdDRV()
{
CUdevice current_device = 0, max_perf_device = 0;
int device_count = 0, sm_per_multiproc = 0;
int max_compute_perf = 0, best_SM_arch = 0;
int major = 0, minor = 0 , multiProcessorCount, clockRate;
cuInit(0);
checkCudaErrors(cuDeviceGetCount(&device_count));
if (device_count == 0)
{
fprintf(stderr, "gpuGetMaxGflopsDeviceIdDRV error: no devices supporting CUDA\n");
exit(EXIT_FAILURE);
}
// Find the best major SM Architecture GPU device
while (current_device < device_count)
{
checkCudaErrors(cuDeviceComputeCapability(&major, &minor, current_device));
if (major > 0 && major < 9999)
{
best_SM_arch = MAX(best_SM_arch, major);
}
current_device++;
}
// Find the best CUDA capable GPU device
current_device = 0;
while (current_device < device_count)
{
checkCudaErrors(cuDeviceGetAttribute(&multiProcessorCount,
CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT,
current_device));
checkCudaErrors(cuDeviceGetAttribute(&clockRate,
CU_DEVICE_ATTRIBUTE_CLOCK_RATE,
current_device));
checkCudaErrors(cuDeviceComputeCapability(&major, &minor, current_device));
int computeMode;
getCudaAttribute<int>(&computeMode, CU_DEVICE_ATTRIBUTE_COMPUTE_MODE, current_device);
if (computeMode != CU_COMPUTEMODE_PROHIBITED)
{
if (major == 9999 && minor == 9999)
{
sm_per_multiproc = 1;
}
else
{
sm_per_multiproc = _ConvertSMVer2CoresDRV(major, minor);
}
int compute_perf = multiProcessorCount * sm_per_multiproc * clockRate;
if (compute_perf > max_compute_perf)
{
// If we find GPU with SM major > 2, search only these
if (best_SM_arch > 2)
{
// If our device==dest_SM_arch, choose this, or else pass
if (major == best_SM_arch)
{
max_compute_perf = compute_perf;
max_perf_device = current_device;
}
}
else
{
max_compute_perf = compute_perf;
max_perf_device = current_device;
}
}
}
++current_device;
}
return max_perf_device;
}
// This function returns the best Graphics GPU based on performance
inline int gpuGetMaxGflopsGLDeviceIdDRV()
{
CUdevice current_device = 0, max_perf_device = 0;
int device_count = 0, sm_per_multiproc = 0;
int max_compute_perf = 0, best_SM_arch = 0;
int major = 0, minor = 0, multiProcessorCount, clockRate;
int bTCC = 0;
char deviceName[256];
cuInit(0);
checkCudaErrors(cuDeviceGetCount(&device_count));
if (device_count == 0)
{
fprintf(stderr, "gpuGetMaxGflopsGLDeviceIdDRV error: no devices supporting CUDA\n");
exit(EXIT_FAILURE);
}
// Find the best major SM Architecture GPU device that are graphics devices
while (current_device < device_count)
{
checkCudaErrors(cuDeviceGetName(deviceName, 256, current_device));
checkCudaErrors(cuDeviceComputeCapability(&major, &minor, current_device));
#if CUDA_VERSION >= 3020
checkCudaErrors(cuDeviceGetAttribute(&bTCC, CU_DEVICE_ATTRIBUTE_TCC_DRIVER, current_device));
#else
// Assume a Tesla GPU is running in TCC if we are running CUDA 3.1
if (deviceName[0] == 'T')
{
bTCC = 1;
}
#endif
int computeMode;
getCudaAttribute<int>(&computeMode, CU_DEVICE_ATTRIBUTE_COMPUTE_MODE, current_device);
if (computeMode != CU_COMPUTEMODE_PROHIBITED)
{
if (!bTCC)
{
if (major > 0 && major < 9999)
{
best_SM_arch = MAX(best_SM_arch, major);
}
}
}
current_device++;
}
// Find the best CUDA capable GPU device
current_device = 0;
while (current_device < device_count)
{
checkCudaErrors(cuDeviceGetAttribute(&multiProcessorCount,
CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT,
current_device));
checkCudaErrors(cuDeviceGetAttribute(&clockRate,
CU_DEVICE_ATTRIBUTE_CLOCK_RATE,
current_device));
checkCudaErrors(cuDeviceComputeCapability(&major, &minor, current_device));
#if CUDA_VERSION >= 3020
checkCudaErrors(cuDeviceGetAttribute(&bTCC, CU_DEVICE_ATTRIBUTE_TCC_DRIVER, current_device));
#else
// Assume a Tesla GPU is running in TCC if we are running CUDA 3.1
if (deviceName[0] == 'T')
{
bTCC = 1;
}
#endif
int computeMode;
getCudaAttribute<int>(&computeMode, CU_DEVICE_ATTRIBUTE_COMPUTE_MODE, current_device);
if (computeMode != CU_COMPUTEMODE_PROHIBITED)
{
if (major == 9999 && minor == 9999)
{
sm_per_multiproc = 1;
}
else
{
sm_per_multiproc = _ConvertSMVer2CoresDRV(major, minor);
}
// If this is a Tesla based GPU and SM 2.0, and TCC is disabled, this is a contendor
if (!bTCC) // Is this GPU running the TCC driver? If so we pass on this
{
int compute_perf = multiProcessorCount * sm_per_multiproc * clockRate;
if (compute_perf > max_compute_perf)
{
// If we find GPU with SM major > 2, search only these
if (best_SM_arch > 2)
{
// If our device = dest_SM_arch, then we pick this one
if (major == best_SM_arch)
{
max_compute_perf = compute_perf;
max_perf_device = current_device;
}
}
else
{
max_compute_perf = compute_perf;
max_perf_device = current_device;
}
}
}
}
++current_device;
}
return max_perf_device;
}
// General initialization call to pick the best CUDA Device
inline CUdevice findCudaDeviceDRV(int argc, const char **argv)
{
CUdevice cuDevice;
int devID = 0;
// If the command-line has a device number specified, use it
if (checkCmdLineFlag(argc, (const char **)argv, "device"))
{
devID = gpuDeviceInitDRV(argc, argv);
if (devID < 0)
{
printf("exiting...\n");
exit(EXIT_SUCCESS);
}
}
else
{
// Otherwise pick the device with highest Gflops/s
char name[100];
devID = gpuGetMaxGflopsDeviceIdDRV();
checkCudaErrors(cuDeviceGet(&cuDevice, devID));
cuDeviceGetName(name, 100, cuDevice);
printf("> Using CUDA Device [%d]: %s\n", devID, name);
}
cuDeviceGet(&cuDevice, devID);
return cuDevice;
}
// This function will pick the best CUDA device available with OpenGL interop
inline CUdevice findCudaGLDeviceDRV(int argc, const char **argv)
{
CUdevice cuDevice;
int devID = 0;
// If the command-line has a device number specified, use it
if (checkCmdLineFlag(argc, (const char **)argv, "device"))
{
devID = gpuDeviceInitDRV(argc, (const char **)argv);
if (devID < 0)
{
printf("no CUDA capable devices found, exiting...\n");
exit(EXIT_SUCCESS);
}
}
else
{
char name[100];
// Otherwise pick the device with highest Gflops/s
devID = gpuGetMaxGflopsGLDeviceIdDRV();
checkCudaErrors(cuDeviceGet(&cuDevice, devID));
cuDeviceGetName(name, 100, cuDevice);
printf("> Using CUDA/GL Device [%d]: %s\n", devID, name);
}
return devID;
}
// General check for CUDA GPU SM Capabilities
inline bool checkCudaCapabilitiesDRV(int major_version, int minor_version, int devID)
{
CUdevice cuDevice;
char name[256];
int major = 0, minor = 0;
checkCudaErrors(cuDeviceGet(&cuDevice, devID));
checkCudaErrors(cuDeviceGetName(name, 100, cuDevice));
checkCudaErrors(cuDeviceComputeCapability(&major, &minor, devID));
if ((major > major_version) ||
(major == major_version && minor >= minor_version))
{
printf("> Device %d: <%16s >, Compute SM %d.%d detected\n", devID, name, major, minor);
return true;
}
else
{
printf("No GPU device was found that can support CUDA compute capability %d.%d.\n", major_version, minor_version);
return false;
}
}
#endif
// end of CUDA Helper Functions
#endif
| {
"pile_set_name": "Github"
} |
defmodule Absinthe.Execution.SubscriptionTest do
use Absinthe.Case
import ExUnit.CaptureLog
defmodule PubSub do
@behaviour Absinthe.Subscription.Pubsub
def start_link() do
Registry.start_link(keys: :unique, name: __MODULE__)
end
def node_name() do
node()
end
def subscribe(topic) do
Registry.register(__MODULE__, topic, [])
:ok
end
def publish_subscription(topic, data) do
message = %{
topic: topic,
event: "subscription:data",
result: data
}
Registry.dispatch(__MODULE__, topic, fn entries ->
for {pid, _} <- entries, do: send(pid, {:broadcast, message})
end)
end
def publish_mutation(_proxy_topic, _mutation_result, _subscribed_fields) do
# this pubsub is local and doesn't support clusters
:ok
end
end
defmodule Schema do
use Absinthe.Schema
query do
# Query type must exist
end
object :user do
field :id, :id
field :name, :string
field :group, :group do
resolve fn user, _, %{context: %{test_pid: pid}} ->
batch({__MODULE__, :batch_get_group, pid}, nil, fn _results ->
{:ok, user.group}
end)
end
end
end
object :group do
field :name, :string
end
def batch_get_group(test_pid, _) do
# send a message to the test process every time we access this function.
# if batching is working properly, it should only happen once.
send(test_pid, :batch_get_group)
%{}
end
subscription do
field :raises, :string do
config fn _, _ ->
{:ok, topic: "*"}
end
resolve fn _, _, _ ->
raise "boom"
end
end
field :user, :user do
arg :id, :id
config fn args, _ ->
{:ok, topic: args[:id] || "*"}
end
trigger :update_user,
topic: fn user ->
[user.id, "*"]
end
end
field :thing, :string do
arg :client_id, non_null(:id)
config fn
_args, %{context: %{authorized: false}} ->
{:error, "unauthorized"}
args, _ ->
{
:ok,
topic: args.client_id
}
end
end
field :multiple_topics, :string do
config fn _, _ ->
{:ok, topic: ["topic_1", "topic_2", "topic_3"]}
end
end
field :other_user, :user do
arg :id, :id
config fn
args, %{context: %{context_id: context_id, document_id: document_id}} ->
{:ok, topic: args[:id] || "*", context_id: context_id, document_id: document_id}
args, %{context: %{context_id: context_id}} ->
{:ok, topic: args[:id] || "*", context_id: context_id}
end
end
field :relies_on_document, :string do
config fn _, %{document: %Absinthe.Blueprint{} = document} ->
%{type: :subscription, name: op_name} = Absinthe.Blueprint.current_operation(document)
{:ok, topic: "*", context_id: "*", document_id: op_name}
end
end
end
mutation do
field :update_user, :user do
arg :id, non_null(:id)
resolve fn _, %{id: id}, _ ->
{:ok, %{id: id, name: "foo"}}
end
end
end
end
setup_all do
{:ok, _} = PubSub.start_link()
{:ok, _} = Absinthe.Subscription.start_link(PubSub)
:ok
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "can subscribe the current process" do
client_id = "abc"
assert {:ok, %{"subscribed" => topic}} =
run_subscription(
@query,
Schema,
variables: %{"clientId" => client_id},
context: %{pubsub: PubSub}
)
Absinthe.Subscription.publish(PubSub, "foo", thing: client_id)
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"thing" => "foo"}},
topic: topic
} == msg
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "can unsubscribe the current process" do
client_id = "abc"
assert {:ok, %{"subscribed" => topic}} =
run_subscription(
@query,
Schema,
variables: %{"clientId" => client_id},
context: %{pubsub: PubSub}
)
Absinthe.Subscription.unsubscribe(PubSub, topic)
Absinthe.Subscription.publish(PubSub, "foo", thing: client_id)
refute_receive({:broadcast, _})
end
@query """
subscription {
multipleTopics
}
"""
test "schema can provide multiple topics to subscribe to" do
assert {:ok, %{"subscribed" => topic}} =
run_subscription(
@query,
Schema,
variables: %{},
context: %{pubsub: PubSub}
)
msg = %{
event: "subscription:data",
result: %{data: %{"multipleTopics" => "foo"}},
topic: topic
}
Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_1")
assert_receive({:broadcast, ^msg})
Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_2")
assert_receive({:broadcast, ^msg})
Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_3")
assert_receive({:broadcast, ^msg})
end
@query """
subscription {
multipleTopics
}
"""
test "unsubscription works when multiple topics are provided" do
assert {:ok, %{"subscribed" => topic}} =
run_subscription(
@query,
Schema,
variables: %{},
context: %{pubsub: PubSub}
)
Absinthe.Subscription.unsubscribe(PubSub, topic)
Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_1")
refute_receive({:broadcast, _})
Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_2")
refute_receive({:broadcast, _})
Absinthe.Subscription.publish(PubSub, "foo", multiple_topics: "topic_3")
refute_receive({:broadcast, _})
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId, extra: 1)
}
"""
test "can return errors properly" do
assert {
:ok,
%{
errors: [
%{
locations: [%{column: 30, line: 2}],
message:
"Unknown argument \"extra\" on field \"thing\" of type \"RootSubscriptionType\"."
}
]
}
} ==
run_subscription(@query, Schema,
variables: %{"clientId" => "abc"},
context: %{pubsub: PubSub}
)
end
@query """
subscription ($userId: ID!) {
user(id: $userId) { id name }
}
"""
test "subscription triggers work" do
id = "1"
assert {:ok, %{"subscribed" => topic}} =
run_subscription(
@query,
Schema,
variables: %{"userId" => id},
context: %{pubsub: PubSub}
)
mutation = """
mutation ($userId: ID!) {
updateUser(id: $userId) { id name }
}
"""
assert {:ok, %{data: _}} =
run_subscription(mutation, Schema,
variables: %{"userId" => id},
context: %{pubsub: PubSub}
)
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"user" => %{"id" => "1", "name" => "foo"}}},
topic: topic
} == msg
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "can return an error tuple from the topic function" do
assert {:ok, %{errors: [%{locations: [%{column: 3, line: 2}], message: "unauthorized"}]}} ==
run_subscription(
@query,
Schema,
variables: %{"clientId" => "abc"},
context: %{pubsub: PubSub, authorized: false}
)
end
@query """
subscription Example {
reliesOnDocument
}
"""
test "topic function receives a document" do
assert {:ok, %{"subscribed" => _topic}} =
run_subscription(@query, Schema, context: %{pubsub: PubSub})
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "stringifies topics" do
assert {:ok, %{"subscribed" => topic}} =
run_subscription(@query, Schema,
variables: %{"clientId" => "1"},
context: %{pubsub: PubSub}
)
Absinthe.Subscription.publish(PubSub, "foo", thing: 1)
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"thing" => "foo"}},
topic: topic
} == msg
end
test "isn't tripped up if one of the subscription docs raises" do
assert {:ok, %{"subscribed" => _}} = run_subscription("subscription { raises }", Schema)
assert {:ok, %{"subscribed" => topic}} =
run_subscription("subscription { thing(clientId: \"*\")}", Schema)
error_log =
capture_log(fn ->
Absinthe.Subscription.publish(PubSub, "foo", raises: "*", thing: "*")
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"thing" => "foo"}},
topic: topic
} == msg
end)
assert String.contains?(error_log, "boom")
end
@tag :pending
test "different subscription docs are batched together" do
opts = [context: %{test_pid: self()}]
assert {:ok, %{"subscribed" => doc1}} =
run_subscription("subscription { user { group { name } id} }", Schema, opts)
# different docs required for test, otherwise they get deduplicated from the start
assert {:ok, %{"subscribed" => doc2}} =
run_subscription("subscription { user { group { name } id name} }", Schema, opts)
user = %{id: "1", name: "Alicia", group: %{name: "Elixir Users"}}
Absinthe.Subscription.publish(PubSub, user, user: ["*", user.id])
assert_receive({:broadcast, %{topic: ^doc1, result: %{data: _}}})
assert_receive({:broadcast, %{topic: ^doc2, result: %{data: %{"user" => user}}}})
assert user["group"]["name"] == "Elixir Users"
# we should get this just once due to batching
assert_receive(:batch_get_group)
refute_receive(:batch_get_group)
end
test "subscription docs with different contexts don't leak context" do
ctx1 = %{test_pid: self(), user: 1}
assert {:ok, %{"subscribed" => doc1}} =
run_subscription("subscription { user { group { name } id} }", Schema, context: ctx1)
ctx2 = %{test_pid: self(), user: 2}
# different docs required for test, otherwise they get deduplicated from the start
assert {:ok, %{"subscribed" => doc2}} =
run_subscription("subscription { user { group { name } id name} }", Schema,
context: ctx2
)
user = %{id: "1", name: "Alicia", group: %{name: "Elixir Users"}}
Absinthe.Subscription.publish(PubSub, user, user: ["*", user.id])
assert_receive({:broadcast, %{topic: ^doc1, result: %{data: _}}})
assert_receive({:broadcast, %{topic: ^doc2, result: %{data: %{"user" => user}}}})
assert user["group"]["name"] == "Elixir Users"
# we should get this twice since the different contexts prevent batching.
assert_receive(:batch_get_group)
assert_receive(:batch_get_group)
end
describe "subscription_ids" do
@query """
subscription {
otherUser { id }
}
"""
test "subscriptions with the same context_id and same source document have the same subscription_id" do
assert {:ok, %{"subscribed" => doc1}} =
run_subscription(@query, Schema, context: %{context_id: "logged-in"})
assert {:ok, %{"subscribed" => doc2}} =
run_subscription(@query, Schema, context: %{context_id: "logged-in"})
assert doc1 == doc2
end
@query """
subscription {
otherUser { id }
}
"""
test "subscriptions with different context_id but the same source document have different subscription_ids" do
assert {:ok, %{"subscribed" => doc1}} =
run_subscription(@query, Schema, context: %{context_id: "logged-in"})
assert {:ok, %{"subscribed" => doc2}} =
run_subscription(@query, Schema, context: %{context_id: "not-logged-in"})
assert doc1 != doc2
end
test "subscriptions with same context_id but different source document have different subscription_ids" do
assert {:ok, %{"subscribed" => doc1}} =
run_subscription("subscription { otherUser { id name } }", Schema,
context: %{context_id: "logged-in"}
)
assert {:ok, %{"subscribed" => doc2}} =
run_subscription("subscription { otherUser { id } }", Schema,
context: %{context_id: "logged-in"}
)
assert doc1 != doc2
end
test "subscriptions with different context_id and different source document have different subscription_ids" do
assert {:ok, %{"subscribed" => doc1}} =
run_subscription("subscription { otherUser { id name } }", Schema,
context: %{context_id: "logged-in"}
)
assert {:ok, %{"subscribed" => doc2}} =
run_subscription("subscription { otherUser { id } }", Schema,
context: %{context_id: "not-logged-in"}
)
assert doc1 != doc2
end
@query """
subscription($id: ID!) { otherUser(id: $id) { id } }
"""
test "subscriptions with the same variables & document have the same subscription_ids" do
assert {:ok, %{"subscribed" => doc1}} =
run_subscription(@query, Schema,
variables: %{"id" => "123"},
context: %{context_id: "logged-in"}
)
assert {:ok, %{"subscribed" => doc2}} =
run_subscription(@query, Schema,
variables: %{"id" => "123"},
context: %{context_id: "logged-in"}
)
assert doc1 == doc2
end
@query """
subscription($id: ID!) { otherUser(id: $id) { id } }
"""
test "subscriptions with different variables but same document have different subscription_ids" do
assert {:ok, %{"subscribed" => doc1}} =
run_subscription(@query, Schema,
variables: %{"id" => "123"},
context: %{context_id: "logged-in"}
)
assert {:ok, %{"subscribed" => doc2}} =
run_subscription(@query, Schema,
variables: %{"id" => "456"},
context: %{context_id: "logged-in"}
)
assert doc1 != doc2
end
test "document_id can be provided to override the default logic for deriving document_id" do
assert {:ok, %{"subscribed" => doc1}} =
run_subscription("subscription { otherUser { id name } }", Schema,
context: %{context_id: "logged-in", document_id: "abcdef"}
)
assert {:ok, %{"subscribed" => doc2}} =
run_subscription("subscription { otherUser { name id } }", Schema,
context: %{context_id: "logged-in", document_id: "abcdef"}
)
assert doc1 == doc2
end
end
@query """
subscription ($clientId: ID!) {
thing(clientId: $clientId)
}
"""
test "subscription executes telemetry events", context do
client_id = "abc"
:telemetry.attach_many(
context.test,
[
[:absinthe, :execute, :operation, :start],
[:absinthe, :execute, :operation, :stop],
[:absinthe, :subscription, :publish, :start],
[:absinthe, :subscription, :publish, :stop]
],
fn event, measurements, metadata, config ->
send(self(), {event, measurements, metadata, config})
end,
%{}
)
assert {:ok, %{"subscribed" => topic}} =
run_subscription(
@query,
Schema,
variables: %{"clientId" => client_id},
context: %{pubsub: PubSub}
)
assert_receive {[:absinthe, :execute, :operation, :start], measurements, %{id: id}, _config}
assert System.convert_time_unit(measurements[:system_time], :native, :millisecond)
assert_receive {[:absinthe, :execute, :operation, :stop], _, %{id: ^id}, _config}
Absinthe.Subscription.publish(PubSub, "foo", thing: client_id)
assert_receive({:broadcast, msg})
assert %{
event: "subscription:data",
result: %{data: %{"thing" => "foo"}},
topic: topic
} == msg
# Subscription events
assert_receive {[:absinthe, :subscription, :publish, :start], _, %{id: id}, _config}
assert_receive {[:absinthe, :subscription, :publish, :stop], _, %{id: ^id}, _config}
:telemetry.detach(context.test)
end
defp run_subscription(query, schema, opts \\ []) do
opts = Keyword.update(opts, :context, %{pubsub: PubSub}, &Map.put(&1, :pubsub, PubSub))
case run(query, schema, opts) do
{:ok, %{"subscribed" => topic}} = val ->
PubSub.subscribe(topic)
val
val ->
val
end
end
end
| {
"pile_set_name": "Github"
} |
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'test_aslr_default',
'type': 'executable',
'msvs_settings': {
},
'sources': ['hello.cc'],
},
{
'target_name': 'test_aslr_no',
'type': 'executable',
'msvs_settings': {
'VCLinkerTool': {
'RandomizedBaseAddress': '1',
},
},
'sources': ['hello.cc'],
},
{
'target_name': 'test_aslr_yes',
'type': 'executable',
'msvs_settings': {
'VCLinkerTool': {
'RandomizedBaseAddress': '2',
}
},
'sources': ['hello.cc'],
},
]
}
| {
"pile_set_name": "Github"
} |
--- src/client.c.org Tue Nov 30 13:49:23 2004
+++ src/client.c Wed Dec 1 12:42:01 2004
@@ -144,6 +144,7 @@
static size_t try_read_from_server PROTO ((char *, size_t));
+static void proxy_connect PROTO ((cvsroot_t *, int));
static void auth_server PROTO ((cvsroot_t *, struct buffer *, struct buffer *,
int, int, struct hostent *));
@@ -3762,7 +3763,7 @@
int port_number;
struct sockaddr_in client_sai;
struct hostent *hostinfo;
- struct buffer *to_server, *from_server;
+ struct buffer *local_to_server, *local_from_server;
sock = socket (AF_INET, SOCK_STREAM, 0);
if (sock == -1)
@@ -3770,7 +3771,17 @@
error (1, 0, "cannot create socket: %s", SOCK_STRERROR (SOCK_ERRNO));
}
port_number = get_cvs_port_number (root);
+
+ /* if we have a proxy connect to that instead */
+ if(root->proxy)
+ {
+ hostinfo = init_sockaddr (&client_sai, root->proxy, root->proxy_port);
+ }
+ else
+ {
hostinfo = init_sockaddr (&client_sai, root->hostname, port_number);
+ }
+
if (trace)
{
fprintf (stderr, " -> Connecting to %s(%s):%d\n",
@@ -3780,29 +3791,41 @@
if (connect (sock, (struct sockaddr *) &client_sai, sizeof (client_sai))
< 0)
error (1, 0, "connect to %s(%s):%d failed: %s",
- root->hostname,
+ root->proxy ? root->proxy : root->hostname,
inet_ntoa (client_sai.sin_addr),
- port_number, SOCK_STRERROR (SOCK_ERRNO));
+ root->proxy ? root->proxy_port : port_number,
+ SOCK_STRERROR (SOCK_ERRNO));
- make_bufs_from_fds (sock, sock, 0, &to_server, &from_server, 1);
+ make_bufs_from_fds (sock, sock, 0, &local_to_server, &local_from_server, 1);
- auth_server (root, to_server, from_server, verify_only, do_gssapi, hostinfo);
+ if(root->proxy)
+ {
+ // REALLY ugly hack to allow proxy_connect() to use send_to_server().
+ // The proper fix would be to remove the global to_server & from_server
+ // variables, and instead let send_to_server() etc. take the target
+ // server struct as a paramter.
+ to_server = local_to_server;
+ from_server = local_from_server;
+ proxy_connect (root, port_number);
+ }
+
+ auth_server (root, local_to_server, local_from_server, verify_only, do_gssapi, hostinfo);
if (verify_only)
{
int status;
- status = buf_shutdown (to_server);
+ status = buf_shutdown (local_to_server);
if (status != 0)
error (0, status, "shutting down buffer to server");
- buf_free (to_server);
- to_server = NULL;
+ buf_free (local_to_server);
+ local_to_server = NULL;
- status = buf_shutdown (from_server);
+ status = buf_shutdown (local_from_server);
if (status != 0)
error (0, status, "shutting down buffer from server");
- buf_free (from_server);
- from_server = NULL;
+ buf_free (local_from_server);
+ local_from_server = NULL;
/* Don't need to set server_started = 0 since we don't set it to 1
* until returning from this call.
@@ -3810,11 +3833,53 @@
}
else
{
- *to_server_p = to_server;
- *from_server_p = from_server;
+ *to_server_p = local_to_server;
+ *from_server_p = local_from_server;
}
return;
+}
+
+
+
+static void
+proxy_connect (root, port_number)
+ cvsroot_t *root;
+ int port_number;
+{
+#define CONNECT_STRING "CONNECT %s:%d HTTP/1.0\r\n\r\n"
+ /* Send a "CONNECT" command to proxy: */
+ char* read_buf;
+ int codenum, count;
+
+ /* 4 characters for port covered by the length of %s & %d */
+ char* write_buf = xmalloc (strlen (CONNECT_STRING) + strlen (root->hostname
+) + 20 + 1);
+ int len = sprintf (write_buf, CONNECT_STRING, root->hostname, port_number);
+
+ send_to_server (write_buf, len);
+
+ /* Wait for HTTP status code, bail out if you don't get back a 2xx code.*/
+ count = read_line (&read_buf);
+ sscanf (read_buf, "%s %d", write_buf, &codenum);
+
+ if ((codenum / 100) != 2)
+ error (1, 0, "proxy server %s:%d does not support http tunnelling",
+ root->proxy, root->proxy_port);
+ free (read_buf);
+ free (write_buf);
+
+ /* Skip through remaining part of MIME header, recv_line
+ consumes the trailing \n */
+ while(read_line (&read_buf) > 0)
+ {
+ if (read_buf[0] == '\r' || read_buf[0] == 0)
+ {
+ free (read_buf);
+ break;
+ }
+ free (read_buf);
+ }
}
--- src/client.h.org Wed Dec 1 07:36:43 2004
+++ src/client.h Wed Dec 1 07:38:06 2004
@@ -69,6 +69,9 @@
# ifndef CVS_AUTH_PORT
# define CVS_AUTH_PORT 2401
# endif /* CVS_AUTH_PORT */
+# ifndef CVS_PROXY_PORT
+# define CVS_PROXY_PORT 80
+# endif /* CVS_PROXY_PORT */
# endif /* (AUTH_CLIENT_SUPPORT) || defined (HAVE_GSSAPI) */
# if HAVE_KERBEROS
--- src/root.c.org Wed Dec 1 07:38:19 2004
+++ src/root.c Wed Dec 1 07:48:43 2004
@@ -303,6 +303,8 @@
newroot->proxy_port = 0;
newroot->isremote = 0;
#endif /* CLIENT_SUPPORT */
+ newroot->proxy = NULL;
+ newroot->proxy_port = CVS_PROXY_PORT;
return newroot;
}
@@ -332,6 +334,8 @@
if (root->proxy_hostname != NULL)
free (root->proxy_hostname);
#endif /* CLIENT_SUPPORT */
+ if (root->proxy != NULL)
+ free (root->proxy);
free (root);
}
@@ -375,6 +379,7 @@
#ifdef CLIENT_SUPPORT
int check_hostname, no_port, no_password;
#endif /* CLIENT_SUPPORT */
+ const char *env_var;
/* allocate some space */
newroot = new_cvsroot_t();
@@ -555,6 +560,29 @@
/* restore the '/' */
cvsroot_copy = firstslash;
*cvsroot_copy = '/';
+
+
+ /* Determine proxy */
+ env_var = getenv("CVS_PROXY");
+ if (!env_var)
+ env_var = getenv("HTTP_PROXY");
+ /* Check if a proxy was specified, and if it is a HTTP proxy */
+ if (env_var && !memcmp(env_var, "http://", 7))
+ {
+ char *port_str;
+
+ /* Try to parse the proxy data */
+ env_var += 7;
+ /* TODO - parse username/password data, too */
+ port_str = strchr(env_var, ':');
+ if (port_str)
+ {
+ *port_str++ = 0;
+ newroot->proxy_port = atoi(port_str);
+ }
+ newroot->proxy = xstrdup(env_var);
+ }
+
#endif /* CLIENT_SUPPORT */
}
--- src/root.h.org Wed Dec 1 07:48:49 2004
+++ src/root.h Wed Dec 1 12:26:12 2004
@@ -39,4 +39,5 @@
int proxy_port; /* The port of the proxy or zero, as above. */
unsigned char isremote; /* Nonzero if we are doing remote access. */
#endif /* CLIENT_SUPPORT */
+ char *proxy;
} cvsroot_t;
| {
"pile_set_name": "Github"
} |
/* Presentation mode for SF HTML
*
* This file implements simple slide functionality for the SF HTML
* files. When loaded in a page, it will tag some of the page elements
* as slide boundaries, giving each an id of the form
* "#slide-XX". Pressing left or right should trigger "slide mode",
* focusing the screen on the current slide, and then navigate between
* slides. Pressing escape brings the page back to normal. */
/* Which DOM elements to mark as slide boundaries */
var slideSelector = 'h1.libtitle, h1.section, h2.section, h3.section, .quiz';
/* Whether or not we are in slide mode */
var slideMode = false;
/* Navigates between slides, using the current location hash to find
* the next slide to go to */
function slideNavigate(direction) {
function slideNumber(s) {
if (!s) return null;
var match = s.match(/slide-(.*)/);
if (match && match.length != 0) {
return parseInt(match[1]);
}
return null;
}
var curSlide = slideNumber(location.hash);
var lastSlide = slideNumber($('.slide').last().attr('id'));
var nextSlide;
/* We change the id of each slide element when the page loads, and
* then switch between slides based on the current hash. This is
* not entirely optimal, and can probably be made better.
* http://www.appelsiini.net/projects/viewport seems to be a nice choice.
*/
if (direction == 'left') {
if (curSlide != null) {
if (curSlide > 0) {
nextSlide = curSlide - 1;
} else {
nextSlide = lastSlide;
}
} else {
nextSlide = 0;
}
} else if (direction == 'right') {
if (curSlide != null && curSlide < lastSlide) {
nextSlide = curSlide + 1;
} else {
nextSlide = 0;
}
}
location.hash = '#slide-' + nextSlide;
return false;
};
/* Force the browser to scroll back to the hash location */
function refreshHash() {
var t = location.hash;
location.hash = '';
location.hash = t;
}
/* Activate slide mode. Inserts the right amount of spacing between
* slide boundaries, ensuring that only one slide appears on the
* screen at a time */
function slideActivate() {
$('.slide').each(function (i, elt) {
if (i > 0) $(elt).css('margin-top', $(window).height());
$(elt).css('height', '20px');
});
$('#main').css('padding-bottom', $(window).height());
slideMode = true;
if (location.hash) {
refreshHash();
} else {
location.hash = '#slide-0';
}
}
/* Deactivate slide mode. Removes the extra spacing between slides */
function slideDeactivate() {
$('.slide').each(function (i, elt) {
$(elt).css('margin-top', 0);
$(elt).css('height', 0);
});
$('#main').css('padding-bottom', 0);
refreshHash();
slideMode = false;
}
/* Set up appropriate input handlers */
$(document).keydown(function (event) {
if (slideMode) {
if (event.keyCode == 37) {
slideNavigate('left');
} else if (event.keyCode == 39) {
slideNavigate('right');
} else if (event.keyCode == 27) { // escape
slideDeactivate();
} else return true;
} else {
if (event.keyCode == 37 || event.keyCode == 39) {
slideActivate();
return false;
} else {
return true;
}
}
});
/* Find slide boundaries and tag them */
$(document).ready(function () {
$(slideSelector).each(function (i, elt) {
var mark = '<div class="slide" id="slide-' + i + '" />';
$(mark).insertBefore($(elt));
});
if (location.hash) {
slideActivate();
}
});
| {
"pile_set_name": "Github"
} |
# The MIT License
#
# Copyright (c) 2004-2010, Sun Microsystems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
Description=Opis
| {
"pile_set_name": "Github"
} |
rem -----------------------------------------------------------------------
rem URL: http://www.orafaq.com/scripts/plsql/hex2dec.txt
rem Filename: hex2dec.sql
rem Purpose: Functions to convert Hex to Decimal and vice versa
rem Author: Mark Malakanov, Feb-1999 + Anonymous
rem -----------------------------------------------------------------------
CREATE OR REPLACE FUNCTION hex2dec (hexnum in char) RETURN number IS
i number;
digits number;
result number := 0;
current_digit char(1);
current_digit_dec number;
BEGIN
digits := length(hexnum);
for i in 1..digits loop
current_digit := SUBSTR(hexnum, i, 1);
if current_digit in ('A','B','C','D','E','F') then
current_digit_dec := ascii(current_digit) - ascii('A') + 10;
else
current_digit_dec := to_number(current_digit);
end if;
result := (result * 16) + current_digit_dec;
end loop;
return result;
END hex2dec;
/
show errors
CREATE OR REPLACE FUNCTION num2hex (N in number) RETURN varchar2 IS
H varchar2(64) :='';
N2 integer := N;
BEGIN
loop
select rawtohex(chr(N2))||H
into H
from dual;
N2 := trunc(N2 / 256);
exit when N2=0;
end loop;
return H;
END num2hex;
/
show errors
-- Examples:
select hex2dec('FF') from dual;
select num2hex(10) from dual;
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0) on Thu Jan 05 01:03:22 EST 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>StaxServerConfiguration (appsensor-parent 2.3.1 API)</title>
<meta name="date" content="2017-01-05">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="StaxServerConfiguration (appsensor-parent 2.3.1 API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ======== START OF CLASS DATA ======== -->
<div class="header">
<div class="subTitle">org.owasp.appsensor.configuration.stax.server</div>
<h2 title="Class StaxServerConfiguration" class="title">Class StaxServerConfiguration</h2>
</div>
<div class="contentContainer">
<ul class="inheritance">
<li><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">java.lang.Object</a></li>
<li>
<ul class="inheritance">
<li><a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html" title="class in org.owasp.appsensor.core.configuration.server">org.owasp.appsensor.core.configuration.server.ServerConfiguration</a></li>
<li>
<ul class="inheritance">
<li>org.owasp.appsensor.configuration.stax.server.StaxServerConfiguration</li>
</ul>
</li>
</ul>
</li>
</ul>
<div class="description">
<ul class="blockList">
<li class="blockList">
<hr>
<br>
<pre>@Named
public class <span class="typeNameLabel">StaxServerConfiguration</span>
extends <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html" title="class in org.owasp.appsensor.core.configuration.server">ServerConfiguration</a></pre>
</li>
</ul>
</div>
<div class="summary">
<ul class="blockList">
<li class="blockList">
<!-- ======== CONSTRUCTOR SUMMARY ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.summary">
<!-- -->
</a>
<h3>Constructor Summary</h3>
<table class="memberSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation">
<caption><span>Constructors</span><span class="tabEnd"> </span></caption>
<tr>
<th class="colOne" scope="col">Constructor and Description</th>
</tr>
<tr class="altColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../../org/owasp/appsensor/configuration/stax/server/StaxServerConfiguration.html#StaxServerConfiguration--">StaxServerConfiguration</a></span>()</code></td>
</tr>
<tr class="rowColor">
<td class="colOne"><code><span class="memberNameLink"><a href="../../../../../../org/owasp/appsensor/configuration/stax/server/StaxServerConfiguration.html#StaxServerConfiguration-boolean-">StaxServerConfiguration</a></span>(boolean loadConfiguration)</code></td>
</tr>
</table>
</li>
</ul>
<!-- ========== METHOD SUMMARY =========== -->
<ul class="blockList">
<li class="blockList"><a name="method.summary">
<!-- -->
</a>
<h3>Method Summary</h3>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.org.owasp.appsensor.core.configuration.server.ServerConfiguration">
<!-- -->
</a>
<h3>Methods inherited from class org.owasp.appsensor.core.configuration.server.<a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html" title="class in org.owasp.appsensor.core.configuration.server">ServerConfiguration</a></h3>
<code><a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#equals-java.lang.Object-">equals</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#findClientApplication-java.lang.String-">findClientApplication</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#findDetectionPoints-org.owasp.appsensor.core.DetectionPoint-">findDetectionPoints</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#findDetectionPoints-org.owasp.appsensor.core.DetectionPoint-java.lang.String-">findDetectionPoints</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getClientApplicationIdentificationHeaderName--">getClientApplicationIdentificationHeaderName</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getClientApplications--">getClientApplications</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getConfigurationFile--">getConfigurationFile</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getCorrelationSets--">getCorrelationSets</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getCustomDetectionPoints--">getCustomDetectionPoints</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getDetectionPoints--">getDetectionPoints</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getGeolocationDatabasePath--">getGeolocationDatabasePath</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getRelatedDetectionSystems-org.owasp.appsensor.core.DetectionSystem-">getRelatedDetectionSystems</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getServerHostName--">getServerHostName</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getServerPort--">getServerPort</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#getServerSocketTimeout--">getServerSocketTimeout</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#hashCode--">hashCode</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#isGeolocateIpAddresses--">isGeolocateIpAddresses</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setClientApplicationIdentificationHeaderName-java.lang.String-">setClientApplicationIdentificationHeaderName</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setClientApplications-java.util.Collection-">setClientApplications</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setConfigurationFile-java.io.File-">setConfigurationFile</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setCorrelationSets-java.util.Collection-">setCorrelationSets</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setCustomDetectionPoints-java.util.HashMap-">setCustomDetectionPoints</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setDetectionPoints-java.util.Collection-">setDetectionPoints</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setGeolocateIpAddresses-boolean-">setGeolocateIpAddresses</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setGeolocationDatabasePath-java.lang.String-">setGeolocationDatabasePath</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setServerHostName-java.lang.String-">setServerHostName</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setServerPort-int-">setServerPort</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#setServerSocketTimeout-int-">setServerSocketTimeout</a>, <a href="../../../../../../org/owasp/appsensor/core/configuration/server/ServerConfiguration.html#toString--">toString</a></code></li>
</ul>
<ul class="blockList">
<li class="blockList"><a name="methods.inherited.from.class.java.lang.Object">
<!-- -->
</a>
<h3>Methods inherited from class java.lang.<a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true" title="class or interface in java.lang">Object</a></h3>
<code><a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#clone--" title="class or interface in java.lang">clone</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#finalize--" title="class or interface in java.lang">finalize</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#getClass--" title="class or interface in java.lang">getClass</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notify--" title="class or interface in java.lang">notify</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#notifyAll--" title="class or interface in java.lang">notifyAll</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait--" title="class or interface in java.lang">wait</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait-long-" title="class or interface in java.lang">wait</a>, <a href="http://docs.oracle.com/javase/7/docs/api/java/lang/Object.html?is-external=true#wait-long-int-" title="class or interface in java.lang">wait</a></code></li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
<div class="details">
<ul class="blockList">
<li class="blockList">
<!-- ========= CONSTRUCTOR DETAIL ======== -->
<ul class="blockList">
<li class="blockList"><a name="constructor.detail">
<!-- -->
</a>
<h3>Constructor Detail</h3>
<a name="StaxServerConfiguration--">
<!-- -->
</a>
<ul class="blockList">
<li class="blockList">
<h4>StaxServerConfiguration</h4>
<pre>public StaxServerConfiguration()</pre>
</li>
</ul>
<a name="StaxServerConfiguration-boolean-">
<!-- -->
</a>
<ul class="blockListLast">
<li class="blockList">
<h4>StaxServerConfiguration</h4>
<pre>public StaxServerConfiguration(boolean loadConfiguration)</pre>
</li>
</ul>
</li>
</ul>
</li>
</ul>
</div>
</div>
<!-- ========= END OF CLASS DATA ========= -->
<p class="legalCopy"><small>Copyright © 2017 <a href="http://www.owasp.org">The Open Web Application Security Project (OWASP)</a>. All rights reserved.</small></p>
</body>
</html>
| {
"pile_set_name": "Github"
} |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using Xunit;
namespace System.Runtime.InteropServices.Tests
{
public class TypeLibVarAttributeTests
{
[Theory]
[InlineData((TypeLibVarFlags)(-1))]
[InlineData((TypeLibVarFlags)0)]
[InlineData(TypeLibVarFlags.FBindable)]
public void Ctor_TypeLibVarFlags(TypeLibVarFlags flags)
{
var attribute = new TypeLibVarAttribute(flags);
Assert.Equal(flags, attribute.Value);
}
[Theory]
[InlineData(-1)]
[InlineData(0)]
[InlineData(4)]
public void Ctor_ShortFlags(short flags)
{
var attribute = new TypeLibVarAttribute(flags);
Assert.Equal((TypeLibVarFlags)flags, attribute.Value);
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* Broadcom UniMAC MDIO bus controller driver
*
* Copyright (C) 2014-2017 Broadcom
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*/
#include <linux/kernel.h>
#include <linux/phy.h>
#include <linux/platform_device.h>
#include <linux/sched.h>
#include <linux/module.h>
#include <linux/io.h>
#include <linux/delay.h>
#include <linux/of.h>
#include <linux/of_platform.h>
#include <linux/of_mdio.h>
#include <linux/platform_data/mdio-bcm-unimac.h>
#define MDIO_CMD 0x00
#define MDIO_START_BUSY (1 << 29)
#define MDIO_READ_FAIL (1 << 28)
#define MDIO_RD (2 << 26)
#define MDIO_WR (1 << 26)
#define MDIO_PMD_SHIFT 21
#define MDIO_PMD_MASK 0x1F
#define MDIO_REG_SHIFT 16
#define MDIO_REG_MASK 0x1F
#define MDIO_CFG 0x04
#define MDIO_C22 (1 << 0)
#define MDIO_C45 0
#define MDIO_CLK_DIV_SHIFT 4
#define MDIO_CLK_DIV_MASK 0x3F
#define MDIO_SUPP_PREAMBLE (1 << 12)
struct unimac_mdio_priv {
struct mii_bus *mii_bus;
void __iomem *base;
int (*wait_func) (void *wait_func_data);
void *wait_func_data;
};
static inline u32 unimac_mdio_readl(struct unimac_mdio_priv *priv, u32 offset)
{
/* MIPS chips strapped for BE will automagically configure the
* peripheral registers for CPU-native byte order.
*/
if (IS_ENABLED(CONFIG_MIPS) && IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))
return __raw_readl(priv->base + offset);
else
return readl_relaxed(priv->base + offset);
}
static inline void unimac_mdio_writel(struct unimac_mdio_priv *priv, u32 val,
u32 offset)
{
if (IS_ENABLED(CONFIG_MIPS) && IS_ENABLED(CONFIG_CPU_BIG_ENDIAN))
__raw_writel(val, priv->base + offset);
else
writel_relaxed(val, priv->base + offset);
}
static inline void unimac_mdio_start(struct unimac_mdio_priv *priv)
{
u32 reg;
reg = unimac_mdio_readl(priv, MDIO_CMD);
reg |= MDIO_START_BUSY;
unimac_mdio_writel(priv, reg, MDIO_CMD);
}
static inline unsigned int unimac_mdio_busy(struct unimac_mdio_priv *priv)
{
return unimac_mdio_readl(priv, MDIO_CMD) & MDIO_START_BUSY;
}
static int unimac_mdio_poll(void *wait_func_data)
{
struct unimac_mdio_priv *priv = wait_func_data;
unsigned int timeout = 1000;
do {
if (!unimac_mdio_busy(priv))
return 0;
usleep_range(1000, 2000);
} while (--timeout);
if (!timeout)
return -ETIMEDOUT;
return 0;
}
static int unimac_mdio_read(struct mii_bus *bus, int phy_id, int reg)
{
struct unimac_mdio_priv *priv = bus->priv;
int ret;
u32 cmd;
/* Prepare the read operation */
cmd = MDIO_RD | (phy_id << MDIO_PMD_SHIFT) | (reg << MDIO_REG_SHIFT);
unimac_mdio_writel(priv, cmd, MDIO_CMD);
/* Start MDIO transaction */
unimac_mdio_start(priv);
ret = priv->wait_func(priv->wait_func_data);
if (ret)
return ret;
cmd = unimac_mdio_readl(priv, MDIO_CMD);
/* Some broken devices are known not to release the line during
* turn-around, e.g: Broadcom BCM53125 external switches, so check for
* that condition here and ignore the MDIO controller read failure
* indication.
*/
if (!(bus->phy_ignore_ta_mask & 1 << phy_id) && (cmd & MDIO_READ_FAIL))
return -EIO;
return cmd & 0xffff;
}
static int unimac_mdio_write(struct mii_bus *bus, int phy_id,
int reg, u16 val)
{
struct unimac_mdio_priv *priv = bus->priv;
u32 cmd;
/* Prepare the write operation */
cmd = MDIO_WR | (phy_id << MDIO_PMD_SHIFT) |
(reg << MDIO_REG_SHIFT) | (0xffff & val);
unimac_mdio_writel(priv, cmd, MDIO_CMD);
unimac_mdio_start(priv);
return priv->wait_func(priv->wait_func_data);
}
/* Workaround for integrated BCM7xxx Gigabit PHYs which have a problem with
* their internal MDIO management controller making them fail to successfully
* be read from or written to for the first transaction. We insert a dummy
* BMSR read here to make sure that phy_get_device() and get_phy_id() can
* correctly read the PHY MII_PHYSID1/2 registers and successfully register a
* PHY device for this peripheral.
*
* Once the PHY driver is registered, we can workaround subsequent reads from
* there (e.g: during system-wide power management).
*
* bus->reset is invoked before mdiobus_scan during mdiobus_register and is
* therefore the right location to stick that workaround. Since we do not want
* to read from non-existing PHYs, we either use bus->phy_mask or do a manual
* Device Tree scan to limit the search area.
*/
static int unimac_mdio_reset(struct mii_bus *bus)
{
struct device_node *np = bus->dev.of_node;
struct device_node *child;
u32 read_mask = 0;
int addr;
if (!np) {
read_mask = ~bus->phy_mask;
} else {
for_each_available_child_of_node(np, child) {
addr = of_mdio_parse_addr(&bus->dev, child);
if (addr < 0)
continue;
read_mask |= 1 << addr;
}
}
for (addr = 0; addr < PHY_MAX_ADDR; addr++) {
if (read_mask & 1 << addr) {
dev_dbg(&bus->dev, "Workaround for PHY @ %d\n", addr);
mdiobus_read(bus, addr, MII_BMSR);
}
}
return 0;
}
static int unimac_mdio_probe(struct platform_device *pdev)
{
struct unimac_mdio_pdata *pdata = pdev->dev.platform_data;
struct unimac_mdio_priv *priv;
struct device_node *np;
struct mii_bus *bus;
struct resource *r;
int ret;
np = pdev->dev.of_node;
priv = devm_kzalloc(&pdev->dev, sizeof(*priv), GFP_KERNEL);
if (!priv)
return -ENOMEM;
r = platform_get_resource(pdev, IORESOURCE_MEM, 0);
/* Just ioremap, as this MDIO block is usually integrated into an
* Ethernet MAC controller register range
*/
priv->base = devm_ioremap(&pdev->dev, r->start, resource_size(r));
if (!priv->base) {
dev_err(&pdev->dev, "failed to remap register\n");
return -ENOMEM;
}
priv->mii_bus = mdiobus_alloc();
if (!priv->mii_bus)
return -ENOMEM;
bus = priv->mii_bus;
bus->priv = priv;
if (pdata) {
bus->name = pdata->bus_name;
priv->wait_func = pdata->wait_func;
priv->wait_func_data = pdata->wait_func_data;
bus->phy_mask = ~pdata->phy_mask;
} else {
bus->name = "unimac MII bus";
priv->wait_func_data = priv;
priv->wait_func = unimac_mdio_poll;
}
bus->parent = &pdev->dev;
bus->read = unimac_mdio_read;
bus->write = unimac_mdio_write;
bus->reset = unimac_mdio_reset;
snprintf(bus->id, MII_BUS_ID_SIZE, "%s-%d", pdev->name, pdev->id);
ret = of_mdiobus_register(bus, np);
if (ret) {
dev_err(&pdev->dev, "MDIO bus registration failed\n");
goto out_mdio_free;
}
platform_set_drvdata(pdev, priv);
dev_info(&pdev->dev, "Broadcom UniMAC MDIO bus at 0x%p\n", priv->base);
return 0;
out_mdio_free:
mdiobus_free(bus);
return ret;
}
static int unimac_mdio_remove(struct platform_device *pdev)
{
struct unimac_mdio_priv *priv = platform_get_drvdata(pdev);
mdiobus_unregister(priv->mii_bus);
mdiobus_free(priv->mii_bus);
return 0;
}
static const struct of_device_id unimac_mdio_ids[] = {
{ .compatible = "brcm,genet-mdio-v5", },
{ .compatible = "brcm,genet-mdio-v4", },
{ .compatible = "brcm,genet-mdio-v3", },
{ .compatible = "brcm,genet-mdio-v2", },
{ .compatible = "brcm,genet-mdio-v1", },
{ .compatible = "brcm,unimac-mdio", },
{ /* sentinel */ },
};
MODULE_DEVICE_TABLE(of, unimac_mdio_ids);
static struct platform_driver unimac_mdio_driver = {
.driver = {
.name = UNIMAC_MDIO_DRV_NAME,
.of_match_table = unimac_mdio_ids,
},
.probe = unimac_mdio_probe,
.remove = unimac_mdio_remove,
};
module_platform_driver(unimac_mdio_driver);
MODULE_AUTHOR("Broadcom Corporation");
MODULE_DESCRIPTION("Broadcom UniMAC MDIO bus controller");
MODULE_LICENSE("GPL");
MODULE_ALIAS("platform:" UNIMAC_MDIO_DRV_NAME);
| {
"pile_set_name": "Github"
} |
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_Name: Floor
m_Shader: {fileID: 4800000, guid: 933532a4fcc9baf4fa0491de14d08ed7, type: 3}
m_ShaderKeywords:
m_LightmapFlags: 4
m_EnableInstancingVariants: 0
m_DoubleSidedGI: 0
m_CustomRenderQueue: 2050
stringTagMap:
RenderType: Opaque
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _BaseMap:
m_Texture: {fileID: 2800000, guid: 7583d2822d7d74246ba5be4bfb4bfb77, type: 3}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _BumpMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailAlbedoMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailMask:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _DetailNormalMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _EmissionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MainTex:
m_Texture: {fileID: 2800000, guid: 7583d2822d7d74246ba5be4bfb4bfb77, type: 3}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _MetallicGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _OcclusionMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _ParallaxMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
- _SpecGlossMap:
m_Texture: {fileID: 0}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats:
- _AlphaClip: 0
- _Blend: 0
- _BumpScale: 1
- _Cull: 2
- _Cutoff: 0.5
- _DetailNormalMapScale: 1
- _DstBlend: 0
- _EnvironmentReflections: 1
- _GlossMapScale: 1
- _Glossiness: 0.15
- _GlossyReflections: 1
- _Metallic: 0
- _OcclusionStrength: 1
- _Parallax: 0.02
- _QueueOffset: 0
- _ReceiveShadows: 1
- _Smoothness: 0.15
- _SmoothnessTextureChannel: 0
- _SpecularHighlights: 1
- _SrcBlend: 1
- _Surface: 0
- _UVSec: 0
- _WorkflowMode: 1
- _ZWrite: 1
m_Colors:
- _BaseColor: {r: 1, g: 1, b: 1, a: 1}
- _Color: {r: 1, g: 1, b: 1, a: 1}
- _EmissionColor: {r: 0, g: 0, b: 0, a: 1}
- _SpecColor: {r: 0.19999996, g: 0.19999996, b: 0.19999996, a: 1}
--- !u!114 &3801298144813794507
MonoBehaviour:
m_ObjectHideFlags: 11
m_CorrespondingSourceObject: {fileID: 0}
m_PrefabInstance: {fileID: 0}
m_PrefabAsset: {fileID: 0}
m_GameObject: {fileID: 0}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: d0353a89b1f911e48b9e16bdc9f2e058, type: 3}
m_Name:
m_EditorClassIdentifier:
version: 1
| {
"pile_set_name": "Github"
} |
/*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, see <http://www.gnu.org/licenses/>.
*
* Copyright (C) 1999-2008 Novell, Inc. (www.novell.com)
*/
#include "evolution-config.h"
#include "e-composer-spell-header.h"
G_DEFINE_TYPE (
EComposerSpellHeader,
e_composer_spell_header,
E_TYPE_COMPOSER_TEXT_HEADER)
static void
e_composer_spell_header_class_init (EComposerSpellHeaderClass *class)
{
EComposerTextHeaderClass *composer_text_header_class;
composer_text_header_class = E_COMPOSER_TEXT_HEADER_CLASS (class);
composer_text_header_class->entry_type = E_TYPE_SPELL_ENTRY;
}
static void
e_composer_spell_header_init (EComposerSpellHeader *header)
{
}
EComposerHeader *
e_composer_spell_header_new_label (ESourceRegistry *registry,
const gchar *label)
{
g_return_val_if_fail (E_IS_SOURCE_REGISTRY (registry), NULL);
return g_object_new (
E_TYPE_COMPOSER_SPELL_HEADER,
"label", label, "button", FALSE,
"registry", registry, NULL);
}
EComposerHeader *
e_composer_spell_header_new_button (ESourceRegistry *registry,
const gchar *label)
{
g_return_val_if_fail (E_IS_SOURCE_REGISTRY (registry), NULL);
return g_object_new (
E_TYPE_COMPOSER_SPELL_HEADER,
"label", label, "button", TRUE,
"registry", registry, NULL);
}
| {
"pile_set_name": "Github"
} |
def euclideanDistance(p: List[Int], q: List[Int]): Int =
math.sqrt(p.zip(q).map { case (u, v) => math.pow(u-v, 2) }.sum) | {
"pile_set_name": "Github"
} |
--- a/sockd/sockd_io.c.old 2017-05-10 19:57:20.672000000 +0000
+++ b/sockd/sockd_io.c 2017-05-10 20:00:34.548000000 +0000
@@ -2912,7 +2912,7 @@ siginfo(sig, si, sc)
int src_so_rcvbuf, dst_so_rcvbuf;
#endif /* HAVE_RECVBUF_IOCTL */
-#if HAVE_SENDBUF_IOCTL
+#ifdef HAVE_SENDBUF_IOCTL
int src_so_sndbuf, dst_so_sndbuf;
#endif /* HAVE_SENDBUF_IOCTL */
@@ -2960,7 +2960,7 @@ siginfo(sig, si, sc)
*src_bufferinfo = NUL;
*dst_bufferinfo = NUL;
-#if HAVE_RECVBUF_IOCTL || HAVE_SENDBUF_IOCTL
+#if defined(HAVE_RECVBUF_IOCTL) || defined(HAVE_SENDBUF_IOCTL)
havesocketinfo = 1;
@@ -2987,7 +2987,7 @@ siginfo(sig, si, sc)
}
#endif /* HAVE_RECVBUF_IOCTL */
-#if HAVE_SENDBUF_IOCTL
+#ifdef HAVE_SENDBUF_IOCTL
if (havesocketinfo) {
if (ioctl(src->s, SENDBUF_IOCTLVAL, &src_so_sndbuf) != 0) {
swarn("%s: sendbuf size ioctl() on src-fd %d failed",
@@ -3005,7 +3005,7 @@ siginfo(sig, si, sc)
}
#endif /* HAVE_SENDBUF_IOCTL */
-#if HAVE_SENDBUF_IOCTL && HAVE_RECVBUF_IOCTL
+#ifdef HAVE_SENDBUF_IOCTL && HAVE_RECVBUF_IOCTL
if (havesocketinfo) {
snprintf(src_bufferinfo, sizeof(src_bufferinfo),
"%lu buffered (%lu + %lu + %lu)",
| {
"pile_set_name": "Github"
} |
/* Copyright 2006-2015 Joaquin M Lopez Munoz.
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* See http://www.boost.org/libs/flyweight for library home page.
*/
#ifndef BOOST_FLYWEIGHT_SERIALIZE_HPP
#define BOOST_FLYWEIGHT_SERIALIZE_HPP
#if defined(_MSC_VER)&&(_MSC_VER>=1200)
#pragma once
#endif
#include <boost/config.hpp> /* keep it first to prevent nasty warns in MSVC */
#include <boost/flyweight/flyweight_fwd.hpp>
#include <boost/flyweight/detail/archive_constructed.hpp>
#include <boost/flyweight/detail/serialization_helper.hpp>
#include <boost/serialization/nvp.hpp>
#include <boost/serialization/split_free.hpp>
#include <boost/throw_exception.hpp>
#include <memory>
/* Serialization routines for flyweight<T>.
*/
namespace boost{
namespace serialization{
template<
class Archive,
typename T,typename Arg1,typename Arg2,typename Arg3
>
inline void serialize(
Archive& ar,::boost::flyweights::flyweight<T,Arg1,Arg2,Arg3>& f,
const unsigned int version)
{
split_free(ar,f,version);
}
template<
class Archive,
typename T,typename Arg1,typename Arg2,typename Arg3
>
void save(
Archive& ar,const ::boost::flyweights::flyweight<T,Arg1,Arg2,Arg3>& f,
const unsigned int /*version*/)
{
typedef ::boost::flyweights::flyweight<T,Arg1,Arg2,Arg3> flyweight;
typedef ::boost::flyweights::detail::save_helper<flyweight> helper;
typedef typename helper::size_type size_type;
helper& hlp=ar.template get_helper<helper>();
size_type n=hlp.find(f);
ar<<make_nvp("item",n);
if(n==hlp.size()){
ar<<make_nvp("key",f.get_key());
hlp.push_back(f);
}
}
template<
class Archive,
typename T,typename Arg1,typename Arg2,typename Arg3
>
void load(
Archive& ar,::boost::flyweights::flyweight<T,Arg1,Arg2,Arg3>& f,
const unsigned int version)
{
typedef ::boost::flyweights::flyweight<T,Arg1,Arg2,Arg3> flyweight;
typedef typename flyweight::key_type key_type;
typedef ::boost::flyweights::detail::load_helper<flyweight> helper;
typedef typename helper::size_type size_type;
helper& hlp=ar.template get_helper<helper>();
size_type n=0;
ar>>make_nvp("item",n);
if(n>hlp.size()){
throw_exception(
archive::archive_exception(archive::archive_exception::other_exception));
}
else if(n==hlp.size()){
::boost::flyweights::detail::archive_constructed<key_type> k(
"key",ar,version);
hlp.push_back(flyweight(k.get()));
}
f=hlp[n];
}
} /* namespace serialization */
} /* namespace boost */
#endif
| {
"pile_set_name": "Github"
} |
/*****
* Tencent is pleased to support the open source community by making QMUI_iOS available.
* Copyright (C) 2016-2019 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
*****/
//
// QMUITableView.m
// qmui
//
// Created by QMUI Team on 14-7-2.
//
#import "QMUITableView.h"
#import "UITableView+QMUI.h"
#import "UIView+QMUI.h"
@implementation QMUITableView
@dynamic delegate;
@dynamic dataSource;
- (instancetype)initWithFrame:(CGRect)frame style:(UITableViewStyle)style {
if (self = [super initWithFrame:frame style:style]) {
[self didInitialize];
}
return self;
}
- (instancetype)initWithCoder:(NSCoder *)aDecoder {
if (self = [super initWithCoder:aDecoder]) {
[self didInitialize];
}
return self;
}
- (void)didInitialize {
[self qmui_styledAsQMUITableView];
}
- (void)dealloc {
self.delegate = nil;
self.dataSource = nil;
}
// 保证一直存在tableFooterView,以去掉列表内容不满一屏时尾部的空白分割线
- (void)setTableFooterView:(UIView *)tableFooterView {
if (!tableFooterView) {
tableFooterView = [[UIView alloc] init];
}
[super setTableFooterView:tableFooterView];
}
- (BOOL)touchesShouldCancelInContentView:(UIView *)view {
if ([self.delegate respondsToSelector:@selector(tableView:touchesShouldCancelInContentView:)]) {
return [self.delegate tableView:self touchesShouldCancelInContentView:view];
}
// 默认情况下只有当view是非UIControl的时候才会返回yes,这里统一对UIButton也返回yes
// 原因是UITableView上面把事件延迟去掉了,但是这样如果拖动的时候手指是在UIControl上面的话,就拖动不了了
if ([view isKindOfClass:[UIControl class]]) {
if ([view isKindOfClass:[UIButton class]]) {
return YES;
} else {
return NO;
}
}
return YES;
}
@end
| {
"pile_set_name": "Github"
} |
import { db } from "../../lib/mongo"
import parse from "../../lib/parse"
class EmailSettingsService {
constructor() {
this.defaultSettings = {
host: "",
port: "",
user: "",
pass: 0,
from_name: "",
from_address: "",
}
}
getEmailSettings() {
return db
.collection("emailSettings")
.findOne()
.then(settings => this.changeProperties(settings))
}
updateEmailSettings(data) {
const settings = this.getValidDocumentForUpdate(data)
return this.insertDefaultSettingsIfEmpty().then(() =>
db
.collection("emailSettings")
.updateOne(
{},
{
$set: settings,
},
{ upsert: true }
)
.then(res => this.getEmailSettings())
)
}
insertDefaultSettingsIfEmpty() {
return db
.collection("emailSettings")
.countDocuments({})
.then(count => {
if (count === 0) {
return db.collection("emailSettings").insertOne(this.defaultSettings)
}
})
}
getValidDocumentForUpdate(data) {
if (Object.keys(data).length === 0) {
return new Error("Required fields are missing")
}
const settings = {}
if (data.host !== undefined) {
settings.host = parse.getString(data.host).toLowerCase()
}
if (data.port !== undefined) {
settings.port = parse.getNumberIfPositive(data.port)
}
if (data.user !== undefined) {
settings.user = parse.getString(data.user)
}
if (data.pass !== undefined) {
settings.pass = parse.getString(data.pass)
}
if (data.from_name !== undefined) {
settings.from_name = parse.getString(data.from_name)
}
if (data.from_address !== undefined) {
settings.from_address = parse.getString(data.from_address)
}
return settings
}
changeProperties(settings) {
if (settings) {
delete settings._id
} else {
return this.defaultSettings
}
return settings
}
}
export default new EmailSettingsService()
| {
"pile_set_name": "Github"
} |
package raft
import (
"log"
"os"
)
//------------------------------------------------------------------------------
//
// Variables
//
//------------------------------------------------------------------------------
const (
Debug = 1
Trace = 2
)
var logLevel int = 0
var logger *log.Logger
func init() {
logger = log.New(os.Stdout, "[raft]", log.Lmicroseconds)
}
//------------------------------------------------------------------------------
//
// Functions
//
//------------------------------------------------------------------------------
func LogLevel() int {
return logLevel
}
func SetLogLevel(level int) {
logLevel = level
}
//--------------------------------------
// Warnings
//--------------------------------------
// Prints to the standard logger. Arguments are handled in the manner of
// fmt.Print.
func warn(v ...interface{}) {
logger.Print(v...)
}
// Prints to the standard logger. Arguments are handled in the manner of
// fmt.Printf.
func warnf(format string, v ...interface{}) {
logger.Printf(format, v...)
}
// Prints to the standard logger. Arguments are handled in the manner of
// fmt.Println.
func warnln(v ...interface{}) {
logger.Println(v...)
}
//--------------------------------------
// Basic debugging
//--------------------------------------
// Prints to the standard logger if debug mode is enabled. Arguments
// are handled in the manner of fmt.Print.
func debug(v ...interface{}) {
if logLevel >= Debug {
logger.Print(v...)
}
}
// Prints to the standard logger if debug mode is enabled. Arguments
// are handled in the manner of fmt.Printf.
func debugf(format string, v ...interface{}) {
if logLevel >= Debug {
logger.Printf(format, v...)
}
}
// Prints to the standard logger if debug mode is enabled. Arguments
// are handled in the manner of fmt.Println.
func debugln(v ...interface{}) {
if logLevel >= Debug {
logger.Println(v...)
}
}
//--------------------------------------
// Trace-level debugging
//--------------------------------------
// Prints to the standard logger if trace debugging is enabled. Arguments
// are handled in the manner of fmt.Print.
func trace(v ...interface{}) {
if logLevel >= Trace {
logger.Print(v...)
}
}
// Prints to the standard logger if trace debugging is enabled. Arguments
// are handled in the manner of fmt.Printf.
func tracef(format string, v ...interface{}) {
if logLevel >= Trace {
logger.Printf(format, v...)
}
}
// Prints to the standard logger if trace debugging is enabled. Arguments
// are handled in the manner of debugln.
func traceln(v ...interface{}) {
if logLevel >= Trace {
logger.Println(v...)
}
}
| {
"pile_set_name": "Github"
} |
/* ___DISCLAIMER___ */
/* clock.c: PLL, CCLK, PCLK controls */
#include <arm/NXP/LPC17xx/LPC17xx.h>
#include "config.h"
#include "clock.h"
#include "bits.h"
#include "uart.h"
void clock_disconnect() {
disconnectPLL0();
disablePLL0();
}
void clock_init() {
/* set flash access time to 5 clks (80<f<=100MHz) */
setFlashAccessTime(5);
/* setup PLL0 for 96MHz
First, disable and disconnect PLL0.
*/
clock_disconnect();
/* PLL is disabled and disconnected. setup PCLK NOW as it cannot be changed
reliably with PLL0 connected.
see:
http://ics.nxp.com/support/documents/microcontrollers/pdf/errata.lpc1754.pdf
*/
/* continue with PLL0 setup:
enable the xtal oscillator and wait for it to become stable
set the oscillator as clk source for PLL0
set PLL0 multiplier+predivider
enable PLL0
set CCLK divider
wait for PLL0 to lock
connect PLL0
done
*/
enableMainOsc();
setClkSrc(CLKSRC_MAINOSC);
setPLL0MultPrediv(CONFIG_CLK_MULT, CONFIG_CLK_PREDIV);
enablePLL0();
setCCLKDiv(CONFIG_CLK_CCLKDIV);
connectPLL0();
/* configure PLL1 for USB operation */
disconnectPLL1();
disablePLL1();
LPC_SC->PLL1CFG = 0x23;
enablePLL1();
connectPLL1();
}
void setFlashAccessTime(uint8_t clocks) {
LPC_SC->FLASHCFG=FLASHTIM(clocks);
}
void setPLL0MultPrediv(uint16_t mult, uint8_t prediv) {
LPC_SC->PLL0CFG=PLL_MULT(mult) | PLL_PREDIV(prediv);
PLL0feed();
}
void enablePLL0() {
LPC_SC->PLL0CON |= PLLE0;
PLL0feed();
}
void disablePLL0() {
LPC_SC->PLL0CON &= ~PLLE0;
PLL0feed();
}
void connectPLL0() {
while(!(LPC_SC->PLL0STAT & PLOCK0));
LPC_SC->PLL0CON |= PLLC0;
PLL0feed();
}
void disconnectPLL0() {
LPC_SC->PLL0CON &= ~PLLC0;
PLL0feed();
}
void setPLL1MultPrediv(uint16_t mult, uint8_t prediv) {
LPC_SC->PLL1CFG=PLL_MULT(mult) | PLL_PREDIV(prediv);
PLL1feed();
}
void enablePLL1() {
LPC_SC->PLL1CON |= PLLE1;
PLL1feed();
}
void disablePLL1() {
LPC_SC->PLL1CON &= ~PLLE1;
PLL1feed();
}
void connectPLL1() {
while(!(LPC_SC->PLL1STAT & PLOCK1));
LPC_SC->PLL1CON |= PLLC1;
PLL1feed();
}
void disconnectPLL1() {
LPC_SC->PLL1CON &= ~PLLC1;
PLL1feed();
}
void setCCLKDiv(uint8_t div) {
LPC_SC->CCLKCFG=CCLK_DIV(div);
}
void enableMainOsc() {
LPC_SC->SCS=OSCEN;
while(!(LPC_SC->SCS&OSCSTAT));
}
void disableMainOsc() {
LPC_SC->SCS=0;
}
void PLL0feed() {
LPC_SC->PLL0FEED=0xaa;
LPC_SC->PLL0FEED=0x55;
}
void PLL1feed() {
LPC_SC->PLL1FEED=0xaa;
LPC_SC->PLL1FEED=0x55;
}
void setClkSrc(uint8_t src) {
LPC_SC->CLKSRCSEL=src;
}
| {
"pile_set_name": "Github"
} |
// This module implements the QsciLexerVerilog class.
//
// Copyright (c) 2017 Riverbank Computing Limited <[email protected]>
//
// This file is part of QScintilla.
//
// This file may be used under the terms of the GNU General Public License
// version 3.0 as published by the Free Software Foundation and appearing in
// the file LICENSE included in the packaging of this file. Please review the
// following information to ensure the GNU General Public License version 3.0
// requirements will be met: http://www.gnu.org/copyleft/gpl.html.
//
// If you do not wish to use this file under the terms of the GPL version 3.0
// then you may purchase a commercial license. For more information contact
// [email protected].
//
// This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
// WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#include "Qsci/qscilexerverilog.h"
#include <qcolor.h>
#include <qfont.h>
#include <qsettings.h>
// The ctor.
QsciLexerVerilog::QsciLexerVerilog(QObject *parent)
: QsciLexer(parent),
fold_atelse(false), fold_comments(false), fold_compact(true),
fold_preproc(false), fold_atmodule(false)
{
}
// The dtor.
QsciLexerVerilog::~QsciLexerVerilog()
{
}
// Returns the language name.
const char *QsciLexerVerilog::language() const
{
return "Verilog";
}
// Returns the lexer name.
const char *QsciLexerVerilog::lexer() const
{
return "verilog";
}
// Return the style used for braces.
int QsciLexerVerilog::braceStyle() const
{
return Operator;
}
// Returns the set of keywords.
const char *QsciLexerVerilog::keywords(int set) const
{
if (set == 1)
return
"always and assign automatic begin buf bufif0 bufif1 case casex "
"casez cell cmos config deassign default defparam design disable "
"edge else end endcase endconfig endfunction endgenerate "
"endmodule endprimitiveendspecify endtable endtask event for "
"force forever fork function generate genvar highz0 highz1 if "
"ifnone incdir include initial inout input instance integer join "
"large liblist library localparam macromodule medium module nand "
"negedge nmos nor noshowcancelled not notif0 notif1 or output "
"parameter pmos posedge primitive pull0 pull1 pulldown pullup "
"pulsestyle_ondetect pulsestyle_onevent rcmos real realtime reg "
"release repeat rnmos rpmos rtran rtranif0 rtranif1 scalared "
"showcancelled signed small specify specparam strong0 strong1 "
"supply0 supply1 table task time tran tranif0 tranif1 tri tri0 "
"tri1 triand trior trireg unsigned use vectored wait wand weak0 "
"weak1 while wire wor xnor xor";
if (set == 3)
return
"$async$and$array $async$and$plane $async$nand$array "
"$async$nand$plane $async$nor$array $async$nor$plane "
"$async$or$array $async$or$plane $bitstoreal $countdrivers "
"$display $displayb $displayh $displayo $dist_chi_square "
"$dist_erlang $dist_exponential $dist_normal $dist_poisson "
"$dist_t $dist_uniform $dumpall $dumpfile $dumpflush $dumplimit "
"$dumpoff $dumpon $dumpportsall $dumpportsflush $dumpportslimit "
"$dumpportsoff $dumpportson $dumpvars $fclose $fdisplayh "
"$fdisplay $fdisplayf $fdisplayb $ferror $fflush $fgetc $fgets "
"$finish $fmonitorb $fmonitor $fmonitorf $fmonitorh $fopen "
"$fread $fscanf $fseek $fsscanf $fstrobe $fstrobebb $fstrobef "
"$fstrobeh $ftel $fullskew $fwriteb $fwritef $fwriteh $fwrite "
"$getpattern $history $hold $incsave $input $itor $key $list "
"$log $monitorb $monitorh $monitoroff $monitoron $monitor "
"$monitoro $nochange $nokey $nolog $period $printtimescale "
"$q_add $q_exam $q_full $q_initialize $q_remove $random "
"$readmemb $readmemh $readmemh $realtime $realtobits $recovery "
"$recrem $removal $reset_count $reset $reset_value $restart "
"$rewind $rtoi $save $scale $scope $sdf_annotate $setup "
"$setuphold $sformat $showscopes $showvariables $showvars "
"$signed $skew $sreadmemb $sreadmemh $stime $stop $strobeb "
"$strobe $strobeh $strobeo $swriteb $swriteh $swriteo $swrite "
"$sync$and$array $sync$and$plane $sync$nand$array "
"$sync$nand$plane $sync$nor$array $sync$nor$plane $sync$or$array "
"$sync$or$plane $test$plusargs $time $timeformat $timeskew "
"$ungetc $unsigned $value$plusargs $width $writeb $writeh $write "
"$writeo";
return 0;
}
// Return the string of characters that comprise a word.
const char *QsciLexerVerilog::wordCharacters() const
{
return "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_$";
}
// Returns the foreground colour of the text for a style.
QColor QsciLexerVerilog::defaultColor(int style) const
{
switch (style)
{
case Default:
case InactiveComment:
case InactiveCommentLine:
case InactiveCommentBang:
case InactiveNumber:
case InactiveKeyword:
case InactiveString:
case InactiveKeywordSet2:
case InactiveSystemTask:
case InactivePreprocessor:
case InactiveOperator:
case InactiveIdentifier:
case InactiveUnclosedString:
case InactiveUserKeywordSet:
case InactiveCommentKeyword:
case InactiveDeclareInputPort:
case InactiveDeclareOutputPort:
case InactiveDeclareInputOutputPort:
case InactivePortConnection:
return QColor(0x80, 0x80, 0x80);
case Comment:
case CommentLine:
return QColor(0x00, 0x7f, 0x00);
case CommentBang:
return QColor(0x3f, 0x7f, 0x3f);
case Number:
case KeywordSet2:
return QColor(0x00, 0x7f, 0x7f);
case Keyword:
case DeclareOutputPort:
return QColor(0x00, 0x00, 0x7f);
case String:
return QColor(0x7f, 0x00, 0x7f);
case SystemTask:
return QColor(0x80, 0x40, 0x20);
case Preprocessor:
return QColor(0x7f, 0x7f, 0x00);
case Operator:
return QColor(0x00, 0x70, 0x70);
case UnclosedString:
return QColor(0x00, 0x00, 0x00);
case UserKeywordSet:
case CommentKeyword:
return QColor(0x2a, 0x00, 0xff);
case DeclareInputPort:
return QColor(0x7f, 0x00, 0x00);
case DeclareInputOutputPort:
return QColor(0x00, 0x00, 0xff);
case PortConnection:
return QColor(0x00, 0x50, 0x32);
}
return QsciLexer::defaultColor(style);
}
// Returns the end-of-line fill for a style.
bool QsciLexerVerilog::defaultEolFill(int style) const
{
switch (style)
{
case CommentBang:
case UnclosedString:
case InactiveDefault:
case InactiveComment:
case InactiveCommentLine:
case InactiveCommentBang:
case InactiveNumber:
case InactiveKeyword:
case InactiveString:
case InactiveKeywordSet2:
case InactiveSystemTask:
case InactivePreprocessor:
case InactiveOperator:
case InactiveIdentifier:
case InactiveUnclosedString:
case InactiveUserKeywordSet:
case InactiveCommentKeyword:
case InactiveDeclareInputPort:
case InactiveDeclareOutputPort:
case InactiveDeclareInputOutputPort:
case InactivePortConnection:
return true;
}
return QsciLexer::defaultEolFill(style);
}
// Returns the font of the text for a style.
QFont QsciLexerVerilog::defaultFont(int style) const
{
QFont f;
switch (style)
{
case Comment:
case CommentLine:
case CommentBang:
case UserKeywordSet:
#if defined(Q_OS_WIN)
f = QFont("Comic Sans MS",9);
#elif defined(Q_OS_MAC)
f = QFont("Comic Sans MS", 12);
#else
f = QFont("Bitstream Vera Serif",9);
#endif
break;
case Keyword:
case PortConnection:
f = QsciLexer::defaultFont(style);
f.setBold(true);
break;
case InactiveDefault:
case InactiveComment:
case InactiveCommentLine:
case InactiveCommentBang:
case InactiveNumber:
case InactiveKeyword:
case InactiveString:
case InactiveKeywordSet2:
case InactiveSystemTask:
case InactivePreprocessor:
case InactiveOperator:
case InactiveIdentifier:
case InactiveUnclosedString:
case InactiveUserKeywordSet:
case InactiveCommentKeyword:
case InactiveDeclareInputPort:
case InactiveDeclareOutputPort:
case InactiveDeclareInputOutputPort:
case InactivePortConnection:
f = QsciLexer::defaultFont(style);
f.setItalic(true);
break;
default:
f = QsciLexer::defaultFont(style);
}
return f;
}
// Returns the user name of a style.
QString QsciLexerVerilog::description(int style) const
{
switch (style)
{
case Default:
return tr("Default");
case Comment:
return tr("Comment");
case CommentLine:
return tr("Line comment");
case CommentBang:
return tr("Bang comment");
case Number:
return tr("Number");
case Keyword:
return tr("Primary keywords and identifiers");
case String:
return tr("String");
case KeywordSet2:
return tr("Secondary keywords and identifiers");
case SystemTask:
return tr("System task");
case Preprocessor:
return tr("Preprocessor block");
case Operator:
return tr("Operator");
case Identifier:
return tr("Identifier");
case UnclosedString:
return tr("Unclosed string");
case UserKeywordSet:
return tr("User defined tasks and identifiers");
case CommentKeyword:
return tr("Keyword comment");
case InactiveCommentKeyword:
return tr("Inactive keyword comment");
case DeclareInputPort:
return tr("Input port declaration");
case InactiveDeclareInputPort:
return tr("Inactive input port declaration");
case DeclareOutputPort:
return tr("Output port declaration");
case InactiveDeclareOutputPort:
return tr("Inactive output port declaration");
case DeclareInputOutputPort:
return tr("Input/output port declaration");
case InactiveDeclareInputOutputPort:
return tr("Inactive input/output port declaration");
case PortConnection:
return tr("Port connection");
case InactivePortConnection:
return tr("Inactive port connection");
}
return QString();
}
// Returns the background colour of the text for a style.
QColor QsciLexerVerilog::defaultPaper(int style) const
{
switch (style)
{
case CommentBang:
return QColor(0xe0, 0xf0, 0xff);
case UnclosedString:
return QColor(0xe0, 0xc0, 0xe0);
case InactiveDefault:
case InactiveComment:
case InactiveCommentLine:
case InactiveCommentBang:
case InactiveNumber:
case InactiveKeyword:
case InactiveString:
case InactiveKeywordSet2:
case InactiveSystemTask:
case InactivePreprocessor:
case InactiveOperator:
case InactiveIdentifier:
case InactiveUnclosedString:
case InactiveUserKeywordSet:
case InactiveCommentKeyword:
case InactiveDeclareInputPort:
case InactiveDeclareOutputPort:
case InactiveDeclareInputOutputPort:
case InactivePortConnection:
return QColor(0xe0, 0xe0, 0xe0);
}
return QsciLexer::defaultPaper(style);
}
// Refresh all properties.
void QsciLexerVerilog::refreshProperties()
{
setAtElseProp();
setCommentProp();
setCompactProp();
setPreprocProp();
// We don't provide options for these as there doesn't seem much point in
// disabling them.
emit propertyChanged("lexer.verilog.track.preprocessor", "1");
emit propertyChanged("lexer.verilog.update.preprocessor", "1");
emit propertyChanged("lexer.verilog.portstyling", "1");
emit propertyChanged("lexer.verilog.allupperkeywords", "1");
}
// Read properties from the settings.
bool QsciLexerVerilog::readProperties(QSettings &qs,const QString &prefix)
{
fold_atelse = qs.value(prefix + "foldatelse", false).toBool();
fold_comments = qs.value(prefix + "foldcomments", false).toBool();
fold_compact = qs.value(prefix + "foldcompact", true).toBool();
fold_preproc = qs.value(prefix + "foldpreprocessor", false).toBool();
fold_atmodule = qs.value(prefix + "foldverilogflags", false).toBool();
return true;
}
// Write properties to the settings.
bool QsciLexerVerilog::writeProperties(QSettings &qs,const QString &prefix) const
{
qs.setValue(prefix + "foldatelse", fold_atelse);
qs.setValue(prefix + "foldcomments", fold_comments);
qs.setValue(prefix + "foldcompact", fold_compact);
qs.setValue(prefix + "foldpreprocessor", fold_preproc);
qs.setValue(prefix + "foldverilogflags", fold_atmodule);
return true;
}
// Set if else can be folded.
void QsciLexerVerilog::setFoldAtElse(bool fold)
{
fold_atelse = fold;
setAtElseProp();
}
// Set the "fold.at.else" property.
void QsciLexerVerilog::setAtElseProp()
{
emit propertyChanged("fold.at.else", (fold_atelse ? "1" : "0"));
}
// Set if comments can be folded.
void QsciLexerVerilog::setFoldComments(bool fold)
{
fold_comments = fold;
setCommentProp();
}
// Set the "fold.comment" property.
void QsciLexerVerilog::setCommentProp()
{
emit propertyChanged("fold.comment", (fold_comments ? "1" : "0"));
}
// Set if folds are compact
void QsciLexerVerilog::setFoldCompact(bool fold)
{
fold_compact = fold;
setCompactProp();
}
// Set the "fold.compact" property.
void QsciLexerVerilog::setCompactProp()
{
emit propertyChanged("fold.compact", (fold_compact ? "1" : "0"));
}
// Set if preprocessor blocks can be folded.
void QsciLexerVerilog::setFoldPreprocessor(bool fold)
{
fold_preproc = fold;
setPreprocProp();
}
// Set the "fold.preprocessor" property.
void QsciLexerVerilog::setPreprocProp()
{
emit propertyChanged("fold.preprocessor", (fold_preproc ? "1" : "0"));
}
// Set if modules can be folded.
void QsciLexerVerilog::setFoldAtModule(bool fold)
{
fold_atmodule = fold;
setAtModuleProp();
}
// Set the "fold.verilog.flags" property.
void QsciLexerVerilog::setAtModuleProp()
{
emit propertyChanged("fold.verilog.flags", (fold_atmodule ? "1" : "0"));
}
| {
"pile_set_name": "Github"
} |
/*
Package logrus is a structured logger for Go, completely API compatible with the standard library logger.
The simplest way to use Logrus is simply the package-level exported logger:
package main
import (
log "github.com/sirupsen/logrus"
)
func main() {
log.WithFields(log.Fields{
"animal": "walrus",
"number": 1,
"size": 10,
}).Info("A walrus appears")
}
Output:
time="2015-09-07T08:48:33Z" level=info msg="A walrus appears" animal=walrus number=1 size=10
For a full guide visit https://github.com/sirupsen/logrus
*/
package logrus
| {
"pile_set_name": "Github"
} |
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
var chars = require("./chars");
var CssTokenType;
(function (CssTokenType) {
CssTokenType[CssTokenType["EOF"] = 0] = "EOF";
CssTokenType[CssTokenType["String"] = 1] = "String";
CssTokenType[CssTokenType["Comment"] = 2] = "Comment";
CssTokenType[CssTokenType["Identifier"] = 3] = "Identifier";
CssTokenType[CssTokenType["Number"] = 4] = "Number";
CssTokenType[CssTokenType["IdentifierOrNumber"] = 5] = "IdentifierOrNumber";
CssTokenType[CssTokenType["AtKeyword"] = 6] = "AtKeyword";
CssTokenType[CssTokenType["Character"] = 7] = "Character";
CssTokenType[CssTokenType["Whitespace"] = 8] = "Whitespace";
CssTokenType[CssTokenType["Invalid"] = 9] = "Invalid";
})(CssTokenType = exports.CssTokenType || (exports.CssTokenType = {}));
var CssLexerMode;
(function (CssLexerMode) {
CssLexerMode[CssLexerMode["ALL"] = 0] = "ALL";
CssLexerMode[CssLexerMode["ALL_TRACK_WS"] = 1] = "ALL_TRACK_WS";
CssLexerMode[CssLexerMode["SELECTOR"] = 2] = "SELECTOR";
CssLexerMode[CssLexerMode["PSEUDO_SELECTOR"] = 3] = "PSEUDO_SELECTOR";
CssLexerMode[CssLexerMode["PSEUDO_SELECTOR_WITH_ARGUMENTS"] = 4] = "PSEUDO_SELECTOR_WITH_ARGUMENTS";
CssLexerMode[CssLexerMode["ATTRIBUTE_SELECTOR"] = 5] = "ATTRIBUTE_SELECTOR";
CssLexerMode[CssLexerMode["AT_RULE_QUERY"] = 6] = "AT_RULE_QUERY";
CssLexerMode[CssLexerMode["MEDIA_QUERY"] = 7] = "MEDIA_QUERY";
CssLexerMode[CssLexerMode["BLOCK"] = 8] = "BLOCK";
CssLexerMode[CssLexerMode["KEYFRAME_BLOCK"] = 9] = "KEYFRAME_BLOCK";
CssLexerMode[CssLexerMode["STYLE_BLOCK"] = 10] = "STYLE_BLOCK";
CssLexerMode[CssLexerMode["STYLE_VALUE"] = 11] = "STYLE_VALUE";
CssLexerMode[CssLexerMode["STYLE_VALUE_FUNCTION"] = 12] = "STYLE_VALUE_FUNCTION";
CssLexerMode[CssLexerMode["STYLE_CALC_FUNCTION"] = 13] = "STYLE_CALC_FUNCTION";
})(CssLexerMode = exports.CssLexerMode || (exports.CssLexerMode = {}));
var LexedCssResult = (function () {
function LexedCssResult(error, token) {
this.error = error;
this.token = token;
}
return LexedCssResult;
}());
exports.LexedCssResult = LexedCssResult;
function generateErrorMessage(input, message, errorValue, index, row, column) {
return message + " at column " + row + ":" + column + " in expression [" +
findProblemCode(input, errorValue, index, column) + ']';
}
exports.generateErrorMessage = generateErrorMessage;
function findProblemCode(input, errorValue, index, column) {
var endOfProblemLine = index;
var current = charCode(input, index);
while (current > 0 && !isNewline(current)) {
current = charCode(input, ++endOfProblemLine);
}
var choppedString = input.substring(0, endOfProblemLine);
var pointerPadding = '';
for (var i = 0; i < column; i++) {
pointerPadding += ' ';
}
var pointerString = '';
for (var i = 0; i < errorValue.length; i++) {
pointerString += '^';
}
return choppedString + '\n' + pointerPadding + pointerString + '\n';
}
exports.findProblemCode = findProblemCode;
var CssToken = (function () {
function CssToken(index, column, line, type, strValue) {
this.index = index;
this.column = column;
this.line = line;
this.type = type;
this.strValue = strValue;
this.numValue = charCode(strValue, 0);
}
return CssToken;
}());
exports.CssToken = CssToken;
var CssLexer = (function () {
function CssLexer() {
}
CssLexer.prototype.scan = function (text, trackComments) {
if (trackComments === void 0) { trackComments = false; }
return new CssScanner(text, trackComments);
};
return CssLexer;
}());
exports.CssLexer = CssLexer;
var CssScannerError = (function () {
function CssScannerError(token, message) {
this.token = token;
this.rawMessage = message;
}
CssScannerError.prototype.toString = function () { return this.message; };
return CssScannerError;
}());
exports.CssScannerError = CssScannerError;
function _trackWhitespace(mode) {
switch (mode) {
case CssLexerMode.SELECTOR:
case CssLexerMode.PSEUDO_SELECTOR:
case CssLexerMode.ALL_TRACK_WS:
case CssLexerMode.STYLE_VALUE:
return true;
default:
return false;
}
}
var CssScanner = (function () {
function CssScanner(input, _trackComments) {
if (_trackComments === void 0) { _trackComments = false; }
this.input = input;
this._trackComments = _trackComments;
this.length = 0;
this.index = -1;
this.column = -1;
this.line = 0;
this._currentMode = CssLexerMode.BLOCK;
this._currentError = null;
this.length = this.input.length;
this.peekPeek = this.peekAt(0);
this.advance();
}
CssScanner.prototype.getMode = function () { return this._currentMode; };
CssScanner.prototype.setMode = function (mode) {
if (this._currentMode != mode) {
if (_trackWhitespace(this._currentMode) && !_trackWhitespace(mode)) {
this.consumeWhitespace();
}
this._currentMode = mode;
}
};
CssScanner.prototype.advance = function () {
if (isNewline(this.peek)) {
this.column = 0;
this.line++;
}
else {
this.column++;
}
this.index++;
this.peek = this.peekPeek;
this.peekPeek = this.peekAt(this.index + 1);
};
CssScanner.prototype.peekAt = function (index) {
return index >= this.length ? chars.$EOF : this.input.charCodeAt(index);
};
CssScanner.prototype.consumeEmptyStatements = function () {
this.consumeWhitespace();
while (this.peek == chars.$SEMICOLON) {
this.advance();
this.consumeWhitespace();
}
};
CssScanner.prototype.consumeWhitespace = function () {
while (chars.isWhitespace(this.peek) || isNewline(this.peek)) {
this.advance();
if (!this._trackComments && isCommentStart(this.peek, this.peekPeek)) {
this.advance();
this.advance();
while (!isCommentEnd(this.peek, this.peekPeek)) {
if (this.peek == chars.$EOF) {
this.error('Unterminated comment');
}
this.advance();
}
this.advance();
this.advance();
}
}
};
CssScanner.prototype.consume = function (type, value) {
if (value === void 0) { value = null; }
var mode = this._currentMode;
this.setMode(_trackWhitespace(mode) ? CssLexerMode.ALL_TRACK_WS : CssLexerMode.ALL);
var previousIndex = this.index;
var previousLine = this.line;
var previousColumn = this.column;
var next;
var output = this.scan();
if (output) {
if (output.error) {
this.setMode(mode);
return output;
}
next = output.token;
}
if (!next) {
next = new CssToken(this.index, this.column, this.line, CssTokenType.EOF, 'end of file');
}
var isMatchingType = false;
if (type == CssTokenType.IdentifierOrNumber) {
isMatchingType = next.type == CssTokenType.Number || next.type == CssTokenType.Identifier;
}
else {
isMatchingType = next.type == type;
}
this.setMode(mode);
var error = null;
if (!isMatchingType || (value && value != next.strValue)) {
var errorMessage = CssTokenType[next.type] + ' does not match expected ' +
CssTokenType[type] + ' value';
if (value) {
errorMessage += ' ("' + next.strValue + '" should match "' + value + '")';
}
error = new CssScannerError(next, generateErrorMessage(this.input, errorMessage, next.strValue, previousIndex, previousLine, previousColumn));
}
return new LexedCssResult(error, next);
};
CssScanner.prototype.scan = function () {
var trackWS = _trackWhitespace(this._currentMode);
if (this.index == 0 && !trackWS) {
this.consumeWhitespace();
}
var token = this._scan();
if (token == null)
return null;
var error = this._currentError;
this._currentError = null;
if (!trackWS) {
this.consumeWhitespace();
}
return new LexedCssResult(error, token);
};
CssScanner.prototype._scan = function () {
var peek = this.peek;
var peekPeek = this.peekPeek;
if (peek == chars.$EOF)
return null;
if (isCommentStart(peek, peekPeek)) {
var commentToken = this.scanComment();
if (this._trackComments) {
return commentToken;
}
}
if (_trackWhitespace(this._currentMode) && (chars.isWhitespace(peek) || isNewline(peek))) {
return this.scanWhitespace();
}
peek = this.peek;
peekPeek = this.peekPeek;
if (peek == chars.$EOF)
return null;
if (isStringStart(peek, peekPeek)) {
return this.scanString();
}
if (this._currentMode == CssLexerMode.STYLE_VALUE_FUNCTION) {
return this.scanCssValueFunction();
}
var isModifier = peek == chars.$PLUS || peek == chars.$MINUS;
var digitA = isModifier ? false : chars.isDigit(peek);
var digitB = chars.isDigit(peekPeek);
if (digitA || (isModifier && (peekPeek == chars.$PERIOD || digitB)) ||
(peek == chars.$PERIOD && digitB)) {
return this.scanNumber();
}
if (peek == chars.$AT) {
return this.scanAtExpression();
}
if (isIdentifierStart(peek, peekPeek)) {
return this.scanIdentifier();
}
if (isValidCssCharacter(peek, this._currentMode)) {
return this.scanCharacter();
}
return this.error("Unexpected character [" + String.fromCharCode(peek) + "]");
};
CssScanner.prototype.scanComment = function () {
if (this.assertCondition(isCommentStart(this.peek, this.peekPeek), 'Expected comment start value')) {
return null;
}
var start = this.index;
var startingColumn = this.column;
var startingLine = this.line;
this.advance();
this.advance();
while (!isCommentEnd(this.peek, this.peekPeek)) {
if (this.peek == chars.$EOF) {
this.error('Unterminated comment');
}
this.advance();
}
this.advance();
this.advance();
var str = this.input.substring(start, this.index);
return new CssToken(start, startingColumn, startingLine, CssTokenType.Comment, str);
};
CssScanner.prototype.scanWhitespace = function () {
var start = this.index;
var startingColumn = this.column;
var startingLine = this.line;
while (chars.isWhitespace(this.peek) && this.peek != chars.$EOF) {
this.advance();
}
var str = this.input.substring(start, this.index);
return new CssToken(start, startingColumn, startingLine, CssTokenType.Whitespace, str);
};
CssScanner.prototype.scanString = function () {
if (this.assertCondition(isStringStart(this.peek, this.peekPeek), 'Unexpected non-string starting value')) {
return null;
}
var target = this.peek;
var start = this.index;
var startingColumn = this.column;
var startingLine = this.line;
var previous = target;
this.advance();
while (!isCharMatch(target, previous, this.peek)) {
if (this.peek == chars.$EOF || isNewline(this.peek)) {
this.error('Unterminated quote');
}
previous = this.peek;
this.advance();
}
if (this.assertCondition(this.peek == target, 'Unterminated quote')) {
return null;
}
this.advance();
var str = this.input.substring(start, this.index);
return new CssToken(start, startingColumn, startingLine, CssTokenType.String, str);
};
CssScanner.prototype.scanNumber = function () {
var start = this.index;
var startingColumn = this.column;
if (this.peek == chars.$PLUS || this.peek == chars.$MINUS) {
this.advance();
}
var periodUsed = false;
while (chars.isDigit(this.peek) || this.peek == chars.$PERIOD) {
if (this.peek == chars.$PERIOD) {
if (periodUsed) {
this.error('Unexpected use of a second period value');
}
periodUsed = true;
}
this.advance();
}
var strValue = this.input.substring(start, this.index);
return new CssToken(start, startingColumn, this.line, CssTokenType.Number, strValue);
};
CssScanner.prototype.scanIdentifier = function () {
if (this.assertCondition(isIdentifierStart(this.peek, this.peekPeek), 'Expected identifier starting value')) {
return null;
}
var start = this.index;
var startingColumn = this.column;
while (isIdentifierPart(this.peek)) {
this.advance();
}
var strValue = this.input.substring(start, this.index);
return new CssToken(start, startingColumn, this.line, CssTokenType.Identifier, strValue);
};
CssScanner.prototype.scanCssValueFunction = function () {
var start = this.index;
var startingColumn = this.column;
var parenBalance = 1;
while (this.peek != chars.$EOF && parenBalance > 0) {
this.advance();
if (this.peek == chars.$LPAREN) {
parenBalance++;
}
else if (this.peek == chars.$RPAREN) {
parenBalance--;
}
}
var strValue = this.input.substring(start, this.index);
return new CssToken(start, startingColumn, this.line, CssTokenType.Identifier, strValue);
};
CssScanner.prototype.scanCharacter = function () {
var start = this.index;
var startingColumn = this.column;
if (this.assertCondition(isValidCssCharacter(this.peek, this._currentMode), charStr(this.peek) + ' is not a valid CSS character')) {
return null;
}
var c = this.input.substring(start, start + 1);
this.advance();
return new CssToken(start, startingColumn, this.line, CssTokenType.Character, c);
};
CssScanner.prototype.scanAtExpression = function () {
if (this.assertCondition(this.peek == chars.$AT, 'Expected @ value')) {
return null;
}
var start = this.index;
var startingColumn = this.column;
this.advance();
if (isIdentifierStart(this.peek, this.peekPeek)) {
var ident = this.scanIdentifier();
var strValue = '@' + ident.strValue;
return new CssToken(start, startingColumn, this.line, CssTokenType.AtKeyword, strValue);
}
else {
return this.scanCharacter();
}
};
CssScanner.prototype.assertCondition = function (status, errorMessage) {
if (!status) {
this.error(errorMessage);
return true;
}
return false;
};
CssScanner.prototype.error = function (message, errorTokenValue, doNotAdvance) {
if (errorTokenValue === void 0) { errorTokenValue = null; }
if (doNotAdvance === void 0) { doNotAdvance = false; }
var index = this.index;
var column = this.column;
var line = this.line;
errorTokenValue =
errorTokenValue ? errorTokenValue : String.fromCharCode(this.peek);
var invalidToken = new CssToken(index, column, line, CssTokenType.Invalid, errorTokenValue);
var errorMessage = generateErrorMessage(this.input, message, errorTokenValue, index, line, column);
if (!doNotAdvance) {
this.advance();
}
this._currentError = new CssScannerError(invalidToken, errorMessage);
return invalidToken;
};
return CssScanner;
}());
exports.CssScanner = CssScanner;
function isCharMatch(target, previous, code) {
return code == target && previous != chars.$BACKSLASH;
}
function isCommentStart(code, next) {
return code == chars.$SLASH && next == chars.$STAR;
}
function isCommentEnd(code, next) {
return code == chars.$STAR && next == chars.$SLASH;
}
function isStringStart(code, next) {
var target = code;
if (target == chars.$BACKSLASH) {
target = next;
}
return target == chars.$DQ || target == chars.$SQ;
}
function isIdentifierStart(code, next) {
var target = code;
if (target == chars.$MINUS) {
target = next;
}
return chars.isAsciiLetter(target) || target == chars.$BACKSLASH || target == chars.$MINUS ||
target == chars.$_;
}
function isIdentifierPart(target) {
return chars.isAsciiLetter(target) || target == chars.$BACKSLASH || target == chars.$MINUS ||
target == chars.$_ || chars.isDigit(target);
}
function isValidPseudoSelectorCharacter(code) {
switch (code) {
case chars.$LPAREN:
case chars.$RPAREN:
return true;
default:
return false;
}
}
function isValidKeyframeBlockCharacter(code) {
return code == chars.$PERCENT;
}
function isValidAttributeSelectorCharacter(code) {
switch (code) {
case chars.$$:
case chars.$PIPE:
case chars.$CARET:
case chars.$TILDA:
case chars.$STAR:
case chars.$EQ:
return true;
default:
return false;
}
}
function isValidSelectorCharacter(code) {
switch (code) {
case chars.$HASH:
case chars.$PERIOD:
case chars.$TILDA:
case chars.$STAR:
case chars.$PLUS:
case chars.$GT:
case chars.$COLON:
case chars.$PIPE:
case chars.$COMMA:
case chars.$LBRACKET:
case chars.$RBRACKET:
return true;
default:
return false;
}
}
function isValidStyleBlockCharacter(code) {
switch (code) {
case chars.$HASH:
case chars.$SEMICOLON:
case chars.$COLON:
case chars.$PERCENT:
case chars.$SLASH:
case chars.$BACKSLASH:
case chars.$BANG:
case chars.$PERIOD:
case chars.$LPAREN:
case chars.$RPAREN:
return true;
default:
return false;
}
}
function isValidMediaQueryRuleCharacter(code) {
switch (code) {
case chars.$LPAREN:
case chars.$RPAREN:
case chars.$COLON:
case chars.$PERCENT:
case chars.$PERIOD:
return true;
default:
return false;
}
}
function isValidAtRuleCharacter(code) {
switch (code) {
case chars.$LPAREN:
case chars.$RPAREN:
case chars.$COLON:
case chars.$PERCENT:
case chars.$PERIOD:
case chars.$SLASH:
case chars.$BACKSLASH:
case chars.$HASH:
case chars.$EQ:
case chars.$QUESTION:
case chars.$AMPERSAND:
case chars.$STAR:
case chars.$COMMA:
case chars.$MINUS:
case chars.$PLUS:
return true;
default:
return false;
}
}
function isValidStyleFunctionCharacter(code) {
switch (code) {
case chars.$PERIOD:
case chars.$MINUS:
case chars.$PLUS:
case chars.$STAR:
case chars.$SLASH:
case chars.$LPAREN:
case chars.$RPAREN:
case chars.$COMMA:
return true;
default:
return false;
}
}
function isValidBlockCharacter(code) {
return code == chars.$AT;
}
function isValidCssCharacter(code, mode) {
switch (mode) {
case CssLexerMode.ALL:
case CssLexerMode.ALL_TRACK_WS:
return true;
case CssLexerMode.SELECTOR:
return isValidSelectorCharacter(code);
case CssLexerMode.PSEUDO_SELECTOR_WITH_ARGUMENTS:
return isValidPseudoSelectorCharacter(code);
case CssLexerMode.ATTRIBUTE_SELECTOR:
return isValidAttributeSelectorCharacter(code);
case CssLexerMode.MEDIA_QUERY:
return isValidMediaQueryRuleCharacter(code);
case CssLexerMode.AT_RULE_QUERY:
return isValidAtRuleCharacter(code);
case CssLexerMode.KEYFRAME_BLOCK:
return isValidKeyframeBlockCharacter(code);
case CssLexerMode.STYLE_BLOCK:
case CssLexerMode.STYLE_VALUE:
return isValidStyleBlockCharacter(code);
case CssLexerMode.STYLE_CALC_FUNCTION:
return isValidStyleFunctionCharacter(code);
case CssLexerMode.BLOCK:
return isValidBlockCharacter(code);
default:
return false;
}
}
function charCode(input, index) {
return index >= input.length ? chars.$EOF : input.charCodeAt(index);
}
function charStr(code) {
return String.fromCharCode(code);
}
function isNewline(code) {
switch (code) {
case chars.$FF:
case chars.$CR:
case chars.$LF:
case chars.$VTAB:
return true;
default:
return false;
}
}
exports.isNewline = isNewline;
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 8b83134143223104c9bc3865a565cab3
PrefabImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
<?php defined('BX_DOL') or die('hack attempt');
/**
* Copyright (c) UNA, Inc - https://una.io
* MIT License - https://opensource.org/licenses/MIT
*
* @defgroup UnaCore UNA Core
* @{
*/
class BxDolModuleQuery extends BxDolDb implements iBxDolSingleton
{
protected function __construct()
{
if (isset($GLOBALS['bxDolClasses'][get_class($this)]))
trigger_error ('Multiple instances are not allowed for the class: ' . get_class($this), E_USER_ERROR);
parent::__construct();
}
/**
* Prevent cloning the instance
*/
public function __clone()
{
if (isset($GLOBALS['bxDolClasses'][get_class($this)]))
trigger_error('Clone is not allowed for the class: ' . get_class($this), E_USER_ERROR);
}
/**
* Get singleton instance of the class
*/
public static function getInstance()
{
$sClass = __CLASS__;
if(!isset($GLOBALS['bxDolClasses'][__CLASS__]))
$GLOBALS['bxDolClasses'][__CLASS__] = new $sClass();
return $GLOBALS['bxDolClasses'][__CLASS__];
}
function getModuleById($iId, $bFromCache = true)
{
$sSql = $this->prepare("SELECT * FROM `sys_modules` WHERE `id`=? LIMIT 1", $iId);
return $bFromCache ? $this->fromMemory('sys_modules_' . $iId, 'getRow', $sSql) : $this->getRow($sSql);
}
function getModuleByName($sName, $bFromCache = true)
{
$sSql = $this->prepare("SELECT * FROM `sys_modules` WHERE `name`=? LIMIT 1", $sName);
return $bFromCache ? $this->fromMemory('sys_modules_' . $sName, 'getRow', $sSql) : $this->getRow($sSql);
}
function getModuleByUri($sUri, $bFromCache = true)
{
$sSql = $this->prepare("SELECT * FROM `sys_modules` WHERE `uri`=? LIMIT 1", $sUri);
return $bFromCache ? $this->fromMemory('sys_modules_' . $sUri, 'getRow', $sSql) : $this->getRow($sSql);
}
function enableModuleByUri($sUri)
{
$sSql = $this->prepare("UPDATE `sys_modules` SET `enabled`='1' WHERE `uri`=? LIMIT 1", $sUri);
return (int)$this->query($sSql) > 0;
}
function disableModuleByUri($sUri)
{
$sSql = $this->prepare("UPDATE `sys_modules` SET `enabled`='0' WHERE `uri`=? LIMIT 1", $sUri);
return (int)$this->query($sSql) > 0;
}
function setModulePendingUninstall($sUri, $bPendingUninstall)
{
$sSql = $this->prepare("UPDATE `sys_modules` SET `pending_uninstall` = ? WHERE `uri` = ? LIMIT 1", $bPendingUninstall ? 1 : 0, $sUri);
return $this->query($sSql);
}
function isModule($sUri)
{
$sSql = $this->prepare("SELECT `id` FROM `sys_modules` WHERE `uri`=? LIMIT 1", $sUri);
return (int)$this->getOne($sSql) > 0;
}
function isModuleByName($sName)
{
$sSql = $this->prepare("SELECT `id` FROM `sys_modules` WHERE `name`=? LIMIT 1", $sName);
return (int)$this->getOne($sSql) > 0;
}
function isModuleParamsUsed($sUri, $sPath, $sPrefixDb, $sPrefixClass)
{
$sSql = "SELECT `id` FROM `sys_modules` WHERE `uri`='" . $sUri . "' || `path`='" . $sPath . "' || `db_prefix`='" . $sPrefixDb . "' || `class_prefix`='" . $sPrefixClass . "' LIMIT 1";
return (int)$this->getOne($sSql) > 0;
}
function isEnabled($sUri)
{
$sSql = $this->prepare("SELECT `id` FROM `sys_modules` WHERE `uri`=? AND `enabled`='1' LIMIT 1", $sUri);
return (int)$this->getOne($sSql) > 0;
}
function isEnabledByName($sName)
{
$sSql = $this->prepare("SELECT `id` FROM `sys_modules` WHERE `name`=? AND `enabled`='1' LIMIT 1", $sName);
return (int)$this->getOne($sSql) > 0;
}
function getModules()
{
$sSql = "SELECT * FROM `sys_modules` ORDER BY `title`";
return $this->fromMemory('sys_modules', 'getAll', $sSql);
}
function getModulesBy($aParams = array(), $bFromCache = true)
{
$aMethod = array('name' => 'getAll', 'params' => array(0 => 'query'));
$sPostfix = $sWhereClause = $sOrderByClause = "";
$aBindings = array();
switch($aParams['type']) {
case 'type':
if(!is_array($aParams['value']))
$aParams['value'] = array($aParams['value']);
$sPostfix .= '_type_' . implode('_', $aParams['value']);
$sWhereClause .= " AND `type` IN (" . $this->implode_escape($aParams['value']) . ")";
break;
case 'modules':
$sPostfix .= '_modules';
$aBindings['type'] = BX_DOL_MODULE_TYPE_MODULE;
$sWhereClause .= " AND `type`=:type";
break;
case 'languages':
$sPostfix .= '_languages';
$aBindings['type'] = BX_DOL_MODULE_TYPE_LANGUAGE;
$sWhereClause .= " AND `type`=:type";
break;
case 'templates':
$sPostfix .= '_templates';
$aBindings['type'] = BX_DOL_MODULE_TYPE_TEMPLATE;
$sWhereClause .= " AND `type`=:type";
break;
case 'path_and_uri':
$sPostfix .= '_path_and_uri_' . $aParams['path'] . '_' . $aParams['uri'];
$aMethod['name'] = 'getRow';
$aBindings = array_merge($aBindings, array(
'path' => $aParams['path'],
'uri' => $aParams['uri']
));
$sWhereClause .= " AND `path`=:path AND `uri`=:uri";
break;
case 'all_pairs_name_uri':
$sPostfix .= 'all_pairs_name_uri';
$aMethod['name'] = 'getPairs';
$aMethod['params'][1] = 'name';
$aMethod['params'][2] = 'uri';
break;
case 'all':
break;
}
if(isset($aParams['active'])) {
$sPostfix .= "_active";
$aBindings['enabled'] = (int)$aParams['active'];
$sWhereClause .= " AND `enabled`=:enabled";
}
$sOrderByClause = " ORDER BY " . (isset($aParams['order_by']) ? $aParams['order_by'] : '`title`');
$aMethod['params'][0] = "SELECT
`id`,
`type`,
`name`,
`title`,
`vendor`,
`version`,
`help_url`,
`path`,
`uri`,
`class_prefix`,
`db_prefix`,
`lang_category`,
`date`,
`enabled`
FROM `sys_modules`
WHERE 1 " . $sWhereClause . $sOrderByClause;
$aMethod['params'][] = $aBindings;
if(!$bFromCache || empty($sPostfix))
return call_user_func_array(array($this, $aMethod['name']), $aMethod['params']);
return call_user_func_array(array($this, 'fromMemory'), array_merge(array('sys_modules' . $sPostfix, $aMethod['name']), $aMethod['params']));
}
function getModulesUri()
{
return $this->fromMemory('sys_modules_uri', 'getColumn', 'SELECT `uri` FROM `sys_modules` ORDER BY `uri`');
}
function getDependent($sName, $sUri)
{
return $this->getAll("SELECT `id`, `title`, `enabled` FROM `sys_modules` WHERE (`dependencies` LIKE " . $this->escape('%' . $sName . '%') . " OR `dependencies` LIKE " . $this->escape('%' . $sUri . '%') . ") AND `enabled`='1'");
}
public function updateModule($aParamsSet, $aParamsWhere = array())
{
if(empty($aParamsSet))
return false;
$sWhereClause = !empty($aParamsWhere) ? $this->arrayToSQL($aParamsWhere, " AND ") : "1";
$sSql = "UPDATE `sys_modules` SET " . $this->arrayToSQL($aParamsSet) . " WHERE " . $sWhereClause;
return $this->query($sSql);
}
}
/** @} */
| {
"pile_set_name": "Github"
} |
<header class="header-wrapper">
<nav class="inner">
<div class="title">
<a href="/">
<img class="logo" src="<%- url_for(theme.profile.logo) %>" />
</a>
</div>
<ul class="menu">
<% for (var i in theme.menu){ %>
<% if ( i == 'search' ){%>
<li class="item">
<a class="link" id="menu-search">
<i class="iconfont icon-<%-i %>" />
</a>
</l>
<% } else {%>
<li class="item">
<a class="link" id="menu-<%-i%>" href="<%- url_for(theme.menu[i]) %>">
<i class="iconfont icon-<%-i %>" />
</a>
</li>
<% } %>
<% } %>
</ul>
</nav>
</header>
<header class="mobile-header-wrapper">
<i id="mobile-toggle" class="iconfont icon-menu mobile-toggle"></i>
</header> | {
"pile_set_name": "Github"
} |
# 练习 1:文本编辑器,vim
> 原文:[Exercise 1. Text Editor, The: vim](https://archive.fo/5vf0X)
> 译者:[飞龙](https://github.com/wizardforcel)
> 协议:[CC BY-NC-SA 4.0](http://creativecommons.org/licenses/by-nc-sa/4.0/)
> 自豪地采用[谷歌翻译](https://translate.google.cn/)
在 Linux 中,就像任何类 Unix 操作系统,一切都只是文件。而 Unix 哲学指出,配置文件必须是人类可读和可编辑的。在几乎所有的情况下,它们只是纯文本。所以,首先,你必须学习如何编辑文本文件。
为此,我强烈建议你学习 vim 的基础知识,这是在 Linux 中处理文本的最强大的工具之一。Vim 是由 Bill Joy 于 1976 年编写的,[vi](http://en.wikipedia.org/wiki/Vi) 的重新实现。vi 实现了一个非常成功的概念,甚至 Microsoft Visual Studio 2012 有一个[插件](http://visualstudiogallery.msdn.microsoft.com/59ca71b3-a4a3-46ca-8fe1-0e90e3f79329/),它提供了一个模式,与这个超过 35 岁的编辑器兼容。你可以在这里玩转它([这是在浏览器中运行的真正的 Linux](https://bellard.org/jslinux/vm.html?url=https://bellard.org/jslinux/buildroot-x86.cfg))。完成之后,最后获取我的虚拟机。
如果我还没成功说服你,你可以了解 [nano](http://www.howtogeek.com/howto/42980/the-beginners-guide-to-nano-the-linux-command-line-text-editor/)来代替。但至少要试试。
现在,登入`vm1`,之后键入:
```
vim hello.txt
```
你应该看到:
```
Hello, brave adventurer!
~
~
~
~
~
~
~
~
~
~
~
~
~
"hello.txt" [New File] 0,0-1 All
```
有一个笑话说,vim有两种模式 - “反复哔哔”和“破坏一切”。那么,如果你不知道如何使用 vim,这是非常真实的,因为 vim 是模态的文本编辑器。模式是:
+ 普通模式:移动光标并执行删除,复制和粘贴等文本操作。
+ 插入模式:输入文本。
> 译者注:还有一个命令模式,用于生成真 · 随机字符串(笑)。
这十分使新手头疼,因为他们试图尽可能地避免普通模式。那么这是错误的,所以现在我将给你正确的大纲来使用 vim :
```
start vim
while editing is not finished, repeat
navigate to desired position in NORMAL mode
enter INSERT mode by pressing i
type text
exit INSERT mode by pressing <ESCAPE>
when editing is finished, type :wq
```
最重要的是,几乎任何时候都呆在普通模式,短时间内进入插入模式,然后立即退出。以这种方式,vim 只有一种模式,而这种模式是普通模式。
现在让我们试试吧。记住,按`i`进入插入模式,以及`<ESCAPE>` 返回到普通模式。键入以下内容(在每行末尾按`<ENTER>`):
```
iRoses are red
Linux is scary
<ESCAPE>
```
这是你应该看到的:
```
Roses are red
Linux is scary
~
~
~
~
~
~
~
~
~
~
~
~
~
4,17 All
```
现在我给你命令列表,在普通模式下移动光标:
+ `h` - 向左移动
+ `j` - 向下移动
+ `k` - 向上移动
+ `l` - 右移
+ `i` - 进入插入模式
+ `o` - 在光标下插入一行并进入插入模式
+ `<ESCAPE>` - 退出插入模式
+ `x` - 删除光标下的符号
+ `dd` - 删除一行
+ `:wq` - 将更改写入文件并退出。是的,没错,这是一个冒号,后面跟着`wq`和`<ENTER>`。
+ `:q!` - 不要对文件进行更改并退出。
那就够了。现在,将光标放在第一行并输入:
```
oViolets are blue<ESCAPE>
```
之后,将光标放在`Linux is scary`那一行,并输入:
```
oBut I'm scary too<ESCAPE>
```
你应该看到:
```
Roses are red
Violets are blue
Linux is scary
But I'm scary too
~
~
~
~
~
~
~
~
~
~
~
4,17 All
```
现在键入`:wq`保存文件,并退出。你应该看到:
```
Violets are blue
Linux is scary
But I'm scary too
~
~
~
~
~
~
~
~
~
~
~
"hello.txt" 4L, 64C written
user1@vm1:~$
```
好的。你做到它了。你刚刚在 vim 中编辑了文本文件,很好很强大!
## 附加题
+ 通过键入键入`vim hello.txt`再次启动 vim,并尝试我给你的一些命令。
+ 玩这个游戏,它会让你更熟悉 vim:<http://vim-adventures.com/>
| {
"pile_set_name": "Github"
} |
/*
Copyright (C) 2011 Mark Chandler (Desura Net Pty Ltd)
Copyright (C) 2014 Bad Juju Games, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
Contact us at [email protected].
*/
#ifndef DESURA_SCRIPTCOREINTERNAL_H
#define DESURA_SCRIPTCOREINTERNAL_H
#ifdef _WIN32
#pragma once
#endif
#include "v8.h"
class ScriptCoreInternal
{
public:
void init();
void del();
void runScript(const char* file, const char* buff, uint32 size);
void runString(const char* string);
protected:
void doRunScript(v8::Handle<v8::Script> script);
gcString reportException(v8::TryCatch* try_catch);
private:
friend bool IsV8Init();
static std::mutex s_InitLock;
static bool s_IsInit;
static bool s_Disabled;
static void OnFatalError(const char* location, const char* message);
v8::Persistent<v8::Context> m_v8Context;
};
#endif //DESURA_SCRIPTCOREINTERNAL_H
| {
"pile_set_name": "Github"
} |
#ifndef _NRF_DELAY_H
#define _NRF_DELAY_H
// #include "nrf.h"
/*lint --e{438, 522} "Variable not used" "Function lacks side-effects" */
#if defined ( __CC_ARM )
static __ASM void __INLINE nrf_delay_us(uint32_t volatile number_of_us)
{
loop
SUBS R0, R0, #1
NOP
NOP
NOP
NOP
NOP
NOP
NOP
NOP
NOP
NOP
NOP
NOP
BNE loop
BX LR
}
#elif defined ( __ICCARM__ )
static void __INLINE nrf_delay_us(uint32_t volatile number_of_us)
{
__ASM (
"loop:\n\t"
" SUBS R0, R0, #1\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" NOP\n\t"
" BNE loop\n\t");
}
#elif defined ( __GNUC__ )
__INLINE static void nrf_delay_us(uint32_t volatile number_of_us)
{
do
{
__ASM volatile (
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
"NOP\n\t"
);
} while (--number_of_us);
}
#endif
void nrf_delay_ms(uint32_t volatile number_of_ms);
#endif
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:orientation="vertical">
<TextView
android:id="@+id/tv_item"
android:layout_width="wrap_content"
android:layout_height="60px"
android:layout_margin="18px"
android:gravity="center"
android:paddingLeft="20px"
android:paddingRight="20px"
android:text="热词"
android:textColor="@color/text1"
android:background="@drawable/shape_tag_nor"
/>
</LinearLayout> | {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 4f6a32ae4320c1d489759d620e71be9b
NativeFormatImporter:
externalObjects: {}
mainObjectFileID: 2100000
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
---------------
Custom Commands
---------------
``xse trip``
show last Trip value
``xse recu up``
``xse recu down``
set Rekuperation up or down.
Works only if Rekuperation is enabled in car
``xse charge``
set charge and leave Time.
Example:
``xse charge 7 15 off``
set Time at 7:15 clock with pre climate off
``xse charge 9 55 on``
set Time at 9:55 clock with pre climate on
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2019 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.bugpatterns;
import static com.google.errorprone.BugPattern.SeverityLevel.ERROR;
import static com.google.errorprone.matchers.Description.NO_MATCH;
import static com.google.errorprone.matchers.JUnitMatchers.hasJUnit4TestRunner;
import static com.google.errorprone.util.ASTHelpers.getAnnotationWithSimpleName;
import static com.google.errorprone.util.ASTHelpers.hasAnnotation;
import com.google.errorprone.BugPattern;
import com.google.errorprone.VisitorState;
import com.google.errorprone.bugpatterns.BugChecker.ClassTreeMatcher;
import com.google.errorprone.fixes.SuggestedFix;
import com.google.errorprone.fixes.SuggestedFixes;
import com.google.errorprone.matchers.Description;
import com.sun.source.tree.AnnotationTree;
import com.sun.source.tree.ClassTree;
/** Flags uses of parameters in non-parameterized tests. */
@BugPattern(
name = "ParametersButNotParameterized",
summary =
"This test has @Parameters but is using the default JUnit4 runner. The parameters will"
+ " have no effect.",
severity = ERROR)
public final class ParametersButNotParameterized extends BugChecker implements ClassTreeMatcher {
private static final String PARAMETERIZED = "org.junit.runners.Parameterized";
private static final String PARAMETER = "org.junit.runners.Parameterized.Parameter";
private static final String PARAMETERS = "org.junit.runners.Parameterized.Parameters";
@Override
public Description matchClass(ClassTree tree, VisitorState state) {
if (!hasJUnit4TestRunner.matches(tree, state)) {
return NO_MATCH;
}
if (tree.getMembers().stream()
.noneMatch(
m -> hasAnnotation(m, PARAMETER, state) || hasAnnotation(m, PARAMETERS, state))) {
return NO_MATCH;
}
AnnotationTree annotation =
getAnnotationWithSimpleName(tree.getModifiers().getAnnotations(), "RunWith");
SuggestedFix.Builder fix = SuggestedFix.builder();
fix.replace(
annotation,
String.format("@RunWith(%s.class)", SuggestedFixes.qualifyType(state, fix, PARAMETERIZED)));
return describeMatch(tree, fix.build());
}
}
| {
"pile_set_name": "Github"
} |
import $ from 'jquery';
import env from '../core/env';
import lists from '../core/lists';
import dom from '../core/dom';
export default class TablePopover {
constructor(context) {
this.context = context;
this.ui = $.summernote.ui;
this.options = context.options;
this.events = {
'summernote.mousedown': (we, e) => {
this.update(e.target);
},
'summernote.keyup summernote.scroll summernote.change': () => {
this.update();
},
'summernote.disable': () => {
this.hide();
}
};
}
shouldInitialize() {
return !lists.isEmpty(this.options.popover.table);
}
initialize() {
this.$popover = this.ui.popover({
className: 'note-table-popover'
}).render().appendTo(this.options.container);
const $content = this.$popover.find('.popover-content,.note-popover-content');
this.context.invoke('buttons.build', $content, this.options.popover.table);
// [workaround] Disable Firefox's default table editor
if (env.isFF) {
document.execCommand('enableInlineTableEditing', false, false);
}
}
destroy() {
this.$popover.remove();
}
update(target) {
if (this.context.isDisabled()) {
return false;
}
const isCell = dom.isCell(target);
if (isCell) {
const pos = dom.posFromPlaceholder(target);
this.$popover.css({
display: 'block',
left: pos.left,
top: pos.top
});
} else {
this.hide();
}
return isCell;
}
hide() {
this.$popover.hide();
}
}
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2017-2019 Jonathan Müller <[email protected]>
// This file is subject to the license terms in the LICENSE file
// found in the top-level directory of this distribution.
#include <cppast/cpp_template_parameter.hpp>
#include <cppast/cpp_entity_kind.hpp>
using namespace cppast;
const char* cppast::to_string(cpp_template_keyword kw) noexcept
{
switch (kw)
{
case cpp_template_keyword::keyword_class:
return "class";
case cpp_template_keyword::keyword_typename:
return "typename";
}
return "should not get here";
}
std::unique_ptr<cpp_template_type_parameter> cpp_template_type_parameter::build(
const cpp_entity_index& idx, cpp_entity_id id, std::string name, cpp_template_keyword kw,
bool variadic, std::unique_ptr<cpp_type> default_type)
{
std::unique_ptr<cpp_template_type_parameter> result(
new cpp_template_type_parameter(std::move(name), kw, variadic, std::move(default_type)));
idx.register_definition(std::move(id), type_safe::cref(*result));
return result;
}
cpp_entity_kind cpp_template_type_parameter::kind() noexcept
{
return cpp_entity_kind::template_type_parameter_t;
}
cpp_entity_kind cpp_template_type_parameter::do_get_entity_kind() const noexcept
{
return kind();
}
bool detail::cpp_template_parameter_ref_predicate::operator()(const cpp_entity& e)
{
return e.kind() == cpp_entity_kind::template_type_parameter_t;
}
std::unique_ptr<cpp_non_type_template_parameter> cpp_non_type_template_parameter::build(
const cpp_entity_index& idx, cpp_entity_id id, std::string name, std::unique_ptr<cpp_type> type,
bool is_variadic, std::unique_ptr<cpp_expression> default_value)
{
std::unique_ptr<cpp_non_type_template_parameter> result(
new cpp_non_type_template_parameter(std::move(name), std::move(type), is_variadic,
std::move(default_value)));
idx.register_definition(std::move(id), type_safe::cref(*result));
return result;
}
cpp_entity_kind cpp_non_type_template_parameter::kind() noexcept
{
return cpp_entity_kind::non_type_template_parameter_t;
}
cpp_entity_kind cpp_non_type_template_parameter::do_get_entity_kind() const noexcept
{
return kind();
}
bool detail::cpp_template_ref_predicate::operator()(const cpp_entity& e)
{
return is_template(e.kind()) || e.kind() == cpp_entity_kind::template_template_parameter_t;
}
cpp_entity_kind cpp_template_template_parameter::kind() noexcept
{
return cpp_entity_kind::template_template_parameter_t;
}
cpp_entity_kind cpp_template_template_parameter::do_get_entity_kind() const noexcept
{
return kind();
}
| {
"pile_set_name": "Github"
} |
//
// Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50).
//
// class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard.
//
#import <AddressBookCore/ABCNPropertyDescription.h>
#import <AddressBookCore/ABCNAbstractPropertyDescription-Protocol.h>
@class NSString;
@interface ABCNIdentifierDescription : ABCNPropertyDescription <ABCNAbstractPropertyDescription>
{
}
- (void)decodeUsingCoder:(id)arg1 contact:(id)arg2;
- (void)encodeUsingCoder:(id)arg1 contact:(id)arg2;
- (void)copyFromContact:(id)arg1 to:(id)arg2;
- (BOOL)isEqualForContact:(id)arg1 other:(id)arg2;
- (id)init;
// Remaining properties
@property(readonly, copy) NSString *debugDescription;
@property(readonly, copy) NSString *description;
@property(readonly) unsigned long long hash;
@property(readonly) Class superclass;
@end
| {
"pile_set_name": "Github"
} |
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is part of dcm4che, an implementation of DICOM(TM) in
* Java(TM), hosted at https://github.com/dcm4che.
*
* The Initial Developer of the Original Code is
* Agfa Healthcare.
* Portions created by the Initial Developer are Copyright (C) 2012
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* See @authors listed below
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
package org.dcm4che3.net;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
/**
* @author Gunter Zeilinger <[email protected]>
*
*/
public class DeviceService implements DeviceServiceInterface
{
protected Device device;
protected ExecutorService executor;
protected ScheduledExecutorService scheduledExecutor;
protected void init(Device device) {
setDevice(device);
}
public void setDevice(Device device) {
this.device = device;
}
public Device getDevice() {
return device;
}
public boolean isRunning() {
return executor != null;
}
public void start() throws Exception {
if (device == null)
throw new IllegalStateException("Not initialized");
if (executor != null)
throw new IllegalStateException("Already started");
executor = executerService();
scheduledExecutor = scheduledExecuterService();
try {
device.setExecutor(executor);
device.setScheduledExecutor(scheduledExecutor);
device.bindConnections();
} catch (Exception e) {
stop();
throw e;
}
}
public void stop() {
if (device != null)
device.unbindConnections();
if (scheduledExecutor != null)
scheduledExecutor.shutdown();
if (executor != null)
executor.shutdown();
executor = null;
scheduledExecutor = null;
}
protected ExecutorService executerService() {
return Executors.newCachedThreadPool();
}
protected ScheduledExecutorService scheduledExecuterService() {
return Executors.newSingleThreadScheduledExecutor();
}
}
| {
"pile_set_name": "Github"
} |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to [email protected] so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Pdf
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: Png.php 22655 2010-07-22 18:47:20Z mabe $
*/
/** Internally used classes */
require_once 'Zend/Pdf/Element/Array.php';
require_once 'Zend/Pdf/Element/Dictionary.php';
require_once 'Zend/Pdf/Element/Name.php';
require_once 'Zend/Pdf/Element/Numeric.php';
require_once 'Zend/Pdf/Element/String/Binary.php';
/** Zend_Pdf_Resource_Image */
require_once 'Zend/Pdf/Resource/Image.php';
/**
* PNG image
*
* @package Zend_Pdf
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Pdf_Resource_Image_Png extends Zend_Pdf_Resource_Image
{
const PNG_COMPRESSION_DEFAULT_STRATEGY = 0;
const PNG_COMPRESSION_FILTERED = 1;
const PNG_COMPRESSION_HUFFMAN_ONLY = 2;
const PNG_COMPRESSION_RLE = 3;
const PNG_FILTER_NONE = 0;
const PNG_FILTER_SUB = 1;
const PNG_FILTER_UP = 2;
const PNG_FILTER_AVERAGE = 3;
const PNG_FILTER_PAETH = 4;
const PNG_INTERLACING_DISABLED = 0;
const PNG_INTERLACING_ENABLED = 1;
const PNG_CHANNEL_GRAY = 0;
const PNG_CHANNEL_RGB = 2;
const PNG_CHANNEL_INDEXED = 3;
const PNG_CHANNEL_GRAY_ALPHA = 4;
const PNG_CHANNEL_RGB_ALPHA = 6;
protected $_width;
protected $_height;
protected $_imageProperties;
/**
* Object constructor
*
* @param string $imageFileName
* @throws Zend_Pdf_Exception
* @todo Add compression conversions to support compression strategys other than PNG_COMPRESSION_DEFAULT_STRATEGY.
* @todo Add pre-compression filtering.
* @todo Add interlaced image handling.
* @todo Add support for 16-bit images. Requires PDF version bump to 1.5 at least.
* @todo Add processing for all PNG chunks defined in the spec. gAMA etc.
* @todo Fix tRNS chunk support for Indexed Images to a SMask.
*/
public function __construct($imageFileName)
{
if (($imageFile = @fopen($imageFileName, 'rb')) === false ) {
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception( "Can not open '$imageFileName' file for reading." );
}
parent::__construct();
//Check if the file is a PNG
fseek($imageFile, 1, SEEK_CUR); //First signature byte (%)
if ('PNG' != fread($imageFile, 3)) {
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception('Image is not a PNG');
}
fseek($imageFile, 12, SEEK_CUR); //Signature bytes (Includes the IHDR chunk) IHDR processed linerarly because it doesnt contain a variable chunk length
$wtmp = unpack('Ni',fread($imageFile, 4)); //Unpack a 4-Byte Long
$width = $wtmp['i'];
$htmp = unpack('Ni',fread($imageFile, 4));
$height = $htmp['i'];
$bits = ord(fread($imageFile, 1)); //Higher than 8 bit depths are only supported in later versions of PDF.
$color = ord(fread($imageFile, 1));
$compression = ord(fread($imageFile, 1));
$prefilter = ord(fread($imageFile,1));
if (($interlacing = ord(fread($imageFile,1))) != Zend_Pdf_Resource_Image_Png::PNG_INTERLACING_DISABLED) {
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception( "Only non-interlaced images are currently supported." );
}
$this->_width = $width;
$this->_height = $height;
$this->_imageProperties = array();
$this->_imageProperties['bitDepth'] = $bits;
$this->_imageProperties['pngColorType'] = $color;
$this->_imageProperties['pngFilterType'] = $prefilter;
$this->_imageProperties['pngCompressionType'] = $compression;
$this->_imageProperties['pngInterlacingType'] = $interlacing;
fseek($imageFile, 4, SEEK_CUR); //4 Byte Ending Sequence
$imageData = '';
/*
* The following loop processes PNG chunks. 4 Byte Longs are packed first give the chunk length
* followed by the chunk signature, a four byte code. IDAT and IEND are manditory in any PNG.
*/
while(($chunkLengthBytes = fread($imageFile, 4)) !== false) {
$chunkLengthtmp = unpack('Ni', $chunkLengthBytes);
$chunkLength = $chunkLengthtmp['i'];
$chunkType = fread($imageFile, 4);
switch($chunkType) {
case 'IDAT': //Image Data
/*
* Reads the actual image data from the PNG file. Since we know at this point that the compression
* strategy is the default strategy, we also know that this data is Zip compressed. We will either copy
* the data directly to the PDF and provide the correct FlateDecode predictor, or decompress the data
* decode the filters and output the data as a raw pixel map.
*/
$imageData .= fread($imageFile, $chunkLength);
fseek($imageFile, 4, SEEK_CUR);
break;
case 'PLTE': //Palette
$paletteData = fread($imageFile, $chunkLength);
fseek($imageFile, 4, SEEK_CUR);
break;
case 'tRNS': //Basic (non-alpha channel) transparency.
$trnsData = fread($imageFile, $chunkLength);
switch ($color) {
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_GRAY:
$baseColor = ord(substr($trnsData, 1, 1));
$transparencyData = array(new Zend_Pdf_Element_Numeric($baseColor),
new Zend_Pdf_Element_Numeric($baseColor));
break;
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_RGB:
$red = ord(substr($trnsData,1,1));
$green = ord(substr($trnsData,3,1));
$blue = ord(substr($trnsData,5,1));
$transparencyData = array(new Zend_Pdf_Element_Numeric($red),
new Zend_Pdf_Element_Numeric($red),
new Zend_Pdf_Element_Numeric($green),
new Zend_Pdf_Element_Numeric($green),
new Zend_Pdf_Element_Numeric($blue),
new Zend_Pdf_Element_Numeric($blue));
break;
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_INDEXED:
//Find the first transparent color in the index, we will mask that. (This is a bit of a hack. This should be a SMask and mask all entries values).
if(($trnsIdx = strpos($trnsData, "\0")) !== false) {
$transparencyData = array(new Zend_Pdf_Element_Numeric($trnsIdx),
new Zend_Pdf_Element_Numeric($trnsIdx));
}
break;
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_GRAY_ALPHA:
// Fall through to the next case
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_RGB_ALPHA:
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception( "tRNS chunk illegal for Alpha Channel Images" );
break;
}
fseek($imageFile, 4, SEEK_CUR); //4 Byte Ending Sequence
break;
case 'IEND';
break 2; //End the loop too
default:
fseek($imageFile, $chunkLength + 4, SEEK_CUR); //Skip the section
break;
}
}
fclose($imageFile);
$compressed = true;
$imageDataTmp = '';
$smaskData = '';
switch ($color) {
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_RGB:
$colorSpace = new Zend_Pdf_Element_Name('DeviceRGB');
break;
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_GRAY:
$colorSpace = new Zend_Pdf_Element_Name('DeviceGray');
break;
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_INDEXED:
if(empty($paletteData)) {
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception( "PNG Corruption: No palette data read for indexed type PNG." );
}
$colorSpace = new Zend_Pdf_Element_Array();
$colorSpace->items[] = new Zend_Pdf_Element_Name('Indexed');
$colorSpace->items[] = new Zend_Pdf_Element_Name('DeviceRGB');
$colorSpace->items[] = new Zend_Pdf_Element_Numeric((strlen($paletteData)/3-1));
$paletteObject = $this->_objectFactory->newObject(new Zend_Pdf_Element_String_Binary($paletteData));
$colorSpace->items[] = $paletteObject;
break;
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_GRAY_ALPHA:
/*
* To decode PNG's with alpha data we must create two images from one. One image will contain the Gray data
* the other will contain the Gray transparency overlay data. The former will become the object data and the latter
* will become the Shadow Mask (SMask).
*/
if($bits > 8) {
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception("Alpha PNGs with bit depth > 8 are not yet supported");
}
$colorSpace = new Zend_Pdf_Element_Name('DeviceGray');
require_once 'Zend/Pdf/ElementFactory.php';
$decodingObjFactory = Zend_Pdf_ElementFactory::createFactory(1);
$decodingStream = $decodingObjFactory->newStreamObject($imageData);
$decodingStream->dictionary->Filter = new Zend_Pdf_Element_Name('FlateDecode');
$decodingStream->dictionary->DecodeParms = new Zend_Pdf_Element_Dictionary();
$decodingStream->dictionary->DecodeParms->Predictor = new Zend_Pdf_Element_Numeric(15);
$decodingStream->dictionary->DecodeParms->Columns = new Zend_Pdf_Element_Numeric($width);
$decodingStream->dictionary->DecodeParms->Colors = new Zend_Pdf_Element_Numeric(2); //GreyAlpha
$decodingStream->dictionary->DecodeParms->BitsPerComponent = new Zend_Pdf_Element_Numeric($bits);
$decodingStream->skipFilters();
$pngDataRawDecoded = $decodingStream->value;
//Iterate every pixel and copy out gray data and alpha channel (this will be slow)
for($pixel = 0, $pixelcount = ($width * $height); $pixel < $pixelcount; $pixel++) {
$imageDataTmp .= $pngDataRawDecoded[($pixel*2)];
$smaskData .= $pngDataRawDecoded[($pixel*2)+1];
}
$compressed = false;
$imageData = $imageDataTmp; //Overwrite image data with the gray channel without alpha
break;
case Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_RGB_ALPHA:
/*
* To decode PNG's with alpha data we must create two images from one. One image will contain the RGB data
* the other will contain the Gray transparency overlay data. The former will become the object data and the latter
* will become the Shadow Mask (SMask).
*/
if($bits > 8) {
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception("Alpha PNGs with bit depth > 8 are not yet supported");
}
$colorSpace = new Zend_Pdf_Element_Name('DeviceRGB');
require_once 'Zend/Pdf/ElementFactory.php';
$decodingObjFactory = Zend_Pdf_ElementFactory::createFactory(1);
$decodingStream = $decodingObjFactory->newStreamObject($imageData);
$decodingStream->dictionary->Filter = new Zend_Pdf_Element_Name('FlateDecode');
$decodingStream->dictionary->DecodeParms = new Zend_Pdf_Element_Dictionary();
$decodingStream->dictionary->DecodeParms->Predictor = new Zend_Pdf_Element_Numeric(15);
$decodingStream->dictionary->DecodeParms->Columns = new Zend_Pdf_Element_Numeric($width);
$decodingStream->dictionary->DecodeParms->Colors = new Zend_Pdf_Element_Numeric(4); //RGBA
$decodingStream->dictionary->DecodeParms->BitsPerComponent = new Zend_Pdf_Element_Numeric($bits);
$decodingStream->skipFilters();
$pngDataRawDecoded = $decodingStream->value;
//Iterate every pixel and copy out rgb data and alpha channel (this will be slow)
for($pixel = 0, $pixelcount = ($width * $height); $pixel < $pixelcount; $pixel++) {
$imageDataTmp .= $pngDataRawDecoded[($pixel*4)+0] . $pngDataRawDecoded[($pixel*4)+1] . $pngDataRawDecoded[($pixel*4)+2];
$smaskData .= $pngDataRawDecoded[($pixel*4)+3];
}
$compressed = false;
$imageData = $imageDataTmp; //Overwrite image data with the RGB channel without alpha
break;
default:
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception( "PNG Corruption: Invalid color space." );
}
if(empty($imageData)) {
require_once 'Zend/Pdf/Exception.php';
throw new Zend_Pdf_Exception( "Corrupt PNG Image. Mandatory IDAT chunk not found." );
}
$imageDictionary = $this->_resource->dictionary;
if(!empty($smaskData)) {
/*
* Includes the Alpha transparency data as a Gray Image, then assigns the image as the Shadow Mask for the main image data.
*/
$smaskStream = $this->_objectFactory->newStreamObject($smaskData);
$smaskStream->dictionary->Type = new Zend_Pdf_Element_Name('XObject');
$smaskStream->dictionary->Subtype = new Zend_Pdf_Element_Name('Image');
$smaskStream->dictionary->Width = new Zend_Pdf_Element_Numeric($width);
$smaskStream->dictionary->Height = new Zend_Pdf_Element_Numeric($height);
$smaskStream->dictionary->ColorSpace = new Zend_Pdf_Element_Name('DeviceGray');
$smaskStream->dictionary->BitsPerComponent = new Zend_Pdf_Element_Numeric($bits);
$imageDictionary->SMask = $smaskStream;
// Encode stream with FlateDecode filter
$smaskStreamDecodeParms = array();
$smaskStreamDecodeParms['Predictor'] = new Zend_Pdf_Element_Numeric(15);
$smaskStreamDecodeParms['Columns'] = new Zend_Pdf_Element_Numeric($width);
$smaskStreamDecodeParms['Colors'] = new Zend_Pdf_Element_Numeric(1);
$smaskStreamDecodeParms['BitsPerComponent'] = new Zend_Pdf_Element_Numeric(8);
$smaskStream->dictionary->DecodeParms = new Zend_Pdf_Element_Dictionary($smaskStreamDecodeParms);
$smaskStream->dictionary->Filter = new Zend_Pdf_Element_Name('FlateDecode');
}
if(!empty($transparencyData)) {
//This is experimental and not properly tested.
$imageDictionary->Mask = new Zend_Pdf_Element_Array($transparencyData);
}
$imageDictionary->Width = new Zend_Pdf_Element_Numeric($width);
$imageDictionary->Height = new Zend_Pdf_Element_Numeric($height);
$imageDictionary->ColorSpace = $colorSpace;
$imageDictionary->BitsPerComponent = new Zend_Pdf_Element_Numeric($bits);
$imageDictionary->Filter = new Zend_Pdf_Element_Name('FlateDecode');
$decodeParms = array();
$decodeParms['Predictor'] = new Zend_Pdf_Element_Numeric(15); // Optimal prediction
$decodeParms['Columns'] = new Zend_Pdf_Element_Numeric($width);
$decodeParms['Colors'] = new Zend_Pdf_Element_Numeric((($color==Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_RGB || $color==Zend_Pdf_Resource_Image_Png::PNG_CHANNEL_RGB_ALPHA)?(3):(1)));
$decodeParms['BitsPerComponent'] = new Zend_Pdf_Element_Numeric($bits);
$imageDictionary->DecodeParms = new Zend_Pdf_Element_Dictionary($decodeParms);
//Include only the image IDAT section data.
$this->_resource->value = $imageData;
//Skip double compression
if ($compressed) {
$this->_resource->skipFilters();
}
}
/**
* Image width
*/
public function getPixelWidth() {
return $this->_width;
}
/**
* Image height
*/
public function getPixelHeight() {
return $this->_height;
}
/**
* Image properties
*/
public function getProperties() {
return $this->_imageProperties;
}
}
| {
"pile_set_name": "Github"
} |
#include <rose.h>
class SgVarSubstCopy : public SgCopyHelp
{
SgVariableSymbol *sourceSymbol;
SgExpression *replacementExpression;
public:
SgVarSubstCopy(SgVariableSymbol *sourceSymbol, SgExpression *replacementExpression)
: sourceSymbol(sourceSymbol), replacementExpression(replacementExpression)
{}
SgNode *copyAst(const SgNode *n)
{
if (const SgVarRefExp *vr = isSgVarRefExp(const_cast<SgNode *>(n)))
{
if (vr->get_symbol() == sourceSymbol)
{
return replacementExpression->copy(*this);
}
}
return n->copy(*this);
}
};
#define SgNULL_FILE Sg_File_Info::generateDefaultFileInfoForTransformationNode()
int main(int argc, char **argv)
{
// Initialize and check compatibility. See Rose::initialize
ROSE_INITIALIZE;
// Build the AST used by ROSE
SgProject *project = frontend(argc, argv);
// Find the exampleFunction function
SgGlobal *global = project->get_file(0).get_globalScope();
SgFunctionSymbol *exampleFunctionSym = global->lookup_function_symbol("exampleFunction");
ROSE_ASSERT(exampleFunctionSym != NULL);
SgFunctionDeclaration *exampleFunctionDecl = exampleFunctionSym->get_declaration();
ROSE_ASSERT(exampleFunctionDecl != NULL);
SgFunctionDefinition *exampleFunctionDef = exampleFunctionDecl->get_definition();
ROSE_ASSERT(exampleFunctionDef != NULL);
// Find its first parameter
SgInitializedName *firstParamName = exampleFunctionDecl->get_args().front();
ROSE_ASSERT(firstParamName != NULL);
SgVariableSymbol *firstParamSym = exampleFunctionDef->lookup_var_symbol(firstParamName->get_name());
ROSE_ASSERT(firstParamSym != NULL);
// Construct the expression to substitute for
SgIntVal *twenty = new SgIntVal(SgNULL_FILE, 20);
// Create our copy help mechanism with the required parameters
SgVarSubstCopy ourCopyHelp(firstParamSym, twenty);
// Do the copy
SgNode *exampleFunctionDeclCopyNode = exampleFunctionDecl->copy(ourCopyHelp);
ROSE_ASSERT(exampleFunctionDeclCopyNode != NULL);
SgFunctionDeclaration *exampleFunctionDeclCopy = isSgFunctionDeclaration(exampleFunctionDeclCopyNode);
ROSE_ASSERT(exampleFunctionDeclCopy != NULL);
// Change the name of the new function
exampleFunctionDeclCopy->set_name("exampleFunction20");
global->get_symbol_table()->insert(exampleFunctionDeclCopy->get_name(), new SgFunctionSymbol(exampleFunctionDeclCopy));
// Add function to global scope
global->append_declaration(exampleFunctionDeclCopy);
// Generate source code from AST and call the vendor's compiler
return backend(project);
}
| {
"pile_set_name": "Github"
} |
/**
* Converts an ASCII `string` to an array.
*
* @private
* @param {string} string The string to convert.
* @returns {Array} Returns the converted array.
*/
function asciiToArray(string) {
return string.split('');
}
module.exports = asciiToArray;
| {
"pile_set_name": "Github"
} |
// This file is part of Substrate.
// Copyright (C) 2017-2020 Parity Technologies (UK) Ltd.
// SPDX-License-Identifier: GPL-3.0-or-later WITH Classpath-exception-2.0
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//! Communication streams for the polite-grandpa networking protocol.
//!
//! GRANDPA nodes communicate over a gossip network, where messages are not sent to
//! peers until they have reached a given round.
//!
//! Rather than expressing protocol rules,
//! polite-grandpa just carries a notion of impoliteness. Nodes which pass some arbitrary
//! threshold of impoliteness are removed. Messages are either costly, or beneficial.
//!
//! For instance, it is _impolite_ to send the same message more than once.
//! In the future, there will be a fallback for allowing sending the same message
//! under certain conditions that are used to un-stick the protocol.
use futures::{prelude::*, channel::mpsc};
use log::{debug, trace};
use parking_lot::Mutex;
use prometheus_endpoint::Registry;
use std::{pin::Pin, sync::Arc, task::{Context, Poll}};
use sp_core::traits::BareCryptoStorePtr;
use finality_grandpa::Message::{Prevote, Precommit, PrimaryPropose};
use finality_grandpa::{voter, voter_set::VoterSet};
use sc_network::{NetworkService, ReputationChange};
use sc_network_gossip::{GossipEngine, Network as GossipNetwork};
use parity_scale_codec::{Encode, Decode};
use sp_runtime::traits::{Block as BlockT, Hash as HashT, Header as HeaderT, NumberFor};
use sc_telemetry::{telemetry, CONSENSUS_DEBUG, CONSENSUS_INFO};
use crate::{
CatchUp, Commit, CommunicationIn, CommunicationOutH,
CompactCommit, Error, Message, SignedMessage,
};
use crate::environment::HasVoted;
use gossip::{
FullCatchUpMessage,
FullCommitMessage,
GossipMessage,
GossipValidator,
PeerReport,
VoteMessage,
};
use sp_finality_grandpa::{
AuthorityId, AuthoritySignature, SetId as SetIdNumber, RoundNumber,
};
use sp_utils::mpsc::TracingUnboundedReceiver;
pub mod gossip;
mod periodic;
#[cfg(test)]
pub(crate) mod tests;
pub use sp_finality_grandpa::GRANDPA_ENGINE_ID;
pub const GRANDPA_PROTOCOL_NAME: &'static str = "/paritytech/grandpa/1";
// cost scalars for reporting peers.
mod cost {
use sc_network::ReputationChange as Rep;
pub(super) const PAST_REJECTION: Rep = Rep::new(-50, "Grandpa: Past message");
pub(super) const BAD_SIGNATURE: Rep = Rep::new(-100, "Grandpa: Bad signature");
pub(super) const MALFORMED_CATCH_UP: Rep = Rep::new(-1000, "Grandpa: Malformed cath-up");
pub(super) const MALFORMED_COMMIT: Rep = Rep::new(-1000, "Grandpa: Malformed commit");
pub(super) const FUTURE_MESSAGE: Rep = Rep::new(-500, "Grandpa: Future message");
pub(super) const UNKNOWN_VOTER: Rep = Rep::new(-150, "Grandpa: Unknown voter");
pub(super) const INVALID_VIEW_CHANGE: Rep = Rep::new(-500, "Grandpa: Invalid view change");
pub(super) const PER_UNDECODABLE_BYTE: i32 = -5;
pub(super) const PER_SIGNATURE_CHECKED: i32 = -25;
pub(super) const PER_BLOCK_LOADED: i32 = -10;
pub(super) const INVALID_CATCH_UP: Rep = Rep::new(-5000, "Grandpa: Invalid catch-up");
pub(super) const INVALID_COMMIT: Rep = Rep::new(-5000, "Grandpa: Invalid commit");
pub(super) const OUT_OF_SCOPE_MESSAGE: Rep = Rep::new(-500, "Grandpa: Out-of-scope message");
pub(super) const CATCH_UP_REQUEST_TIMEOUT: Rep = Rep::new(-200, "Grandpa: Catch-up request timeout");
// cost of answering a catch up request
pub(super) const CATCH_UP_REPLY: Rep = Rep::new(-200, "Grandpa: Catch-up reply");
pub(super) const HONEST_OUT_OF_SCOPE_CATCH_UP: Rep = Rep::new(-200, "Grandpa: Out-of-scope catch-up");
}
// benefit scalars for reporting peers.
mod benefit {
use sc_network::ReputationChange as Rep;
pub(super) const NEIGHBOR_MESSAGE: Rep = Rep::new(100, "Grandpa: Neighbor message");
pub(super) const ROUND_MESSAGE: Rep = Rep::new(100, "Grandpa: Round message");
pub(super) const BASIC_VALIDATED_CATCH_UP: Rep = Rep::new(200, "Grandpa: Catch-up message");
pub(super) const BASIC_VALIDATED_COMMIT: Rep = Rep::new(100, "Grandpa: Commit");
pub(super) const PER_EQUIVOCATION: i32 = 10;
}
/// A type that ties together our local authority id and a keystore where it is
/// available for signing.
pub struct LocalIdKeystore((AuthorityId, BareCryptoStorePtr));
impl LocalIdKeystore {
/// Returns a reference to our local authority id.
fn local_id(&self) -> &AuthorityId {
&(self.0).0
}
/// Returns a reference to the keystore.
fn keystore(&self) -> &BareCryptoStorePtr {
&(self.0).1
}
}
impl AsRef<BareCryptoStorePtr> for LocalIdKeystore {
fn as_ref(&self) -> &BareCryptoStorePtr {
self.keystore()
}
}
impl From<(AuthorityId, BareCryptoStorePtr)> for LocalIdKeystore {
fn from(inner: (AuthorityId, BareCryptoStorePtr)) -> LocalIdKeystore {
LocalIdKeystore(inner)
}
}
/// If the voter set is larger than this value some telemetry events are not
/// sent to avoid increasing usage resource on the node and flooding the
/// telemetry server (e.g. received votes, received commits.)
const TELEMETRY_VOTERS_LIMIT: usize = 10;
/// A handle to the network.
///
/// Something that provides both the capabilities needed for the `gossip_network::Network` trait as
/// well as the ability to set a fork sync request for a particular block.
pub trait Network<Block: BlockT>: GossipNetwork<Block> + Clone + Send + 'static {
/// Notifies the sync service to try and sync the given block from the given
/// peers.
///
/// If the given vector of peers is empty then the underlying implementation
/// should make a best effort to fetch the block from any peers it is
/// connected to (NOTE: this assumption will change in the future #3629).
fn set_sync_fork_request(&self, peers: Vec<sc_network::PeerId>, hash: Block::Hash, number: NumberFor<Block>);
}
impl<B, H> Network<B> for Arc<NetworkService<B, H>> where
B: BlockT,
H: sc_network::ExHashT,
{
fn set_sync_fork_request(&self, peers: Vec<sc_network::PeerId>, hash: B::Hash, number: NumberFor<B>) {
NetworkService::set_sync_fork_request(self, peers, hash, number)
}
}
/// Create a unique topic for a round and set-id combo.
pub(crate) fn round_topic<B: BlockT>(round: RoundNumber, set_id: SetIdNumber) -> B::Hash {
<<B::Header as HeaderT>::Hashing as HashT>::hash(format!("{}-{}", set_id, round).as_bytes())
}
/// Create a unique topic for global messages on a set ID.
pub(crate) fn global_topic<B: BlockT>(set_id: SetIdNumber) -> B::Hash {
<<B::Header as HeaderT>::Hashing as HashT>::hash(format!("{}-GLOBAL", set_id).as_bytes())
}
/// Bridge between the underlying network service, gossiping consensus messages and Grandpa
pub(crate) struct NetworkBridge<B: BlockT, N: Network<B>> {
service: N,
gossip_engine: Arc<Mutex<GossipEngine<B>>>,
validator: Arc<GossipValidator<B>>,
/// Sender side of the neighbor packet channel.
///
/// Packets sent into this channel are processed by the `NeighborPacketWorker` and passed on to
/// the underlying `GossipEngine`.
neighbor_sender: periodic::NeighborPacketSender<B>,
/// `NeighborPacketWorker` processing packets sent through the `NeighborPacketSender`.
//
// `NetworkBridge` is required to be cloneable, thus one needs to be able to clone its children,
// thus one has to wrap `neighbor_packet_worker` with an `Arc` `Mutex`.
neighbor_packet_worker: Arc<Mutex<periodic::NeighborPacketWorker<B>>>,
/// Receiver side of the peer report stream populated by the gossip validator, forwarded to the
/// gossip engine.
//
// `NetworkBridge` is required to be cloneable, thus one needs to be able to clone its children,
// thus one has to wrap gossip_validator_report_stream with an `Arc` `Mutex`. Given that it is
// just an `UnboundedReceiver`, one could also switch to a multi-producer-*multi*-consumer
// channel implementation.
gossip_validator_report_stream: Arc<Mutex<TracingUnboundedReceiver<PeerReport>>>,
}
impl<B: BlockT, N: Network<B>> Unpin for NetworkBridge<B, N> {}
impl<B: BlockT, N: Network<B>> NetworkBridge<B, N> {
/// Create a new NetworkBridge to the given NetworkService. Returns the service
/// handle.
/// On creation it will register previous rounds' votes with the gossip
/// service taken from the VoterSetState.
pub(crate) fn new(
service: N,
config: crate::Config,
set_state: crate::environment::SharedVoterSetState<B>,
prometheus_registry: Option<&Registry>,
) -> Self {
let (validator, report_stream) = GossipValidator::new(
config,
set_state.clone(),
prometheus_registry,
);
let validator = Arc::new(validator);
let gossip_engine = Arc::new(Mutex::new(GossipEngine::new(
service.clone(),
GRANDPA_ENGINE_ID,
GRANDPA_PROTOCOL_NAME,
validator.clone()
)));
{
// register all previous votes with the gossip service so that they're
// available to peers potentially stuck on a previous round.
let completed = set_state.read().completed_rounds();
let (set_id, voters) = completed.set_info();
validator.note_set(SetId(set_id), voters.to_vec(), |_, _| {});
for round in completed.iter() {
let topic = round_topic::<B>(round.number, set_id);
// we need to note the round with the gossip validator otherwise
// messages will be ignored.
validator.note_round(Round(round.number), |_, _| {});
for signed in round.votes.iter() {
let message = gossip::GossipMessage::Vote(
gossip::VoteMessage::<B> {
message: signed.clone(),
round: Round(round.number),
set_id: SetId(set_id),
}
);
gossip_engine.lock().register_gossip_message(
topic,
message.encode(),
);
}
trace!(target: "afg",
"Registered {} messages for topic {:?} (round: {}, set_id: {})",
round.votes.len(),
topic,
round.number,
set_id,
);
}
}
let (neighbor_packet_worker, neighbor_packet_sender) = periodic::NeighborPacketWorker::new();
NetworkBridge {
service,
gossip_engine,
validator,
neighbor_sender: neighbor_packet_sender,
neighbor_packet_worker: Arc::new(Mutex::new(neighbor_packet_worker)),
gossip_validator_report_stream: Arc::new(Mutex::new(report_stream)),
}
}
/// Note the beginning of a new round to the `GossipValidator`.
pub(crate) fn note_round(
&self,
round: Round,
set_id: SetId,
voters: &VoterSet<AuthorityId>,
) {
// is a no-op if currently in that set.
self.validator.note_set(
set_id,
voters.iter().map(|(v, _)| v.clone()).collect(),
|to, neighbor| self.neighbor_sender.send(to, neighbor),
);
self.validator.note_round(
round,
|to, neighbor| self.neighbor_sender.send(to, neighbor),
);
}
/// Get a stream of signature-checked round messages from the network as well as a sink for round messages to the
/// network all within the current set.
pub(crate) fn round_communication(
&self,
keystore: Option<LocalIdKeystore>,
round: Round,
set_id: SetId,
voters: Arc<VoterSet<AuthorityId>>,
has_voted: HasVoted<B>,
) -> (
impl Stream<Item = SignedMessage<B>> + Unpin,
OutgoingMessages<B>,
) {
self.note_round(
round,
set_id,
&*voters,
);
let keystore = keystore.and_then(|ks| {
let id = ks.local_id();
if voters.contains(id) {
Some(ks)
} else {
None
}
});
let topic = round_topic::<B>(round.0, set_id.0);
let incoming = self.gossip_engine.lock().messages_for(topic)
.filter_map(move |notification| {
let decoded = GossipMessage::<B>::decode(&mut ¬ification.message[..]);
match decoded {
Err(ref e) => {
debug!(target: "afg", "Skipping malformed message {:?}: {}", notification, e);
future::ready(None)
}
Ok(GossipMessage::Vote(msg)) => {
// check signature.
if !voters.contains(&msg.message.id) {
debug!(target: "afg", "Skipping message from unknown voter {}", msg.message.id);
return future::ready(None);
}
if voters.len().get() <= TELEMETRY_VOTERS_LIMIT {
match &msg.message.message {
PrimaryPropose(propose) => {
telemetry!(CONSENSUS_INFO; "afg.received_propose";
"voter" => ?format!("{}", msg.message.id),
"target_number" => ?propose.target_number,
"target_hash" => ?propose.target_hash,
);
},
Prevote(prevote) => {
telemetry!(CONSENSUS_INFO; "afg.received_prevote";
"voter" => ?format!("{}", msg.message.id),
"target_number" => ?prevote.target_number,
"target_hash" => ?prevote.target_hash,
);
},
Precommit(precommit) => {
telemetry!(CONSENSUS_INFO; "afg.received_precommit";
"voter" => ?format!("{}", msg.message.id),
"target_number" => ?precommit.target_number,
"target_hash" => ?precommit.target_hash,
);
},
};
}
future::ready(Some(msg.message))
}
_ => {
debug!(target: "afg", "Skipping unknown message type");
future::ready(None)
}
}
});
let (tx, out_rx) = mpsc::channel(0);
let outgoing = OutgoingMessages::<B> {
keystore,
round: round.0,
set_id: set_id.0,
network: self.gossip_engine.clone(),
sender: tx,
has_voted,
};
// Combine incoming votes from external GRANDPA nodes with outgoing
// votes from our own GRANDPA voter to have a single
// vote-import-pipeline.
let incoming = stream::select(incoming, out_rx);
(incoming, outgoing)
}
/// Set up the global communication streams.
pub(crate) fn global_communication(
&self,
set_id: SetId,
voters: Arc<VoterSet<AuthorityId>>,
is_voter: bool,
) -> (
impl Stream<Item = CommunicationIn<B>>,
impl Sink<CommunicationOutH<B, B::Hash>, Error = Error> + Unpin,
) {
self.validator.note_set(
set_id,
voters.iter().map(|(v, _)| v.clone()).collect(),
|to, neighbor| self.neighbor_sender.send(to, neighbor),
);
let topic = global_topic::<B>(set_id.0);
let incoming = incoming_global(
self.gossip_engine.clone(),
topic,
voters,
self.validator.clone(),
self.neighbor_sender.clone(),
);
let outgoing = CommitsOut::<B>::new(
self.gossip_engine.clone(),
set_id.0,
is_voter,
self.validator.clone(),
self.neighbor_sender.clone(),
);
let outgoing = outgoing.with(|out| {
let voter::CommunicationOut::Commit(round, commit) = out;
future::ok((round, commit))
});
(incoming, outgoing)
}
/// Notifies the sync service to try and sync the given block from the given
/// peers.
///
/// If the given vector of peers is empty then the underlying implementation
/// should make a best effort to fetch the block from any peers it is
/// connected to (NOTE: this assumption will change in the future #3629).
pub(crate) fn set_sync_fork_request(
&self,
peers: Vec<sc_network::PeerId>,
hash: B::Hash,
number: NumberFor<B>
) {
Network::set_sync_fork_request(&self.service, peers, hash, number)
}
}
impl<B: BlockT, N: Network<B>> Future for NetworkBridge<B, N> {
type Output = Result<(), Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {
loop {
match self.neighbor_packet_worker.lock().poll_next_unpin(cx) {
Poll::Ready(Some((to, packet))) => {
self.gossip_engine.lock().send_message(to, packet.encode());
},
Poll::Ready(None) => return Poll::Ready(
Err(Error::Network("Neighbor packet worker stream closed.".into()))
),
Poll::Pending => break,
}
}
loop {
match self.gossip_validator_report_stream.lock().poll_next_unpin(cx) {
Poll::Ready(Some(PeerReport { who, cost_benefit })) => {
self.gossip_engine.lock().report(who, cost_benefit);
},
Poll::Ready(None) => return Poll::Ready(
Err(Error::Network("Gossip validator report stream closed.".into()))
),
Poll::Pending => break,
}
}
match self.gossip_engine.lock().poll_unpin(cx) {
Poll::Ready(()) => return Poll::Ready(
Err(Error::Network("Gossip engine future finished.".into()))
),
Poll::Pending => {},
}
Poll::Pending
}
}
fn incoming_global<B: BlockT>(
gossip_engine: Arc<Mutex<GossipEngine<B>>>,
topic: B::Hash,
voters: Arc<VoterSet<AuthorityId>>,
gossip_validator: Arc<GossipValidator<B>>,
neighbor_sender: periodic::NeighborPacketSender<B>,
) -> impl Stream<Item = CommunicationIn<B>> {
let process_commit = move |
msg: FullCommitMessage<B>,
mut notification: sc_network_gossip::TopicNotification,
gossip_engine: &Arc<Mutex<GossipEngine<B>>>,
gossip_validator: &Arc<GossipValidator<B>>,
voters: &VoterSet<AuthorityId>,
| {
if voters.len().get() <= TELEMETRY_VOTERS_LIMIT {
let precommits_signed_by: Vec<String> =
msg.message.auth_data.iter().map(move |(_, a)| {
format!("{}", a)
}).collect();
telemetry!(CONSENSUS_INFO; "afg.received_commit";
"contains_precommits_signed_by" => ?precommits_signed_by,
"target_number" => ?msg.message.target_number.clone(),
"target_hash" => ?msg.message.target_hash.clone(),
);
}
if let Err(cost) = check_compact_commit::<B>(
&msg.message,
voters,
msg.round,
msg.set_id,
) {
if let Some(who) = notification.sender {
gossip_engine.lock().report(who, cost);
}
return None;
}
let round = msg.round;
let set_id = msg.set_id;
let commit = msg.message;
let finalized_number = commit.target_number;
let gossip_validator = gossip_validator.clone();
let gossip_engine = gossip_engine.clone();
let neighbor_sender = neighbor_sender.clone();
let cb = move |outcome| match outcome {
voter::CommitProcessingOutcome::Good(_) => {
// if it checks out, gossip it. not accounting for
// any discrepancy between the actual ghost and the claimed
// finalized number.
gossip_validator.note_commit_finalized(
round,
set_id,
finalized_number,
|to, neighbor| neighbor_sender.send(to, neighbor),
);
gossip_engine.lock().gossip_message(topic, notification.message.clone(), false);
}
voter::CommitProcessingOutcome::Bad(_) => {
// report peer and do not gossip.
if let Some(who) = notification.sender.take() {
gossip_engine.lock().report(who, cost::INVALID_COMMIT);
}
}
};
let cb = voter::Callback::Work(Box::new(cb));
Some(voter::CommunicationIn::Commit(round.0, commit, cb))
};
let process_catch_up = move |
msg: FullCatchUpMessage<B>,
mut notification: sc_network_gossip::TopicNotification,
gossip_engine: &Arc<Mutex<GossipEngine<B>>>,
gossip_validator: &Arc<GossipValidator<B>>,
voters: &VoterSet<AuthorityId>,
| {
let gossip_validator = gossip_validator.clone();
let gossip_engine = gossip_engine.clone();
if let Err(cost) = check_catch_up::<B>(
&msg.message,
voters,
msg.set_id,
) {
if let Some(who) = notification.sender {
gossip_engine.lock().report(who, cost);
}
return None;
}
let cb = move |outcome| {
if let voter::CatchUpProcessingOutcome::Bad(_) = outcome {
// report peer
if let Some(who) = notification.sender.take() {
gossip_engine.lock().report(who, cost::INVALID_CATCH_UP);
}
}
gossip_validator.note_catch_up_message_processed();
};
let cb = voter::Callback::Work(Box::new(cb));
Some(voter::CommunicationIn::CatchUp(msg.message, cb))
};
gossip_engine.clone().lock().messages_for(topic)
.filter_map(|notification| {
// this could be optimized by decoding piecewise.
let decoded = GossipMessage::<B>::decode(&mut ¬ification.message[..]);
if let Err(ref e) = decoded {
trace!(target: "afg", "Skipping malformed commit message {:?}: {}", notification, e);
}
future::ready(decoded.map(move |d| (notification, d)).ok())
})
.filter_map(move |(notification, msg)| {
future::ready(match msg {
GossipMessage::Commit(msg) =>
process_commit(msg, notification, &gossip_engine, &gossip_validator, &*voters),
GossipMessage::CatchUp(msg) =>
process_catch_up(msg, notification, &gossip_engine, &gossip_validator, &*voters),
_ => {
debug!(target: "afg", "Skipping unknown message type");
None
}
})
})
}
impl<B: BlockT, N: Network<B>> Clone for NetworkBridge<B, N> {
fn clone(&self) -> Self {
NetworkBridge {
service: self.service.clone(),
gossip_engine: self.gossip_engine.clone(),
validator: Arc::clone(&self.validator),
neighbor_sender: self.neighbor_sender.clone(),
neighbor_packet_worker: self.neighbor_packet_worker.clone(),
gossip_validator_report_stream: self.gossip_validator_report_stream.clone(),
}
}
}
/// Type-safe wrapper around a round number.
#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Encode, Decode)]
pub struct Round(pub RoundNumber);
/// Type-safe wrapper around a set ID.
#[derive(Debug, Clone, Copy, Eq, PartialEq, PartialOrd, Ord, Encode, Decode)]
pub struct SetId(pub SetIdNumber);
/// A sink for outgoing messages to the network. Any messages that are sent will
/// be replaced, as appropriate, according to the given `HasVoted`.
/// NOTE: The votes are stored unsigned, which means that the signatures need to
/// be "stable", i.e. we should end up with the exact same signed message if we
/// use the same raw message and key to sign. This is currently true for
/// `ed25519` and `BLS` signatures (which we might use in the future), care must
/// be taken when switching to different key types.
pub(crate) struct OutgoingMessages<Block: BlockT> {
round: RoundNumber,
set_id: SetIdNumber,
keystore: Option<LocalIdKeystore>,
sender: mpsc::Sender<SignedMessage<Block>>,
network: Arc<Mutex<GossipEngine<Block>>>,
has_voted: HasVoted<Block>,
}
impl<B: BlockT> Unpin for OutgoingMessages<B> {}
impl<Block: BlockT> Sink<Message<Block>> for OutgoingMessages<Block>
{
type Error = Error;
fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
Sink::poll_ready(Pin::new(&mut self.sender), cx)
.map(|elem| { elem.map_err(|e| {
Error::Network(format!("Failed to poll_ready channel sender: {:?}", e))
})})
}
fn start_send(mut self: Pin<&mut Self>, mut msg: Message<Block>) -> Result<(), Self::Error> {
// if we've voted on this round previously under the same key, send that vote instead
match &mut msg {
finality_grandpa::Message::PrimaryPropose(ref mut vote) =>
if let Some(propose) = self.has_voted.propose() {
*vote = propose.clone();
},
finality_grandpa::Message::Prevote(ref mut vote) =>
if let Some(prevote) = self.has_voted.prevote() {
*vote = prevote.clone();
},
finality_grandpa::Message::Precommit(ref mut vote) =>
if let Some(precommit) = self.has_voted.precommit() {
*vote = precommit.clone();
},
}
// when locals exist, sign messages on import
if let Some(ref keystore) = self.keystore {
let target_hash = *(msg.target().0);
let signed = sp_finality_grandpa::sign_message(
keystore.as_ref(),
msg,
keystore.local_id().clone(),
self.round,
self.set_id,
).ok_or_else(
|| Error::Signing(format!(
"Failed to sign GRANDPA vote for round {} targetting {:?}", self.round, target_hash
))
)?;
let message = GossipMessage::Vote(VoteMessage::<Block> {
message: signed.clone(),
round: Round(self.round),
set_id: SetId(self.set_id),
});
debug!(
target: "afg",
"Announcing block {} to peers which we voted on in round {} in set {}",
target_hash,
self.round,
self.set_id,
);
telemetry!(
CONSENSUS_DEBUG; "afg.announcing_blocks_to_voted_peers";
"block" => ?target_hash, "round" => ?self.round, "set_id" => ?self.set_id,
);
// announce the block we voted on to our peers.
self.network.lock().announce(target_hash, Vec::new());
// propagate the message to peers
let topic = round_topic::<Block>(self.round, self.set_id);
self.network.lock().gossip_message(topic, message.encode(), false);
// forward the message to the inner sender.
return self.sender.start_send(signed).map_err(|e| {
Error::Network(format!("Failed to start_send on channel sender: {:?}", e))
});
};
Ok(())
}
fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Result<(), Self::Error>> {
Sink::poll_close(Pin::new(&mut self.sender), cx)
.map(|elem| { elem.map_err(|e| {
Error::Network(format!("Failed to poll_close channel sender: {:?}", e))
})})
}
}
// checks a compact commit. returns the cost associated with processing it if
// the commit was bad.
fn check_compact_commit<Block: BlockT>(
msg: &CompactCommit<Block>,
voters: &VoterSet<AuthorityId>,
round: Round,
set_id: SetId,
) -> Result<(), ReputationChange> {
// 4f + 1 = equivocations from f voters.
let f = voters.total_weight() - voters.threshold();
let full_threshold = (f + voters.total_weight()).0;
// check total weight is not out of range.
let mut total_weight = 0;
for (_, ref id) in &msg.auth_data {
if let Some(weight) = voters.get(id).map(|info| info.weight()) {
total_weight += weight.get();
if total_weight > full_threshold {
return Err(cost::MALFORMED_COMMIT);
}
} else {
debug!(target: "afg", "Skipping commit containing unknown voter {}", id);
return Err(cost::MALFORMED_COMMIT);
}
}
if total_weight < voters.threshold().get() {
return Err(cost::MALFORMED_COMMIT);
}
// check signatures on all contained precommits.
let mut buf = Vec::new();
for (i, (precommit, &(ref sig, ref id))) in msg.precommits.iter()
.zip(&msg.auth_data)
.enumerate()
{
use crate::communication::gossip::Misbehavior;
use finality_grandpa::Message as GrandpaMessage;
if !sp_finality_grandpa::check_message_signature_with_buffer(
&GrandpaMessage::Precommit(precommit.clone()),
id,
sig,
round.0,
set_id.0,
&mut buf,
) {
debug!(target: "afg", "Bad commit message signature {}", id);
telemetry!(CONSENSUS_DEBUG; "afg.bad_commit_msg_signature"; "id" => ?id);
let cost = Misbehavior::BadCommitMessage {
signatures_checked: i as i32,
blocks_loaded: 0,
equivocations_caught: 0,
}.cost();
return Err(cost);
}
}
Ok(())
}
// checks a catch up. returns the cost associated with processing it if
// the catch up was bad.
fn check_catch_up<Block: BlockT>(
msg: &CatchUp<Block>,
voters: &VoterSet<AuthorityId>,
set_id: SetId,
) -> Result<(), ReputationChange> {
// 4f + 1 = equivocations from f voters.
let f = voters.total_weight() - voters.threshold();
let full_threshold = (f + voters.total_weight()).0;
// check total weight is not out of range for a set of votes.
fn check_weight<'a>(
voters: &'a VoterSet<AuthorityId>,
votes: impl Iterator<Item=&'a AuthorityId>,
full_threshold: u64,
) -> Result<(), ReputationChange> {
let mut total_weight = 0;
for id in votes {
if let Some(weight) = voters.get(&id).map(|info| info.weight()) {
total_weight += weight.get();
if total_weight > full_threshold {
return Err(cost::MALFORMED_CATCH_UP);
}
} else {
debug!(target: "afg", "Skipping catch up message containing unknown voter {}", id);
return Err(cost::MALFORMED_CATCH_UP);
}
}
if total_weight < voters.threshold().get() {
return Err(cost::MALFORMED_CATCH_UP);
}
Ok(())
};
check_weight(
voters,
msg.prevotes.iter().map(|vote| &vote.id),
full_threshold,
)?;
check_weight(
voters,
msg.precommits.iter().map(|vote| &vote.id),
full_threshold,
)?;
fn check_signatures<'a, B, I>(
messages: I,
round: RoundNumber,
set_id: SetIdNumber,
mut signatures_checked: usize,
buf: &mut Vec<u8>,
) -> Result<usize, ReputationChange> where
B: BlockT,
I: Iterator<Item=(Message<B>, &'a AuthorityId, &'a AuthoritySignature)>,
{
use crate::communication::gossip::Misbehavior;
for (msg, id, sig) in messages {
signatures_checked += 1;
if !sp_finality_grandpa::check_message_signature_with_buffer(
&msg,
id,
sig,
round,
set_id,
buf,
) {
debug!(target: "afg", "Bad catch up message signature {}", id);
telemetry!(CONSENSUS_DEBUG; "afg.bad_catch_up_msg_signature"; "id" => ?id);
let cost = Misbehavior::BadCatchUpMessage {
signatures_checked: signatures_checked as i32,
}.cost();
return Err(cost);
}
}
Ok(signatures_checked)
}
let mut buf = Vec::new();
// check signatures on all contained prevotes.
let signatures_checked = check_signatures::<Block, _>(
msg.prevotes.iter().map(|vote| {
(finality_grandpa::Message::Prevote(vote.prevote.clone()), &vote.id, &vote.signature)
}),
msg.round_number,
set_id.0,
0,
&mut buf,
)?;
// check signatures on all contained precommits.
let _ = check_signatures::<Block, _>(
msg.precommits.iter().map(|vote| {
(finality_grandpa::Message::Precommit(vote.precommit.clone()), &vote.id, &vote.signature)
}),
msg.round_number,
set_id.0,
signatures_checked,
&mut buf,
)?;
Ok(())
}
/// An output sink for commit messages.
struct CommitsOut<Block: BlockT> {
network: Arc<Mutex<GossipEngine<Block>>>,
set_id: SetId,
is_voter: bool,
gossip_validator: Arc<GossipValidator<Block>>,
neighbor_sender: periodic::NeighborPacketSender<Block>,
}
impl<Block: BlockT> CommitsOut<Block> {
/// Create a new commit output stream.
pub(crate) fn new(
network: Arc<Mutex<GossipEngine<Block>>>,
set_id: SetIdNumber,
is_voter: bool,
gossip_validator: Arc<GossipValidator<Block>>,
neighbor_sender: periodic::NeighborPacketSender<Block>,
) -> Self {
CommitsOut {
network,
set_id: SetId(set_id),
is_voter,
gossip_validator,
neighbor_sender,
}
}
}
impl<Block: BlockT> Sink<(RoundNumber, Commit<Block>)> for CommitsOut<Block> {
type Error = Error;
fn poll_ready(self: Pin<&mut Self>, _: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn start_send(self: Pin<&mut Self>, input: (RoundNumber, Commit<Block>)) -> Result<(), Self::Error> {
if !self.is_voter {
return Ok(());
}
let (round, commit) = input;
let round = Round(round);
telemetry!(CONSENSUS_DEBUG; "afg.commit_issued";
"target_number" => ?commit.target_number, "target_hash" => ?commit.target_hash,
);
let (precommits, auth_data) = commit.precommits.into_iter()
.map(|signed| (signed.precommit, (signed.signature, signed.id)))
.unzip();
let compact_commit = CompactCommit::<Block> {
target_hash: commit.target_hash,
target_number: commit.target_number,
precommits,
auth_data
};
let message = GossipMessage::Commit(FullCommitMessage::<Block> {
round,
set_id: self.set_id,
message: compact_commit,
});
let topic = global_topic::<Block>(self.set_id.0);
// the gossip validator needs to be made aware of the best commit-height we know of
// before gossiping
self.gossip_validator.note_commit_finalized(
round,
self.set_id,
commit.target_number,
|to, neighbor| self.neighbor_sender.send(to, neighbor),
);
self.network.lock().gossip_message(topic, message.encode(), false);
Ok(())
}
fn poll_close(self: Pin<&mut Self>, _: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn poll_flush(self: Pin<&mut Self>, _: &mut Context) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
}
| {
"pile_set_name": "Github"
} |
console.log (Object.prototype.toString.call([].__proto__));
console.log (Object.prototype.toString.call([].__proto__.__proto__));
console.log (Object.prototype.toString.call([].__proto__.__proto__.__proto__));
| {
"pile_set_name": "Github"
} |
# Subscription
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**subscriptionId** | **String** | | [optional]
**applicationId** | **String** | |
**apiIdentifier** | **String** | |
**tier** | **String** | |
**status** | [**StatusEnum**](#StatusEnum) | | [optional]
<a name="StatusEnum"></a>
## Enum: StatusEnum
Name | Value
---- | -----
BLOCKED | "BLOCKED"
PROD_ONLY_BLOCKED | "PROD_ONLY_BLOCKED"
UNBLOCKED | "UNBLOCKED"
ON_HOLD | "ON_HOLD"
REJECTED | "REJECTED"
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2006-2011 Christian Plattner. All rights reserved.
* Please refer to the LICENSE.txt for licensing details.
*/
package ch.ethz.ssh2.packets;
import ch.ethz.ssh2.DHGexParameters;
/**
* PacketKexDhGexRequestOld.
*
* @author Christian Plattner
* @version 2.50, 03/15/10
*/
public class PacketKexDhGexRequestOld
{
byte[] payload;
int n;
public PacketKexDhGexRequestOld(DHGexParameters para)
{
this.n = para.getPref_group_len();
}
public byte[] getPayload()
{
if (payload == null)
{
TypesWriter tw = new TypesWriter();
tw.writeByte(Packets.SSH_MSG_KEX_DH_GEX_REQUEST_OLD);
tw.writeUINT32(n);
payload = tw.getBytes();
}
return payload;
}
}
| {
"pile_set_name": "Github"
} |
true
false
| {
"pile_set_name": "Github"
} |
//===========================================
// The following is for 8812A 1ANT BT Co-exist definition
//===========================================
#define BT_AUTO_REPORT_ONLY_8812A_1ANT 1
#define BT_INFO_8812A_1ANT_B_FTP BIT7
#define BT_INFO_8812A_1ANT_B_A2DP BIT6
#define BT_INFO_8812A_1ANT_B_HID BIT5
#define BT_INFO_8812A_1ANT_B_SCO_BUSY BIT4
#define BT_INFO_8812A_1ANT_B_ACL_BUSY BIT3
#define BT_INFO_8812A_1ANT_B_INQ_PAGE BIT2
#define BT_INFO_8812A_1ANT_B_SCO_ESCO BIT1
#define BT_INFO_8812A_1ANT_B_CONNECTION BIT0
#define BT_INFO_8812A_1ANT_A2DP_BASIC_RATE(_BT_INFO_EXT_) \
(((_BT_INFO_EXT_&BIT0))? TRUE:FALSE)
#define BTC_RSSI_COEX_THRESH_TOL_8812A_1ANT 2
#define BT_8812A_1ANT_WIFI_NOISY_THRESH 30 //max: 255
typedef enum _BT_INFO_SRC_8812A_1ANT{
BT_INFO_SRC_8812A_1ANT_WIFI_FW = 0x0,
BT_INFO_SRC_8812A_1ANT_BT_RSP = 0x1,
BT_INFO_SRC_8812A_1ANT_BT_ACTIVE_SEND = 0x2,
BT_INFO_SRC_8812A_1ANT_MAX
}BT_INFO_SRC_8812A_1ANT,*PBT_INFO_SRC_8812A_1ANT;
typedef enum _BT_8812A_1ANT_BT_STATUS{
BT_8812A_1ANT_BT_STATUS_NON_CONNECTED_IDLE = 0x0,
BT_8812A_1ANT_BT_STATUS_CONNECTED_IDLE = 0x1,
BT_8812A_1ANT_BT_STATUS_INQ_PAGE = 0x2,
BT_8812A_1ANT_BT_STATUS_ACL_BUSY = 0x3,
BT_8812A_1ANT_BT_STATUS_SCO_BUSY = 0x4,
BT_8812A_1ANT_BT_STATUS_ACL_SCO_BUSY = 0x5,
BT_8812A_1ANT_BT_STATUS_MAX
}BT_8812A_1ANT_BT_STATUS,*PBT_8812A_1ANT_BT_STATUS;
typedef enum _BT_8812A_1ANT_WIFI_STATUS{
BT_8812A_1ANT_WIFI_STATUS_NON_CONNECTED_IDLE = 0x0,
BT_8812A_1ANT_WIFI_STATUS_NON_CONNECTED_ASSO_AUTH_SCAN = 0x1,
BT_8812A_1ANT_WIFI_STATUS_CONNECTED_SCAN = 0x2,
BT_8812A_1ANT_WIFI_STATUS_CONNECTED_SPECIAL_PKT = 0x3,
BT_8812A_1ANT_WIFI_STATUS_CONNECTED_IDLE = 0x4,
BT_8812A_1ANT_WIFI_STATUS_CONNECTED_BUSY = 0x5,
BT_8812A_1ANT_WIFI_STATUS_MAX
}BT_8812A_1ANT_WIFI_STATUS,*PBT_8812A_1ANT_WIFI_STATUS;
typedef enum _BT_8812A_1ANT_COEX_ALGO{
BT_8812A_1ANT_COEX_ALGO_UNDEFINED = 0x0,
BT_8812A_1ANT_COEX_ALGO_SCO = 0x1,
BT_8812A_1ANT_COEX_ALGO_HID = 0x2,
BT_8812A_1ANT_COEX_ALGO_A2DP = 0x3,
BT_8812A_1ANT_COEX_ALGO_A2DP_PANHS = 0x4,
BT_8812A_1ANT_COEX_ALGO_PANEDR = 0x5,
BT_8812A_1ANT_COEX_ALGO_PANHS = 0x6,
BT_8812A_1ANT_COEX_ALGO_PANEDR_A2DP = 0x7,
BT_8812A_1ANT_COEX_ALGO_PANEDR_HID = 0x8,
BT_8812A_1ANT_COEX_ALGO_HID_A2DP_PANEDR = 0x9,
BT_8812A_1ANT_COEX_ALGO_HID_A2DP = 0xa,
BT_8812A_1ANT_COEX_ALGO_MAX = 0xb,
}BT_8812A_1ANT_COEX_ALGO,*PBT_8812A_1ANT_COEX_ALGO;
typedef struct _COEX_DM_8812A_1ANT{
// hw setting
u1Byte preAntPosType;
u1Byte curAntPosType;
// fw mechanism
BOOLEAN bCurIgnoreWlanAct;
BOOLEAN bPreIgnoreWlanAct;
u1Byte prePsTdma;
u1Byte curPsTdma;
u1Byte psTdmaPara[5];
u1Byte psTdmaDuAdjType;
BOOLEAN bAutoTdmaAdjust;
BOOLEAN bPrePsTdmaOn;
BOOLEAN bCurPsTdmaOn;
BOOLEAN bPreBtAutoReport;
BOOLEAN bCurBtAutoReport;
u1Byte preLps;
u1Byte curLps;
u1Byte preRpwm;
u1Byte curRpwm;
// sw mechanism
BOOLEAN bPreLowPenaltyRa;
BOOLEAN bCurLowPenaltyRa;
u4Byte preVal0x6c0;
u4Byte curVal0x6c0;
u4Byte preVal0x6c4;
u4Byte curVal0x6c4;
u4Byte preVal0x6c8;
u4Byte curVal0x6c8;
u1Byte preVal0x6cc;
u1Byte curVal0x6cc;
BOOLEAN bLimitedDig;
u4Byte backupArfrCnt1; // Auto Rate Fallback Retry cnt
u4Byte backupArfrCnt2; // Auto Rate Fallback Retry cnt
u2Byte backupRetryLimit;
u1Byte backupAmpduMaxTime;
// algorithm related
u1Byte preAlgorithm;
u1Byte curAlgorithm;
u1Byte btStatus;
u1Byte wifiChnlInfo[3];
u4Byte preRaMask;
u4Byte curRaMask;
u1Byte preArfrType;
u1Byte curArfrType;
u1Byte preRetryLimitType;
u1Byte curRetryLimitType;
u1Byte preAmpduTimeType;
u1Byte curAmpduTimeType;
u4Byte nArpCnt;
u1Byte errorCondition;
} COEX_DM_8812A_1ANT, *PCOEX_DM_8812A_1ANT;
typedef struct _COEX_STA_8812A_1ANT{
BOOLEAN bBtLinkExist;
BOOLEAN bScoExist;
BOOLEAN bA2dpExist;
BOOLEAN bHidExist;
BOOLEAN bPanExist;
BOOLEAN bUnderLps;
BOOLEAN bUnderIps;
u4Byte specialPktPeriodCnt;
u4Byte highPriorityTx;
u4Byte highPriorityRx;
u4Byte lowPriorityTx;
u4Byte lowPriorityRx;
s1Byte btRssi;
BOOLEAN bBtTxRxMask;
u1Byte preBtRssiState;
u1Byte preWifiRssiState[4];
BOOLEAN bC2hBtInfoReqSent;
u1Byte btInfoC2h[BT_INFO_SRC_8812A_1ANT_MAX][10];
u4Byte btInfoC2hCnt[BT_INFO_SRC_8812A_1ANT_MAX];
u4Byte btInfoQueryCnt;
BOOLEAN bC2hBtInquiryPage;
BOOLEAN bC2hBtPage; //Add for win8.1 page out issue
BOOLEAN bWiFiIsHighPriTask; //Add for win8.1 page out issue
u1Byte btRetryCnt;
u1Byte btInfoExt;
u4Byte popEventCnt;
u1Byte nScanAPNum;
u4Byte nCRCOK_CCK;
u4Byte nCRCOK_11g;
u4Byte nCRCOK_11n;
u4Byte nCRCOK_11nAgg;
u4Byte nCRCErr_CCK;
u4Byte nCRCErr_11g;
u4Byte nCRCErr_11n;
u4Byte nCRCErr_11nAgg;
BOOLEAN bCCKLock;
BOOLEAN bPreCCKLock;
u1Byte nCoexTableType;
BOOLEAN bForceLpsOn;
}COEX_STA_8812A_1ANT, *PCOEX_STA_8812A_1ANT;
//===========================================
// The following is interface which will notify coex module.
//===========================================
VOID
EXhalbtc8812a1ant_PowerOnSetting(
IN PBTC_COEXIST pBtCoexist
);
VOID
EXhalbtc8812a1ant_PreLoadFirmware(
IN PBTC_COEXIST pBtCoexist
);
VOID
EXhalbtc8812a1ant_InitHwConfig(
IN PBTC_COEXIST pBtCoexist,
IN BOOLEAN bWifiOnly
);
VOID
EXhalbtc8812a1ant_InitCoexDm(
IN PBTC_COEXIST pBtCoexist
);
VOID
EXhalbtc8812a1ant_IpsNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte type
);
VOID
EXhalbtc8812a1ant_LpsNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte type
);
VOID
EXhalbtc8812a1ant_ScanNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte type
);
VOID
EXhalbtc8812a1ant_ConnectNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte type
);
VOID
EXhalbtc8812a1ant_MediaStatusNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte type
);
VOID
EXhalbtc8812a1ant_SpecialPacketNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte type
);
VOID
EXhalbtc8812a1ant_BtInfoNotify(
IN PBTC_COEXIST pBtCoexist,
IN pu1Byte tmpBuf,
IN u1Byte length
);
VOID
EXhalbtc8812a1ant_RfStatusNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte type
);
VOID
EXhalbtc8812a1ant_HaltNotify(
IN PBTC_COEXIST pBtCoexist
);
VOID
EXhalbtc8812a1ant_PnpNotify(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte pnpState
);
VOID
EXhalbtc8812a1ant_CoexDmReset(
IN PBTC_COEXIST pBtCoexist
);
VOID
EXhalbtc8812a1ant_Periodical(
IN PBTC_COEXIST pBtCoexist
);
VOID
EXhalbtc8812a1ant_DbgControl(
IN PBTC_COEXIST pBtCoexist,
IN u1Byte opCode,
IN u1Byte opLen,
IN pu1Byte pData
);
VOID
EXhalbtc8812a1ant_DisplayCoexInfo(
IN PBTC_COEXIST pBtCoexist
);
| {
"pile_set_name": "Github"
} |
package crdapps
import (
"testing"
routefake "github.com/openshift/client-go/route/clientset/versioned/fake"
v1alpha1 "github.com/stakater/Forecastle/pkg/apis/forecastle/v1alpha1"
"github.com/stakater/Forecastle/pkg/kube"
"github.com/stakater/Forecastle/pkg/testutil"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
kubefake "k8s.io/client-go/kubernetes/fake"
)
func Test_getURL(t *testing.T) {
clients := kube.Clients{
RoutesClient: routefake.NewSimpleClientset(),
KubernetesClient: kubefake.NewSimpleClientset(),
}
type args struct {
clients kube.Clients
forecastleApp v1alpha1.ForecastleApp
}
tests := []struct {
name string
args args
want string
err error
}{
{
name: "TestGetURLWithDefaultURLValue",
args: args{
clients: clients,
forecastleApp: *testutil.CreateForecastleApp("app-1", "https://google.com", "default", "https://icon"),
},
want: "https://google.com",
},
{
name: "TestGetURLWithNoURL",
args: args{
clients: clients,
forecastleApp: *testutil.CreateForecastleApp("app-1", "", "default", "https://icon"),
},
want: "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got, err := getURL(tt.args.clients, tt.args.forecastleApp); got != tt.want && err != tt.err {
t.Errorf("getURL() = %v, want %v, err = %v, wantErr = %v", got, tt.want, err, tt.err)
}
})
}
}
func Test_discoverURLFromRefs(t *testing.T) {
clients := kube.Clients{
RoutesClient: routefake.NewSimpleClientset(),
KubernetesClient: kubefake.NewSimpleClientset(),
}
clients.KubernetesClient.ExtensionsV1beta1().Ingresses("").Create(testutil.CreateIngressWithHost("my-app-ingress", "https://ingress-url.com"))
clients.RoutesClient.RouteV1().Routes("").Create(testutil.CreateRouteWithHost("my-app-route", "ingress-url.com"))
type args struct {
clients kube.Clients
forecastleApp v1alpha1.ForecastleApp
}
tests := []struct {
name string
args args
want string
err error
}{
{
name: "TestDiscoverURLFromRefsWithIngressName",
args: args{
clients: clients,
forecastleApp: *testutil.CreateForecastleAppWithURLFromIngress("app-1", "default", "https://icon", "my-app-ingress"),
},
want: "http://https://ingress-url.com",
},
{
name: "TestDiscoverURLFromRefsWithRouteName",
args: args{
clients: clients,
forecastleApp: *testutil.CreateForecastleAppWithURLFromRoute("app-1", "default", "https://icon", "my-app-route"),
},
want: "http://ingress-url.com",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got, err := discoverURLFromRefs(tt.args.clients, tt.args.forecastleApp); got != tt.want && err != tt.err {
t.Errorf("discoverURLFromRefs() = %v, want %v, err = %v, wantErr = %v", got, tt.want, err, tt.err)
}
})
}
clients.KubernetesClient.ExtensionsV1beta1().Ingresses("").Delete("my-app-ingress", &metav1.DeleteOptions{})
clients.RoutesClient.RouteV1().Routes("").Delete("my-app-route", &metav1.DeleteOptions{})
}
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.1
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace ClearCanvas.ImageServer.Common {
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "10.0.0.0")]
public sealed partial class ProductManifestServiceSettings : global::System.Configuration.ApplicationSettingsBase {
private static ProductManifestServiceSettings defaultInstance = ((ProductManifestServiceSettings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new ProductManifestServiceSettings())));
public static ProductManifestServiceSettings Default {
get {
return defaultInstance;
}
}
[global::System.Configuration.ApplicationScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("http://localhost:9998/")]
public string BaseUrl {
get {
return ((string)(this["BaseUrl"]));
}
}
[global::System.Configuration.ApplicationScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string FailoverBaseUrl {
get {
return ((string)(this["FailoverBaseUrl"]));
}
}
[global::System.Configuration.ApplicationScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("ClearCanvas.ImageServer.Common.ClientWsHttpConfiguration, ClearCanvas.ImageServer" +
".Common")]
public string ConfigurationClass {
get {
return ((string)(this["ConfigurationClass"]));
}
}
[global::System.Configuration.ApplicationScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("2000000")]
public int MaxReceivedMessageSize {
get {
return ((int)(this["MaxReceivedMessageSize"]));
}
}
[global::System.Configuration.ApplicationScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("None")]
public global::System.ServiceModel.Security.X509CertificateValidationMode CertificateValidationMode {
get {
return ((global::System.ServiceModel.Security.X509CertificateValidationMode)(this["CertificateValidationMode"]));
}
}
[global::System.Configuration.ApplicationScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("NoCheck")]
public global::System.Security.Cryptography.X509Certificates.X509RevocationMode RevocationMode {
get {
return ((global::System.Security.Cryptography.X509Certificates.X509RevocationMode)(this["RevocationMode"]));
}
}
[global::System.Configuration.ApplicationScopedSettingAttribute()]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Configuration.DefaultSettingValueAttribute("")]
public string UserCredentialsProviderClass {
get {
return ((string)(this["UserCredentialsProviderClass"]));
}
}
}
}
| {
"pile_set_name": "Github"
} |
<?php
/*
* Copyright 2007-2017 Charles du Jeu - Abstrium SAS <team (at) pyd.io>
* This file is part of Pydio.
*
* Pydio is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Pydio is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Pydio. If not, see <http://www.gnu.org/licenses/>.
*
* The latest code can be found at <https://pydio.com>.
*/
namespace Pydio\Access\Core\Filter;
use Pydio\Access\Core\Model\AJXP_Node;
use Pydio\Access\Core\Model\UserSelection;
use Pydio\Core\Utils\Vars\PathUtils;
defined('AJXP_EXEC') or die( 'Access not allowed');
/**
* Class ContentFilter
*/
class ContentFilter {
public $filters = array();
public $virtualPaths = array();
/**
* @param AJXP_Node[] $nodes
*/
function __construct($nodes){
foreach($nodes as $n){
$virtualPath = $this->getVirtualPath($n->getPath());
$this->filters[$n->getPath()] = $virtualPath;
}
$this->virtualPaths = array_flip($this->filters);
}
/**
* @param $path
* @return string
*/
private function getVirtualPath($path){
return "/".substr(md5($path), 0, 10)."/".basename($path);
}
/**
* @param UserSelection $userSelection
*/
function filterUserSelection( &$userSelection ){
if($userSelection->isEmpty()){
foreach($this->filters as $path => $virtual){
$userSelection->addFile($path);
}
}else{
$newFiles = array();
foreach($userSelection->getFiles() as $f){
if(isSet($this->virtualPaths[$f])){
$newFiles[] = $this->virtualPaths[$f];
}else{
$testB = base64_decode($f);
if(isSet($this->virtualPaths[$testB])){
$newFiles[] = $this->virtualPaths[$testB];
}
}
}
$userSelection->setFiles($newFiles);
}
}
/**
* @return mixed|string
*/
function getBaseDir(){
return PathUtils::forwardSlashDirname(array_keys($this->filters)[0]);
}
/**
* Retrieves the path of the first object
* @return mixed|string
*/
function getUniquePath(){
return PathUtils::forwardSlashBasename(array_keys($this->filters)[0]);
}
/**
* @param AJXP_Node $node
* @return String
*/
function externalPath(AJXP_Node $node){
return $this->getVirtualPath($node->getPath());
}
/**
* @param String $vPath
* @return String mixed
*/
function filterExternalPath($vPath){
if(isSet($this->virtualPaths) && isSet($this->virtualPaths[$vPath])){
return $this->virtualPaths[$vPath];
}
return $vPath;
}
/**
* @param String $oldPath
* @param String $newPath
* @return bool Operation result
*/
public function movePath($oldPath, $newPath){
if(isSet($this->filters[$oldPath])){
$this->filters[$newPath] = $this->getVirtualPath($newPath);
unset($this->filters[$oldPath]);
$this->virtualPaths = array_flip($this->filters);
return true;
}
return false;
}
/**
* @return array public data as array, pre-utf8 encoded
*/
public function toArray(){
$data = array("filters" => array(), "virtualPaths" => array());
foreach($this->filters as $k => $v){
$data["filters"][$k] = $v;
}
foreach($this->virtualPaths as $k => $v){
$data["virtualPaths"][$k] = $v;
}
return $data;
}
/**
* @param $filters
*/
public function fromFilterArray($filters){
$this->filters = $filters;
$this->virtualPaths = array_flip($this->filters);
}
} | {
"pile_set_name": "Github"
} |
# coding=utf-8
"""Python Arlo setup script."""
from setuptools import setup
def readme():
with open('README.md') as desc:
return desc.read()
setup(
name='arlo',
py_modules=['arlo', 'request', 'eventstream'],
version='1.2.35',
description='Python Arlo is a library written in Python 2.7/3x ' +
'which exposes the Netgear Arlo cameras via the apis that are consumed by their website.',
long_description=readme(),
long_description_content_type='text/markdown',
author='Jeffrey D. Walter',
author_email='[email protected]',
url='https://github.com/jeffreydwalter/arlo',
license='Apache Software License',
include_package_data=True,
install_requires=['monotonic', 'requests', 'sseclient==0.0.22', 'PySocks'],
keywords=[
'arlo',
'camera',
'home automation',
'netgear',
'python',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| {
"pile_set_name": "Github"
} |
// Copyright 2017 The Cockroach Authors.
//
// Use of this software is governed by the Business Source License
// included in the file licenses/BSL.txt.
//
// As of the Change Date specified in that file, in accordance with
// the Business Source License, use of this software will be governed
// by the Apache License, Version 2.0, included in the file
// licenses/APL.txt.
package tree
// Discard represents a DISCARD statement.
type Discard struct {
Mode DiscardMode
}
var _ Statement = &Discard{}
// DiscardMode is an enum of the various discard modes.
type DiscardMode int
const (
// DiscardModeAll represents a DISCARD ALL statement.
DiscardModeAll DiscardMode = iota
)
// Format implements the NodeFormatter interface.
func (node *Discard) Format(ctx *FmtCtx) {
switch node.Mode {
case DiscardModeAll:
ctx.WriteString("DISCARD ALL")
}
}
// String implements the Statement interface.
func (node *Discard) String() string {
return AsString(node)
}
| {
"pile_set_name": "Github"
} |
Description:
Compute MD4 checksum.
Files:
lib/md4.h
lib/md4.c
m4/md4.m4
Depends-on:
byteswap
stdalign
stdint
configure.ac:
AC_REQUIRE([AC_C_RESTRICT])
gl_MD4
Makefile.am:
lib_SOURCES += md4.c
Include:
"md4.h"
License:
LGPLv2+
Maintainer:
Simon Josefsson
| {
"pile_set_name": "Github"
} |
import _plotly_utils.basevalidators
class XaxisValidator(_plotly_utils.basevalidators.SubplotidValidator):
def __init__(self, plotly_name="xaxis", parent_name="histogram2d", **kwargs):
super(XaxisValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
dflt=kwargs.pop("dflt", "x"),
edit_type=kwargs.pop("edit_type", "calc+clearAxisTypes"),
role=kwargs.pop("role", "info"),
**kwargs
)
| {
"pile_set_name": "Github"
} |
#[doc = "Reader of register CHINTENCLR"]
pub type R = crate::R<u8, super::CHINTENCLR>;
#[doc = "Writer for register CHINTENCLR"]
pub type W = crate::W<u8, super::CHINTENCLR>;
#[doc = "Register CHINTENCLR `reset()`'s with value 0"]
impl crate::ResetValue for super::CHINTENCLR {
type Type = u8;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `OVR`"]
pub type OVR_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OVR`"]
pub struct OVR_W<'a> {
w: &'a mut W,
}
impl<'a> OVR_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !0x01) | ((value as u8) & 0x01);
self.w
}
}
#[doc = "Reader of field `EVD`"]
pub type EVD_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EVD`"]
pub struct EVD_W<'a> {
w: &'a mut W,
}
impl<'a> EVD_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u8) & 0x01) << 1);
self.w
}
}
impl R {
#[doc = "Bit 0 - Channel Overrun Interrupt Disable"]
#[inline(always)]
pub fn ovr(&self) -> OVR_R {
OVR_R::new((self.bits & 0x01) != 0)
}
#[doc = "Bit 1 - Channel Event Detected Interrupt Disable"]
#[inline(always)]
pub fn evd(&self) -> EVD_R {
EVD_R::new(((self.bits >> 1) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 0 - Channel Overrun Interrupt Disable"]
#[inline(always)]
pub fn ovr(&mut self) -> OVR_W {
OVR_W { w: self }
}
#[doc = "Bit 1 - Channel Event Detected Interrupt Disable"]
#[inline(always)]
pub fn evd(&mut self) -> EVD_W {
EVD_W { w: self }
}
}
| {
"pile_set_name": "Github"
} |
export enum NotificationAnimationType {
Fade = 'fade',
FromTop = 'fromTop',
FromRight = 'fromRight',
FromBottom = 'fromBottom',
FromLeft = 'fromLeft',
Scale = 'scale',
Rotate = 'rotate'
}
| {
"pile_set_name": "Github"
} |
/*
* The Clear BSD License
* Copyright (c) 2016, Freescale Semiconductor, Inc.
* Copyright 2016-2017 NXP
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided
* that the following conditions are met:
*
* o Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* o Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* o Neither the name of the copyright holder nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef _PIN_MUX_H_
#define _PIN_MUX_H_
/*******************************************************************************
* Definitions
******************************************************************************/
/*! @brief Direction type */
typedef enum _pin_mux_direction
{
kPIN_MUX_DirectionInput = 0U, /* Input direction */
kPIN_MUX_DirectionOutput = 1U, /* Output direction */
kPIN_MUX_DirectionInputOrOutput = 2U /* Input or output direction */
} pin_mux_direction_t;
/*!
* @addtogroup pin_mux
* @{
*/
/*******************************************************************************
* API
******************************************************************************/
#if defined(__cplusplus)
extern "C" {
#endif
/* FC0_RXD_SDA_MOSI (number 31), U18[4]/TO_MUX_P0_0-ISP_RX */
#define BOARD_INITPINS_DEBUG_UART_RX_PERIPHERAL FLEXCOMM0 /*!< Device name: FLEXCOMM0 */
#define BOARD_INITPINS_DEBUG_UART_RX_SIGNAL RXD_SDA_MOSI /*!< FLEXCOMM0 signal: RXD_SDA_MOSI */
#define BOARD_INITPINS_DEBUG_UART_RX_PIN_NAME FC0_RXD_SDA_MOSI /*!< Pin name */
#define BOARD_INITPINS_DEBUG_UART_RX_LABEL "U18[4]/TO_MUX_P0_0-ISP_RX" /*!< Label */
#define BOARD_INITPINS_DEBUG_UART_RX_NAME "DEBUG_UART_RX" /*!< Identifier name */
/* FC0_TXD_SCL_MISO (number 32), U6[4]/U22[3]/P0_1-ISP_TX */
#define BOARD_INITPINS_DEBUG_UART_TX_PERIPHERAL FLEXCOMM0 /*!< Device name: FLEXCOMM0 */
#define BOARD_INITPINS_DEBUG_UART_TX_SIGNAL TXD_SCL_MISO /*!< FLEXCOMM0 signal: TXD_SCL_MISO */
#define BOARD_INITPINS_DEBUG_UART_TX_PIN_NAME FC0_TXD_SCL_MISO /*!< Pin name */
#define BOARD_INITPINS_DEBUG_UART_TX_LABEL "U6[4]/U22[3]/P0_1-ISP_TX" /*!< Label */
#define BOARD_INITPINS_DEBUG_UART_TX_NAME "DEBUG_UART_TX" /*!< Identifier name */
/* SWO (number 50), J2[10]/JS30/U4[12]/TDO-SWO_TRGT-SPIFI_IO2 */
#define BOARD_INITPINS_DEBUG_SWD_SWO_PERIPHERAL SWD /*!< Device name: SWD */
#define BOARD_INITPINS_DEBUG_SWD_SWO_SIGNAL SWO /*!< SWD signal: SWO */
#define BOARD_INITPINS_DEBUG_SWD_SWO_PIN_NAME SWO /*!< Pin name */
#define BOARD_INITPINS_DEBUG_SWD_SWO_LABEL "J2[10]/JS30/U4[12]/TDO-SWO_TRGT-SPIFI_IO2" /*!< Label */
#define BOARD_INITPINS_DEBUG_SWD_SWO_NAME "DEBUG_SWD_SWO" /*!< Identifier name */
/*!
* @brief Configures pin routing and optionally pin electrical features.
*
*/
void BOARD_InitPins(void); /* Function assigned for the Cortex-M0P */
#if defined(__cplusplus)
}
#endif
/*!
* @}
*/
#endif /* _PIN_MUX_H_ */
/*******************************************************************************
* EOF
******************************************************************************/
| {
"pile_set_name": "Github"
} |
$TTL 1
; KO
$ORIGIN .. ; Missing label between dots
| {
"pile_set_name": "Github"
} |
/* http://www.positioniseverything.net/easyclearing.html */
#djDebug .djdt-clearfix:after {
content: ".";
display: block;
height: 0;
clear: both;
visibility: hidden;
}
#djDebug .djdt-clearfix {display: inline-block;}
/* Hides from IE-mac \*/
#djDebug .djdt-clearfix {display: block;}
* html #djDebug .djdt-clearfix {height: 1%;}
/* end hide from IE-mac */
/* Debug Toolbar CSS Reset, adapted from Eric Meyer's CSS Reset */
#djDebug {color:#000;background:#FFF;}
#djDebug, #djDebug div, #djDebug span, #djDebug applet, #djDebug object, #djDebug iframe,
#djDebug h1, #djDebug h2, #djDebug h3, #djDebug h4, #djDebug h5, #djDebug h6, #djDebug p, #djDebug blockquote, #djDebug pre,
#djDebug a, #djDebug abbr, #djDebug acronym, #djDebug address, #djDebug big, #djDebug cite, #djDebug code,
#djDebug del, #djDebug dfn, #djDebug em, #djDebug font, #djDebug img, #djDebug ins, #djDebug kbd, #djDebug q, #djDebug s, #djDebug samp,
#djDebug small, #djDebug strike, #djDebug strong, #djDebug sub, #djDebug sup, #djDebug tt, #djDebug var,
#djDebug b, #djDebug u, #djDebug i, #djDebug center,
#djDebug dl, #djDebug dt, #djDebug dd, #djDebug ol, #djDebug ul, #djDebug li,
#djDebug fieldset, #djDebug form, #djDebug label, #djDebug legend,
#djDebug table, #djDebug caption, #djDebug tbody, #djDebug tfoot, #djDebug thead, #djDebug tr, #djDebug th, #djDebug td,
#djDebug button {
margin:0;
padding:0;
min-width:0;
width:auto;
border:0;
outline:0;
font-size:12px;
line-height:1.5em;
color:#000;
vertical-align:baseline;
background-color:transparent;
font-family:sans-serif;
text-align:left;
text-shadow: none;
white-space: normal;
-webkit-transition: none;
-moz-transition: none;
-o-transition: none;
transition: none;
}
#djDebug button, #djDebug a.button {
background-color: #eee;
background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #eee), color-stop(100%, #cccccc));
background-image: -webkit-linear-gradient(top, #eee, #cccccc);
background-image: -moz-linear-gradient(top, #eee, #cccccc);
background-image: -ms-linear-gradient(top, #eee, #cccccc);
background-image: -o-linear-gradient(top, #eee, #cccccc);
background-image: linear-gradient(top, #eee, #cccccc);
border: 1px solid #ccc;
border-bottom: 1px solid #bbb;
-webkit-border-radius: 3px;
-moz-border-radius: 3px;
border-radius: 3px;
color: #333;
line-height: 1;
padding: 0 8px;
text-align: center;
text-shadow: 0 1px 0 #eee;
}
#djDebug button:hover, #djDebug a.button:hover {
background-color: #ddd;
background-image: -webkit-gradient(linear, left top, left bottom, color-stop(0%, #ddd), color-stop(100%, #bbb));
background-image: -webkit-linear-gradient(top, #ddd, #bbb);
background-image: -moz-linear-gradient(top, #ddd, #bbb);
background-image: -ms-linear-gradient(top, #ddd, #bbb);
background-image: -o-linear-gradient(top, #ddd, #bbb);
background-image: linear-gradient(top, #ddd, #bbb);
border-color: #bbb;
border-bottom-color: #999;
cursor: pointer;
text-shadow: 0 1px 0 #ddd;
}
#djDebug button:active, #djDebug a.button:active {
border: 1px solid #aaa;
border-bottom: 1px solid #888;
-webkit-box-shadow: inset 0 0 5px 2px #aaa, 0 1px 0 0 #eee;
-moz-box-shadow: inset 0 0 5px 2px #aaa, 0 1px 0 0 #eee;
box-shadow: inset 0 0 5px 2px #aaa, 0 1px 0 0 #eee;
}
#djDebug #djDebugToolbar {
background-color:#111;
width:200px;
z-index:100000000;
position:fixed;
top:0;
bottom:0;
right:0;
opacity:0.9;
overflow-y: auto;
}
#djDebug #djDebugToolbar small {
color:#999;
}
#djDebug #djDebugToolbar ul {
margin:0;
padding:0;
list-style:none;
}
#djDebug #djDebugToolbar li {
border-bottom:1px solid #222;
color:#fff;
display:block;
font-weight:bold;
float:none;
margin:0;
padding:0;
position:relative;
width:auto;
}
#djDebug #djDebugToolbar input[type=checkbox] {
float: right;
margin: 10px;
}
#djDebug #djDebugToolbar li>a,
#djDebug #djDebugToolbar li>div.djdt-contentless {
font-weight:normal;
font-style:normal;
text-decoration:none;
display:block;
font-size:16px;
padding:10px 10px 5px 25px;
color:#fff;
}
#djDebug #djDebugToolbar li>div.djdt-disabled {
font-style: italic;
color: #999;
}
#djDebug #djDebugToolbar li a:hover {
color:#111;
background-color:#ffc;
}
#djDebug #djDebugToolbar li.djdt-active {
background: #333 no-repeat left center;
background-image: url(../img/indicator.png);
padding-left:10px;
}
#djDebug #djDebugToolbar li.djdt-active a:hover {
color:#b36a60;
background-color:transparent;
}
#djDebug #djDebugToolbar li small {
font-size:12px;
color:#999;
font-style:normal;
text-decoration:none;
font-variant:small-caps;
}
#djDebug #djDebugToolbarHandle {
position:fixed;
background-color:#fff;
border:1px solid #111;
top:30px;
right:0;
z-index:100000000;
opacity:0.75;
}
#djDebug #djShowToolBarButton {
display:block;
height:75px;
width:30px;
border-right:none;
border-bottom:4px solid #fff;
border-top:4px solid #fff;
border-left:4px solid #fff;
color:#fff;
font-size:10px;
font-weight:bold;
text-decoration:none;
text-align:center;
text-indent:-999999px;
background: #000 no-repeat left center;
background-image: url(../img/djdt_vertical.png);
opacity:0.5;
}
#djDebug #djShowToolBarButton:hover {
background-color:#111;
border-top-color:#FFE761;
border-left-color:#FFE761;
border-bottom-color:#FFE761;
cursor:move;
opacity:1.0;
}
#djDebug code {
display:block;
font-family:Consolas, Monaco, "Bitstream Vera Sans Mono", "Lucida Console", monospace;
font-size: 12px;
white-space:pre;
overflow:auto;
}
#djDebug .djDebugOdd {
background-color:#f5f5f5;
}
#djDebug .djdt-panelContent {
display:none;
position:fixed;
margin:0;
top:0;
right:200px;
bottom:0;
left:0px;
background-color:#eee;
color:#666;
z-index:100000000;
}
#djDebug .djdt-panelContent > div {
border-bottom:1px solid #ddd;
}
#djDebug .djDebugPanelTitle {
position:absolute;
background-color:#ffc;
color:#666;
padding-left:20px;
top:0;
right:0;
left:0;
height:50px;
}
#djDebug .djDebugPanelTitle code {
display:inline;
font-size:inherit;
}
#djDebug .djDebugPanelContent {
position:absolute;
top:50px;
right:0;
bottom:0;
left:0;
height:auto;
padding:5px 0 0 20px;
}
#djDebug .djDebugPanelContent .djdt-loader {
display:block;
margin:80px auto;
}
#djDebug .djDebugPanelContent .djdt-scroll {
height:100%;
overflow:auto;
display:block;
padding:0 10px 0 0;
}
#djDebug h3 {
font-size:24px;
font-weight:normal;
line-height:50px;
}
#djDebug h4 {
font-size:20px;
font-weight:bold;
margin-top:0.8em;
}
#djDebug .djdt-panelContent table {
border:1px solid #ccc;
border-collapse:collapse;
width:100%;
background-color:#fff;
display:table;
margin-top:0.8em;
overflow: auto;
}
#djDebug .djdt-panelContent tbody td,
#djDebug .djdt-panelContent tbody th {
vertical-align:top;
padding:2px 3px;
}
#djDebug .djdt-panelContent tbody td.djdt-time {
text-align: center;
}
#djDebug .djdt-panelContent thead th {
padding:1px 6px 1px 3px;
text-align:left;
font-weight:bold;
font-size:14px;
white-space: nowrap;
}
#djDebug .djdt-panelContent tbody th {
width:12em;
text-align:right;
color:#666;
padding-right:.5em;
}
#djDebug .djTemplateContext {
background-color:#fff;
}
/*
#djDebug .djdt-panelContent p a:hover, #djDebug .djdt-panelContent dd a:hover {
color:#111;
background-color:#ffc;
}
#djDebug .djdt-panelContent p {
padding:0 5px;
}
#djDebug .djdt-panelContent p, #djDebug .djdt-panelContent table, #djDebug .djdt-panelContent ol, #djDebug .djdt-panelContent ul, #djDebug .djdt-panelContent dl {
margin:5px 0 15px;
background-color:#fff;
}
#djDebug .djdt-panelContent table {
clear:both;
border:0;
padding:0;
margin:0;
border-collapse:collapse;
border-spacing:0;
}
#djDebug .djdt-panelContent table a {
color:#000;
padding:2px 4px;
}
#djDebug .djdt-panelContent table a:hover {
background-color:#ffc;
}
#djDebug .djdt-panelContent table th {
background-color:#333;
font-weight:bold;
color:#fff;
padding:3px 7px 3px;
text-align:left;
cursor:pointer;
}
#djDebug .djdt-panelContent table td {
padding:5px 10px;
font-size:14px;
background-color:#fff;
color:#000;
vertical-align:top;
border:0;
}
#djDebug .djdt-panelContent table tr.djDebugOdd td {
background-color:#eee;
}
*/
#djDebug .djdt-panelContent .djDebugClose {
display:block;
position:absolute;
top:4px;
right:15px;
height:40px;
width:40px;
background: no-repeat center center;
background-image: url(../img/close.png);
}
#djDebug .djdt-panelContent .djDebugClose:hover {
background-image: url(../img/close_hover.png);
}
#djDebug .djdt-panelContent .djDebugClose.djDebugBack {
background-image: url(../img/back.png);
}
#djDebug .djdt-panelContent .djDebugClose.djDebugBack:hover {
background-image: url(../img/back_hover.png);
}
#djDebug .djdt-panelContent dt, #djDebug .djdt-panelContent dd {
display:block;
}
#djDebug .djdt-panelContent dt {
margin-top:0.75em;
}
#djDebug .djdt-panelContent dd {
margin-left:10px;
}
#djDebug a.toggleTemplate {
padding:4px;
background-color:#bbb;
-webkit-border-radius:3px;
-moz-border-radius:3px;
border-radius:3px;
}
#djDebug a.toggleTemplate:hover {
padding:4px;
background-color:#444;
color:#ffe761;
-webkit-border-radius:3px;
-moz-border-radius:3px;
border-radius:3px;
}
#djDebug .djDebugSqlWrap {
position:relative;
}
#djDebug .djDebugCollapsed {
display: none;
text-decoration: none;
color: #333;
}
#djDebug .djDebugUncollapsed {
color: #333;
text-decoration: none;
}
#djDebug .djUnselected {
display: none;
}
#djDebug tr.djHiddenByDefault {
display: none;
}
#djDebug tr.djSelected {
display: table-row;
}
#djDebug .djDebugSql {
word-break:break-word;
z-index:100000002;
}
#djDebug .djSQLDetailsDiv tbody th {
text-align: left;
}
#djDebug .djSqlExplain td {
white-space: pre;
}
#djDebug span.djDebugLineChart {
background-color:#777;
height:3px;
position:absolute;
bottom:0;
top:0;
left:0;
display:block;
z-index:1000000001;
}
#djDebug span.djDebugLineChartWarning {
background-color:#900;
}
#djDebug .highlight { color:#000; }
#djDebug .highlight .err { color:#000; } /* Error */
#djDebug .highlight .g { color:#000; } /* Generic */
#djDebug .highlight .k { color:#000; font-weight:bold } /* Keyword */
#djDebug .highlight .o { color:#000; } /* Operator */
#djDebug .highlight .n { color:#000; } /* Name */
#djDebug .highlight .mi { color:#000; font-weight:bold } /* Literal.Number.Integer */
#djDebug .highlight .l { color:#000; } /* Literal */
#djDebug .highlight .x { color:#000; } /* Other */
#djDebug .highlight .p { color:#000; } /* Punctuation */
#djDebug .highlight .m { color:#000; font-weight:bold } /* Literal.Number */
#djDebug .highlight .s { color:#333 } /* Literal.String */
#djDebug .highlight .w { color:#888888 } /* Text.Whitespace */
#djDebug .highlight .il { color:#000; font-weight:bold } /* Literal.Number.Integer.Long */
#djDebug .highlight .na { color:#333 } /* Name.Attribute */
#djDebug .highlight .nt { color:#000; font-weight:bold } /* Name.Tag */
#djDebug .highlight .nv { color:#333 } /* Name.Variable */
#djDebug .highlight .s2 { color:#333 } /* Literal.String.Double */
#djDebug .highlight .cp { color:#333 } /* Comment.Preproc */
#djDebug .djdt-timeline {
width: 30%;
}
#djDebug .djDebugTimeline {
position: relative;
height: 100%;
min-height: 100%;
}
#djDebug div.djDebugLineChart {
position: absolute;
left: 0;
right: 0;
top: 0;
bottom: 0;
vertical-align: middle;
}
#djDebug div.djDebugLineChart strong {
text-indent: -10000em;
display: block;
font-weight: normal;
vertical-align: middle;
background-color:#ccc;
}
#djDebug div.djDebugLineChartWarning strong {
background-color:#900;
}
#djDebug .djDebugInTransaction div.djDebugLineChart strong {
background-color: #d3ff82;
}
#djDebug .djDebugStartTransaction div.djDebugLineChart strong {
border-left: 1px solid #94b24d;
}
#djDebug .djDebugEndTransaction div.djDebugLineChart strong {
border-right: 1px solid #94b24d;
}
#djDebug .djdt-panelContent ul.djdt-stats {
position: relative;
list-style-type: none;
}
#djDebug .djdt-panelContent ul.djdt-stats li {
width: 30%;
float: left;
}
#djDebug .djdt-panelContent ul.djdt-stats li strong.djdt-label {
display: block;
}
#djDebug .djdt-panelContent ul.djdt-stats li span.djdt-color {
height: 12px;
width: 3px;
display: inline-block;
}
#djDebug .djdt-panelContent ul.djdt-stats li span.djdt-info {
display: block;
padding-left: 5px;
}
#djDebug .djdt-panelContent thead th {
white-space: nowrap;
}
#djDebug .djDebugRowWarning .djdt-time {
color: red;
}
#djdebug .djdt-panelContent table .djdt-toggle {
width: 14px;
padding-top: 3px;
}
#djDebug .djdt-panelContent table .djdt-actions {
min-width: 70px;
white-space: nowrap;
}
#djdebug .djdt-panelContent table .djdt-color {
width: 3px;
}
#djdebug .djdt-panelContent table .djdt-color span {
width: 3px;
height: 12px;
overflow: hidden;
padding: 0;
}
#djDebug .djToggleSwitch {
text-decoration: none;
border: 1px solid #999;
height: 12px;
width: 12px;
line-height: 12px;
text-align: center;
color: #777;
display: inline-block;
filter: progid:DXImageTransform.Microsoft.gradient(startColorstr='#FFF', endColorstr='#DCDCDC'); /* for IE */
background: -webkit-gradient(linear, left top, left bottom, from(#FFF), to(#DCDCDC)); /* for webkit browsers */
background:-moz-linear-gradient(center top , #FFFFFF 0pt, #DCDCDC 100%) repeat scroll 0 0 transparent;
}
#djDebug .djNoToggleSwitch {
height: 14px;
width: 14px;
display: inline-block;
}
#djDebug .djSQLDetailsDiv {
margin-top:0.8em;
}
#djDebug pre {
white-space: -moz-pre-wrap; /* Mozilla, since 1999 */
white-space: -pre-wrap; /* Opera 4-6 */
white-space: -o-pre-wrap; /* Opera 7 */
white-space: pre-wrap; /* CSS-3 */
word-wrap: break-word; /* Internet Explorer 5.5+ */
color: #555;
border:1px solid #ccc;
border-collapse:collapse;
background-color:#fff;
display:block;
overflow: auto;
padding:2px 3px;
margin-bottom: 3px;
font-family:Consolas, Monaco, "Bitstream Vera Sans Mono", "Lucida Console", monospace;
}
#djDebug .djdt-stack span {
color: #000;
font-weight: bold;
}
#djDebug .djdt-stack span.djdt-path,
#djDebug .djdt-stack pre.djdt-locals,
#djDebug .djdt-stack pre.djdt-locals span {
color: #777;
font-weight: normal;
}
#djDebug .djdt-stack span.djdt-code {
font-weight: normal;
}
#djDebug .djdt-stack pre.djdt-locals {
margin: 0 27px 27px 27px;
}
#djDebug .djdt-width-20 {
width: 20%;
}
#djDebug .djdt-width-60 {
width: 60%;
}
#djDebug .djdt-highlighted {
background-color: lightgrey;
}
.djdt-hidden {
display: none;
}
| {
"pile_set_name": "Github"
} |
- content_for :page_title, @user.full_name
.outer
.container
= render "/header", title: @user.full_name
.charts
= render @time_series.chart, time_series: @time_series
.charts
= render "/charts/pie_chart", title: t("charts.hours_spent_per_project"), data: EntryStats.new(@time_series.entries_for_time_span).hours_for_subject_collection(Project.all).to_json
= render "/charts/pie_chart", title: t("charts.hours_spent_per_category"), data: EntryStats.new(@time_series.entries_for_time_span).hours_for_subject_collection(Category.all).to_json
= link_to t("users.show.entries"), user_entries_path(@user)
| {
"pile_set_name": "Github"
} |
package com.yarolegovich.mp.io;
import android.app.Dialog;
import android.content.Context;
import android.content.DialogInterface;
import android.view.LayoutInflater;
import android.view.View;
import android.widget.EditText;
import androidx.appcompat.app.AlertDialog;
import androidx.fragment.app.FragmentActivity;
import com.kunzisoft.androidclearchroma.ChromaDialog;
import com.kunzisoft.androidclearchroma.IndicatorMode;
import com.kunzisoft.androidclearchroma.colormode.ColorMode;
import com.kunzisoft.androidclearchroma.listener.OnColorSelectedListener;
import com.yarolegovich.mp.R;
import java.util.HashSet;
import java.util.Set;
/**
* Created by yarolegovich on 06.05.2016.
*/
public class StandardUserInputModule implements UserInputModule {
protected Context context;
public StandardUserInputModule(Context context) {
this.context = context;
}
@Override
public void showEditTextInput(
String key,
CharSequence title,
CharSequence defaultValue,
final Listener<String> listener) {
final View view = LayoutInflater.from(context).inflate(R.layout.dialog_edittext, null);
final EditText inputField = (EditText) view.findViewById(R.id.mp_text_input);
if (defaultValue != null) {
inputField.setText(defaultValue);
inputField.setSelection(defaultValue.length());
}
final Dialog dialog = new AlertDialog.Builder(context)
.setTitle(title)
.setView(view)
.show();
view.findViewById(R.id.mp_btn_confirm).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
listener.onInput(inputField.getText().toString());
dialog.dismiss();
}
});
}
@Override
public void showSingleChoiceInput(
String key,
CharSequence title,
CharSequence[] displayItems,
final CharSequence[] values,
int selected,
final Listener<String> listener) {
new AlertDialog.Builder(context)
.setTitle(title)
.setSingleChoiceItems(displayItems, selected, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
String selected = values[which].toString();
listener.onInput(selected);
dialog.dismiss();
}
})
/*.setItems(displayItems, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
String selected = values[which].toString();
listener.onInput(selected);
}
})*/
.show();
}
@Override
public void showMultiChoiceInput(
String key,
CharSequence title,
CharSequence[] displayItems,
final CharSequence[] values,
final boolean[] itemStates,
final Listener<Set<String>> listener) {
new AlertDialog.Builder(context)
.setTitle(title)
.setMultiChoiceItems(displayItems, itemStates, new DialogInterface.OnMultiChoiceClickListener() {
@Override
public void onClick(DialogInterface dialog, int which, boolean isChecked) {
itemStates[which] = isChecked;
}
})
.setOnDismissListener(new DialogInterface.OnDismissListener() {
@Override
public void onDismiss(DialogInterface dialog) {
Set<String> result = new HashSet<>();
for (int i = 0; i < values.length; i++) {
if (itemStates[i]) {
result.add(values[i].toString());
}
}
listener.onInput(result);
}
})
.show();
}
@Override
public void showColorSelectionInput(
String key,
CharSequence title,
int defaultColor,
final Listener<Integer> colorListener) {
FragmentActivity activity;
try {
activity = (FragmentActivity) context;
} catch (ClassCastException exc) {
throw new AssertionError(context.getString(R.string.exc_not_frag_activity_subclass));
}
final String tag = colorListener.getClass().getSimpleName();
ChromaDialog dialog = new ChromaDialog.Builder()
.initialColor(defaultColor)
.colorMode(ColorMode.ARGB)
.indicatorMode(IndicatorMode.HEX)
.create();
dialog.setOnColorSelectedListener(new OnColorSelectedListener() {
@Override
public void onPositiveButtonClick(int color) {
colorListener.onInput(color);
}
@Override
public void onNegativeButtonClick(int color) {
}
});
dialog.show(activity.getSupportFragmentManager(), tag);
}
}
| {
"pile_set_name": "Github"
} |
# Copyright (c) Open Enclave SDK contributors.
# Licensed under the MIT License.
add_enclave_library(oehostfs STATIC hostfs.c)
maybe_build_using_clangw(oehostfs)
enclave_include_directories(oehostfs PRIVATE ${CMAKE_BINARY_DIR}/syscall
${PROJECT_SOURCE_DIR}/include/openenclave/corelibc)
enclave_enable_code_coverage(oehostfs)
enclave_link_libraries(oehostfs PRIVATE oesyscall)
install_enclaves(
TARGETS
oehostfs
EXPORT
openenclave-targets
ARCHIVE
DESTINATION
${CMAKE_INSTALL_LIBDIR}/openenclave/enclave)
| {
"pile_set_name": "Github"
} |
package com.xixiciTest
import com.xixici.P10
import org.scalatest.FunSuite
/**
* Created by xixici
* Date: 2019/3/13
* Project Name: sword-offer-scala-sbt
* Project URL: https://github.com/xixici/sword-offer-scala
**/
class P10Test extends FunSuite {
test("P10Test") {
val Expected = 5
val Actual = P10.RectCover(4)
assert(Expected === Actual)
}
} | {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="author" content="{{ Setting::get('author') }}">
<meta name="description" content="{{ Setting::get('desc') }}">
<meta name="keywords" content="{{ Setting::get('keywords') }}">
<link rel="shortcut icon" type="image/x-icon" href="{{ asset('images/favicon.ico') }}">
<link rel="stylesheet" href="{{ asset(elixir('css/app.css')) }}">
</head>
<body>
<header>
<navigation></navigation>
</header>
<main id="app">
<div class="container">
<router-view></router-view>
</div>
</main>
<footer class="page-footer transparent">
<div class="footer-copyright">
<div class="container black-text text-lighten-5">
Copyright © 2015-2016 forehalo
<span class="right black-text text-lighten-5" to="/">{{ Setting::get('title') }}</span>
</div>
</div>
</footer>
<script>
window.Laravel = {!! json_encode([
'csrfToken' => csrf_token(),
'config' => Setting::all(),
'currentViewType' => Request::segment(1) ?: 'default',
'isProduction' => env('APP_ENV') === 'prod' || env('APP_ENV') === 'production'
]) !!};
window.dictionary = {!! json_encode(trans('app')) !!};
</script>
<script src="{{ asset(elixir('js/app.js')) }}"></script>
<script>
if ('serviceWorker' in navigator) {
window.addEventListener('load', function() {
navigator.serviceWorker
.register('/service-worker.js')
.then(function(reg) {
reg.onupdatefound = function() {
var installingWorker = reg.installing;
installingWorker.onstatechange = function() {
switch (installingWorker.state) {
case 'installed':
if (navigator.serviceWorker.controller) {
console.log('New or updated content is available.');
} else {
console.log('Content is now available offline!');
}
break;
case 'redundant':
console.error('The installing service worker became redundant.');
break;
}
};
};
}).catch(function(e) {
console.error('Error during service worker registration:', e);
});
});
}
</script>
</body>
</html>
| {
"pile_set_name": "Github"
} |
ccc05a53f8c4a7f74b75a07b2cfba7b1
| {
"pile_set_name": "Github"
} |
from pymesh.TestCase import TestCase
from pymesh import compute_outer_hull
from pymesh.meshio import form_mesh
from pymesh.meshutils import generate_box_mesh
from pymesh.meshutils import merge_meshes
from pymesh.misc import Quaternion
import numpy as np
import math
import unittest
class OuterHullTest(TestCase):
def assert_valid_attributes(self, mesh, outer_hull):
self.assertTrue(outer_hull.has_attribute("flipped"))
self.assertTrue(outer_hull.has_attribute("face_sources"))
flipped = outer_hull.get_attribute("flipped")
face_sources = outer_hull.get_attribute("face_sources")
self.assertEqual(outer_hull.num_faces, len(flipped))
self.assertEqual(outer_hull.num_faces, len(face_sources))
self.assertTrue(np.all(face_sources >= 0))
self.assertTrue(np.all(face_sources < mesh.num_faces))
def test_simple_cube(self):
mesh = generate_box_mesh(
np.array([0, 0, 0]), np.array([1, 1, 1]))
outer_hulls = compute_outer_hull(mesh, all_layers=True)
self.assertEqual(1, len(outer_hulls))
outer_hull = outer_hulls[0]
self.assertTrue(outer_hull.is_closed())
self.assertEqual(mesh.num_vertices, outer_hull.num_vertices)
self.assertEqual(mesh.num_faces, outer_hull.num_faces)
self.assert_valid_attributes(mesh, outer_hull)
def test_intersecting_cubes(self):
mesh_1 = generate_box_mesh(
np.array([0, 0, 0]), np.array([2, 2, 2]))
mesh_2 = generate_box_mesh(
np.array([1, 1, 1]), np.array([3, 3, 3]))
mesh = merge_meshes((mesh_1, mesh_2))
outer_hull = compute_outer_hull(mesh)
self.assertTrue(outer_hull.is_closed())
self.assert_valid_attributes(mesh, outer_hull)
def test_nested_cubes(self):
mesh_1 = generate_box_mesh(
np.array([0, 0, 0]), np.array([3, 3, 3]))
mesh_2 = generate_box_mesh(
np.array([1, 1, 1]), np.array([2, 2, 2]))
mesh = merge_meshes((mesh_1, mesh_2))
outer_hulls = compute_outer_hull(mesh, all_layers=True)
self.assertEqual(2, len(outer_hulls))
outer_hull = outer_hulls[0]
interior_mesh = outer_hulls[1]
self.assertTrue(outer_hull.is_closed())
self.assertEqual(1, outer_hull.num_components)
self.assert_valid_attributes(mesh, outer_hull)
self.assertEqual(8, interior_mesh.num_vertices)
self.assert_array_equal(([1, 1, 1], [2, 2, 2]),
interior_mesh.bbox)
def test_multiple_components(self):
mesh_1 = generate_box_mesh(
np.array([0, 0, 0]), np.array([1, 1, 1]))
mesh_2 = generate_box_mesh(
np.array([2, 2, 2]), np.array([3, 3, 3]))
mesh = merge_meshes((mesh_1, mesh_2))
outer_hulls = compute_outer_hull(mesh, all_layers=True)
self.assertEqual(1, len(outer_hulls))
outer_hull = outer_hulls[0]
self.assertTrue(outer_hull.is_closed())
self.assertEqual(2, outer_hull.num_components)
self.assert_valid_attributes(mesh, outer_hull)
def test_face_face_touch(self):
mesh_1 = generate_box_mesh(
np.array([0, 0, 0]), np.array([1, 1, 1]))
mesh_2 = generate_box_mesh(
np.array([0, 0, 1]), np.array([1, 1, 2]))
mesh = merge_meshes((mesh_1, mesh_2))
outer_hulls = compute_outer_hull(mesh, all_layers=True)
self.assertEqual(2, len(outer_hulls))
outer_hull = outer_hulls[0]
interior_mesh = outer_hulls[1]
self.assertTrue(outer_hull.is_closed())
self.assertEqual(1, outer_hull.num_components)
self.assert_valid_attributes(mesh, outer_hull)
self.assert_array_equal(([0, 0, 0], [1, 1, 2]), outer_hull.bbox)
self.assertTrue(interior_mesh.is_closed())
self.assertEqual(1, interior_mesh.num_components)
self.assert_array_equal(([0, 0, 1], [1, 1, 1]), interior_mesh.bbox)
| {
"pile_set_name": "Github"
} |
#!/bin/bash
#
# This is a git extension that merges a pull request or topic branch via
# rebasing so as to avoid a merge commit.
#
# Copyright 2015 Bazaarvoice, Inc., RetailMeNot, Inc., and git-land contributors
# Licensed under Apache 2.0
# http://www.apache.org/licenses/LICENSE-2.0
project_root=`echo $(git rev-parse --show-toplevel)`
# This lockfile exists primarily so that other automation, such as file change
# monitoring, can react to the fact that this process is running.
lockfile=$project_root/.git-land-in-progress
touch $lockfile
function exit_and_cleanup() {
rm -f $lockfile
if [[ $# == 2 ]]; then
printf "$2"
fi
exit $1
}
function usage() {
echo "$1"
echo ""
echo "Usage: git land [options] [<remote>] <pull request number>[:<target branch>]"
echo " git land [options] [<remote>] <branch>[:<target branch>]"
echo ""
echo " <remote>: the remote repo (default: origin)"
echo " <pull request number>: a pull request to merge and close"
echo " <branch>: a branch to merge and close"
echo " <target branch>: the branch to merge to (default: master)"
echo ""
echo "Options:"
echo " -f, --force-push-topic: force push <branch> to <remote> after rebasing"
echo " -F, --no-force-push-topic: do not force push <branch> to <remote> after rebasing"
echo ""
echo "Examples:"
echo " git land origin 23:master"
echo " git land my-feature"
echo " git land my-feature:target-branch"
echo ""
exit_and_cleanup 1
}
# set upstream remote, defaulting to origin
remote=$(git config git-land.remote)
if [ -z "$remote" ]; then
remote='origin'
fi
# set target branch, defaulting to master
target_branch=$(git config git-land.target)
if [ -z "$target_branch" ]; then
target_branch='master'
fi
# are we configured to force push after rebasing?
force_push_topic=$(git config --bool git-land.force-push-topic)
[[ "$force_push_topic" = 'true' ]] \
&& force_push_topic=true || force_push_topic=false
args=()
# Parse args
while [[ $# > 0 ]]; do
arg="$1"
case $arg in
-f|--force-push-topic)
force_push_topic=true
shift
;;
-F|--no-force-push-topic)
force_push_topic=false
shift
;;
*)
args[${#args[@]}]=$arg
shift
;;
esac
done
merge_branch=""
case ${#args[@]} in
0)
usage "specified no args"
;;
1)
merge_branch=${args[0]}
;;
2)
remote=${args[0]}
merge_branch=${args[1]}
;;
*)
usage "too many args"
;;
esac
# the branch specifier is source:target, but you can also just say source for short,
# in which case, target defaults to "master"
if [[ $merge_branch =~ ^[^:]+:[^:]+$ ]]; then
branches=($(echo $arg | tr ':' '\n')) # split on ':' into an array
merge_branch=${branches[0]}
target_branch=${branches[1]}
fi
# set merge branch if merging a PR
if [[ $merge_branch =~ ^[0-9]+$ ]]; then
if [ "$force_push_topic" = true ]; then
exit_and_cleanup 1 "Cannot force push a PR (https://help.github.com/articles/checking-out-pull-requests-locally/#tips)"
fi
pr=$merge_branch
merge_branch="$remote/pr/$pr"
fi
read -r -p "Are you sure you want to merge $merge_branch into $remote/$target_branch? [Y/n] " response
if [[ ! ($response = '' || $response =~ ^([yY][eE][sS]|[yY])$) ]]; then
exit_and_cleanup 1
fi
if [ -x "$project_root/.git/hooks/pre-land" ]; then
$project_root/.git/hooks/pre-land $merge_branch $target_branch $remote || \
exit_and_cleanup $? "pre-land hook returned a non-zero error code"
fi
# sync local $target_branch with latest on github
(git checkout $target_branch && \
git fetch $remote && \
git reset --hard $remote/$target_branch) || \
exit_and_cleanup $? "Could not sync local $target_branch with main repo"
# rebase and squash
(git checkout $merge_branch && \
git rebase -i $target_branch) || \
exit_and_cleanup $? "Could not checkout or rebase $merge_branch on $target_branch"
# append github tag to close PR if we can and the last commit message omits it
if [ -n "$pr" ]; then
commit_message=$(git log -1 --pretty=%B)
if [[ ! $commit_message =~ \[closes?\ \#"$pr"\] ]]; then
if ! (git commit -n --amend -m "$commit_message"$'\n\n'"[close #$pr]"); then
echo "Could not append commit message tag to close #$pr"
fi
fi
fi
# optionally force push source branch to origin
if [ "$force_push_topic" = true ]; then
git push -f $remote $merge_branch
fi
# merge the PR and push
head=$(git rev-parse HEAD)
(git checkout $target_branch && \
git merge --ff-only $head) || \
exit_and_cleanup $? "Could not fast-forward merge $merge_branch into $target_branch"
git push $remote $target_branch || \
exit_and_cleanup $? "Could not push $target_branch to $remote"
if [ -x "$project_root/.git/hooks/post-land" ]; then
$project_root/.git/hooks/post-land $merge_branch $target_branch $remote || \
exit_and_cleanup $? "post-land hook returned a non-zero error code"
fi
exit_and_cleanup $?
| {
"pile_set_name": "Github"
} |
//
// KCSIBeaconTests.m
// KCSIBeaconTests
//
// Copyright 2015 Kinvey, Inc
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#import <XCTest/XCTest.h>
#import "KCSBeaconManager.h"
@interface KCSIBeaconTests : XCTestCase <KCSBeaconManagerDelegate>
@property (nonatomic, strong) NSError* rangingError;
@end
@implementation KCSIBeaconTests
- (void)setUp
{
[super setUp];
// Put setup code here. This method is called before the invocation of each test method in the class.
}
- (void)tearDown
{
// Put teardown code here. This method is called after the invocation of each test method in the class.
[super tearDown];
}
- (void)testExample
{
KCSBeaconManager* manager = [[KCSBeaconManager alloc] init];
manager.delegate = self;
[manager startMonitoringForRegion:[[NSUUID UUID] UUIDString] identifier:@"com.kinvey.foo"];
while (1) {
[[NSRunLoop currentRunLoop] runUntilDate:[NSDate dateWithTimeIntervalSinceNow:2]];
}
}
- (void) rangingFailedForRegion:(CLBeaconRegion *)region withError:(NSError *)error
{
self.rangingError = error;
}
@end
| {
"pile_set_name": "Github"
} |
class PassthroughFormatter {
format(value) {
return value;
}
parse(text) {
return text;
}
}
export default PassthroughFormatter;
| {
"pile_set_name": "Github"
} |
{
global: {},
components: {
// Component-level parameters, defined initially from 'ks prototype use ...'
// Each object below should correspond to a component in the components/ directory
workflows: {
bucket: "kubeflow-ci_temp",
mode: "minikube",
name: "jlewi-kubeflow-gke-deploy-test-4-3a8b",
namespace: "kubeflow-test-infra",
platform: "minikube",
prow: "JOB_NAME=kubeflow-presubmit-test,JOB_TYPE=presubmit,PULL_NUMBER=209,REPO_NAME=kubeflow,REPO_OWNER=kubeflow,BUILD_NUMBER=997a",
prow_env: "JOB_NAME=kubeflow-gke-deploy-test,JOB_TYPE=presubmit,PULL_NUMBER=4,REPO_NAME=kubeflow,REPO_OWNER=jlewi,BUILD_NUMBER=3a8b",
gkeApiVersion: "",
workflowName: "",
},
gke_deploy: {
bucket: "kubeflow-ci_temp",
name: "jlewi-kubeflow-gke-deploy-test-4-3a8b",
namespace: "kubeflow-test-infra",
prow: "JOB_NAME=kubeflow-presubmit-test,JOB_TYPE=presubmit,PULL_NUMBER=209,REPO_NAME=kubeflow,REPO_OWNER=kubeflow,BUILD_NUMBER=997a",
prow_env: "JOB_NAME=kubeflow-gke-deploy-test,JOB_TYPE=presubmit,PULL_NUMBER=4,REPO_NAME=kubeflow,REPO_OWNER=jlewi,BUILD_NUMBER=3a8b",
gkeApiVersion: "",
},
kfctl_test: {
bucket: "kubeflow-ci_temp",
name: "somefakename",
namespace: "kubeflow-test-infra",
prow_env: "",
deleteKubeflow: true,
gkeApiVersion: "v1",
workflowName: "kfctl",
},
kfctl_go_test: {
bucket: "kubeflow-ci_temp",
name: "somefakename",
namespace: "kubeflow-test-infra",
prow_env: "",
deleteKubeflow: true,
gkeApiVersion: "v1",
workflowName: "kfctl-go",
useBasicAuth: "false",
useIstio: "true",
testEndpoint: "false",
configPath: "bootstrap/config/kfctl_gcp_iap_master.yaml",
},
click_deploy_test: {
bucket: "kubeflow-ci_temp",
name: "somefakename",
namespace: "kubeflow-test-infra",
prow_env: "REPO_NAME=kubeflow,REPO_OWNER=kubeflow",
gkeApiVersion: "v1",
installIstio: false,
workflowName: "deployapp",
},
unit_tests: {
bucket: "kubeflow-ci_temp",
name: "somefakename",
namespace: "kubeflow-test-infra",
prow_env: "",
gkeApiVersion: "",
workflowName: "unittest",
},
tfserving: {
commit: "master",
name: "somefakename",
namespace: "kubeflow-test-infra",
prow_env: "REPO_OWNER=kubeflow,REPO_NAME=kubeflow,PULL_BASE_SHA=master",
gkeApiVersion: "",
workflowName: "tfserving",
},
},
}
| {
"pile_set_name": "Github"
} |
# 如何查看一个文件的 inode number
::: tip Issue
欢迎在 Issue 中交流与讨论: [Issue 115](https://github.com/shfshanyue/Daily-Question/issues/115)
:::
::: tip Author
回答者: [shfshanyue](https://github.com/shfshanyue)
:::
可以使用 `ls` 或者 `stat`
``` bash
$ stat hello.txt
File: ‘hello.txt’
Size: 30 Blocks: 8 IO Block: 4096 regular file
Device: fd01h/64769d Inode: 917526 Links: 1
Access: (0644/-rw-r--r--) Uid: ( 0/ root) Gid: ( 0/ root)
Access: 2019-12-10 16:15:55.253325208 +0800
Modify: 2019-12-10 16:15:52.740653330 +0800
Change: 2019-12-10 16:15:52.742653069 +0800
$ ls -i hello.txt
917526 hello.txt
``` | {
"pile_set_name": "Github"
} |
/*
Author: aeroson
Description:
Sets the text on the dialog
Parameters:
None
Returns:
Nothing
*/
#define AGM_CrewInfo_TextIDC 11123
private["_text", "_ctrl"];
disableSerialization;
_text = _this select 0;
_ctrl = (uiNamespace getVariable "AGM_CrewInfo_dialog") displayCtrl AGM_CrewInfo_TextIDC;
_ctrl ctrlSetStructuredText parseText _text;
_ctrl ctrlCommit 0;
| {
"pile_set_name": "Github"
} |
{% if scid %}
SELECT
cl.oid as oid,
relname as name,
nsp.nspname as schema,
pg_get_userbyid(relowner) AS seqowner,
description as comment,
array_to_string(relacl::text[], ', ') as acl,
(SELECT array_agg(provider || '=' || label) FROM pg_seclabels sl1 WHERE sl1.objoid=cl.oid) AS securities
FROM pg_class cl
LEFT OUTER JOIN pg_namespace nsp ON cl.relnamespace = nsp.oid
LEFT OUTER JOIN pg_description des ON (des.objoid=cl.oid
AND des.classoid='pg_class'::regclass)
WHERE relkind = 'S' AND relnamespace = {{scid}}::oid
{% if seid %}AND cl.oid = {{seid}}::oid {% endif %}
ORDER BY relname
{% endif %}
| {
"pile_set_name": "Github"
} |
package(default_visibility = ["//visibility:public"])
load(
"@io_bazel_rules_go//go:def.bzl",
"go_library",
)
go_library(
name = "go_default_library",
srcs = [
"controllerrevision.go",
"deployment.go",
"expansion_generated.go",
"scale.go",
"statefulset.go",
"statefulset_expansion.go",
],
importpath = "k8s.io/client-go/listers/apps/v1beta1",
deps = [
"//vendor/k8s.io/api/apps/v1beta1:go_default_library",
"//vendor/k8s.io/api/core/v1:go_default_library",
"//vendor/k8s.io/apimachinery/pkg/api/errors:go_default_library",
"//vendor/k8s.io/apimachinery/pkg/apis/meta/v1:go_default_library",
"//vendor/k8s.io/apimachinery/pkg/labels:go_default_library",
"//vendor/k8s.io/client-go/tools/cache:go_default_library",
],
)
filegroup(
name = "package-srcs",
srcs = glob(["**"]),
tags = ["automanaged"],
visibility = ["//visibility:private"],
)
filegroup(
name = "all-srcs",
srcs = [":package-srcs"],
tags = ["automanaged"],
)
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<configuration status="OFF">
<appenders>
<Console name="Console" target="SYSTEM_OUT">
<PatternLayout pattern="%d [%t] %-5p %M(%F:%L) - %m%n"/>
</Console>
</appenders>
<loggers>
<root level="info">
<appender-ref ref="Console"/>
<appender-ref ref="Console" level="error"/>
</root>
</loggers>
</configuration> | {
"pile_set_name": "Github"
} |
/* (C) COPYRIGHT 1994-2002 Xiph.Org Foundation */
/* Modified by Jean-Marc Valin */
/*
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
- Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/* opus_types.h based on ogg_types.h from libogg */
/**
@file opus_types.h
@brief Opus reference implementation types
*/
#ifndef OPUS_TYPES_H
#define OPUS_TYPES_H
/* Use the real stdint.h if it's there (taken from Paul Hsieh's pstdint.h) */
#if (defined(__STDC__) && __STDC__ && defined(__STDC_VERSION__) && __STDC_VERSION__ >= 199901L) || (defined(__GNUC__) && (defined(_STDINT_H) || defined(_STDINT_H_)) || defined (HAVE_STDINT_H))
#include <stdint.h>
typedef int16_t opus_int16;
typedef uint16_t opus_uint16;
typedef int32_t opus_int32;
typedef uint32_t opus_uint32;
#elif defined(_WIN32)
# if defined(__CYGWIN__)
# include <_G_config.h>
typedef _G_int32_t opus_int32;
typedef _G_uint32_t opus_uint32;
typedef _G_int16 opus_int16;
typedef _G_uint16 opus_uint16;
# elif defined(__MINGW32__)
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
# elif defined(__MWERKS__)
typedef int opus_int32;
typedef unsigned int opus_uint32;
typedef short opus_int16;
typedef unsigned short opus_uint16;
# else
/* MSVC/Borland */
typedef __int32 opus_int32;
typedef unsigned __int32 opus_uint32;
typedef __int16 opus_int16;
typedef unsigned __int16 opus_uint16;
# endif
#elif defined(__MACOS__)
# include <sys/types.h>
typedef SInt16 opus_int16;
typedef UInt16 opus_uint16;
typedef SInt32 opus_int32;
typedef UInt32 opus_uint32;
#elif (defined(__APPLE__) && defined(__MACH__)) /* MacOS X Framework build */
# include <sys/types.h>
typedef int16_t opus_int16;
typedef u_int16_t opus_uint16;
typedef int32_t opus_int32;
typedef u_int32_t opus_uint32;
#elif defined(__BEOS__)
/* Be */
# include <inttypes.h>
typedef int16 opus_int16;
typedef u_int16 opus_uint16;
typedef int32_t opus_int32;
typedef u_int32_t opus_uint32;
#elif defined (__EMX__)
/* OS/2 GCC */
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#elif defined (DJGPP)
/* DJGPP */
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#elif defined(R5900)
/* PS2 EE */
typedef int opus_int32;
typedef unsigned opus_uint32;
typedef short opus_int16;
typedef unsigned short opus_uint16;
#elif defined(__SYMBIAN32__)
/* Symbian GCC */
typedef signed short opus_int16;
typedef unsigned short opus_uint16;
typedef signed int opus_int32;
typedef unsigned int opus_uint32;
#elif defined(CONFIG_TI_C54X) || defined (CONFIG_TI_C55X)
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef long opus_int32;
typedef unsigned long opus_uint32;
#elif defined(CONFIG_TI_C6X)
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#else
/* Give up, take a reasonable guess */
typedef short opus_int16;
typedef unsigned short opus_uint16;
typedef int opus_int32;
typedef unsigned int opus_uint32;
#endif
#define opus_int int /* used for counters etc; at least 16 bits */
#define opus_int64 long long
#define opus_int8 signed char
#define opus_uint unsigned int /* used for counters etc; at least 16 bits */
#define opus_uint64 unsigned long long
#define opus_uint8 unsigned char
#endif /* OPUS_TYPES_H */
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace RssDataSerices
{
class RssDataScheduler
{
public string PostRssFeed(string ProfileType)
{
string str = "";
try
{
Api.RssFeeds.RssFeeds objRssFeed = new Api.RssFeeds.RssFeeds();
str = objRssFeed.PostRssfeed(ProfileType);
}
catch (Exception ex)
{
str = ex.Message;
}
return str;
}
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package rangetable provides utilities for creating and inspecting
// unicode.RangeTables.
package rangetable
import (
"sort"
"unicode"
)
// New creates a RangeTable from the given runes, which may contain duplicates.
func New(r ...rune) *unicode.RangeTable {
if len(r) == 0 {
return &unicode.RangeTable{}
}
sort.Sort(byRune(r))
// Remove duplicates.
k := 1
for i := 1; i < len(r); i++ {
if r[k-1] != r[i] {
r[k] = r[i]
k++
}
}
var rt unicode.RangeTable
for _, r := range r[:k] {
if r <= 0xFFFF {
rt.R16 = append(rt.R16, unicode.Range16{Lo: uint16(r), Hi: uint16(r), Stride: 1})
} else {
rt.R32 = append(rt.R32, unicode.Range32{Lo: uint32(r), Hi: uint32(r), Stride: 1})
}
}
// Optimize RangeTable.
return Merge(&rt)
}
type byRune []rune
func (r byRune) Len() int { return len(r) }
func (r byRune) Swap(i, j int) { r[i], r[j] = r[j], r[i] }
func (r byRune) Less(i, j int) bool { return r[i] < r[j] }
// Visit visits all runes in the given RangeTable in order, calling fn for each.
func Visit(rt *unicode.RangeTable, fn func(rune)) {
for _, r16 := range rt.R16 {
for r := rune(r16.Lo); r <= rune(r16.Hi); r += rune(r16.Stride) {
fn(r)
}
}
for _, r32 := range rt.R32 {
for r := rune(r32.Lo); r <= rune(r32.Hi); r += rune(r32.Stride) {
fn(r)
}
}
}
// Assigned returns a RangeTable with all assigned code points for a given
// Unicode version. This includes graphic, format, control, and private-use
// characters. It returns nil if the data for the given version is not
// available.
func Assigned(version string) *unicode.RangeTable {
return assigned[version]
}
| {
"pile_set_name": "Github"
} |
/* Whole cell */
div.container.cell {
padding-left: 0;
margin-bottom: 1em;
}
/* Removing all background formatting so we can control at the div level */
.cell_input div.highlight, .cell_input pre, .cell_output .output * {
border: none;
background: none;
background-color: transparent;
box-shadow: none;
}
.cell_output .output pre, .cell_input pre {
margin: 0px;
}
/* Input cells */
div.cell div.cell_input {
padding-left: 0em;
padding-right: 0em;
border: 1px #ccc solid;
background-color: #f7f7f7;
border-left-color: green;
border-left-width: medium;
}
div.cell_input > div, div.cell_output div.output > div.highlight {
margin: 0em !important;
border: none !important;
}
/* All cell outputs */
.cell_output {
padding-left: 1em;
padding-right: 0em;
margin-top: 1em;
}
/* Outputs from jupyter_sphinx overrides to remove extra CSS */
div.section div.jupyter_container {
padding: .4em;
margin: 0 0 .4em 0;
background-color: none;
border: none;
-moz-box-shadow: none;
-webkit-box-shadow: none;
box-shadow: none;
}
/* Text outputs from cells */
.cell_output .output.text_plain,
.cell_output .output.traceback,
.cell_output .output.stream {
background: #fcfcfc;
margin-top: 1em;
margin-bottom: 0em;
box-shadow: none;
}
.cell_output .output.text_plain,
.cell_output .output.stream {
border: 1px solid #f7f7f7;
}
.cell_output .output.traceback {
border: 1px solid #ffd6d6;
}
/* Math align to the left */
.cell_output .MathJax_Display {
text-align: left !important;
}
/* Pandas tables. Pulled from the Jupyter / nbsphinx CSS */
div.cell_output table {
border: none;
border-collapse: collapse;
border-spacing: 0;
color: black;
font-size: 1em;
table-layout: fixed;
}
div.cell_output thead {
border-bottom: 1px solid black;
vertical-align: bottom;
}
div.cell_output tr,
div.cell_output th,
div.cell_output td {
text-align: right;
vertical-align: middle;
padding: 0.5em 0.5em;
line-height: normal;
white-space: normal;
max-width: none;
border: none;
}
div.cell_output th {
font-weight: bold;
}
div.cell_output tbody tr:nth-child(odd) {
background: #f5f5f5;
}
div.cell_output tbody tr:hover {
background: rgba(66, 165, 245, 0.2);
}
/* Inline text from `paste` operation */
span.pasted-text {
font-weight: bold;
}
span.pasted-inline img {
max-height: 2em;
}
tbody span.pasted-inline img {
max-height: none;
}
| {
"pile_set_name": "Github"
} |
package org.nextrtc.signalingserver.api.dto;
import org.nextrtc.signalingserver.api.NextRTCEvents;
import org.nextrtc.signalingserver.exception.SignalingException;
import java.time.ZonedDateTime;
import java.util.Map;
import java.util.Optional;
public interface NextRTCEvent {
NextRTCEvents type();
ZonedDateTime published();
Optional<NextRTCMember> from();
Optional<NextRTCMember> to();
Optional<NextRTCConversation> conversation();
Optional<SignalingException> exception();
Map<String, String> custom();
String content();
}
| {
"pile_set_name": "Github"
} |
OPTION DOTNAME
.text$ SEGMENT ALIGN(256) 'CODE'
EXTERN OPENSSL_ia32cap_P:NEAR
PUBLIC aesni_multi_cbc_encrypt
ALIGN 32
aesni_multi_cbc_encrypt PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
$L$SEH_begin_aesni_multi_cbc_encrypt::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
mov rax,rsp
push rbx
push rbp
push r12
push r13
push r14
push r15
lea rsp,QWORD PTR[((-168))+rsp]
movaps XMMWORD PTR[rsp],xmm6
movaps XMMWORD PTR[16+rsp],xmm7
movaps XMMWORD PTR[32+rsp],xmm8
movaps XMMWORD PTR[48+rsp],xmm9
movaps XMMWORD PTR[64+rsp],xmm10
movaps XMMWORD PTR[80+rsp],xmm11
movaps XMMWORD PTR[96+rsp],xmm12
movaps XMMWORD PTR[(-104)+rax],xmm13
movaps XMMWORD PTR[(-88)+rax],xmm14
movaps XMMWORD PTR[(-72)+rax],xmm15
sub rsp,48
and rsp,-64
mov QWORD PTR[16+rsp],rax
$L$enc4x_body::
movdqu xmm12,XMMWORD PTR[rsi]
lea rsi,QWORD PTR[120+rsi]
lea rdi,QWORD PTR[80+rdi]
$L$enc4x_loop_grande::
mov DWORD PTR[24+rsp],edx
xor edx,edx
mov ecx,DWORD PTR[((-64))+rdi]
mov r8,QWORD PTR[((-80))+rdi]
cmp ecx,edx
mov r12,QWORD PTR[((-72))+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm2,XMMWORD PTR[((-56))+rdi]
mov DWORD PTR[32+rsp],ecx
cmovle r8,rsp
mov ecx,DWORD PTR[((-24))+rdi]
mov r9,QWORD PTR[((-40))+rdi]
cmp ecx,edx
mov r13,QWORD PTR[((-32))+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm3,XMMWORD PTR[((-16))+rdi]
mov DWORD PTR[36+rsp],ecx
cmovle r9,rsp
mov ecx,DWORD PTR[16+rdi]
mov r10,QWORD PTR[rdi]
cmp ecx,edx
mov r14,QWORD PTR[8+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm4,XMMWORD PTR[24+rdi]
mov DWORD PTR[40+rsp],ecx
cmovle r10,rsp
mov ecx,DWORD PTR[56+rdi]
mov r11,QWORD PTR[40+rdi]
cmp ecx,edx
mov r15,QWORD PTR[48+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm5,XMMWORD PTR[64+rdi]
mov DWORD PTR[44+rsp],ecx
cmovle r11,rsp
test edx,edx
jz $L$enc4x_done
movups xmm1,XMMWORD PTR[((16-120))+rsi]
pxor xmm2,xmm12
movups xmm0,XMMWORD PTR[((32-120))+rsi]
pxor xmm3,xmm12
mov eax,DWORD PTR[((240-120))+rsi]
pxor xmm4,xmm12
movdqu xmm6,XMMWORD PTR[r8]
pxor xmm5,xmm12
movdqu xmm7,XMMWORD PTR[r9]
pxor xmm2,xmm6
movdqu xmm8,XMMWORD PTR[r10]
pxor xmm3,xmm7
movdqu xmm9,XMMWORD PTR[r11]
pxor xmm4,xmm8
pxor xmm5,xmm9
movdqa xmm10,XMMWORD PTR[32+rsp]
xor rbx,rbx
jmp $L$oop_enc4x
ALIGN 32
$L$oop_enc4x::
add rbx,16
lea rbp,QWORD PTR[16+rsp]
mov ecx,1
sub rbp,rbx
DB 102,15,56,220,209
prefetcht0 [31+rbx*1+r8]
prefetcht0 [31+rbx*1+r9]
DB 102,15,56,220,217
prefetcht0 [31+rbx*1+r10]
prefetcht0 [31+rbx*1+r10]
DB 102,15,56,220,225
DB 102,15,56,220,233
movups xmm1,XMMWORD PTR[((48-120))+rsi]
cmp ecx,DWORD PTR[32+rsp]
DB 102,15,56,220,208
DB 102,15,56,220,216
DB 102,15,56,220,224
cmovge r8,rbp
cmovg r12,rbp
DB 102,15,56,220,232
movups xmm0,XMMWORD PTR[((-56))+rsi]
cmp ecx,DWORD PTR[36+rsp]
DB 102,15,56,220,209
DB 102,15,56,220,217
DB 102,15,56,220,225
cmovge r9,rbp
cmovg r13,rbp
DB 102,15,56,220,233
movups xmm1,XMMWORD PTR[((-40))+rsi]
cmp ecx,DWORD PTR[40+rsp]
DB 102,15,56,220,208
DB 102,15,56,220,216
DB 102,15,56,220,224
cmovge r10,rbp
cmovg r14,rbp
DB 102,15,56,220,232
movups xmm0,XMMWORD PTR[((-24))+rsi]
cmp ecx,DWORD PTR[44+rsp]
DB 102,15,56,220,209
DB 102,15,56,220,217
DB 102,15,56,220,225
cmovge r11,rbp
cmovg r15,rbp
DB 102,15,56,220,233
movups xmm1,XMMWORD PTR[((-8))+rsi]
movdqa xmm11,xmm10
DB 102,15,56,220,208
prefetcht0 [15+rbx*1+r12]
prefetcht0 [15+rbx*1+r13]
DB 102,15,56,220,216
prefetcht0 [15+rbx*1+r14]
prefetcht0 [15+rbx*1+r15]
DB 102,15,56,220,224
DB 102,15,56,220,232
movups xmm0,XMMWORD PTR[((128-120))+rsi]
pxor xmm12,xmm12
DB 102,15,56,220,209
pcmpgtd xmm11,xmm12
movdqu xmm12,XMMWORD PTR[((-120))+rsi]
DB 102,15,56,220,217
paddd xmm10,xmm11
movdqa XMMWORD PTR[32+rsp],xmm10
DB 102,15,56,220,225
DB 102,15,56,220,233
movups xmm1,XMMWORD PTR[((144-120))+rsi]
cmp eax,11
DB 102,15,56,220,208
DB 102,15,56,220,216
DB 102,15,56,220,224
DB 102,15,56,220,232
movups xmm0,XMMWORD PTR[((160-120))+rsi]
jb $L$enc4x_tail
DB 102,15,56,220,209
DB 102,15,56,220,217
DB 102,15,56,220,225
DB 102,15,56,220,233
movups xmm1,XMMWORD PTR[((176-120))+rsi]
DB 102,15,56,220,208
DB 102,15,56,220,216
DB 102,15,56,220,224
DB 102,15,56,220,232
movups xmm0,XMMWORD PTR[((192-120))+rsi]
je $L$enc4x_tail
DB 102,15,56,220,209
DB 102,15,56,220,217
DB 102,15,56,220,225
DB 102,15,56,220,233
movups xmm1,XMMWORD PTR[((208-120))+rsi]
DB 102,15,56,220,208
DB 102,15,56,220,216
DB 102,15,56,220,224
DB 102,15,56,220,232
movups xmm0,XMMWORD PTR[((224-120))+rsi]
jmp $L$enc4x_tail
ALIGN 32
$L$enc4x_tail::
DB 102,15,56,220,209
DB 102,15,56,220,217
DB 102,15,56,220,225
DB 102,15,56,220,233
movdqu xmm6,XMMWORD PTR[rbx*1+r8]
movdqu xmm1,XMMWORD PTR[((16-120))+rsi]
DB 102,15,56,221,208
movdqu xmm7,XMMWORD PTR[rbx*1+r9]
pxor xmm6,xmm12
DB 102,15,56,221,216
movdqu xmm8,XMMWORD PTR[rbx*1+r10]
pxor xmm7,xmm12
DB 102,15,56,221,224
movdqu xmm9,XMMWORD PTR[rbx*1+r11]
pxor xmm8,xmm12
DB 102,15,56,221,232
movdqu xmm0,XMMWORD PTR[((32-120))+rsi]
pxor xmm9,xmm12
movups XMMWORD PTR[(-16)+rbx*1+r12],xmm2
pxor xmm2,xmm6
movups XMMWORD PTR[(-16)+rbx*1+r13],xmm3
pxor xmm3,xmm7
movups XMMWORD PTR[(-16)+rbx*1+r14],xmm4
pxor xmm4,xmm8
movups XMMWORD PTR[(-16)+rbx*1+r15],xmm5
pxor xmm5,xmm9
dec edx
jnz $L$oop_enc4x
mov rax,QWORD PTR[16+rsp]
mov edx,DWORD PTR[24+rsp]
lea rdi,QWORD PTR[160+rdi]
dec edx
jnz $L$enc4x_loop_grande
$L$enc4x_done::
movaps xmm6,XMMWORD PTR[((-216))+rax]
movaps xmm7,XMMWORD PTR[((-200))+rax]
movaps xmm8,XMMWORD PTR[((-184))+rax]
movaps xmm9,XMMWORD PTR[((-168))+rax]
movaps xmm10,XMMWORD PTR[((-152))+rax]
movaps xmm11,XMMWORD PTR[((-136))+rax]
movaps xmm12,XMMWORD PTR[((-120))+rax]
mov r15,QWORD PTR[((-48))+rax]
mov r14,QWORD PTR[((-40))+rax]
mov r13,QWORD PTR[((-32))+rax]
mov r12,QWORD PTR[((-24))+rax]
mov rbp,QWORD PTR[((-16))+rax]
mov rbx,QWORD PTR[((-8))+rax]
lea rsp,QWORD PTR[rax]
$L$enc4x_epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$SEH_end_aesni_multi_cbc_encrypt::
aesni_multi_cbc_encrypt ENDP
PUBLIC aesni_multi_cbc_decrypt
ALIGN 32
aesni_multi_cbc_decrypt PROC PUBLIC
mov QWORD PTR[8+rsp],rdi ;WIN64 prologue
mov QWORD PTR[16+rsp],rsi
mov rax,rsp
$L$SEH_begin_aesni_multi_cbc_decrypt::
mov rdi,rcx
mov rsi,rdx
mov rdx,r8
mov rax,rsp
push rbx
push rbp
push r12
push r13
push r14
push r15
lea rsp,QWORD PTR[((-168))+rsp]
movaps XMMWORD PTR[rsp],xmm6
movaps XMMWORD PTR[16+rsp],xmm7
movaps XMMWORD PTR[32+rsp],xmm8
movaps XMMWORD PTR[48+rsp],xmm9
movaps XMMWORD PTR[64+rsp],xmm10
movaps XMMWORD PTR[80+rsp],xmm11
movaps XMMWORD PTR[96+rsp],xmm12
movaps XMMWORD PTR[(-104)+rax],xmm13
movaps XMMWORD PTR[(-88)+rax],xmm14
movaps XMMWORD PTR[(-72)+rax],xmm15
sub rsp,48
and rsp,-64
mov QWORD PTR[16+rsp],rax
$L$dec4x_body::
movdqu xmm12,XMMWORD PTR[rsi]
lea rsi,QWORD PTR[120+rsi]
lea rdi,QWORD PTR[80+rdi]
$L$dec4x_loop_grande::
mov DWORD PTR[24+rsp],edx
xor edx,edx
mov ecx,DWORD PTR[((-64))+rdi]
mov r8,QWORD PTR[((-80))+rdi]
cmp ecx,edx
mov r12,QWORD PTR[((-72))+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm6,XMMWORD PTR[((-56))+rdi]
mov DWORD PTR[32+rsp],ecx
cmovle r8,rsp
mov ecx,DWORD PTR[((-24))+rdi]
mov r9,QWORD PTR[((-40))+rdi]
cmp ecx,edx
mov r13,QWORD PTR[((-32))+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm7,XMMWORD PTR[((-16))+rdi]
mov DWORD PTR[36+rsp],ecx
cmovle r9,rsp
mov ecx,DWORD PTR[16+rdi]
mov r10,QWORD PTR[rdi]
cmp ecx,edx
mov r14,QWORD PTR[8+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm8,XMMWORD PTR[24+rdi]
mov DWORD PTR[40+rsp],ecx
cmovle r10,rsp
mov ecx,DWORD PTR[56+rdi]
mov r11,QWORD PTR[40+rdi]
cmp ecx,edx
mov r15,QWORD PTR[48+rdi]
cmovg edx,ecx
test ecx,ecx
movdqu xmm9,XMMWORD PTR[64+rdi]
mov DWORD PTR[44+rsp],ecx
cmovle r11,rsp
test edx,edx
jz $L$dec4x_done
movups xmm1,XMMWORD PTR[((16-120))+rsi]
movups xmm0,XMMWORD PTR[((32-120))+rsi]
mov eax,DWORD PTR[((240-120))+rsi]
movdqu xmm2,XMMWORD PTR[r8]
movdqu xmm3,XMMWORD PTR[r9]
pxor xmm2,xmm12
movdqu xmm4,XMMWORD PTR[r10]
pxor xmm3,xmm12
movdqu xmm5,XMMWORD PTR[r11]
pxor xmm4,xmm12
pxor xmm5,xmm12
movdqa xmm10,XMMWORD PTR[32+rsp]
xor rbx,rbx
jmp $L$oop_dec4x
ALIGN 32
$L$oop_dec4x::
add rbx,16
lea rbp,QWORD PTR[16+rsp]
mov ecx,1
sub rbp,rbx
DB 102,15,56,222,209
prefetcht0 [31+rbx*1+r8]
prefetcht0 [31+rbx*1+r9]
DB 102,15,56,222,217
prefetcht0 [31+rbx*1+r10]
prefetcht0 [31+rbx*1+r11]
DB 102,15,56,222,225
DB 102,15,56,222,233
movups xmm1,XMMWORD PTR[((48-120))+rsi]
cmp ecx,DWORD PTR[32+rsp]
DB 102,15,56,222,208
DB 102,15,56,222,216
DB 102,15,56,222,224
cmovge r8,rbp
cmovg r12,rbp
DB 102,15,56,222,232
movups xmm0,XMMWORD PTR[((-56))+rsi]
cmp ecx,DWORD PTR[36+rsp]
DB 102,15,56,222,209
DB 102,15,56,222,217
DB 102,15,56,222,225
cmovge r9,rbp
cmovg r13,rbp
DB 102,15,56,222,233
movups xmm1,XMMWORD PTR[((-40))+rsi]
cmp ecx,DWORD PTR[40+rsp]
DB 102,15,56,222,208
DB 102,15,56,222,216
DB 102,15,56,222,224
cmovge r10,rbp
cmovg r14,rbp
DB 102,15,56,222,232
movups xmm0,XMMWORD PTR[((-24))+rsi]
cmp ecx,DWORD PTR[44+rsp]
DB 102,15,56,222,209
DB 102,15,56,222,217
DB 102,15,56,222,225
cmovge r11,rbp
cmovg r15,rbp
DB 102,15,56,222,233
movups xmm1,XMMWORD PTR[((-8))+rsi]
movdqa xmm11,xmm10
DB 102,15,56,222,208
prefetcht0 [15+rbx*1+r12]
prefetcht0 [15+rbx*1+r13]
DB 102,15,56,222,216
prefetcht0 [15+rbx*1+r14]
prefetcht0 [15+rbx*1+r15]
DB 102,15,56,222,224
DB 102,15,56,222,232
movups xmm0,XMMWORD PTR[((128-120))+rsi]
pxor xmm12,xmm12
DB 102,15,56,222,209
pcmpgtd xmm11,xmm12
movdqu xmm12,XMMWORD PTR[((-120))+rsi]
DB 102,15,56,222,217
paddd xmm10,xmm11
movdqa XMMWORD PTR[32+rsp],xmm10
DB 102,15,56,222,225
DB 102,15,56,222,233
movups xmm1,XMMWORD PTR[((144-120))+rsi]
cmp eax,11
DB 102,15,56,222,208
DB 102,15,56,222,216
DB 102,15,56,222,224
DB 102,15,56,222,232
movups xmm0,XMMWORD PTR[((160-120))+rsi]
jb $L$dec4x_tail
DB 102,15,56,222,209
DB 102,15,56,222,217
DB 102,15,56,222,225
DB 102,15,56,222,233
movups xmm1,XMMWORD PTR[((176-120))+rsi]
DB 102,15,56,222,208
DB 102,15,56,222,216
DB 102,15,56,222,224
DB 102,15,56,222,232
movups xmm0,XMMWORD PTR[((192-120))+rsi]
je $L$dec4x_tail
DB 102,15,56,222,209
DB 102,15,56,222,217
DB 102,15,56,222,225
DB 102,15,56,222,233
movups xmm1,XMMWORD PTR[((208-120))+rsi]
DB 102,15,56,222,208
DB 102,15,56,222,216
DB 102,15,56,222,224
DB 102,15,56,222,232
movups xmm0,XMMWORD PTR[((224-120))+rsi]
jmp $L$dec4x_tail
ALIGN 32
$L$dec4x_tail::
DB 102,15,56,222,209
DB 102,15,56,222,217
DB 102,15,56,222,225
pxor xmm6,xmm0
pxor xmm7,xmm0
DB 102,15,56,222,233
movdqu xmm1,XMMWORD PTR[((16-120))+rsi]
pxor xmm8,xmm0
pxor xmm9,xmm0
movdqu xmm0,XMMWORD PTR[((32-120))+rsi]
DB 102,15,56,223,214
DB 102,15,56,223,223
movdqu xmm6,XMMWORD PTR[((-16))+rbx*1+r8]
movdqu xmm7,XMMWORD PTR[((-16))+rbx*1+r9]
DB 102,65,15,56,223,224
DB 102,65,15,56,223,233
movdqu xmm8,XMMWORD PTR[((-16))+rbx*1+r10]
movdqu xmm9,XMMWORD PTR[((-16))+rbx*1+r11]
movups XMMWORD PTR[(-16)+rbx*1+r12],xmm2
movdqu xmm2,XMMWORD PTR[rbx*1+r8]
movups XMMWORD PTR[(-16)+rbx*1+r13],xmm3
movdqu xmm3,XMMWORD PTR[rbx*1+r9]
pxor xmm2,xmm12
movups XMMWORD PTR[(-16)+rbx*1+r14],xmm4
movdqu xmm4,XMMWORD PTR[rbx*1+r10]
pxor xmm3,xmm12
movups XMMWORD PTR[(-16)+rbx*1+r15],xmm5
movdqu xmm5,XMMWORD PTR[rbx*1+r11]
pxor xmm4,xmm12
pxor xmm5,xmm12
dec edx
jnz $L$oop_dec4x
mov rax,QWORD PTR[16+rsp]
mov edx,DWORD PTR[24+rsp]
lea rdi,QWORD PTR[160+rdi]
dec edx
jnz $L$dec4x_loop_grande
$L$dec4x_done::
movaps xmm6,XMMWORD PTR[((-216))+rax]
movaps xmm7,XMMWORD PTR[((-200))+rax]
movaps xmm8,XMMWORD PTR[((-184))+rax]
movaps xmm9,XMMWORD PTR[((-168))+rax]
movaps xmm10,XMMWORD PTR[((-152))+rax]
movaps xmm11,XMMWORD PTR[((-136))+rax]
movaps xmm12,XMMWORD PTR[((-120))+rax]
mov r15,QWORD PTR[((-48))+rax]
mov r14,QWORD PTR[((-40))+rax]
mov r13,QWORD PTR[((-32))+rax]
mov r12,QWORD PTR[((-24))+rax]
mov rbp,QWORD PTR[((-16))+rax]
mov rbx,QWORD PTR[((-8))+rax]
lea rsp,QWORD PTR[rax]
$L$dec4x_epilogue::
mov rdi,QWORD PTR[8+rsp] ;WIN64 epilogue
mov rsi,QWORD PTR[16+rsp]
DB 0F3h,0C3h ;repret
$L$SEH_end_aesni_multi_cbc_decrypt::
aesni_multi_cbc_decrypt ENDP
EXTERN __imp_RtlVirtualUnwind:NEAR
ALIGN 16
se_handler PROC PRIVATE
push rsi
push rdi
push rbx
push rbp
push r12
push r13
push r14
push r15
pushfq
sub rsp,64
mov rax,QWORD PTR[120+r8]
mov rbx,QWORD PTR[248+r8]
mov rsi,QWORD PTR[8+r9]
mov r11,QWORD PTR[56+r9]
mov r10d,DWORD PTR[r11]
lea r10,QWORD PTR[r10*1+rsi]
cmp rbx,r10
jb $L$in_prologue
mov rax,QWORD PTR[152+r8]
mov r10d,DWORD PTR[4+r11]
lea r10,QWORD PTR[r10*1+rsi]
cmp rbx,r10
jae $L$in_prologue
mov rax,QWORD PTR[16+rax]
mov rbx,QWORD PTR[((-8))+rax]
mov rbp,QWORD PTR[((-16))+rax]
mov r12,QWORD PTR[((-24))+rax]
mov r13,QWORD PTR[((-32))+rax]
mov r14,QWORD PTR[((-40))+rax]
mov r15,QWORD PTR[((-48))+rax]
mov QWORD PTR[144+r8],rbx
mov QWORD PTR[160+r8],rbp
mov QWORD PTR[216+r8],r12
mov QWORD PTR[224+r8],r13
mov QWORD PTR[232+r8],r14
mov QWORD PTR[240+r8],r15
lea rsi,QWORD PTR[((-56-160))+rax]
lea rdi,QWORD PTR[512+r8]
mov ecx,20
DD 0a548f3fch
$L$in_prologue::
mov rdi,QWORD PTR[8+rax]
mov rsi,QWORD PTR[16+rax]
mov QWORD PTR[152+r8],rax
mov QWORD PTR[168+r8],rsi
mov QWORD PTR[176+r8],rdi
mov rdi,QWORD PTR[40+r9]
mov rsi,r8
mov ecx,154
DD 0a548f3fch
mov rsi,r9
xor rcx,rcx
mov rdx,QWORD PTR[8+rsi]
mov r8,QWORD PTR[rsi]
mov r9,QWORD PTR[16+rsi]
mov r10,QWORD PTR[40+rsi]
lea r11,QWORD PTR[56+rsi]
lea r12,QWORD PTR[24+rsi]
mov QWORD PTR[32+rsp],r10
mov QWORD PTR[40+rsp],r11
mov QWORD PTR[48+rsp],r12
mov QWORD PTR[56+rsp],rcx
call QWORD PTR[__imp_RtlVirtualUnwind]
mov eax,1
add rsp,64
popfq
pop r15
pop r14
pop r13
pop r12
pop rbp
pop rbx
pop rdi
pop rsi
DB 0F3h,0C3h ;repret
se_handler ENDP
.text$ ENDS
.pdata SEGMENT READONLY ALIGN(4)
ALIGN 4
DD imagerel $L$SEH_begin_aesni_multi_cbc_encrypt
DD imagerel $L$SEH_end_aesni_multi_cbc_encrypt
DD imagerel $L$SEH_info_aesni_multi_cbc_encrypt
DD imagerel $L$SEH_begin_aesni_multi_cbc_decrypt
DD imagerel $L$SEH_end_aesni_multi_cbc_decrypt
DD imagerel $L$SEH_info_aesni_multi_cbc_decrypt
.pdata ENDS
.xdata SEGMENT READONLY ALIGN(8)
ALIGN 8
$L$SEH_info_aesni_multi_cbc_encrypt::
DB 9,0,0,0
DD imagerel se_handler
DD imagerel $L$enc4x_body,imagerel $L$enc4x_epilogue
$L$SEH_info_aesni_multi_cbc_decrypt::
DB 9,0,0,0
DD imagerel se_handler
DD imagerel $L$dec4x_body,imagerel $L$dec4x_epilogue
.xdata ENDS
END
| {
"pile_set_name": "Github"
} |
visiting
accept: aVisitor
aVisitor visitTask: self | {
"pile_set_name": "Github"
} |
<?php
/**
* This file is part of the Nette Framework (https://nette.org)
* Copyright (c) 2004 David Grudl (https://davidgrudl.com)
*/
declare(strict_types=1);
namespace Nette;
if (false) {
/** @deprecated use Nette\Bootstrap\Configurator */
class Configurator
{
}
} elseif (!class_exists(Configurator::class)) {
class_alias(Bootstrap\Configurator::class, Configurator::class);
}
| {
"pile_set_name": "Github"
} |
/******************************************************************************
* most_balanced_minimum_cuts.h
* *
* Source of KaHIP -- Karlsruhe High Quality Partitioning.
* Christian Schulz <[email protected]>
*****************************************************************************/
#ifndef MOST_BALANCED_MINIMUM_CUTS_SBD5CS
#define MOST_BALANCED_MINIMUM_CUTS_SBD5CS
#include "data_structure/graph_access.h"
#include "partition_config.h"
class most_balanced_minimum_cuts {
public:
most_balanced_minimum_cuts();
virtual ~most_balanced_minimum_cuts();
void compute_good_balanced_min_cut( graph_access & residualGraph,
const PartitionConfig & config,
NodeWeight & perfect_rhs_weight,
std::vector< NodeID > & new_rhs_node );
private:
void build_internal_scc_graph( graph_access & residualGraph,
std::vector<int> & components,
int comp_count,
graph_access & scc_graph);
void compute_new_rhs( graph_access & scc_graph,
const PartitionConfig & config,
std::vector< NodeWeight > & comp_weights,
int comp_of_s,
int comp_of_t,
NodeWeight optimal_rhs_weight,
std::vector<int> & comp_for_rhs);
};
#endif /* end of include guard: MOST_BALANCED_MINIMUM_CUTS_SBD5CS */
| {
"pile_set_name": "Github"
} |
CONFIGURATION_BUILD_DIR = ${PODS_CONFIGURATION_BUILD_DIR}/Crashlytics
FRAMEWORK_SEARCH_PATHS = $(inherited) "${PODS_ROOT}/Crashlytics/iOS" "${PODS_ROOT}/Fabric/iOS"
GCC_PREPROCESSOR_DEFINITIONS = $(inherited) COCOAPODS=1
OTHER_LDFLAGS = $(inherited) -l"c++" -l"z" -framework "Security" -framework "SystemConfiguration" -framework "UIKit"
PODS_BUILD_DIR = ${BUILD_DIR}
PODS_CONFIGURATION_BUILD_DIR = ${PODS_BUILD_DIR}/$(CONFIGURATION)$(EFFECTIVE_PLATFORM_NAME)
PODS_ROOT = ${SRCROOT}
PODS_TARGET_SRCROOT = ${PODS_ROOT}/Crashlytics
PRODUCT_BUNDLE_IDENTIFIER = org.cocoapods.${PRODUCT_NAME:rfc1034identifier}
SKIP_INSTALL = YES
USE_RECURSIVE_SCRIPT_INPUTS_IN_SCRIPT_PHASES = YES
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<item android:color="@color/b4" android:state_checked="false" android:state_focused="false" android:state_pressed="false" />
<item android:color="@color/c1" android:state_checked="true" android:state_focused="false" android:state_pressed="false" />
<item android:color="@color/c1" android:state_checked="false" android:state_focused="true" android:state_pressed="false" />
<item android:color="@color/b4" android:state_checked="true" android:state_focused="true" android:state_pressed="false" />
</selector>
| {
"pile_set_name": "Github"
} |
/// Copyright (c) 2012 Ecma International. All rights reserved.
/**
* @path ch15/15.4/15.4.4/15.4.4.19/15.4.4.19-1-10.js
* @description Array.prototype.map - applied to the Math object
*/
function testcase() {
function callbackfn(val, idx, obj) {
return ('[object Math]' === Object.prototype.toString.call(obj));
}
try {
Math.length = 1;
Math[0] = 1;
var testResult = Array.prototype.map.call(Math, callbackfn);
return testResult[0] === true;
} finally {
delete Math[0];
delete Math.length;
}
}
runTestCase(testcase);
| {
"pile_set_name": "Github"
} |
//
// MMTrainWorkModel.m
// MicroMannage
//
// Created by 倪望龙 on 2017/3/17.
// Copyright © 2017年 xunyijia. All rights reserved.
//
#import "MMTrainWorkModel.h"
@implementation MMTrainWorkModel
@end
| {
"pile_set_name": "Github"
} |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkdrds.endpoint import endpoint_data
class DescribeDrdsComponentsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Drds', '2019-01-23', 'DescribeDrdsComponents','Drds')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_CommodityCode(self):
return self.get_query_params().get('CommodityCode')
def set_CommodityCode(self,CommodityCode):
self.add_query_param('CommodityCode',CommodityCode) | {
"pile_set_name": "Github"
} |
function getCookie(name) {
var cookieValue = null;
if (document.cookie && document.cookie != '') {
var cookies = document.cookie.split(';');
for (var i = 0; i < cookies.length; i++) {
var cookie = jQuery.trim(cookies[i]);
if (cookie.substring(0, name.length + 1) == (name + '=')) {
cookieValue = decodeURIComponent(cookie.substring(name.length + 1));
break;
}
}
}
return cookieValue;
}
var csrftoken = getCookie('csrftoken');
function upload(event) {
event.preventDefault();
var data = new FormData(this);
$("#importbutton").button("loading")
$.ajax({
url: $(this).attr('action'),
type: $(this).attr('method'),
data: data,
cache: false,
processData: false,
contentType: false,
success: function(data, status) {
if (data == "Ok"){
$('.modal').modal('hide');
$(".dynamic").load($(".dynamic").data("form"));
$(".selected").load($(".selected").data("form"));
$("#importbutton").button("complete")
} else {
$('.modal').empty();
$('.modal').append(data);
$(".modal").modal('show');
}
}
});
return false;
}
function csrfSafeMethod(method) {
return (/^(GET|HEAD|OPTIONS|TRACE)$/.test(method));
}
$.ajaxSetup({
beforeSend: function(xhr, settings) {
if (!csrfSafeMethod(settings.type) && !this.crossDomain) {
xhr.setRequestHeader("X-CSRFToken", csrftoken);
}
}
});
function popupwindow(url, title, w, h) {
w = typeof w !== 'undefined' ? w : 800;
h = typeof h !== 'undefined' ? h : 400;
var left = 200;
var top = -100;
return window.open(url, title, 'toolbar=no, location=no, directories=no, status=no, menubar=no, scrollbars=no, resizable=no, copyhistory=no, width='+w+', height='+h+', top='+top+', left='+left);
}
function PopupCenter(url, title, w, h) {
var dualScreenLeft = window.screenLeft != undefined ? window.screenLeft : screen.left;
var dualScreenTop = window.screenTop != undefined ? window.screenTop : screen.top;
width = window.innerWidth ? window.innerWidth : document.documentElement.clientWidth ? document.documentElement.clientWidth : screen.width;
height = window.innerHeight ? window.innerHeight : document.documentElement.clientHeight ? document.documentElement.clientHeight : screen.height;
var left = ((width / 2) - (w / 2)) + dualScreenLeft;
var top = ((height / 2) - (h / 2)) + dualScreenTop;
var newWindow = window.open(url, title, 'scrollbars=yes, width=' + w + ', height=' + h + ', top=' + top + ', left=' + left);
if (window.focus) {
newWindow.focus();
}
}
$(function() {
$('[data-toggle="tooltip"]').tooltip();
$('body').tooltip({
selector: '[data-toggle="tooltip"]'
});
$(".dynamic").on("click", ".download", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
window.location.href = url;
return false;
});
$(".dynamic").on("click", ".silent", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$.get(url)
$(".dynamic").empty();
$(".dynamic").load($(".dynamic").data("form"));
$(".selected").load($(".selected").data("form"));
return false;
});
$(".dynamic").on("click", ".paging", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$(".dynamic").empty();
$(".dynamic").load(url);
return false;
});
$(".modal").on("click", ".paging", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$(".modal").load(url);
return false;
});
$(".dynamic").on("click", ".edit", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$(".modal").load(url, function() {
$(this).modal('show');
});
return false;
});
$(".dynamic").on("click", ".popup", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
PopupCenter(url, "preview", 800, 600);
return false;
});
$(".modal").on("click", ".edit", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$(".modal").empty();
$(".modal").load(url, function() {
$(this).modal('show');
});
return false;
});
$(".selected").on("click", ".add", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$(".modal").load(url, function() {
$(this).modal('show');
});
return false;
});
$(".selected").on("click", ".edit", function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$(".modal").load(url, function() {
$(this).modal('show');
});
return false;
});
$(".new").click(function(ev) {
ev.preventDefault();
var url = $(this).data("form");
$(".modal").load(url, function() {
$(this).modal('show');
});
return false;
});
$(".new-selected").click(function(ev) {
ev.preventDefault();
var url = $("#button-url").find(":selected").val();
$(".modal").load(url, function() {
$(this).modal('show');
});
return false;
});
$('.modal').on('hidden.bs.modal', function () {
$(this).empty();
})
$(".dynamic").load($(".dynamic").data("form"));
var delay = (function(){
var timer = 0;
return function(callback, ms){
clearTimeout (timer);
timer = setTimeout(callback, ms);
};
})();
$(".filter-input").on("keyup", function(ev) {
delay(function(){
if ($(".filter-input").val() != ""){
$(".dynamic").load($(".dynamic").data("form") + '/' + $(".filter-input").val());
}
else {
$(".dynamic").load($(".dynamic").data("form"));
}
}, 200);
})
$('.modal').on("submit", ".modal-form", function() {
$.ajax({
type: this.method,
url: this.action,
data: $(this).serialize(),
context: this,
success: function(data, status) {
if (data == "Ok"){
$('.modal').modal('hide');
$(".dynamic").load($(".dynamic").data("form"));
$(".selected").load($(".selected").data("form"));
} else {
$('.modal').empty();
$('.modal').append(data);
$(".modal").modal('show');
}
}
});
return false;
});
$('.modal').on("click", ".refresh", function() {
$('.modal').load($(".refresh").data("url"));
return false;
});
$('.modal').on("click", ".browse", function(){
$('input[id=targetlist]').click();
return false;
})
$('.modal').on("change", 'input[id=targetlist]', function(){
$('#filepath').val($(this).val());
})
$('.modal').on("submit", ".modal-form-file", upload);
$('.select').change(function(ev){
if ($("select option:selected").val() == "" )
{
$(".selected").empty();
}
else
{
$(".selected").load($("select option:selected").data("form"));
$(".selected").attr("data-form", $("select option:selected").data("form"))
}
})
index_form = function( fset, index ){
$(fset).find(':input').each(function() {
var name = $(this).attr('name').replace( new RegExp('(\_\_prefix\_\_|\\d)') , index );
var id = 'id_' + name;
$(this).attr({'name': name, 'id': id});
});
$(fset).find('label').each(function() {
var newFor = $(this).attr('for').replace( new RegExp('(\_\_prefix\_\_|\\d)') , index );
var id = 'label_' + newFor;
$(this).attr({'id':id, 'for':newFor});
});
}
reindex_formset = function( formset_zone ){
var formset = $(formset_zone).find( '.nsorte' );
for( var cpt=0;cpt<formset.length;cpt++ ){
index_form( formset[cpt], cpt );
};
$("#id_form-TOTAL_FORMS").val( parseInt( cpt ) );
};
set_event = function(){
$('.modal').on('click',".bt_rm_sorte",function(){
$(this).parents(".nsorte").remove();
reindex_formset( "#formsetZone" );
});
};
$('.modal').on('click',"#bt_add_sorte",function(){
$( "#eform" ).clone(true).appendTo( $("#formsetZone") );
reindex_formset( "#formsetZone" );
});
set_event();
});
// CKEditor Focus Horrible Hack
$.fn.modal.Constructor.prototype.enforceFocus = function() {
modal_this = this
$(document).on('focusin.modal', function (e) {
if (modal_this.$element[0] !== e.target && !modal_this.$element.has(e.target).length
&& !$(e.target.parentNode).hasClass('cke_dialog_ui_input_select')
&& !$(e.target.parentNode).hasClass('cke_dialog_ui_input_text')
&& !$(e.target.parentNode).hasClass('cke_dialog_ui_input_textarea')) {
modal_this.$element.focus()
}
})
};
| {
"pile_set_name": "Github"
} |
#ifndef BOOST_BIND_STORAGE_HPP_INCLUDED
#define BOOST_BIND_STORAGE_HPP_INCLUDED
// MS compatible compilers support #pragma once
#if defined(_MSC_VER) && (_MSC_VER >= 1020)
# pragma once
#endif
//
// bind/storage.hpp
//
// boost/bind.hpp support header, optimized storage
//
// Copyright (c) 2006 Peter Dimov
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt
//
// See http://www.boost.org/libs/bind/bind.html for documentation.
//
#include <boost/config.hpp>
#include <boost/bind/arg.hpp>
#ifdef BOOST_MSVC
# pragma warning(push)
# pragma warning(disable: 4512) // assignment operator could not be generated
#endif
namespace boost
{
namespace _bi
{
// 1
template<class A1> struct storage1
{
explicit storage1( A1 a1 ): a1_( a1 ) {}
template<class V> void accept(V & v) const
{
BOOST_BIND_VISIT_EACH(v, a1_, 0);
}
A1 a1_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION ) && !defined( __BORLANDC__ )
template<int I> struct storage1< boost::arg<I> >
{
explicit storage1( boost::arg<I> ) {}
template<class V> void accept(V &) const { }
static boost::arg<I> a1_() { return boost::arg<I>(); }
};
template<int I> struct storage1< boost::arg<I> (*) () >
{
explicit storage1( boost::arg<I> (*) () ) {}
template<class V> void accept(V &) const { }
static boost::arg<I> a1_() { return boost::arg<I>(); }
};
#endif
// 2
template<class A1, class A2> struct storage2: public storage1<A1>
{
typedef storage1<A1> inherited;
storage2( A1 a1, A2 a2 ): storage1<A1>( a1 ), a2_( a2 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a2_, 0);
}
A2 a2_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, int I> struct storage2< A1, boost::arg<I> >: public storage1<A1>
{
typedef storage1<A1> inherited;
storage2( A1 a1, boost::arg<I> ): storage1<A1>( a1 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a2_() { return boost::arg<I>(); }
};
template<class A1, int I> struct storage2< A1, boost::arg<I> (*) () >: public storage1<A1>
{
typedef storage1<A1> inherited;
storage2( A1 a1, boost::arg<I> (*) () ): storage1<A1>( a1 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a2_() { return boost::arg<I>(); }
};
#endif
// 3
template<class A1, class A2, class A3> struct storage3: public storage2< A1, A2 >
{
typedef storage2<A1, A2> inherited;
storage3( A1 a1, A2 a2, A3 a3 ): storage2<A1, A2>( a1, a2 ), a3_( a3 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a3_, 0);
}
A3 a3_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, class A2, int I> struct storage3< A1, A2, boost::arg<I> >: public storage2< A1, A2 >
{
typedef storage2<A1, A2> inherited;
storage3( A1 a1, A2 a2, boost::arg<I> ): storage2<A1, A2>( a1, a2 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a3_() { return boost::arg<I>(); }
};
template<class A1, class A2, int I> struct storage3< A1, A2, boost::arg<I> (*) () >: public storage2< A1, A2 >
{
typedef storage2<A1, A2> inherited;
storage3( A1 a1, A2 a2, boost::arg<I> (*) () ): storage2<A1, A2>( a1, a2 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a3_() { return boost::arg<I>(); }
};
#endif
// 4
template<class A1, class A2, class A3, class A4> struct storage4: public storage3< A1, A2, A3 >
{
typedef storage3<A1, A2, A3> inherited;
storage4( A1 a1, A2 a2, A3 a3, A4 a4 ): storage3<A1, A2, A3>( a1, a2, a3 ), a4_( a4 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a4_, 0);
}
A4 a4_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, class A2, class A3, int I> struct storage4< A1, A2, A3, boost::arg<I> >: public storage3< A1, A2, A3 >
{
typedef storage3<A1, A2, A3> inherited;
storage4( A1 a1, A2 a2, A3 a3, boost::arg<I> ): storage3<A1, A2, A3>( a1, a2, a3 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a4_() { return boost::arg<I>(); }
};
template<class A1, class A2, class A3, int I> struct storage4< A1, A2, A3, boost::arg<I> (*) () >: public storage3< A1, A2, A3 >
{
typedef storage3<A1, A2, A3> inherited;
storage4( A1 a1, A2 a2, A3 a3, boost::arg<I> (*) () ): storage3<A1, A2, A3>( a1, a2, a3 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a4_() { return boost::arg<I>(); }
};
#endif
// 5
template<class A1, class A2, class A3, class A4, class A5> struct storage5: public storage4< A1, A2, A3, A4 >
{
typedef storage4<A1, A2, A3, A4> inherited;
storage5( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5 ): storage4<A1, A2, A3, A4>( a1, a2, a3, a4 ), a5_( a5 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a5_, 0);
}
A5 a5_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, class A2, class A3, class A4, int I> struct storage5< A1, A2, A3, A4, boost::arg<I> >: public storage4< A1, A2, A3, A4 >
{
typedef storage4<A1, A2, A3, A4> inherited;
storage5( A1 a1, A2 a2, A3 a3, A4 a4, boost::arg<I> ): storage4<A1, A2, A3, A4>( a1, a2, a3, a4 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a5_() { return boost::arg<I>(); }
};
template<class A1, class A2, class A3, class A4, int I> struct storage5< A1, A2, A3, A4, boost::arg<I> (*) () >: public storage4< A1, A2, A3, A4 >
{
typedef storage4<A1, A2, A3, A4> inherited;
storage5( A1 a1, A2 a2, A3 a3, A4 a4, boost::arg<I> (*) () ): storage4<A1, A2, A3, A4>( a1, a2, a3, a4 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a5_() { return boost::arg<I>(); }
};
#endif
// 6
template<class A1, class A2, class A3, class A4, class A5, class A6> struct storage6: public storage5< A1, A2, A3, A4, A5 >
{
typedef storage5<A1, A2, A3, A4, A5> inherited;
storage6( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6 ): storage5<A1, A2, A3, A4, A5>( a1, a2, a3, a4, a5 ), a6_( a6 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a6_, 0);
}
A6 a6_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, class A2, class A3, class A4, class A5, int I> struct storage6< A1, A2, A3, A4, A5, boost::arg<I> >: public storage5< A1, A2, A3, A4, A5 >
{
typedef storage5<A1, A2, A3, A4, A5> inherited;
storage6( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, boost::arg<I> ): storage5<A1, A2, A3, A4, A5>( a1, a2, a3, a4, a5 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a6_() { return boost::arg<I>(); }
};
template<class A1, class A2, class A3, class A4, class A5, int I> struct storage6< A1, A2, A3, A4, A5, boost::arg<I> (*) () >: public storage5< A1, A2, A3, A4, A5 >
{
typedef storage5<A1, A2, A3, A4, A5> inherited;
storage6( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, boost::arg<I> (*) () ): storage5<A1, A2, A3, A4, A5>( a1, a2, a3, a4, a5 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a6_() { return boost::arg<I>(); }
};
#endif
// 7
template<class A1, class A2, class A3, class A4, class A5, class A6, class A7> struct storage7: public storage6< A1, A2, A3, A4, A5, A6 >
{
typedef storage6<A1, A2, A3, A4, A5, A6> inherited;
storage7( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7 ): storage6<A1, A2, A3, A4, A5, A6>( a1, a2, a3, a4, a5, a6 ), a7_( a7 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a7_, 0);
}
A7 a7_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, class A2, class A3, class A4, class A5, class A6, int I> struct storage7< A1, A2, A3, A4, A5, A6, boost::arg<I> >: public storage6< A1, A2, A3, A4, A5, A6 >
{
typedef storage6<A1, A2, A3, A4, A5, A6> inherited;
storage7( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, boost::arg<I> ): storage6<A1, A2, A3, A4, A5, A6>( a1, a2, a3, a4, a5, a6 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a7_() { return boost::arg<I>(); }
};
template<class A1, class A2, class A3, class A4, class A5, class A6, int I> struct storage7< A1, A2, A3, A4, A5, A6, boost::arg<I> (*) () >: public storage6< A1, A2, A3, A4, A5, A6 >
{
typedef storage6<A1, A2, A3, A4, A5, A6> inherited;
storage7( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, boost::arg<I> (*) () ): storage6<A1, A2, A3, A4, A5, A6>( a1, a2, a3, a4, a5, a6 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a7_() { return boost::arg<I>(); }
};
#endif
// 8
template<class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8> struct storage8: public storage7< A1, A2, A3, A4, A5, A6, A7 >
{
typedef storage7<A1, A2, A3, A4, A5, A6, A7> inherited;
storage8( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, A8 a8 ): storage7<A1, A2, A3, A4, A5, A6, A7>( a1, a2, a3, a4, a5, a6, a7 ), a8_( a8 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a8_, 0);
}
A8 a8_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, class A2, class A3, class A4, class A5, class A6, class A7, int I> struct storage8< A1, A2, A3, A4, A5, A6, A7, boost::arg<I> >: public storage7< A1, A2, A3, A4, A5, A6, A7 >
{
typedef storage7<A1, A2, A3, A4, A5, A6, A7> inherited;
storage8( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, boost::arg<I> ): storage7<A1, A2, A3, A4, A5, A6, A7>( a1, a2, a3, a4, a5, a6, a7 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a8_() { return boost::arg<I>(); }
};
template<class A1, class A2, class A3, class A4, class A5, class A6, class A7, int I> struct storage8< A1, A2, A3, A4, A5, A6, A7, boost::arg<I> (*) () >: public storage7< A1, A2, A3, A4, A5, A6, A7 >
{
typedef storage7<A1, A2, A3, A4, A5, A6, A7> inherited;
storage8( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, boost::arg<I> (*) () ): storage7<A1, A2, A3, A4, A5, A6, A7>( a1, a2, a3, a4, a5, a6, a7 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a8_() { return boost::arg<I>(); }
};
#endif
// 9
template<class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, class A9> struct storage9: public storage8< A1, A2, A3, A4, A5, A6, A7, A8 >
{
typedef storage8<A1, A2, A3, A4, A5, A6, A7, A8> inherited;
storage9( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, A8 a8, A9 a9 ): storage8<A1, A2, A3, A4, A5, A6, A7, A8>( a1, a2, a3, a4, a5, a6, a7, a8 ), a9_( a9 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
BOOST_BIND_VISIT_EACH(v, a9_, 0);
}
A9 a9_;
};
#if !defined( BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION )
template<class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, int I> struct storage9< A1, A2, A3, A4, A5, A6, A7, A8, boost::arg<I> >: public storage8< A1, A2, A3, A4, A5, A6, A7, A8 >
{
typedef storage8<A1, A2, A3, A4, A5, A6, A7, A8> inherited;
storage9( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, A8 a8, boost::arg<I> ): storage8<A1, A2, A3, A4, A5, A6, A7, A8>( a1, a2, a3, a4, a5, a6, a7, a8 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a9_() { return boost::arg<I>(); }
};
template<class A1, class A2, class A3, class A4, class A5, class A6, class A7, class A8, int I> struct storage9< A1, A2, A3, A4, A5, A6, A7, A8, boost::arg<I> (*) () >: public storage8< A1, A2, A3, A4, A5, A6, A7, A8 >
{
typedef storage8<A1, A2, A3, A4, A5, A6, A7, A8> inherited;
storage9( A1 a1, A2 a2, A3 a3, A4 a4, A5 a5, A6 a6, A7 a7, A8 a8, boost::arg<I> (*) () ): storage8<A1, A2, A3, A4, A5, A6, A7, A8>( a1, a2, a3, a4, a5, a6, a7, a8 ) {}
template<class V> void accept(V & v) const
{
inherited::accept(v);
}
static boost::arg<I> a9_() { return boost::arg<I>(); }
};
#endif
} // namespace _bi
} // namespace boost
#ifdef BOOST_MSVC
# pragma warning(default: 4512) // assignment operator could not be generated
# pragma warning(pop)
#endif
#endif // #ifndef BOOST_BIND_STORAGE_HPP_INCLUDED
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2010-2011, NVIDIA Corporation
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of NVIDIA Corporation nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <cugar/basic/cuda/arch.h>
#include <cugar/basic/cuda/pointers.h>
#include <cugar/basic/functors.h>
#include <cugar/basic/algorithms.h>
#include <cugar/basic/cuda/scan.h>
#include <cugar/basic/utils.h>
namespace cugar {
namespace cuda {
namespace bintree {
typedef Radixtree_context::Split_task Split_task;
// find the most significant bit smaller than start by which code0 and code1 differ
template <typename Integer>
CUGAR_FORCEINLINE CUGAR_HOST_DEVICE int32 find_leading_bit_difference(
const int32 start_level,
const Integer code0,
const Integer code1)
{
int32 level = start_level;
while (level >= 0)
{
const Integer mask = Integer(1u) << level;
if ((code0 & mask) !=
(code1 & mask))
break;
--level;
}
return level;
}
#define RADIX_TREE_USE_VOLATILE 1
#define RADIX_TREE_USE_FENCE 1
#define RADIX_TREE_USE_ATOMIC_RELEASE 1
#if defined(RADIX_TREE_USE_VOLATILE)
#define RADIX_TREE_UNCACHED_LOAD(x) load<LOAD_VOLATILE>(x)
#define RADIX_TREE_UNCACHED_STORE(x,v) store<STORE_VOLATILE>(x,v)
#else
#define RADIX_TREE_UNCACHED_LOAD(x) load<LOAD_CG>(x)
#define RADIX_TREE_UNCACHED_STORE(x,v) store<STORE_CG>(x,v)
#endif
#if defined(RADIX_TREE_USE_FENCE)
#define RADIX_TREE_RELEASE_FENCE() __threadfence()
#else
#define RADIX_TREE_RELEASE_FENCE()
#endif
#if defined(RADIX_TREE_USE_ATOMIC_RELEASE)
#define RADIX_TREE_RELEASE(x,v) atomicExch(x,v)
#else
#define RADIX_TREE_RELEASE(x,v) RADIX_TREE_UNCACHED_STORE(x,v)
#endif
// do a single kd-split for all nodes in the input task queue, and generate
// a corresponding list of output tasks
template <uint32 BLOCK_SIZE, typename Tree, typename Integer>
__global__ void split_kernel(
const uint32 grid_size,
Tree tree,
const uint32 max_leaf_size,
const uint32 n_nodes,
const uint32 n_codes,
const Integer* codes,
int32* flags,
Split_task* tasks,
uint32* skip_nodes,
uint32* out_node_count,
uint32* out_leaf_count,
uint32* work_counter)
{
const uint32 LOG_WARP_SIZE = 5;
const uint32 WARP_SIZE = 1u << LOG_WARP_SIZE;
volatile __shared__ uint32 warp_offset[ BLOCK_SIZE >> LOG_WARP_SIZE ];
const uint32 warp_tid = threadIdx.x & (WARP_SIZE-1);
const uint32 warp_id = threadIdx.x >> LOG_WARP_SIZE;
uint32 node = 0;
uint32 begin = 0;
uint32 end = 0;
uint32 level = uint32(-1);
uint32 parent = uint32(-1);
uint32 skip_node = uint32(-1);
uint32 split_index = 0;
int32 node_flag = -1;
bool proper_split = false;
bool terminated = false;
// keep the entire warp looping until there's some work to do
while (__any(!terminated))
{
// fetch new tasks for inactive lanes which are done processing their node (i.e. node_flag != 0)
const uint32 new_node = cuda::alloc<1>( node_flag != 0, work_counter, warp_tid, warp_offset + warp_id );
if (node_flag != 0) // check if we are done processing the current node
{
// check whether we are done processing all nodes
if (new_node >= n_nodes)
terminated = true;
// reset the node
node = new_node;
begin = 0;
end = 0;
level = uint32(-1);
parent = uint32(-1);
skip_node = uint32(-1);
split_index = 0;
node_flag = 0;
proper_split = false;
}
//node_flag = terminated ? 0 : *(volatile int32*)(flags + node);
node_flag = terminated ? 0 : RADIX_TREE_UNCACHED_LOAD(flags + node);
if (node_flag != 0)
{
// fetch this node's description
const Split_task in_task = RADIX_TREE_UNCACHED_LOAD( (uint4*)tasks + node );
parent = in_task.m_parent;
begin = in_task.m_begin;
end = in_task.m_end;
level = in_task.m_level;
split_index = (begin + end)/2;
skip_node = RADIX_TREE_UNCACHED_LOAD( skip_nodes + node );
// check whether this is a valid node, needing a proper split
if (node_flag == 1)
{
// check whether the input node really needs to be split
if (end - begin > max_leaf_size)
{
if (level != uint32(-1))
{
// adjust the splitting level so as to make sure the split will produce either 2 or 0 children
level = find_leading_bit_difference(
level,
codes[begin],
codes[end-1] );
}
// check again if there is any chance to make a split, after the level has been adjusted
if (level != uint32(-1))
{
// find the "partitioning pivot" using a binary search
split_index = find_pivot(
codes + begin,
end - begin,
mask_and<Integer>( Integer(1u) << level ) ) - codes;
// this shouldn't be needed, but... force a good split
if (split_index == begin || split_index == end)
split_index = (begin + end)/2;
}
// mark this as a proper split
proper_split = true;
}
}
}
#define RADIX_TREE_WRITE_NODE( OUTPUT_INDEX, PARENT, BEGIN, END, LEVEL, SKIP, RELEASE_VALUE ) \
do { \
RADIX_TREE_UNCACHED_STORE( (uint4*)tasks + OUTPUT_INDEX, make_uint4( PARENT, BEGIN, END, LEVEL ) ); \
RADIX_TREE_UNCACHED_STORE( skip_nodes + OUTPUT_INDEX, SKIP ); \
RADIX_TREE_RELEASE_FENCE(); \
RADIX_TREE_RELEASE( flags + OUTPUT_INDEX, RELEASE_VALUE ); \
} while (0)
// split the node if it contains more than one code
const uint32 child_count = (node_flag != 0) && (end - begin > 1u) ? 2u : 0u;
// alloc the actual children
//const uint32 node_offset = cuda::alloc<2>( child_count, out_node_count, warp_tid, warp_offset + warp_id );
const uint32 node_offset = atomicAdd( out_node_count, child_count );
// write the them out
if (child_count == 2)
{
RADIX_TREE_WRITE_NODE( node_offset+0, node, begin, split_index, level-1, node_offset+1, proper_split ? 1 : -1 ); // mark nodes produced by "virtual" splits with a -1
RADIX_TREE_WRITE_NODE( node_offset+1, node, split_index, end, level-1, skip_node, proper_split ? 1 : -1 ); // mark nodes produced by "virtual" splits with a -1
}
const bool generate_leaf = (node_flag == 1) && (proper_split == false);
// count how many leaves we need to generate
//const uint32 leaf_index = cuda::alloc<1>( generate_leaf, out_leaf_count, warp_tid, warp_offset + warp_id );
const uint32 leaf_index = atomicAdd( out_leaf_count, generate_leaf ? 1u : 0u );
// write out the current node if it's not virtual
if (node_flag == 1)
{
tree.write_node(
node,
parent,
generate_leaf ? false : true,
generate_leaf ? false : true,
generate_leaf ? leaf_index : node_offset,
skip_node,
level,
begin,
end,
generate_leaf ? uint32(-1) : split_index );
// make a leaf if necessary
if (generate_leaf)
tree.write_leaf( leaf_index, node, begin, end );
}
}
}
// do a single kd-split for all nodes in the input task queue, and generate
// a corresponding list of output tasks
template <typename Tree, typename Integer>
void split(
Tree tree,
const uint32 max_leaf_size,
const uint32 n_nodes,
const uint32 n_codes,
const Integer* codes,
int32* flags,
Split_task* tasks,
uint32* skip_nodes,
uint32* out_node_count,
uint32* out_leaf_count,
uint32* work_counter)
{
const uint32 BLOCK_SIZE = 128;
const size_t max_blocks = cuda::max_active_blocks(split_kernel<BLOCK_SIZE,Tree,Integer>, BLOCK_SIZE, 0);
const size_t n_blocks = cugar::min( max_blocks, size_t(n_nodes + BLOCK_SIZE-1) / BLOCK_SIZE );
const size_t grid_size = n_blocks * BLOCK_SIZE;
split_kernel<BLOCK_SIZE> <<<n_blocks,BLOCK_SIZE>>> (
grid_size,
tree,
max_leaf_size,
n_nodes,
n_codes,
codes,
flags,
tasks,
skip_nodes,
out_node_count,
out_leaf_count,
work_counter );
//cudaDeviceSynchronize();
}
} // namespace bintree
template <typename Tree, typename Integer>
void generate_radix_tree(
Radixtree_context& context,
const uint32 n_codes,
const Integer* codes,
const uint32 bits,
const uint32 max_leaf_size,
const bool keep_singletons,
Tree& tree)
{
const uint32 max_nodes = cugar::max( n_codes * 2u - 1u, 1u );
tree.reserve_nodes( max_nodes );
tree.reserve_leaves( cugar::max( n_codes, 1u ) );
// reserve storage for internal queues
need_space( context.m_task_queues, max_nodes );
need_space( context.m_skip_nodes, max_nodes );
context.m_counters.resize( 3 );
context.m_counters[0] = 1; // nodes counter
context.m_counters[1] = 0; // leaf counter
context.m_counters[2] = 0; // work counter
context.m_task_queues[0] = Radixtree_context::Split_task( uint32(-1), 0, n_codes, bits-1 );
context.m_skip_nodes[0] = uint32(-1);
caching_device_vector<int32> flags( max_nodes, 0u );
flags[0] = 1u; // mark the root node as ready to be split
// build the radix tree in a single pass
bintree::split(
tree.get_context(),
max_leaf_size,
max_nodes,
n_codes,
codes,
raw_pointer( flags ),
raw_pointer( context.m_task_queues ),
raw_pointer( context.m_skip_nodes ),
raw_pointer( context.m_counters ),
raw_pointer( context.m_counters ) + 1,
raw_pointer( context.m_counters ) + 2 );
context.m_nodes = context.m_counters[0];
context.m_leaves = context.m_counters[1];
}
template <typename Tree_writer, typename Integer>
void generate_radix_tree(
const uint32 n_codes,
const Integer* codes,
const uint32 bits,
const uint32 max_leaf_size,
const bool keep_singletons,
Tree_writer& tree)
{
Radixtree_context context;
generate_radix_tree( context, n_codes, codes, bits, max_leaf_size, keep_singletons, tree );
}
} // namespace cuda
} // namespace cugar
| {
"pile_set_name": "Github"
} |
Sample code for Chapter 5 - "First-class functions"
From the book "Fluent Python" by Luciano Ramalho (O'Reilly, 2015)
http://shop.oreilly.com/product/0636920032519.do
| {
"pile_set_name": "Github"
} |
// StringUtils.cpp
#include "StdAfx.h"
#include "StringUtils.h"
void SplitStringToTwoStrings(const UString &src, UString &dest1, UString &dest2)
{
dest1.Empty();
dest2.Empty();
bool quoteMode = false;
int i;
for (i = 0; i < src.Length(); i++)
{
wchar_t c = src[i];
if (c == L'\"')
quoteMode = !quoteMode;
else if (c == L' ' && !quoteMode)
{
if (!quoteMode)
{
i++;
break;
}
}
else
dest1 += c;
}
dest2 = src.Mid(i);
}
void SplitString(const UString &srcString, UStringVector &destStrings)
{
destStrings.Clear();
UString string;
int len = srcString.Length();
if (len == 0)
return;
for (int i = 0; i < len; i++)
{
wchar_t c = srcString[i];
if (c == L' ')
{
if (!string.IsEmpty())
{
destStrings.Add(string);
string.Empty();
}
}
else
string += c;
}
if (!string.IsEmpty())
destStrings.Add(string);
}
UString JoinStrings(const UStringVector &srcStrings)
{
UString destString;
for (int i = 0; i < srcStrings.Size(); i++)
{
if (i != 0)
destString += L' ';
destString += srcStrings[i];
}
return destString;
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="UTF-8"?>
<model ref="5ef691b5-60ce-4ece-a04e-25e642dfa128/r:59b6a434-36b8-4735-ae34-3acf97303510(com.mbeddr.mpsutil.lantest/com.mbeddr.mpsutil.lantest.structure)">
<persistence version="9" />
<languages>
<use id="982eb8df-2c96-4bd7-9963-11712ea622e5" name="jetbrains.mps.lang.resources" version="2" />
<devkit ref="78434eb8-b0e5-444b-850d-e7c4ad2da9ab(jetbrains.mps.devkit.aspect.structure)" />
</languages>
<imports>
<import index="tp25" ref="r:00000000-0000-4000-0000-011c89590301(jetbrains.mps.lang.smodel.structure)" />
<import index="tpck" ref="r:00000000-0000-4000-0000-011c89590288(jetbrains.mps.lang.core.structure)" implicit="true" />
<import index="tpce" ref="r:00000000-0000-4000-0000-011c89590292(jetbrains.mps.lang.structure.structure)" implicit="true" />
</imports>
<registry>
<language id="c72da2b9-7cce-4447-8389-f407dc1158b7" name="jetbrains.mps.lang.structure">
<concept id="6054523464627964745" name="jetbrains.mps.lang.structure.structure.AttributeInfo_AttributedConcept" flags="ng" index="trNpa">
<reference id="6054523464627965081" name="concept" index="trN6q" />
</concept>
<concept id="2992811758677295509" name="jetbrains.mps.lang.structure.structure.AttributeInfo" flags="ng" index="M6xJ_">
<property id="7588428831955550663" name="role" index="Hh88m" />
<child id="7588428831947959310" name="attributed" index="EQaZv" />
</concept>
<concept id="1169125787135" name="jetbrains.mps.lang.structure.structure.AbstractConceptDeclaration" flags="ig" index="PkWjJ">
<property id="6714410169261853888" name="conceptId" index="EcuMT" />
<property id="4628067390765907488" name="conceptShortDescription" index="R4oN_" />
<property id="4628067390765956807" name="final" index="R5$K2" />
<property id="4628067390765956802" name="abstract" index="R5$K7" />
<property id="5092175715804935370" name="conceptAlias" index="34LRSv" />
<child id="1071489727083" name="linkDeclaration" index="1TKVEi" />
<child id="1071489727084" name="propertyDeclaration" index="1TKVEl" />
</concept>
<concept id="1169125989551" name="jetbrains.mps.lang.structure.structure.InterfaceConceptDeclaration" flags="ig" index="PlHQZ" />
<concept id="1169127622168" name="jetbrains.mps.lang.structure.structure.InterfaceConceptReference" flags="ig" index="PrWs8">
<reference id="1169127628841" name="intfc" index="PrY4T" />
</concept>
<concept id="1071489090640" name="jetbrains.mps.lang.structure.structure.ConceptDeclaration" flags="ig" index="1TIwiD">
<property id="1096454100552" name="rootable" index="19KtqR" />
<reference id="1071489389519" name="extends" index="1TJDcQ" />
<child id="1169129564478" name="implements" index="PzmwI" />
</concept>
<concept id="1071489288299" name="jetbrains.mps.lang.structure.structure.PropertyDeclaration" flags="ig" index="1TJgyi">
<property id="241647608299431129" name="propertyId" index="IQ2nx" />
<reference id="1082985295845" name="dataType" index="AX2Wp" />
</concept>
<concept id="1071489288298" name="jetbrains.mps.lang.structure.structure.LinkDeclaration" flags="ig" index="1TJgyj">
<property id="1071599776563" name="role" index="20kJfa" />
<property id="1071599893252" name="sourceCardinality" index="20lbJX" />
<property id="1071599937831" name="metaClass" index="20lmBu" />
<property id="241647608299431140" name="linkId" index="IQ2ns" />
<reference id="1071599976176" name="target" index="20lvS9" />
</concept>
</language>
<language id="ceab5195-25ea-4f22-9b92-103b95ca8c0c" name="jetbrains.mps.lang.core">
<concept id="1133920641626" name="jetbrains.mps.lang.core.structure.BaseConcept" flags="ng" index="2VYdi">
<property id="1193676396447" name="virtualPackage" index="3GE5qa" />
<child id="5169995583184591170" name="smodelAttribute" index="lGtFl" />
</concept>
<concept id="1169194658468" name="jetbrains.mps.lang.core.structure.INamedConcept" flags="ng" index="TrEIO">
<property id="1169194664001" name="name" index="TrG5h" />
</concept>
</language>
</registry>
<node concept="1TIwiD" id="4XCJ8CcQ6Nj">
<property role="TrG5h" value="LantestConfig" />
<property role="34LRSv" value="lantest_config" />
<property role="19KtqR" value="true" />
<property role="R4oN_" value="configuration for language testing" />
<property role="EcuMT" value="5722030627681234131" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="1TJgyj" id="2A9nHKANPH8" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="seedModel" />
<property role="IQ2ns" value="2993027727001344840" />
<ref role="20lvS9" node="2A9nHKANPGu" resolve="AbstractSeedModel" />
</node>
<node concept="1TJgyj" id="2P6psD9B1UY" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="seedChooser" />
<property role="20lbJX" value="fLJekj4/_1" />
<property role="IQ2ns" value="3262406899569270462" />
<ref role="20lvS9" node="2P6psD9B1UX" resolve="ISeedChooser" />
</node>
<node concept="1TJgyj" id="33cGTVo60G$" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="langSpecificConfig" />
<property role="IQ2ns" value="3516382903881173796" />
<ref role="20lvS9" node="33cGTVo609o" resolve="ILanguageSpecificConfig" />
</node>
<node concept="1TJgyj" id="5aWlhTu3WIo" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="scope" />
<property role="20lbJX" value="fLJekj5/_0__n" />
<property role="IQ2ns" value="5961733595647167384" />
<ref role="20lvS9" node="5aWlhTu2ZzL" resolve="InterestingLanguages" />
</node>
<node concept="1TJgyj" id="30nlpkLbz5c" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="conceptChooser" />
<property role="20lbJX" value="fLJekj4/_1" />
<property role="IQ2ns" value="3465332537548484940" />
<ref role="20lvS9" node="30nlpkLbzJw" resolve="IConceptChooser" />
</node>
<node concept="1TJgyj" id="3acDVtIDSu6" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20lbJX" value="fLJekj4/_1" />
<property role="20kJfa" value="modelWhereResultsAreSaved" />
<property role="IQ2ns" value="3642470604913215366" />
<ref role="20lvS9" to="tp25:v3WHCwUiHy" resolve="ModelReferenceExpression" />
</node>
<node concept="1TJgyj" id="52eR6w5Qnsd" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20lbJX" value="fLJekj4/_1" />
<property role="20kJfa" value="temporaryModel" />
<property role="IQ2ns" value="5804819309059995405" />
<ref role="20lvS9" to="tp25:v3WHCwUiHy" resolve="ModelReferenceExpression" />
</node>
<node concept="1TJgyj" id="1EeUs_TucP_" role="1TKVEi">
<property role="IQ2ns" value="1913723943214697829" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="modelWithBuggyRootsAfterChecking" />
<property role="20lbJX" value="fLJekj4/_1" />
<ref role="20lvS9" to="tp25:v3WHCwUiHy" resolve="ModelReferenceExpression" />
</node>
<node concept="1TJgyj" id="2zqpPfizDaF" role="1TKVEi">
<property role="IQ2ns" value="2943778916152545963" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="whatToCheckForEditorActions" />
<ref role="20lvS9" node="2zqpPfi$1nS" resolve="AbstractEditorActionsScope" />
</node>
<node concept="1TJgyj" id="2zqpPfizDco" role="1TKVEi">
<property role="IQ2ns" value="2943778916152546072" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="linksKnownToBeHidden" />
<property role="20lbJX" value="fLJekj5/_0__n" />
<ref role="20lvS9" to="tp25:2iMJRNx_nol" resolve="LinkIdRefExpression" />
</node>
<node concept="1TJgyj" id="2zqpPfiG1QG" role="1TKVEi">
<property role="IQ2ns" value="2943778916154744236" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="conceptsKnownNotToBeInstantiable" />
<property role="20lbJX" value="fLJekj5/_0__n" />
<ref role="20lvS9" to="tp25:2iMJRNxweHk" resolve="ConceptIdRefExpression" />
</node>
<node concept="1TJgyi" id="4XCJ8CcQCV$" role="1TKVEl">
<property role="TrG5h" value="maximumNumberOfTries" />
<property role="IQ2nx" value="5722030627681373924" />
<ref role="AX2Wp" to="tpck:fKAQMTA" resolve="integer" />
</node>
<node concept="1TJgyi" id="3HDVcqVU4f8" role="1TKVEl">
<property role="TrG5h" value="minimalDepth" />
<property role="IQ2nx" value="4281213259092607944" />
<ref role="AX2Wp" to="tpck:fKAQMTA" resolve="integer" />
</node>
<node concept="1TJgyi" id="4XCJ8CcQZiq" role="1TKVEl">
<property role="TrG5h" value="maximalDepth" />
<property role="IQ2nx" value="5722030627681465498" />
<ref role="AX2Wp" to="tpck:fKAQMTA" resolve="integer" />
</node>
<node concept="1TJgyi" id="3acDVtI_Wt4" role="1TKVEl">
<property role="TrG5h" value="cloneOriginalNodeRatio" />
<property role="IQ2nx" value="3642470604912183108" />
<ref role="AX2Wp" to="tpck:fKAQMTA" resolve="integer" />
</node>
<node concept="1TJgyi" id="1ir9k2TCuuL" role="1TKVEl">
<property role="TrG5h" value="forceSavingOfAllModels" />
<property role="IQ2nx" value="1484821462071240625" />
<ref role="AX2Wp" to="tpck:fKAQMTB" resolve="boolean" />
</node>
<node concept="1TJgyi" id="484XVyxOHJP" role="1TKVEl">
<property role="IQ2nx" value="4757199478771080181" />
<property role="TrG5h" value="checkEditor" />
<ref role="AX2Wp" to="tpck:fKAQMTB" resolve="boolean" />
</node>
<node concept="1TJgyi" id="5JsRhvvgGh" role="1TKVEl">
<property role="IQ2nx" value="103428260108241681" />
<property role="TrG5h" value="checkEditorOnTheFly" />
<ref role="AX2Wp" to="tpck:fKAQMTB" resolve="boolean" />
</node>
<node concept="1TJgyi" id="484XVyxOHK2" role="1TKVEl">
<property role="IQ2nx" value="4757199478771080194" />
<property role="TrG5h" value="checkGeneratedCode" />
<ref role="AX2Wp" to="tpck:fKAQMTB" resolve="boolean" />
</node>
<node concept="1TJgyi" id="484XVyxOHKv" role="1TKVEl">
<property role="IQ2nx" value="4757199478771080223" />
<property role="TrG5h" value="deleteCheckedRoots" />
<ref role="AX2Wp" to="tpck:fKAQMTB" resolve="boolean" />
</node>
<node concept="1TJgyi" id="5siEZZN9u33" role="1TKVEl">
<property role="IQ2nx" value="6274266346664878275" />
<property role="TrG5h" value="pathToLogDirectory" />
<ref role="AX2Wp" to="tpck:fKAOsGN" resolve="string" />
</node>
<node concept="PrWs8" id="4XCJ8CcQ6V_" role="PzmwI">
<ref role="PrY4T" to="tpck:h0TrEE$" resolve="INamedConcept" />
</node>
</node>
<node concept="PlHQZ" id="2P6psD9B1UX">
<property role="TrG5h" value="ISeedChooser" />
<property role="3GE5qa" value="concept_seed" />
<property role="EcuMT" value="3262406899569270461" />
</node>
<node concept="1TIwiD" id="2P6psD9B1V8">
<property role="34LRSv" value="random descendant seed" />
<property role="TrG5h" value="RandomDescendantSeed" />
<property role="EcuMT" value="3262406899569270472" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="PrWs8" id="2P6psD9B1Zi" role="PzmwI">
<ref role="PrY4T" node="2P6psD9B1UX" resolve="ISeedChooser" />
</node>
</node>
<node concept="1TIwiD" id="2P6psD9D$Jx">
<property role="34LRSv" value="concept seed" />
<property role="TrG5h" value="ConceptSeed" />
<property role="3GE5qa" value="concept_seed" />
<property role="EcuMT" value="3262406899569937377" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="PrWs8" id="2P6psD9D$Jy" role="PzmwI">
<ref role="PrY4T" node="2P6psD9B1UX" resolve="ISeedChooser" />
</node>
<node concept="1TJgyj" id="7K2NL56H6S_" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="concepts" />
<property role="20lbJX" value="fLJekj6/_1__n" />
<property role="IQ2ns" value="8935932283764108837" />
<ref role="20lvS9" node="7K2NL56H6Rb" resolve="AbstractConceptDeclarationRef" />
</node>
</node>
<node concept="PlHQZ" id="33cGTVo609o">
<property role="TrG5h" value="ILanguageSpecificConfig" />
<property role="EcuMT" value="3516382903881171544" />
<property role="3GE5qa" value="language_specific_config" />
</node>
<node concept="1TIwiD" id="5aWlhTu2ZzL">
<property role="TrG5h" value="InterestingLanguages" />
<property role="EcuMT" value="5961733595646916849" />
<property role="34LRSv" value="interesting languages matcher" />
<property role="3GE5qa" value="language_scope" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="1TJgyj" id="6CKjFdBPe0z" role="1TKVEi">
<property role="IQ2ns" value="7651702299350589475" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="interestingLanguages" />
<property role="20lbJX" value="fLJekj5/_0__n" />
<ref role="20lvS9" node="6CKjFdBPdCS" resolve="InterestingLanguage" />
</node>
</node>
<node concept="1TIwiD" id="2A9nHKANPGu">
<property role="TrG5h" value="AbstractSeedModel" />
<property role="R5$K7" value="true" />
<property role="R5$K2" value="false" />
<property role="3GE5qa" value="seed" />
<property role="EcuMT" value="2993027727001344798" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
</node>
<node concept="1TIwiD" id="2A9nHKANPGv">
<property role="3GE5qa" value="seed" />
<property role="TrG5h" value="SingleModelSeed" />
<property role="EcuMT" value="2993027727001344799" />
<property role="34LRSv" value="single model seed" />
<ref role="1TJDcQ" node="2A9nHKANPGu" resolve="AbstractSeedModel" />
<node concept="1TJgyj" id="2A9nHKANPGS" role="1TKVEi">
<property role="20kJfa" value="startingPoint" />
<property role="IQ2ns" value="2993027727001344824" />
<ref role="20lvS9" to="tpck:h0TrEE$" resolve="INamedConcept" />
</node>
</node>
<node concept="1TIwiD" id="7K2NL56H6Rb">
<property role="TrG5h" value="AbstractConceptDeclarationRef" />
<property role="3GE5qa" value="concept_seed" />
<property role="EcuMT" value="8935932283764108747" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="1TJgyj" id="7K2NL56H6Rl" role="1TKVEi">
<property role="20kJfa" value="conceptDeclaration" />
<property role="20lbJX" value="fLJekj4/_1" />
<property role="IQ2ns" value="8935932283764108757" />
<ref role="20lvS9" to="tpce:h0PkWnZ" resolve="AbstractConceptDeclaration" />
</node>
</node>
<node concept="1TIwiD" id="30nlpkL5nws">
<property role="TrG5h" value="MutatedNodeAnnotation" />
<property role="EcuMT" value="3465332537546864668" />
<ref role="1TJDcQ" to="tpck:2ULFgo8_XDk" resolve="NodeAttribute" />
<node concept="1TJgyi" id="3acDVtI_ylt" role="1TKVEl">
<property role="TrG5h" value="mutationDepth" />
<property role="IQ2nx" value="3642470604912076125" />
<ref role="AX2Wp" to="tpck:fKAQMTA" resolve="integer" />
</node>
<node concept="M6xJ_" id="30nlpkL5nwt" role="lGtFl">
<property role="Hh88m" value="mutatedAnnotation" />
<node concept="trNpa" id="30nlpkL5nwx" role="EQaZv">
<ref role="trN6q" to="tpck:gw2VY9q" resolve="BaseConcept" />
</node>
</node>
</node>
<node concept="1TIwiD" id="30nlpkLbzJv">
<property role="TrG5h" value="RandomConceptChooser" />
<property role="3GE5qa" value="concept_chooser" />
<property role="EcuMT" value="3465332537548487647" />
<property role="34LRSv" value="random concept chooser" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="PrWs8" id="30nlpkLbzKi" role="PzmwI">
<ref role="PrY4T" node="30nlpkLbzJw" resolve="IConceptChooser" />
</node>
</node>
<node concept="PlHQZ" id="30nlpkLbzJw">
<property role="TrG5h" value="IConceptChooser" />
<property role="3GE5qa" value="concept_chooser" />
<property role="EcuMT" value="3465332537548487648" />
</node>
<node concept="1TIwiD" id="tJrHRTVPJ_">
<property role="TrG5h" value="ExceptionAnnotation" />
<property role="EcuMT" value="535768780340419557" />
<ref role="1TJDcQ" to="tpck:2ULFgo8_XDk" resolve="NodeAttribute" />
<node concept="1TJgyi" id="64cByBTL3ml" role="1TKVEl">
<property role="TrG5h" value="message" />
<property role="IQ2nx" value="6993138224520770965" />
<ref role="AX2Wp" to="tpck:fKAOsGN" resolve="string" />
</node>
<node concept="1TJgyi" id="7VeUlv6Zsjs" role="1TKVEl">
<property role="IQ2nx" value="9137497257191261404" />
<property role="TrG5h" value="stackTrace" />
<ref role="AX2Wp" to="tpck:fKAOsGN" resolve="string" />
</node>
<node concept="M6xJ_" id="tJrHRTVPJA" role="lGtFl">
<property role="Hh88m" value="exception" />
<node concept="trNpa" id="tJrHRTVPJD" role="EQaZv">
<ref role="trN6q" to="tpck:gw2VY9q" resolve="BaseConcept" />
</node>
</node>
</node>
<node concept="1TIwiD" id="3S9K2OvqeWn">
<property role="EcuMT" value="4470315405174959895" />
<property role="3GE5qa" value="concept_chooser" />
<property role="TrG5h" value="FirstConceptFixedThenRandomChooser" />
<property role="34LRSv" value="first concept given then random" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="1TJgyj" id="3S9K2OvqeWr" role="1TKVEi">
<property role="IQ2ns" value="4470315405174959899" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="first" />
<property role="20lbJX" value="fLJekj4/_1" />
<ref role="20lvS9" node="7K2NL56H6Rb" resolve="AbstractConceptDeclarationRef" />
</node>
<node concept="PrWs8" id="3S9K2OvqeXM" role="PzmwI">
<ref role="PrY4T" node="30nlpkLbzJw" resolve="IConceptChooser" />
</node>
</node>
<node concept="1TIwiD" id="6CKjFdBPdCS">
<property role="EcuMT" value="7651702299350587960" />
<property role="TrG5h" value="InterestingLanguage" />
<property role="34LRSv" value="language" />
<property role="R4oN_" value="allow/dissallow languages based on their name" />
<property role="3GE5qa" value="language_scope" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="1TJgyj" id="6CKjFdBR33G" role="1TKVEi">
<property role="IQ2ns" value="7651702299351068908" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="excluded" />
<property role="20lbJX" value="fLJekj5/_0__n" />
<ref role="20lvS9" node="6CKjFdBR33w" resolve="LanguageExcluded" />
</node>
<node concept="1TJgyi" id="6CKjFdBPdCT" role="1TKVEl">
<property role="IQ2nx" value="7651702299350587961" />
<property role="TrG5h" value="languageNameSubstring" />
<ref role="AX2Wp" to="tpck:fKAOsGN" resolve="string" />
</node>
<node concept="1TJgyi" id="6CKjFdBPdDc" role="1TKVEl">
<property role="IQ2nx" value="7651702299350587980" />
<property role="TrG5h" value="languageNameSubstringIsRegex" />
<ref role="AX2Wp" to="tpck:fKAQMTB" resolve="boolean" />
</node>
</node>
<node concept="1TIwiD" id="6CKjFdBR33w">
<property role="EcuMT" value="7651702299351068896" />
<property role="3GE5qa" value="language_scope" />
<property role="TrG5h" value="LanguageExcluded" />
<property role="34LRSv" value="excluded" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
<node concept="1TJgyi" id="6CKjFdBR33x" role="1TKVEl">
<property role="IQ2nx" value="7651702299351068897" />
<property role="TrG5h" value="languageNameSubstring" />
<ref role="AX2Wp" to="tpck:fKAOsGN" resolve="string" />
</node>
</node>
<node concept="1TIwiD" id="7UfzZYaIfQ3">
<property role="EcuMT" value="9119666098155355523" />
<property role="3GE5qa" value="seed" />
<property role="TrG5h" value="RandomRootNodeFromSolution" />
<property role="34LRSv" value="random root node from solution" />
<property role="R4oN_" value="pick a random root node from a solution" />
<ref role="1TJDcQ" node="2A9nHKANPGu" resolve="AbstractSeedModel" />
<node concept="1TJgyj" id="52eR6w5Pjol" role="1TKVEi">
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="module" />
<property role="20lbJX" value="fLJekj4/_1" />
<property role="IQ2ns" value="5804819309059716629" />
<ref role="20lvS9" to="tp25:1t9FffgebJy" resolve="ModuleRefExpression" />
</node>
</node>
<node concept="1TIwiD" id="2zqpPfi$1nS">
<property role="EcuMT" value="2943778916152645112" />
<property role="TrG5h" value="AbstractEditorActionsScope" />
<property role="R5$K7" value="true" />
<property role="R5$K2" value="false" />
<property role="3GE5qa" value="editor_actions_seed" />
<ref role="1TJDcQ" to="tpck:gw2VY9q" resolve="BaseConcept" />
</node>
<node concept="1TIwiD" id="2zqpPfi$274">
<property role="EcuMT" value="2943778916152648132" />
<property role="3GE5qa" value="editor_actions_seed" />
<property role="TrG5h" value="ModelScopeForEditorActions" />
<property role="34LRSv" value="model scope" />
<property role="R4oN_" value="check instantiability of nodes from model" />
<ref role="1TJDcQ" node="2zqpPfi$1nS" resolve="AbstractEditorActionsScope" />
<node concept="1TJgyj" id="2zqpPfi$275" role="1TKVEi">
<property role="IQ2ns" value="2943778916152648133" />
<property role="20lmBu" value="fLJjDmT/aggregation" />
<property role="20kJfa" value="modelPointerExpression" />
<property role="20lbJX" value="fLJekj4/_1" />
<ref role="20lvS9" to="tp25:1Bs_61$nfRn" resolve="ModelPointerExpression" />
</node>
</node>
<node concept="1TIwiD" id="2zqpPfi$7FA">
<property role="EcuMT" value="2943778916152670950" />
<property role="3GE5qa" value="editor_actions_seed" />
<property role="TrG5h" value="SingleRootNodeScope" />
<property role="34LRSv" value="single root node scope" />
<property role="R4oN_" value="check a single root node" />
<ref role="1TJDcQ" node="2zqpPfi$1nS" resolve="AbstractEditorActionsScope" />
<node concept="1TJgyj" id="2zqpPfi$7FB" role="1TKVEi">
<property role="IQ2ns" value="2943778916152670951" />
<property role="20kJfa" value="root" />
<property role="20lbJX" value="fLJekj4/_1" />
<ref role="20lvS9" to="tpck:h0TrEE$" resolve="INamedConcept" />
</node>
</node>
</model>
| {
"pile_set_name": "Github"
} |
package cobra
import (
"bytes"
"strings"
"testing"
)
func TestZshCompletion(t *testing.T) {
tcs := []struct {
name string
root *Command
expectedExpressions []string
}{
{
name: "trivial",
root: &Command{Use: "trivialapp"},
expectedExpressions: []string{"#compdef trivial"},
},
{
name: "linear",
root: func() *Command {
r := &Command{Use: "linear"}
sub1 := &Command{Use: "sub1"}
r.AddCommand(sub1)
sub2 := &Command{Use: "sub2"}
sub1.AddCommand(sub2)
sub3 := &Command{Use: "sub3"}
sub2.AddCommand(sub3)
return r
}(),
expectedExpressions: []string{"sub1", "sub2", "sub3"},
},
{
name: "flat",
root: func() *Command {
r := &Command{Use: "flat"}
r.AddCommand(&Command{Use: "c1"})
r.AddCommand(&Command{Use: "c2"})
return r
}(),
expectedExpressions: []string{"(c1 c2)"},
},
{
name: "tree",
root: func() *Command {
r := &Command{Use: "tree"}
sub1 := &Command{Use: "sub1"}
r.AddCommand(sub1)
sub11 := &Command{Use: "sub11"}
sub12 := &Command{Use: "sub12"}
sub1.AddCommand(sub11)
sub1.AddCommand(sub12)
sub2 := &Command{Use: "sub2"}
r.AddCommand(sub2)
sub21 := &Command{Use: "sub21"}
sub22 := &Command{Use: "sub22"}
sub2.AddCommand(sub21)
sub2.AddCommand(sub22)
return r
}(),
expectedExpressions: []string{"(sub11 sub12)", "(sub21 sub22)"},
},
}
for _, tc := range tcs {
t.Run(tc.name, func(t *testing.T) {
buf := new(bytes.Buffer)
tc.root.GenZshCompletion(buf)
output := buf.String()
for _, expectedExpression := range tc.expectedExpressions {
if !strings.Contains(output, expectedExpression) {
t.Errorf("Expected completion to contain %q somewhere; got %q", expectedExpression, output)
}
}
})
}
}
| {
"pile_set_name": "Github"
} |
/*
Copyright (c) 2011, Daniel Guerrero
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the Daniel Guerrero nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL DANIEL GUERRERO BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
var Base64Binary = {
_keyStr : "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=",
/* will return a Uint8Array type */
decodeArrayBuffer: function(input) {
var bytes = Math.ceil( (3*input.length) / 4.0);
var ab = new ArrayBuffer(bytes);
this.decode(input, ab);
return ab;
},
decode: function(input, arrayBuffer) {
//get last chars to see if are valid
var lkey1 = this._keyStr.indexOf(input.charAt(input.length-1));
var lkey2 = this._keyStr.indexOf(input.charAt(input.length-1));
var bytes = Math.ceil( (3*input.length) / 4.0);
if (lkey1 == 64) bytes--; //padding chars, so skip
if (lkey2 == 64) bytes--; //padding chars, so skip
var uarray;
var chr1, chr2, chr3;
var enc1, enc2, enc3, enc4;
var i = 0;
var j = 0;
if (arrayBuffer)
uarray = new Uint8Array(arrayBuffer);
else
uarray = new Uint8Array(bytes);
input = input.replace(/[^A-Za-z0-9\+\/\=]/g, "");
for (i=0; i<bytes; i+=3) {
//get the 3 octects in 4 ascii chars
enc1 = this._keyStr.indexOf(input.charAt(j++));
enc2 = this._keyStr.indexOf(input.charAt(j++));
enc3 = this._keyStr.indexOf(input.charAt(j++));
enc4 = this._keyStr.indexOf(input.charAt(j++));
chr1 = (enc1 << 2) | (enc2 >> 4);
chr2 = ((enc2 & 15) << 4) | (enc3 >> 2);
chr3 = ((enc3 & 3) << 6) | enc4;
uarray[i] = chr1;
if (enc3 != 64) uarray[i+1] = chr2;
if (enc4 != 64) uarray[i+2] = chr3;
}
return uarray;
}
}; | {
"pile_set_name": "Github"
} |
---
method: POST
headers:
Authorization: AWS4-HMAC-SHA256 Credential=access/20091028/us-east-1/mturk-requester/aws4_request, SignedHeaders=content-type;host;x-amz-content-sha256;x-amz-date, Signature=?
Host: mturk-requester.us-east-1.amazonaws.com
Content-Type: application/x-www-form-urlencoded; charset=utf-8
X-Amz-Content-SHA256: abcdef
X-Amz-Date: 20091028T223200Z
body:
''
| {
"pile_set_name": "Github"
} |
$NetBSD: patch-ac,v 1.2 2006/08/02 19:24:55 kristerw Exp $
--- xroads.c.orig 1999-06-30 20:25:02.000000000 +0200
+++ xroads.c 2006-08-02 21:14:27.000000000 +0200
@@ -282,13 +282,13 @@
}
/*************************************************************************************************/
-int trunc( float num ) { /* Truncate a float to an int */
+int mytrunc( float num ) { /* Truncate a float to an int */
if (num>0) return (int)num;
else return (int)(num-0.5);
}
/*************************************************************************************************/
-int round( float num ) { /* Round a float to an int */
+int roundup( float num ) { /* Round a float to an int */
return (int)(num+0.5);
}
@@ -394,11 +394,11 @@
/* VacAttack */
/*
if(data[amonst->type].attack1==VACCUUM && (amonst->x==monst[targ].x
- || amonst->y==monst[targ].y) && (round(amonst->x) ==
- trunc(amonst->x) && round(amonst->y) == trunc(amonst->y))) {
+ || amonst->y==monst[targ].y) && (roundup(amonst->x) ==
+ mytrunc(amonst->x) && roundup(amonst->y) == mytrunc(amonst->y))) {
*/
- if(data[amonst->type].attack1==VACCUUM && (round(amonst->x) ==
- trunc(amonst->x) && round(amonst->y) == trunc(amonst->y)) &&
+ if(data[amonst->type].attack1==VACCUUM && (roundup(amonst->x) ==
+ mytrunc(amonst->x) && roundup(amonst->y) == mytrunc(amonst->y)) &&
clearshot(amonst, &monst[targ])) {
watchentity(amonst, &monst[targ]);
vacattack(amonst, &monst[targ]);
@@ -423,19 +423,19 @@
else amonst->frame++;
if(data[amonst->type].etype!=EFFECT) {
- maze[round(oldy)][round(oldx)]=-2; /* Blank out old space */
- maze[trunc(oldy)][trunc(oldx)]=-2;
- maze[round(amonst->y)][round(amonst->x)]=number;
- maze[trunc(amonst->y)][trunc(amonst->x)]=number;
+ maze[roundup(oldy)][roundup(oldx)]=-2; /* Blank out old space */
+ maze[mytrunc(oldy)][mytrunc(oldx)]=-2;
+ maze[roundup(amonst->y)][roundup(amonst->x)]=number;
+ maze[mytrunc(amonst->y)][mytrunc(amonst->x)]=number;
}
/* If there was something underneath the effect, redraw it */
- if(maze[trunc(amonst->y)][trunc(amonst->x)]!=-2) {
- bmonst=&monst[maze[trunc(amonst->y)][trunc(amonst->x)]];
+ if(maze[mytrunc(amonst->y)][mytrunc(amonst->x)]!=-2) {
+ bmonst=&monst[maze[mytrunc(amonst->y)][mytrunc(amonst->x)]];
draw_tilex2(mainwindow, data[bmonst->type].tile[bmonst->frame], (bmonst->x)*GRIDSIZE,
(bmonst->y)*GRIDSIZE, bmonst->color, bmonst->dir);
}
- if(maze[round(amonst->y)][round(amonst->x)]!=-2) {
- bmonst=&monst[maze[round(amonst->y)][round(amonst->x)]];
+ if(maze[roundup(amonst->y)][roundup(amonst->x)]!=-2) {
+ bmonst=&monst[maze[roundup(amonst->y)][roundup(amonst->x)]];
draw_tilex2(mainwindow, data[bmonst->type].tile[bmonst->frame], (bmonst->x)*GRIDSIZE,
(bmonst->y)*GRIDSIZE, bmonst->color, bmonst->dir);
}
@@ -558,7 +558,7 @@
else if(player->attr & PULL_R) { newx+=1; player->attr &= ~PULL_R; }
/* Only one half-step at a time */
- if(round(newx)!=trunc(newx) && round(newy)!=trunc(newy)) {
+ if(roundup(newx)!=mytrunc(newx) && roundup(newy)!=mytrunc(newy)) {
#ifdef DEBUG
printf("Damn, in two half-steps at a time...\n");
#endif
@@ -574,30 +574,30 @@
/* If we're in a half-step, eqx and eqy are the space we're moving
into */
- if(round(newx)!=trunc(newx)) {
- eqy = trunc(newy);
+ if(roundup(newx)!=mytrunc(newx)) {
+ eqy = mytrunc(newy);
if(newx > player->x)
- eqx = round(newx);
+ eqx = roundup(newx);
else if(newx < player->x)
- eqx = trunc(newx);
+ eqx = mytrunc(newx);
}
- else if(round(newy)!=trunc(newy)) {
- eqx = trunc(newx);
+ else if(roundup(newy)!=mytrunc(newy)) {
+ eqx = mytrunc(newx);
if(newy > player->y)
- eqy = round(newy);
+ eqy = roundup(newy);
else if(newy < player->y)
- eqy = trunc(newy);
+ eqy = mytrunc(newy);
}
else {
- eqx = trunc(newx);
- eqy = trunc(newy);
+ eqx = mytrunc(newx);
+ eqy = mytrunc(newy);
}
/*
switch(player->dir) {
- case LEFT: eqx = trunc(newx); break;
- case RIGHT: eqx = round(newx); break;
- case UP: eqy = trunc(newy); break;
- case DOWN: eqy = round(newy); break;
+ case LEFT: eqx = mytrunc(newx); break;
+ case RIGHT: eqx = roundup(newx); break;
+ case UP: eqy = mytrunc(newy); break;
+ case DOWN: eqy = roundup(newy); break;
default:
}
*/
@@ -610,8 +610,8 @@
printf("Destination (%0.2f, %0.2f)...\n", newx, newy);
printf("Equiv (%d, %d)\n", eqx, eqy);
printf("@ %d, %d : %d\n", eqx, eqy, maze[eqy][eqx]);
- printf("newx: %0.2f: round %d, trunc %d\n", newx, round(newx), trunc(newx));
- printf("newy: %0.2f: round %d, trunc %d\n", newy, round(newy), trunc(newy));
+ printf("newx: %0.2f: round %d, trunc %d\n", newx, roundup(newx), mytrunc(newx));
+ printf("newy: %0.2f: round %d, trunc %d\n", newy, roundup(newy), mytrunc(newy));
printf("Checkdest: %d\n", checkdest(eqx, eqy));
printf("-----\n");
}
@@ -646,8 +646,8 @@
else if(!okay) {
#ifdef DEBUG
printf("something in the way. (%d or %d)\n",
- maze[trunc(newy)][trunc(newx)],
- maze[round(newy)][round(newy)]);
+ maze[mytrunc(newy)][mytrunc(newx)],
+ maze[roundup(newy)][roundup(newy)]);
#endif
newx=player->x;
newy=player->y;
@@ -658,15 +658,15 @@
#endif
/* Destination has been set, now all we have to do is move there */
- maze[round(player->y)][round(player->x)]=-2;
- maze[trunc(player->y)][trunc(player->x)]=-2;
+ maze[roundup(player->y)][roundup(player->x)]=-2;
+ maze[mytrunc(player->y)][mytrunc(player->x)]=-2;
draw_tilex2(mainwindow, blank, player->x*GRIDSIZE, player->y*GRIDSIZE,
BLACK, RIGHT);
/* Draw player */
player->x=newx; player->y=newy;
- maze[round(player->y)][round(player->x)]=player->ind;
- maze[trunc(player->y)][trunc(player->x)]=player->ind;
+ maze[roundup(player->y)][roundup(player->x)]=player->ind;
+ maze[mytrunc(player->y)][mytrunc(player->x)]=player->ind;
draw_tilex2(mainwindow, data[datapos].tile[player->frame],
player->x*GRIDSIZE, player->y*GRIDSIZE,
player->color, player->dir);
@@ -1279,8 +1279,8 @@
player[i]->health = 0;
if(player[i]->attr & ACTIVE) {
player[i]->attr ^= ACTIVE;
- maze[round(player[i]->y)][round(player[i]->x)] = -2;
- maze[trunc(player[i]->y)][trunc(player[i]->x)] = -2;
+ maze[roundup(player[i]->y)][roundup(player[i]->x)] = -2;
+ maze[mytrunc(player[i]->y)][mytrunc(player[i]->x)] = -2;
draw_tilex2(mainwindow, blank, player[i]->x*GRIDSIZE,
player[i]->y*GRIDSIZE, BLACK, RIGHT);
#ifdef XPLOSIONS
| {
"pile_set_name": "Github"
} |