max_stars_count
int64 301
224k
| text
stringlengths 6
1.05M
| token_count
int64 3
727k
|
---|---|---|
3,682 | from django.views import generic
from django.contrib.gis.geos import Point
from django.contrib.gis.db.models.functions import Distance
from .models import Shop
longitude = -80.191_788
latitude = 25.761_681
user_location = Point(longitude, latitude, srid=4326)
class Home(generic.ListView):
model = Shop
context_object_name = "shops"
queryset = Shop.objects.annotate(
distance=Distance("location", user_location)
).order_by("distance")[0:6]
template_name = "shops/index.html"
home = Home.as_view()
| 195 |
5,169 | <reponame>Gantios/Specs
{
"name": "CGGeometry_YJ",
"version": "0.1.1",
"summary": "CGGeometry_YJ is the CGGeometry extension for resizing and positioning calculation.",
"description": "CGGeometry_YJ is the CGGeometry extension for resizing and positioning calculation. Using CGGeometry_YJ is similar to UIView's contentMode.",
"homepage": "https://github.com/huang-kun/CGGeometry_YJ.git",
"license": "MIT",
"authors": {
"huang-kun": "<EMAIL>"
},
"source": {
"git": "https://github.com/huang-kun/CGGeometry_YJ.git",
"tag": "0.1.1"
},
"platforms": {
"ios": "5.0"
},
"source_files": "Pod/Classes/**/*",
"public_header_files": "Pod/Classes/**/*.h"
}
| 273 |
2,174 | <gh_stars>1000+
package com.gaoxi.req;
/**
* @author 大闲人柴毛毛
* @date 2017/10/31 下午7:42
* @description 查询请求
*/
public class QueryReq extends AbsReq {
/** 页码 */
protected int page = 1;
/** 每页显示的条数 */
protected int numPerPage = 10;
//当前行号
protected int currentPage;
public int getCurrentPage() {
return (page-1)*numPerPage;
}
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public int getNumPerPage() {
return numPerPage;
}
public void setNumPerPage(int numPerPage) {
this.numPerPage = numPerPage;
}
}
| 325 |
14,668 | // Copyright 2018 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/viz/common/surfaces/scoped_surface_id_allocator.h"
#include <utility>
#include "components/viz/common/surfaces/parent_local_surface_id_allocator.h"
namespace viz {
ScopedSurfaceIdAllocator::ScopedSurfaceIdAllocator(
base::OnceCallback<void()> allocation_task)
: allocation_task_(std::move(allocation_task)) {}
ScopedSurfaceIdAllocator::ScopedSurfaceIdAllocator(
ParentLocalSurfaceIdAllocator* allocator,
base::OnceCallback<void()> allocation_task)
: allocator_(allocator), allocation_task_(std::move(allocation_task)) {
// If you hit this DCHECK, it is because you are attempting to allow multiple
// suppressions to be in flight at the same time.
DCHECK(!allocator->is_allocation_suppressed_);
allocator->is_allocation_suppressed_ = true;
}
ScopedSurfaceIdAllocator::ScopedSurfaceIdAllocator(
ScopedSurfaceIdAllocator&& other)
: allocator_(std::move(other.allocator_)),
allocation_task_(std::move(other.allocation_task_)) {
other.allocator_ = nullptr;
DCHECK(other.allocation_task_.is_null());
}
ScopedSurfaceIdAllocator& ScopedSurfaceIdAllocator::operator=(
ScopedSurfaceIdAllocator&& other) {
ScopedSurfaceIdAllocator temp(std::move(other));
swap(*this, temp);
return *this;
}
ScopedSurfaceIdAllocator::~ScopedSurfaceIdAllocator() {
if (allocator_) {
DCHECK(allocator_->is_allocation_suppressed_);
allocator_->is_allocation_suppressed_ = false;
}
if (allocation_task_)
std::move(allocation_task_).Run();
}
void swap(ScopedSurfaceIdAllocator& first, ScopedSurfaceIdAllocator& second) {
using std::swap; // to enable ADL
swap(first.allocator_, second.allocator_);
swap(first.allocation_task_, second.allocation_task_);
}
} // namespace viz
| 678 |
2,322 | <reponame>metoo10987/wine<gh_stars>1000+
/*
* Program Manager
*
* Copyright 1996 <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*/
#define WIN32_LEAN_AND_MEAN
#include <string.h>
#include "windows.h"
#include "progman.h"
/***********************************************************************
*
* PROGRAM_ProgramWndProc
*/
static LRESULT CALLBACK PROGRAM_ProgramWndProc(HWND hWnd, UINT msg, WPARAM wParam, LPARAM lParam)
{
switch (msg)
{
case WM_NCLBUTTONDOWN:
{
HLOCAL hProgram = (HLOCAL) GetWindowLongPtrW(hWnd, 0);
PROGRAM *program = LocalLock(hProgram);
PROGGROUP *group = LocalLock(program->hGroup);
group->hActiveProgram = hProgram;
EnableMenuItem(Globals.hFileMenu, PM_MOVE , MF_ENABLED);
EnableMenuItem(Globals.hFileMenu, PM_COPY , MF_ENABLED);
break;
}
case WM_NCLBUTTONDBLCLK:
{
PROGRAM_ExecuteProgram((HLOCAL) GetWindowLongPtrW(hWnd, 0));
return(0);
}
case WM_PAINTICON:
case WM_NCPAINT:
{
PROGRAM *program;
PAINTSTRUCT ps;
HDC hdc;
hdc = BeginPaint(hWnd,&ps);
program = LocalLock((HLOCAL) GetWindowLongPtrW(hWnd, 0));
if (program->hIcon)
DrawIcon(hdc, 0, 0, program->hIcon);
EndPaint(hWnd,&ps);
break;
}
}
return DefWindowProcW(hWnd, msg, wParam, lParam);
}
/***********************************************************************
*
* PROGRAM_RegisterProgramWinClass
*/
ATOM PROGRAM_RegisterProgramWinClass(void)
{
WNDCLASSW class;
class.style = CS_HREDRAW | CS_VREDRAW;
class.lpfnWndProc = PROGRAM_ProgramWndProc;
class.cbClsExtra = 0;
class.cbWndExtra = sizeof(LONG_PTR);
class.hInstance = Globals.hInstance;
class.hIcon = 0;
class.hCursor = LoadCursorW (0, (LPWSTR)IDC_ARROW);
class.hbrBackground = GetStockObject (WHITE_BRUSH);
class.lpszMenuName = 0;
class.lpszClassName = STRING_PROGRAM_WIN_CLASS_NAME;
return RegisterClassW(&class);
}
/***********************************************************************
*
* PROGRAM_NewProgram
*/
VOID PROGRAM_NewProgram(HLOCAL hGroup)
{
INT nCmdShow = SW_SHOWNORMAL;
INT nHotKey = 0;
INT nIconIndex = 0;
CHAR szName[MAX_PATHNAME_LEN] = "";
CHAR szCmdLine[MAX_PATHNAME_LEN] = "";
CHAR szIconFile[MAX_PATHNAME_LEN] = "";
CHAR szWorkDir[MAX_PATHNAME_LEN] = "";
HICON hIcon = 0;
if (!DIALOG_ProgramAttributes(szName, szCmdLine, szWorkDir, szIconFile,
&hIcon, &nIconIndex, &nHotKey,
&nCmdShow, MAX_PATHNAME_LEN))
return;
if (!hIcon) hIcon = LoadIconW(0, (LPWSTR)IDI_WINLOGO);
if (!PROGRAM_AddProgram(hGroup, hIcon, szName, 0, 0, szCmdLine, szIconFile,
nIconIndex, szWorkDir, nHotKey, nCmdShow))
return;
GRPFILE_WriteGroupFile(hGroup);
}
/***********************************************************************
*
* PROGRAM_ModifyProgram
*/
VOID PROGRAM_ModifyProgram(HLOCAL hProgram)
{
PROGRAM *program = LocalLock(hProgram);
CHAR szName[MAX_PATHNAME_LEN];
CHAR szCmdLine[MAX_PATHNAME_LEN];
CHAR szIconFile[MAX_PATHNAME_LEN];
CHAR szWorkDir[MAX_PATHNAME_LEN];
lstrcpynA(szName, LocalLock(program->hName), MAX_PATHNAME_LEN);
lstrcpynA(szCmdLine, LocalLock(program->hCmdLine), MAX_PATHNAME_LEN);
lstrcpynA(szIconFile, LocalLock(program->hIconFile), MAX_PATHNAME_LEN);
lstrcpynA(szWorkDir, LocalLock(program->hWorkDir), MAX_PATHNAME_LEN);
if (!DIALOG_ProgramAttributes(szName, szCmdLine, szWorkDir, szIconFile,
&program->hIcon, &program->nIconIndex,
&program->nHotKey, &program->nCmdShow,
MAX_PATHNAME_LEN))
return;
MAIN_ReplaceString(&program->hName, szName);
MAIN_ReplaceString(&program->hCmdLine, szCmdLine);
MAIN_ReplaceString(&program->hIconFile, szIconFile);
MAIN_ReplaceString(&program->hWorkDir, szWorkDir);
SetWindowTextA(program->hWnd, szName);
UpdateWindow(program->hWnd);
GRPFILE_WriteGroupFile(program->hGroup);
return;
}
/***********************************************************************
*
* PROGRAM_AddProgram
*/
HLOCAL PROGRAM_AddProgram(HLOCAL hGroup, HICON hIcon, LPCSTR lpszName,
INT x, INT y, LPCSTR lpszCmdLine,
LPCSTR lpszIconFile, INT nIconIndex,
LPCSTR lpszWorkDir, INT nHotKey, INT nCmdShow)
{
PROGGROUP *group = LocalLock(hGroup);
PROGRAM *program;
HLOCAL hPrior, *p;
HLOCAL hProgram = LocalAlloc(LMEM_FIXED, sizeof(PROGRAM));
HLOCAL hName = LocalAlloc(LMEM_FIXED, 1 + strlen(lpszName));
HLOCAL hCmdLine = LocalAlloc(LMEM_FIXED, 1 + strlen(lpszCmdLine));
HLOCAL hIconFile = LocalAlloc(LMEM_FIXED, 1 + strlen(lpszIconFile));
HLOCAL hWorkDir = LocalAlloc(LMEM_FIXED, 1 + strlen(lpszWorkDir));
if (!hProgram || !hName || !hCmdLine || !hIconFile || !hWorkDir)
{
MAIN_MessageBoxIDS(IDS_OUT_OF_MEMORY, IDS_ERROR, MB_OK);
if (hProgram) LocalFree(hProgram);
if (hName) LocalFree(hName);
if (hCmdLine) LocalFree(hCmdLine);
if (hIconFile) LocalFree(hIconFile);
if (hWorkDir) LocalFree(hWorkDir);
return(0);
}
memcpy(LocalLock(hName), lpszName, 1 + strlen(lpszName));
memcpy(LocalLock(hCmdLine), lpszCmdLine, 1 + strlen(lpszCmdLine));
memcpy(LocalLock(hIconFile), lpszIconFile, 1 + strlen(lpszIconFile));
memcpy(LocalLock(hWorkDir), lpszWorkDir, 1 + strlen(lpszWorkDir));
group->hActiveProgram = hProgram;
hPrior = 0;
p = &group->hPrograms;
while (*p)
{
hPrior = *p;
p = &((PROGRAM*)LocalLock(hPrior))->hNext;
}
*p = hProgram;
program = LocalLock(hProgram);
program->hGroup = hGroup;
program->hPrior = hPrior;
program->hNext = 0;
program->hName = hName;
program->hCmdLine = hCmdLine;
program->hIconFile = hIconFile;
program->nIconIndex = nIconIndex;
program->hWorkDir = hWorkDir;
program->hIcon = hIcon;
program->nCmdShow = nCmdShow;
program->nHotKey = nHotKey;
program->hWnd =
CreateWindowW(STRING_PROGRAM_WIN_CLASS_NAME, NULL,
WS_CHILD | WS_CAPTION,
x, y, CW_USEDEFAULT, CW_USEDEFAULT,
group->hWnd, 0, Globals.hInstance, 0);
SetWindowTextA(program->hWnd, lpszName);
SetWindowLongPtrW(program->hWnd, 0, (LONG_PTR) hProgram);
ShowWindow (program->hWnd, SW_SHOWMINIMIZED);
SetWindowPos (program->hWnd, 0, x, y, 0, 0, SWP_NOZORDER | SWP_NOACTIVATE | SWP_NOSIZE);
UpdateWindow (program->hWnd);
return hProgram;
}
/***********************************************************************
*
* PROGRAM_CopyMoveProgram
*/
VOID PROGRAM_CopyMoveProgram(HLOCAL hProgram, BOOL bMove)
{
PROGRAM *program = LocalLock(hProgram);
PROGGROUP *fromgroup = LocalLock(program->hGroup);
HLOCAL hGroup = DIALOG_CopyMove(LocalLock(program->hName),
LocalLock(fromgroup->hName), bMove);
if (!hGroup) return;
/* FIXME shouldn't be necessary */
OpenIcon(((PROGGROUP*)LocalLock(hGroup))->hWnd);
if (!PROGRAM_AddProgram(hGroup,
#if 0
CopyIcon(program->hIcon),
#else
program->hIcon,
#endif
LocalLock(program->hName),
program->x, program->y,
LocalLock(program->hCmdLine),
LocalLock(program->hIconFile),
program->nIconIndex,
LocalLock(program->hWorkDir),
program->nHotKey, program->nCmdShow)) return;
GRPFILE_WriteGroupFile(hGroup);
if (bMove) PROGRAM_DeleteProgram(hProgram, TRUE);
}
/***********************************************************************
*
* PROGRAM_ExecuteProgram
*/
VOID PROGRAM_ExecuteProgram(HLOCAL hProgram)
{
PROGRAM *program = LocalLock(hProgram);
LPSTR lpszCmdLine = LocalLock(program->hCmdLine);
/* FIXME set working directory from program->hWorkDir */
WinExec(lpszCmdLine, program->nCmdShow);
if (Globals.bMinOnRun) CloseWindow(Globals.hMainWnd);
}
/***********************************************************************
*
* PROGRAM_DeleteProgram
*/
VOID PROGRAM_DeleteProgram(HLOCAL hProgram, BOOL bUpdateGrpFile)
{
PROGRAM *program = LocalLock(hProgram);
PROGGROUP *group = LocalLock(program->hGroup);
group->hActiveProgram = 0;
if (program->hPrior)
((PROGRAM*)LocalLock(program->hPrior))->hNext = program->hNext;
else
((PROGGROUP*)LocalLock(program->hGroup))->hPrograms = program->hNext;
if (program->hNext)
((PROGRAM*)LocalLock(program->hNext))->hPrior = program->hPrior;
if (bUpdateGrpFile)
GRPFILE_WriteGroupFile(program->hGroup);
DestroyWindow(program->hWnd);
#if 0
if (program->hIcon)
DestroyIcon(program->hIcon);
#endif
LocalFree(program->hName);
LocalFree(program->hCmdLine);
LocalFree(program->hIconFile);
LocalFree(program->hWorkDir);
LocalFree(hProgram);
}
/***********************************************************************
*
* PROGRAM_FirstProgram
*/
HLOCAL PROGRAM_FirstProgram(HLOCAL hGroup)
{
PROGGROUP *group;
if (!hGroup) return(0);
group = LocalLock(hGroup);
return(group->hPrograms);
}
/***********************************************************************
*
* PROGRAM_NextProgram
*/
HLOCAL PROGRAM_NextProgram(HLOCAL hProgram)
{
PROGRAM *program;
if (!hProgram) return(0);
program = LocalLock(hProgram);
return(program->hNext);
}
/***********************************************************************
*
* PROGRAM_ActiveProgram
*/
HLOCAL PROGRAM_ActiveProgram(HLOCAL hGroup)
{
PROGGROUP *group;
if (!hGroup) return(0);
group = LocalLock(hGroup);
if (IsIconic(group->hWnd)) return(0);
return(group->hActiveProgram);
}
/***********************************************************************
*
* PROGRAM_ProgramName
*/
LPCSTR PROGRAM_ProgramName(HLOCAL hProgram)
{
PROGRAM *program;
if (!hProgram) return(0);
program = LocalLock(hProgram);
return(LocalLock(program->hName));
}
| 4,030 |
703 | #pragma once
#include <EditorEngineProcessFramework/EngineProcess/EngineProcessDocumentContext.h>
#include <EnginePluginAssets/EnginePluginAssetsDLL.h>
#include <RendererCore/Declarations.h>
#include <RendererCore/Meshes/MeshResource.h>
class ezObjectSelectionMsgToEngine;
class ezRenderContext;
class EZ_ENGINEPLUGINASSETS_DLL ezMeshContext : public ezEngineProcessDocumentContext
{
EZ_ADD_DYNAMIC_REFLECTION(ezMeshContext, ezEngineProcessDocumentContext);
public:
ezMeshContext();
virtual void HandleMessage(const ezEditorEngineDocumentMsg* pMsg) override;
const ezMeshResourceHandle& GetMesh() const { return m_hMesh; }
bool m_bDisplayGrid = true;
protected:
virtual void OnInitialize() override;
virtual ezEngineProcessViewContext* CreateViewContext() override;
virtual void DestroyViewContext(ezEngineProcessViewContext* pContext) override;
virtual bool UpdateThumbnailViewContext(ezEngineProcessViewContext* pThumbnailViewContext) override;
private:
void QuerySelectionBBox(const ezEditorEngineDocumentMsg* pMsg);
void OnResourceEvent(const ezResourceEvent& e);
ezGameObject* m_pMeshObject;
ezMeshResourceHandle m_hMesh;
ezAtomicBool m_boundsDirty = false;
ezEvent<const ezResourceEvent&, ezMutex>::Unsubscriber m_meshResourceEventSubscriber;
};
| 442 |
332 | /*
Copyright (c) 2016-2020 Microsoft Corporation. All rights reserved.
Released under Apache 2.0 license as described in the file LICENSE.
Author: <NAME>, <NAME>
*/
#pragma once
#include <string>
namespace lean {
std::string remove_cr(std::string const & str);
bool equal_upto_cr(std::string const & a, std::string const & b);
}
| 104 |
892 | <reponame>westonsteimel/advisory-database-github
{
"schema_version": "1.2.0",
"id": "GHSA-wf6v-m5v4-phr8",
"modified": "2022-04-29T01:26:21Z",
"published": "2022-04-29T01:26:21Z",
"aliases": [
"CVE-2003-0395"
],
"details": "Ultimate PHP Board (UPB) 1.9 allows remote attackers to execute arbitrary PHP code with UPB administrator privileges via an HTTP request containing the code in the User-Agent header, which is executed when the administrator executes admin_iplog.php.",
"severity": [
],
"affected": [
],
"references": [
{
"type": "ADVISORY",
"url": "https://nvd.nist.gov/vuln/detail/CVE-2003-0395"
},
{
"type": "WEB",
"url": "http://f0kp.iplus.ru/bz/024.en.txt"
},
{
"type": "WEB",
"url": "http://marc.info/?l=bugtraq&m=105379741528925&w=2"
}
],
"database_specific": {
"cwe_ids": [
],
"severity": "HIGH",
"github_reviewed": false
}
} | 426 |
1,168 | // Checks that we correctly record instantiates edges for total specs.
//- @s_equals_float defines/binding PrimaryT
template <typename S> bool s_equals_float = false;
//- @s_equals_float defines/binding TotalT
template <> bool s_equals_float<float> = true;
//- @s_equals_float ref TotalT
//- TotalT instantiates PrimaryTFloat
//- TotalT specializes PrimaryTFloat
//- PrimaryTFloat param.0 PrimaryT
bool is_true = s_equals_float<float>;
| 141 |
2,757 | /*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define _GNU_SOURCE
#include <err.h>
#include <fcntl.h>
#include <stdint.h>
#include <sys/mman.h>
#include <sys/syscall.h>
#include <unistd.h>
#include <stdio.h>
void execveat(int fd, char* path, char** args, char** envp, int options) {
syscall(__NR_execveat, (uintptr_t)fd, (uintptr_t)path, (uintptr_t)args,
(uintptr_t)envp, (uintptr_t)options);
}
int main() {
setvbuf(stdout, NULL, _IONBF, 0);
setvbuf(stderr, NULL, _IONBF, 0);
puts("Max binary size 10MiB");
puts("len(ELF) u32le || ELF: ");
uint32_t len = 0;
if (read(STDIN_FILENO, &len, sizeof(len)) != sizeof(len)) {
err(1, "read");
}
if (len > 10*1024*1024) {
errx(1, "too large");
}
int fd = memfd_create("bin", MFD_CLOEXEC);
if (fd < 0) {
err(1, "memfd_create");
}
size_t to_copy = len;
while (to_copy) {
char buf[4096];
size_t c = sizeof(buf) < to_copy ? sizeof(buf) : to_copy;
ssize_t r = read(STDIN_FILENO, &buf[0], c);
if (r <= 0) {
err(1, "read");
}
to_copy -= r;
if (write(fd, &buf[0], r) != r) {
err(1, "write");
}
}
char* args[] = {"bin", NULL};
char* envp[] = {NULL};
execveat(fd, "", args, envp, AT_EMPTY_PATH);
err(1, "execveat");
return 0;
}
| 715 |
345 | import tensorflow as tf
import math
def fully_connected(input_, output_nodes, name, stddev=0.02):
with tf.variable_scope(name):
input_shape = input_.get_shape()
input_nodes = input_shape[-1]
w = tf.get_variable('w', [input_nodes, output_nodes],
initializer=tf.truncated_normal_initializer(stddev=0.02))
biases = tf.get_variable('b', [output_nodes],
initializer=tf.constant_initializer(0.0))
res = tf.matmul(input_, w) + biases
return res
# 1d CONVOLUTION WITH DILATION
def conv1d(input_, output_channels,
dilation = 1, filter_width = 1, causal = False,
name = "dilated_conv"):
with tf.variable_scope(name):
w = tf.get_variable('w', [1, filter_width, input_.get_shape()[-1], output_channels ],
initializer=tf.truncated_normal_initializer(stddev=0.02))
b = tf.get_variable('b', [output_channels ],
initializer=tf.constant_initializer(0.0))
if causal:
padding = [[0, 0], [(filter_width - 1) * dilation, 0], [0, 0]]
padded = tf.pad(input_, padding)
input_expanded = tf.expand_dims(padded, dim = 1)
out = tf.nn.atrous_conv2d(input_expanded, w, rate = dilation, padding = 'VALID') + b
else:
input_expanded = tf.expand_dims(input_, dim = 1)
out = tf.nn.atrous_conv2d(input_expanded, w, rate = dilation, padding = 'SAME') + b
return tf.squeeze(out, [1])
def layer_normalization(x, name, epsilon=1e-8, trainable = True):
with tf.variable_scope(name):
shape = x.get_shape()
beta = tf.get_variable('beta', [ int(shape[-1])],
initializer=tf.constant_initializer(0), trainable=trainable)
gamma = tf.get_variable('gamma', [ int(shape[-1])],
initializer=tf.constant_initializer(1), trainable=trainable)
mean, variance = tf.nn.moments(x, axes=[len(shape) - 1], keep_dims=True)
x = (x - mean) / tf.sqrt(variance + epsilon)
return gamma * x + beta
def byetenet_residual_block(input_, dilation, layer_no,
residual_channels, filter_width,
causal = True, train = True):
block_type = "decoder" if causal else "encoder"
block_name = "bytenet_{}_layer_{}_{}".format(block_type, layer_no, dilation)
with tf.variable_scope(block_name):
input_ln = layer_normalization(input_, name="ln1", trainable = train)
relu1 = tf.nn.relu(input_ln)
conv1 = conv1d(relu1, residual_channels, name = "conv1d_1")
conv1 = layer_normalization(conv1, name="ln2", trainable = train)
relu2 = tf.nn.relu(conv1)
dilated_conv = conv1d(relu2, residual_channels,
dilation, filter_width,
causal = causal,
name = "dilated_conv"
)
print dilated_conv
dilated_conv = layer_normalization(dilated_conv, name="ln3", trainable = train)
relu3 = tf.nn.relu(dilated_conv)
conv2 = conv1d(relu3, 2 * residual_channels, name = 'conv1d_2')
return input_ + conv2
def init_weight(dim_in, dim_out, name=None, stddev=1.0):
return tf.Variable(tf.truncated_normal([dim_in, dim_out], stddev=stddev/math.sqrt(float(dim_in))), name=name)
def init_bias(dim_out, name=None):
return tf.Variable(tf.zeros([dim_out]), name=name) | 1,626 |
1,056 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.netbeans.modules.j2ee.jpa.refactoring;
import org.netbeans.api.java.source.TreePathHandle;
/**
* This class represents an annotation reference to an entity.
*
* @author <NAME>
*/
public class EntityAnnotationReference {
/**
* The entity that has the feature with the referencing annotation.
*/
private final String entity;
/**
* The FQN of the referencing annotation.
*/
private final String annotation;
/**
* The referencing annotation attribute.
*/
private final String attribute;
/**
* The value for the referencing annotation attribute.
*/
private final String attributeValue;
/**
* The handle for the property that has the referencing annotation.
*/
private final TreePathHandle handle;
/**
* Creates a new instance of EntityAssociation
* @param referenced the entity that is referenced.
* @param referring the entity that has the property with referencing annotation.
* @param property the property that hat the referencing annotation.
* @param annotation the referencing annotation
* @param attributeValue the attribute value of the annotation that references other entity
*/
public EntityAnnotationReference(String entity, String annotation,
String attribute, String attributeValue, TreePathHandle handle) {
this.entity = entity;
this.annotation = annotation;
this.attribute = attribute;
this.attributeValue = attributeValue;
this.handle = handle;
}
/**
*@see #entity
*/
public String getEntity() {
return entity;
}
/**
*@see #annotation
*/
public String getAnnotation() {
return annotation;
}
/**
*@see #attribute
*/
public String getAttribute() {
return attribute;
}
/**
*@see #attributeValue
*/
public String getAttributeValue() {
return attributeValue;
}
/**
*@see #handle
*/
public TreePathHandle getHandle() {
return handle;
}
}
| 937 |
348 | {"nom":"Victoria","circ":"10ème circonscription","dpt":"Français établis hors de France","inscrits":373,"abs":307,"votants":66,"blancs":3,"nuls":0,"exp":63,"res":[{"nuance":"REM","nom":"<NAME>","voix":39},{"nuance":"LR","nom":"<NAME>","voix":24}]} | 93 |
1,116 | import os
import shutil
from .. import run_nbgrader
from .base import BaseTestApp
class TestNbGraderQuickStart(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["quickstart", "--help-all"])
def test_no_course_id(self):
"""Is the help displayed when no course id is given?"""
run_nbgrader(["quickstart"], retcode=1)
def test_quickstart(self, fake_home_dir):
"""Is the quickstart example properly generated?"""
run_nbgrader(["quickstart", "example"])
# it should fail if it already exists
run_nbgrader(["quickstart", "example"], retcode=1)
# it should succeed if --force is given
os.remove(os.path.join("example", "nbgrader_config.py"))
run_nbgrader(["quickstart", "example", "--force"])
assert os.path.exists(os.path.join("example", "nbgrader_config.py"))
# nbgrader validate should work
os.chdir("example")
for nb in os.listdir(os.path.join("source", "ps1")):
if not nb.endswith(".ipynb"):
continue
output = run_nbgrader(["validate", os.path.join("source", "ps1", nb)], stdout=True)
assert output.strip() == "Success! Your notebook passes all the tests."
# nbgrader generate_assignment should work
run_nbgrader(["generate_assignment", "ps1"])
def test_quickstart_overwrite_course_folder_if_structure_not_present(self):
"""Is the quickstart example properly generated?"""
run_nbgrader(["quickstart", "example_without_folder_and_config_file"])
# it should fail if it already exists
run_nbgrader(["quickstart", "example_without_folder_and_config_file"], retcode=1)
# should succeed if both source folder and config file are not present.
shutil.rmtree(os.path.join("example_without_folder_and_config_file", "source"))
os.remove(os.path.join("example_without_folder_and_config_file", "nbgrader_config.py"))
run_nbgrader(["quickstart", "example_without_folder_and_config_file"])
assert os.path.exists(os.path.join("example_without_folder_and_config_file", "nbgrader_config.py"))
assert os.path.exists(os.path.join("example_without_folder_and_config_file", "source"))
# nbgrader validate should work
os.chdir("example_without_folder_and_config_file")
for nb in os.listdir(os.path.join("source", "ps1")):
if not nb.endswith(".ipynb"):
continue
output = run_nbgrader(["validate", os.path.join("source", "ps1", nb)], stdout=True)
assert output.strip() == "Success! Your notebook passes all the tests."
# nbgrader generate_assignment should work
run_nbgrader(["generate_assignment", "ps1"])
def test_quickstart_fails_with_source_folder_removed(self):
"""Is the quickstart example properly generated if source folder removed?"""
run_nbgrader(["quickstart", "example_source_folder_fail"])
# it should fail if it already exists
run_nbgrader(["quickstart", "example_source_folder_fail"], retcode=1)
# it should succeed if source folder not present and create it
shutil.rmtree(os.path.join("example_source_folder_fail", "source"))
# it should fail if it already source folder or config file exists
run_nbgrader(["quickstart", "example_source_folder_fail"], retcode=1)
def test_quickstart_fails_with_config_file_removed(self):
"""Is the quickstart example properly generated if source folder removed?"""
run_nbgrader(["quickstart", "example_source_folder_fail"])
# it should fail if it already exists
run_nbgrader(["quickstart", "example_source_folder_fail"], retcode=1)
# it should succeed if source folder not present and create it
os.remove(os.path.join("example_source_folder_fail", "nbgrader_config.py"))
# it should fail if it already source folder or config file exists
run_nbgrader(["quickstart", "example_source_folder_fail"], retcode=1)
def test_quickstart_f(self):
"""Is the quickstart example properly generated?"""
run_nbgrader(["quickstart", "example"])
# it should fail if it already exists
run_nbgrader(["quickstart", "example"], retcode=1)
# it should succeed if --force is given
os.remove(os.path.join("example", "nbgrader_config.py"))
run_nbgrader(["quickstart", "example", "-f"])
assert os.path.exists(os.path.join("example", "nbgrader_config.py"))
# nbgrader validate should work
os.chdir("example")
for nb in os.listdir(os.path.join("source", "ps1")):
if not nb.endswith(".ipynb"):
continue
output = run_nbgrader(["validate", os.path.join("source", "ps1", nb)], stdout=True)
assert output.strip() == "Success! Your notebook passes all the tests."
# nbgrader generate_assignment should work
run_nbgrader(["generate_assignment", "ps1"])
| 2,032 |
1,010 | /*
* Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.jet.core.metrics;
import com.hazelcast.jet.Job;
import javax.management.MBeanServer;
import javax.management.ObjectInstance;
import javax.management.ObjectName;
import java.lang.management.ManagementFactory;
import java.util.Set;
import static java.util.stream.Collectors.toSet;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
class JmxMetricsChecker {
private static final String PREFIX = "com.hazelcast.jet";
private final MBeanServer platformMBeanServer = ManagementFactory.getPlatformMBeanServer();
private final ObjectName descriptor;
JmxMetricsChecker(String instance) throws Exception {
this.descriptor = new ObjectName(getName(instance));
}
JmxMetricsChecker(String instance, Job job) throws Exception {
this.descriptor = new ObjectName(getName(instance, job));
}
JmxMetricsChecker(String instance, Job job, String... extraTags) throws Exception {
this.descriptor = new ObjectName(getName(instance, job, extraTags));
}
long getMetricValue(String metricName) throws Exception {
Set<ObjectName> publishedDescriptors = platformMBeanServer
.queryMBeans(new ObjectName(PREFIX + ":*"), null)
.stream().map(ObjectInstance::getObjectName).collect(toSet());
assertTrue("name: " + metricName + " not published", publishedDescriptors.contains(descriptor));
return (long) platformMBeanServer.getAttribute(descriptor, metricName);
}
void assertMetricValue(String metricName, long expectedValue) throws Exception {
long actualValue = getMetricValue(metricName);
assertEquals(expectedValue, actualValue);
}
long assertMetricValueAtLeast(String metricName, long minExpectedValue) throws Exception {
long actualValue = getMetricValue(metricName);
assertTrue(actualValue >= minExpectedValue);
return actualValue;
}
private static String getName(String instance, Job job, String... extraTags) {
String jobId = job.getIdString();
String execId = job.getMetrics().get(MetricNames.RECEIVED_COUNT).get(0).tag("exec");
StringBuilder sb = new StringBuilder();
sb.append(getName(instance, "job=" + jobId, "exec=" + execId));
return appendTags(sb, 2, extraTags).toString();
}
private static String getName(String instance, String... extraTags) {
StringBuilder sb = new StringBuilder();
sb.append(String.format("com.hazelcast.jet:type=Metrics,instance=%s", instance));
return appendTags(sb, 0, extraTags).toString();
}
private static StringBuilder appendTags(StringBuilder sb, int offset, String... extraTags) {
for (int i = 0; i < extraTags.length; i++) {
sb.append(String.format(",tag%d=\"%s\"", offset + i, extraTags[i]));
}
return sb;
}
}
| 1,209 |
457 | """tests for path module"""
| 7 |
4,054 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
#pragma once
#include "fusionspec.h"
#include <vespa/searchlib/common/serialnum.h>
#include <vespa/vespalib/stllike/string.h>
namespace searchcorespi {
namespace index {
/**
* Utility class with functions to read aspects of an index from disk.
* Used by the index maintainer.
*/
struct IndexReadUtilities {
static FusionSpec readFusionSpec(const vespalib::string &baseDir);
static search::SerialNum readSerialNum(const vespalib::string &dir);
};
} // namespace index
} // namespace searchcorespi
| 191 |
5,169 | <reponame>Gantios/Specs
{
"name": "testFramework_eldhojohnson",
"version": "0.0.3",
"summary": "Lenddo SDK is used for company testing purposes.",
"description": "Lenddo SDK is used for company testing purposes. Find more details at https://github.com/eldhojohnson/testFramework",
"homepage": "https://github.com/eldhojohnson/testFramework",
"license": {
"type": "MIT",
"file": "LICENSE"
},
"authors": {
"eldho": "<EMAIL>"
},
"platforms": {
"ios": null
},
"source": {
"git": "https://github.com/eldhojohnson/testFramework.git",
"tag": "0.0.3"
},
"source_files": "lenddoSDK/**/*.{h,m}"
}
| 262 |
372 | /*-----------------------------------------------------------------------------
* $RCSfile: ShSvPlaceOrder.c,v $
*
* See Copyright for the status of this software.
*
* The OpenSOAP Project
* http://opensoap.jp/
*-----------------------------------------------------------------------------
*/
#include "ShSvPlaceOrder.h"
#include "ShSvCmn.h"
#include "SvCmn.h"
#include <string.h>
/* process place order request */
int
PlaceOrder(OpenSOAPEnvelopePtr request,
OpenSOAPEnvelopePtr *response,
void *opt) {
static char FuncName[] = "PlaceOrder";
int error = OPENSOAP_NO_ERROR;
char *code = NULL;
long qty = -1;
OpenSOAPStringPtr confirmation = NULL;
OpenSOAPBlockPtr body = NULL;
int i = 0;
const char* nameSpace = opt;
/* --- load product stock --- */
error = LoadProductStock();
ERROR_RETURN(error, FuncName, "load product stock");
LOG_INT(FuncName, "productStockCount", productStockCount);
/* --- parse request message --- */
error = ParseRequestCommon(request,
"PlaceOrder",
&body,
nameSpace);
if (OPENSOAP_FAILED(error)) {
error = CreateFaultMessage(response,
nameSpace,
"SOAP-ENV:service",
"cannont parse common part of "
"PlaceOrder request",
NULL,
NULL);
return error;
}
error = GetStringParameter(body, "code", &code);
if (OPENSOAP_FAILED(error)) {
error = CreateFaultMessage(response,
nameSpace,
"SOAP-ENV:service",
"cannont get code parameter",
NULL,
NULL);
return error;
}
error = OpenSOAPBlockGetChildValueMB(body, "qty", "int", &qty);
if (OPENSOAP_FAILED(error)) {
error = CreateFaultMessage(response,
nameSpace,
"SOAP-ENV:service",
"cannont get qty parameter",
NULL,
NULL);
return error;
}
/* --- make response contents --- */
while (i < productStockCount
&& strcmp(code, productStockList[i].code) != 0) {
LOG_STRING(FuncName, "code", productStockList[i].code);
i++;
}
free(code);
if (i == productStockCount) {
error = CreateFaultMessage(response,
nameSpace,
"SOAP-ENV:service",
"invalid code parameter",
NULL,
NULL);
return error;
}
else if (qty < 1 || qty > productStockList[i].qty) {
error = CreateFaultMessage(response,
nameSpace,
"SOAP-ENV:service",
"qty parameter out of range",
NULL,
NULL);
return error;
}
else {
productStockList[i].qty -= qty;
error = OpenSOAPStringCreateWithMB("order successfully placed",
&confirmation);
ERROR_RETURN(error, FuncName, "create string: confirmation");
/* --- create response message --- */
error = CreateResponseCommon(response,
"PlaceOrderResponse",
&body,
nameSpace);
ERROR_RETURN(error, FuncName, "create common part of response");
error = OpenSOAPBlockSetChildValueMB(body,
"confirmation",
"string",
&confirmation);
ERROR_RETURN(error, FuncName, "set parameter: confirmation");
}
/* --- save product stock --- */
error = SaveProductStock();
ERROR_RETURN(error, FuncName, "save product stock");
return error;
}
| 1,390 |
12,718 | /* termios c_cc symbolic constant definitions. Linux/powerpc version.
Copyright (C) 2019-2021 Free Software Foundation, Inc.
This file is part of the GNU C Library.
The GNU C Library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
The GNU C Library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with the GNU C Library. If not, see
<https://www.gnu.org/licenses/>. */
#ifndef _TERMIOS_H
# error "Never include <bits/termios-c_cc.h> directly; use <termios.h> instead."
#endif
/* c_cc characters */
#define VINTR 0
#define VQUIT 1
#define VERASE 2
#define VKILL 3
#define VEOF 4
#define VMIN 5
#define VEOL 6
#define VTIME 7
#define VEOL2 8
#define VSWTC 9
#define VWERASE 10
#define VREPRINT 11
#define VSUSP 12
#define VSTART 13
#define VSTOP 14
#define VLNEXT 15
#define VDISCARD 16 | 426 |
569 | // Copyright 2019 DeepMind Technologies Limited.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "reverb/cc/platform/net.h"
#include <netinet/in.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <unistd.h>
#include <cerrno>
#include <cstdlib>
#include <cstring>
#include <unordered_set>
#include "reverb/cc/platform/logging.h"
namespace deepmind {
namespace reverb {
namespace internal {
namespace {
bool IsPortAvailable(int* port, bool is_tcp) {
const int protocol = is_tcp ? IPPROTO_TCP : 0;
const int fd = socket(AF_INET, is_tcp ? SOCK_STREAM : SOCK_DGRAM, protocol);
struct sockaddr_in addr;
socklen_t addr_len = sizeof(addr);
int actual_port;
REVERB_CHECK_GE(*port, 0);
REVERB_CHECK_LE(*port, 65535);
if (fd < 0) {
REVERB_LOG(REVERB_ERROR) << "socket() failed: " << strerror(errno);
return false;
}
// SO_REUSEADDR lets us start up a server immediately after it exists.
int one = 1;
if (setsockopt(fd, SOL_SOCKET, SO_REUSEADDR, &one, sizeof(one)) < 0) {
REVERB_LOG(REVERB_ERROR) << "setsockopt() failed: " << strerror(errno);
if (close(fd) < 0) {
REVERB_LOG(REVERB_ERROR) << "close() failed: " << strerror(errno);
}
return false;
}
// Try binding to port.
addr.sin_family = AF_INET;
addr.sin_addr.s_addr = INADDR_ANY;
addr.sin_port = htons(static_cast<uint16_t>(*port));
if (bind(fd, reinterpret_cast<struct sockaddr*>(&addr), sizeof(addr)) < 0) {
REVERB_LOG(REVERB_WARNING)
<< "bind(port=" << *port << ") failed: " << strerror(errno);
if (close(fd) < 0) {
REVERB_LOG(REVERB_ERROR) << "close() failed: " << strerror(errno);
}
return false;
}
// Get the bound port number.
if (getsockname(fd, reinterpret_cast<struct sockaddr*>(&addr), &addr_len) <
0) {
REVERB_LOG(REVERB_WARNING) << "getsockname() failed: " << strerror(errno);
if (close(fd) < 0) {
REVERB_LOG(REVERB_ERROR) << "close() failed: " << strerror(errno);
}
return false;
}
REVERB_CHECK_LE(addr_len, sizeof(addr));
actual_port = ntohs(addr.sin_port);
REVERB_CHECK_GT(actual_port, 0);
if (*port == 0) {
*port = actual_port;
} else {
REVERB_CHECK_EQ(*port, actual_port);
}
if (close(fd) < 0) {
REVERB_LOG(REVERB_ERROR) << "close() failed: " << strerror(errno);
}
return true;
}
const int kNumRandomPortsToPick = 100;
const int kMaximumTrials = 1000;
} // namespace
int PickUnusedPortOrDie() {
static std::unordered_set<int> chosen_ports;
// Type of port to first pick in the next iteration.
bool is_tcp = true;
int trial = 0;
while (true) {
int port;
trial++;
REVERB_CHECK_LE(trial, kMaximumTrials)
<< "Failed to pick an unused port for testing.";
if (trial == 1) {
port = getpid() % (65536 - 30000) + 30000;
} else if (trial <= kNumRandomPortsToPick) {
port = rand() % (65536 - 30000) + 30000; // NOLINT: Ignore suggestion to use rand_r instead.
} else {
port = 0;
}
if (chosen_ports.find(port) != chosen_ports.end()) {
continue;
}
if (!IsPortAvailable(&port, is_tcp)) {
continue;
}
REVERB_CHECK_GT(port, 0);
if (!IsPortAvailable(&port, !is_tcp)) {
is_tcp = !is_tcp;
continue;
}
chosen_ports.insert(port);
return port;
}
return 0;
}
} // namespace internal
} // namespace reverb
} // namespace deepmind
| 1,552 |
615 | <reponame>ashutom/tensorflow-upstream<gh_stars>100-1000
/* Copyright 2015 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#ifndef TENSORFLOW_CORE_KERNELS_DROPOUT_OP_H_
#define TENSORFLOW_CORE_KERNELS_DROPOUT_OP_H_
namespace tensorflow {
template <typename Device, typename T>
struct ApplyDropout {
void operator()(const Device& d, T* out, uint8* mask, const T* in, const float* rng_data,
float rate, uint64 num_elements, random::PhiloxRandom gen,
bool seeded) {}
};
template <typename Device, typename T>
struct ApplyDropoutGrad {
void operator()(const Device& d, T* outgrads, const T* grads, const uint8* mask,
float rate, uint64 num_elements) {}
};
#if GOOGLE_CUDA || TENSORFLOW_USE_ROCM
typedef Eigen::GpuDevice GPUDevice;
template <typename T>
struct ApplyDropout<GPUDevice, T> {
void operator()(const GPUDevice& d, T* out, uint8* mask, const T* in,
const float* rng_data, float rate, uint64 num_elements,
random::PhiloxRandom gen, bool seeded);
};
template <typename T>
struct ApplyDropoutGrad<GPUDevice, T> {
void operator()(const GPUDevice& d, T* outgrads, const T* grads, const uint8* mask,
float rate, uint64 num_elements);
};
#endif
}
#endif
| 652 |
542 | <reponame>pecuniafinance/hummingbot
import asyncio
import json
import re
import unittest
from decimal import Decimal
from typing import Any, Awaitable, Dict, List
from unittest.mock import AsyncMock, patch
from aioresponses.core import aioresponses
from bidict import bidict
import hummingbot.connector.derivative.binance_perpetual.constants as CONSTANTS
from hummingbot.connector.derivative.binance_perpetual import binance_perpetual_web_utils as web_utils
from hummingbot.connector.derivative.binance_perpetual.binance_perpetual_api_order_book_data_source import (
BinancePerpetualAPIOrderBookDataSource,
)
from hummingbot.connector.time_synchronizer import TimeSynchronizer
from hummingbot.core.data_type.funding_info import FundingInfo
from hummingbot.core.data_type.order_book import OrderBook
from hummingbot.core.data_type.order_book_message import OrderBookMessage, OrderBookMessageType
from test.hummingbot.connector.network_mocking_assistant import NetworkMockingAssistant
class BinancePerpetualAPIOrderBookDataSourceUnitTests(unittest.TestCase):
# logging.Level required to receive logs from the data source logger
level = 0
@classmethod
def setUpClass(cls) -> None:
super().setUpClass()
cls.ev_loop = asyncio.get_event_loop()
cls.base_asset = "COINALPHA"
cls.quote_asset = "HBOT"
cls.trading_pair = f"{cls.base_asset}-{cls.quote_asset}"
cls.ex_trading_pair = f"{cls.base_asset}{cls.quote_asset}"
cls.domain = "binance_perpetual_testnet"
def setUp(self) -> None:
super().setUp()
self.log_records = []
self.listening_task = None
self.async_tasks: List[asyncio.Task] = []
self.time_synchronizer = TimeSynchronizer()
self.time_synchronizer.add_time_offset_ms_sample(0)
self.data_source = BinancePerpetualAPIOrderBookDataSource(
time_synchronizer=self.time_synchronizer,
trading_pairs=[self.trading_pair],
domain=self.domain,
)
self.data_source.logger().setLevel(1)
self.data_source.logger().addHandler(self)
self.mocking_assistant = NetworkMockingAssistant()
self.resume_test_event = asyncio.Event()
BinancePerpetualAPIOrderBookDataSource._trading_pair_symbol_map = {
self.domain: bidict({self.ex_trading_pair: self.trading_pair})
}
def tearDown(self) -> None:
self.listening_task and self.listening_task.cancel()
for task in self.async_tasks:
task.cancel()
BinancePerpetualAPIOrderBookDataSource._trading_pair_symbol_map = {}
super().tearDown()
def handle(self, record):
self.log_records.append(record)
def async_run_with_timeout(self, coroutine: Awaitable, timeout: float = 1):
ret = self.ev_loop.run_until_complete(asyncio.wait_for(coroutine, timeout))
return ret
def resume_test_callback(self, *_, **__):
self.resume_test_event.set()
return None
def _is_logged(self, log_level: str, message: str) -> bool:
return any(record.levelname == log_level and record.getMessage() == message for record in self.log_records)
def _raise_exception(self, exception_class):
raise exception_class
def _raise_exception_and_unlock_test_with_event(self, exception):
self.resume_test_event.set()
raise exception
def _orderbook_update_event(self):
resp = {
"stream": f"{self.ex_trading_pair.lower()}@depth",
"data": {
"e": "depthUpdate",
"E": 1631591424198,
"T": 1631591424189,
"s": self.ex_trading_pair,
"U": 752409354963,
"u": 752409360466,
"pu": 752409354901,
"b": [
["43614.31", "0.000"],
],
"a": [
["45277.14", "0.257"],
],
},
}
return resp
def _orderbook_trade_event(self):
resp = {
"stream": f"{self.ex_trading_pair.lower()}@aggTrade",
"data": {
"e": "aggTrade",
"E": 1631594403486,
"a": 817295132,
"s": self.ex_trading_pair,
"p": "45266.16",
"q": "2.206",
"f": 1437689393,
"l": 1437689407,
"T": 1631594403330,
"m": False,
},
}
return resp
def _funding_info_event(self):
resp = {
"stream": f"{self.ex_trading_pair.lower()}@markPrice",
"data": {
"e": "markPriceUpdate",
"E": 1641288864000,
"s": self.ex_trading_pair,
"p": "46353.99600757",
"P": "46507.47845460",
"i": "46358.63622407",
"r": "0.00010000",
"T": 1641312000000,
},
}
return resp
@aioresponses()
def test_get_last_traded_prices(self, mock_api):
url = web_utils.rest_url(CONSTANTS.SERVER_TIME_PATH_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
response = {"serverTime": 1640000003000}
mock_api.get(regex_url,
body=json.dumps(response))
url = web_utils.rest_url(path_url=CONSTANTS.TICKER_PRICE_CHANGE_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response: Dict[str, Any] = {
# Truncated responses
"lastPrice": "10.0",
}
mock_api.get(regex_url, body=json.dumps(mock_response))
result: Dict[str, Any] = self.async_run_with_timeout(
self.data_source.get_last_traded_prices(trading_pairs=[self.trading_pair], domain=self.domain)
)
self.assertTrue(self.trading_pair in result)
self.assertEqual(10.0, result[self.trading_pair])
@aioresponses()
def test_init_trading_pair_symbols_failure(self, mock_api):
BinancePerpetualAPIOrderBookDataSource._trading_pair_symbol_map = {}
url = web_utils.rest_url(path_url=CONSTANTS.EXCHANGE_INFO_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_api.get(regex_url, status=400, body=json.dumps(["ERROR"]))
map = self.async_run_with_timeout(self.data_source.trading_pair_symbol_map(
domain=self.domain,
time_synchronizer=self.data_source._time_synchronizer))
self.assertEqual(0, len(map))
@aioresponses()
def test_init_trading_pair_symbols_successful(self, mock_api):
url = web_utils.rest_url(path_url=CONSTANTS.EXCHANGE_INFO_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response: Dict[str, Any] = {
# Truncated Responses
"symbols": [
{
"symbol": self.ex_trading_pair,
"pair": self.ex_trading_pair,
"baseAsset": self.base_asset,
"quoteAsset": self.quote_asset,
"status": "TRADING",
},
{"symbol": "INACTIVEMARKET", "status": "INACTIVE"},
],
}
mock_api.get(regex_url, status=200, body=json.dumps(mock_response))
self.async_run_with_timeout(self.data_source.init_trading_pair_symbols(
domain=self.domain,
time_synchronizer=self.data_source._time_synchronizer))
self.assertEqual(1, len(self.data_source._trading_pair_symbol_map))
@aioresponses()
def test_trading_pair_symbol_map_dictionary_not_initialized(self, mock_api):
BinancePerpetualAPIOrderBookDataSource._trading_pair_symbol_map = {}
url = web_utils.rest_url(path_url=CONSTANTS.EXCHANGE_INFO_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response: Dict[str, Any] = {
# Truncated Responses
"symbols": [
{
"symbol": self.ex_trading_pair,
"pair": self.ex_trading_pair,
"baseAsset": self.base_asset,
"quoteAsset": self.quote_asset,
"status": "TRADING",
},
]
}
mock_api.get(regex_url, status=200, body=json.dumps(mock_response))
self.async_run_with_timeout(self.data_source.trading_pair_symbol_map(
domain=self.domain,
time_synchronizer=self.data_source._time_synchronizer))
self.assertEqual(1, len(self.data_source._trading_pair_symbol_map))
def test_trading_pair_symbol_map_dictionary_initialized(self):
result = self.async_run_with_timeout(self.data_source.trading_pair_symbol_map(
domain=self.domain,
time_synchronizer=self.data_source._time_synchronizer))
self.assertEqual(1, len(result))
def test_convert_from_exchange_trading_pair_not_found(self):
unknown_pair = "UNKNOWN-PAIR"
with self.assertRaisesRegex(ValueError, f"There is no symbol mapping for exchange trading pair {unknown_pair}"):
self.async_run_with_timeout(
self.data_source.convert_from_exchange_trading_pair(unknown_pair, domain=self.domain))
def test_convert_from_exchange_trading_pair_successful(self):
result = self.async_run_with_timeout(
self.data_source.convert_from_exchange_trading_pair(self.ex_trading_pair, domain=self.domain))
self.assertEqual(result, self.trading_pair)
def test_convert_to_exchange_trading_pair_not_found(self):
unknown_pair = "UNKNOWN-PAIR"
with self.assertRaisesRegex(ValueError, f"There is no symbol mapping for trading pair {unknown_pair}"):
self.async_run_with_timeout(
self.data_source.convert_to_exchange_trading_pair(unknown_pair, domain=self.domain))
def test_convert_to_exchange_trading_pair_successful(self):
result = self.async_run_with_timeout(
self.data_source.convert_to_exchange_trading_pair(self.trading_pair, domain=self.domain))
self.assertEqual(result, self.ex_trading_pair)
@aioresponses()
def test_get_snapshot_exception_raised(self, mock_api):
url = web_utils.rest_url(CONSTANTS.SNAPSHOT_REST_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_api.get(regex_url, status=400, body=json.dumps(["ERROR"]))
with self.assertRaises(IOError) as context:
self.async_run_with_timeout(
self.data_source.get_snapshot(
trading_pair=self.trading_pair,
domain=self.domain,
time_synchronizer=self.data_source._time_synchronizer)
)
self.assertEqual("Error executing request GET /depth. HTTP status is 400. Error: [\"ERROR\"]",
str(context.exception))
@aioresponses()
def test_get_snapshot_successful(self, mock_api):
url = web_utils.rest_url(CONSTANTS.SNAPSHOT_REST_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response = {
"lastUpdateId": 1027024,
"E": 1589436922972,
"T": 1589436922959,
"bids": [["10", "1"]],
"asks": [["11", "1"]],
}
mock_api.get(regex_url, status=200, body=json.dumps(mock_response))
result: Dict[str, Any] = self.async_run_with_timeout(
self.data_source.get_snapshot(
trading_pair=self.trading_pair,
domain=self.domain,
time_synchronizer=self.data_source._time_synchronizer)
)
self.assertEqual(mock_response, result)
@aioresponses()
def test_get_new_order_book(self, mock_api):
url = web_utils.rest_url(CONSTANTS.SNAPSHOT_REST_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response = {
"lastUpdateId": 1027024,
"E": 1589436922972,
"T": 1589436922959,
"bids": [["10", "1"]],
"asks": [["11", "1"]],
}
mock_api.get(regex_url, status=200, body=json.dumps(mock_response))
result = self.async_run_with_timeout(self.data_source.get_new_order_book(trading_pair=self.trading_pair))
self.assertIsInstance(result, OrderBook)
self.assertEqual(1027024, result.snapshot_uid)
@aioresponses()
def test_get_funding_info_from_exchange_error_response(self, mock_api):
url = web_utils.rest_url(CONSTANTS.MARK_PRICE_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_api.get(regex_url, status=400)
result = self.async_run_with_timeout(self.data_source._get_funding_info_from_exchange(self.trading_pair))
self.assertIsNone(result)
self._is_logged("ERROR", f"Unable to fetch FundingInfo for {self.trading_pair}. Error: None")
@aioresponses()
def test_get_funding_info_from_exchange_successful(self, mock_api):
url = web_utils.rest_url(CONSTANTS.MARK_PRICE_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response = {
"symbol": self.ex_trading_pair,
"markPrice": "46382.32704603",
"indexPrice": "46385.80064948",
"estimatedSettlePrice": "46510.13598963",
"lastFundingRate": "0.00010000",
"interestRate": "0.00010000",
"nextFundingTime": 1641312000000,
"time": 1641288825000,
}
mock_api.get(regex_url, body=json.dumps(mock_response))
result = self.async_run_with_timeout(self.data_source._get_funding_info_from_exchange(self.trading_pair))
self.assertIsInstance(result, FundingInfo)
self.assertEqual(result.trading_pair, self.trading_pair)
self.assertEqual(result.index_price, Decimal(mock_response["indexPrice"]))
self.assertEqual(result.mark_price, Decimal(mock_response["markPrice"]))
self.assertEqual(result.next_funding_utc_timestamp, mock_response["nextFundingTime"])
self.assertEqual(result.rate, Decimal(mock_response["lastFundingRate"]))
@aioresponses()
def test_get_funding_info(self, mock_api):
self.assertNotIn(self.trading_pair, self.data_source._funding_info)
url = web_utils.rest_url(CONSTANTS.MARK_PRICE_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response = {
"symbol": self.ex_trading_pair,
"markPrice": "46382.32704603",
"indexPrice": "46385.80064948",
"estimatedSettlePrice": "46510.13598963",
"lastFundingRate": "0.00010000",
"interestRate": "0.00010000",
"nextFundingTime": 1641312000000,
"time": 1641288825000,
}
mock_api.get(regex_url, body=json.dumps(mock_response))
result = self.async_run_with_timeout(self.data_source.get_funding_info(trading_pair=self.trading_pair))
self.assertIsInstance(result, FundingInfo)
self.assertEqual(result.trading_pair, self.trading_pair)
self.assertEqual(result.index_price, Decimal(mock_response["indexPrice"]))
self.assertEqual(result.mark_price, Decimal(mock_response["markPrice"]))
self.assertEqual(result.next_funding_utc_timestamp, mock_response["nextFundingTime"])
self.assertEqual(result.rate, Decimal(mock_response["lastFundingRate"]))
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
@patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep")
def test_listen_for_subscriptions_cancelled_when_connecting(self, _, mock_ws):
msg_queue: asyncio.Queue = asyncio.Queue()
mock_ws.side_effect = asyncio.CancelledError
with self.assertRaises(asyncio.CancelledError):
self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions())
self.async_run_with_timeout(self.listening_task)
self.assertEqual(msg_queue.qsize(), 0)
@patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep")
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_listen_for_subscriptions_logs_exception(self, mock_ws, *_):
mock_ws.return_value = self.mocking_assistant.create_websocket_mock()
mock_ws.close.return_value = None
incomplete_resp = {
"m": 1,
"i": 2,
}
self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, json.dumps(incomplete_resp))
self.mocking_assistant.add_websocket_aiohttp_message(
mock_ws.return_value, json.dumps(self._orderbook_update_event())
)
self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions())
try:
self.async_run_with_timeout(self.listening_task)
except asyncio.exceptions.TimeoutError:
pass
self.assertTrue(
self._is_logged("ERROR", "Unexpected error with Websocket connection. Retrying after 30 seconds...")
)
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_listen_for_subscriptions_successful(self, mock_ws):
msg_queue_diffs: asyncio.Queue = asyncio.Queue()
msg_queue_trades: asyncio.Queue = asyncio.Queue()
mock_ws.return_value = self.mocking_assistant.create_websocket_mock()
mock_ws.close.return_value = None
self.mocking_assistant.add_websocket_aiohttp_message(
mock_ws.return_value, json.dumps(self._orderbook_update_event())
)
self.mocking_assistant.add_websocket_aiohttp_message(
mock_ws.return_value, json.dumps(self._orderbook_trade_event())
)
self.mocking_assistant.add_websocket_aiohttp_message(
mock_ws.return_value, json.dumps(self._funding_info_event())
)
self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions())
self.listening_task_diffs = self.ev_loop.create_task(
self.data_source.listen_for_order_book_diffs(self.ev_loop, msg_queue_diffs)
)
self.listening_task_trades = self.ev_loop.create_task(
self.data_source.listen_for_trades(self.ev_loop, msg_queue_trades)
)
self.listening_task_funding_info = self.ev_loop.create_task(self.data_source.listen_for_funding_info())
result: OrderBookMessage = self.async_run_with_timeout(msg_queue_diffs.get())
self.assertIsInstance(result, OrderBookMessage)
self.assertEqual(OrderBookMessageType.DIFF, result.type)
self.assertTrue(result.has_update_id)
self.assertEqual(result.update_id, 752409360466)
self.assertEqual(self.trading_pair, result.content["trading_pair"])
self.assertEqual(1, len(result.content["bids"]))
self.assertEqual(1, len(result.content["asks"]))
result: OrderBookMessage = self.async_run_with_timeout(msg_queue_trades.get())
self.assertIsInstance(result, OrderBookMessage)
self.assertEqual(OrderBookMessageType.TRADE, result.type)
self.assertTrue(result.has_trade_id)
self.assertEqual(result.trade_id, 817295132)
self.assertEqual(self.trading_pair, result.content["trading_pair"])
self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value)
self.assertIn(self.trading_pair, self.data_source.funding_info)
funding_info: FundingInfo = self.data_source.funding_info[self.trading_pair]
self.assertTrue(self.data_source.is_funding_info_initialized)
self.assertEqual(funding_info.trading_pair, self.trading_pair)
self.assertEqual(funding_info.index_price, Decimal(self._funding_info_event()["data"]["i"]))
self.assertEqual(funding_info.mark_price, Decimal(self._funding_info_event()["data"]["p"]))
self.assertEqual(funding_info.next_funding_utc_timestamp, int(self._funding_info_event()["data"]["T"]))
self.assertEqual(funding_info.rate, Decimal(self._funding_info_event()["data"]["r"]))
@aioresponses()
def test_listen_for_order_book_snapshots_cancelled_error_raised(self, mock_api):
url = web_utils.rest_url(CONSTANTS.SNAPSHOT_REST_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_api.get(regex_url, exception=asyncio.CancelledError)
msg_queue: asyncio.Queue = asyncio.Queue()
with self.assertRaises(asyncio.CancelledError):
self.listening_task = self.ev_loop.create_task(
self.data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue)
)
self.async_run_with_timeout(self.listening_task)
self.assertEqual(0, msg_queue.qsize())
@aioresponses()
def test_listen_for_order_book_snapshots_logs_exception_error_with_response(self, mock_api):
url = web_utils.rest_url(CONSTANTS.SNAPSHOT_REST_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response = {
"m": 1,
"i": 2,
}
mock_api.get(regex_url, body=json.dumps(mock_response), callback=self.resume_test_callback)
msg_queue: asyncio.Queue = asyncio.Queue()
self.listening_task = self.ev_loop.create_task(
self.data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue)
)
self.async_run_with_timeout(self.resume_test_event.wait())
self.assertTrue(
self._is_logged("ERROR", "Unexpected error occurred fetching orderbook snapshots. Retrying in 5 seconds...")
)
@aioresponses()
def test_listen_for_order_book_snapshots_successful(self, mock_api):
url = web_utils.rest_url(CONSTANTS.SNAPSHOT_REST_URL, domain=self.domain)
regex_url = re.compile(f"^{url}".replace(".", r"\.").replace("?", r"\?"))
mock_response = {
"lastUpdateId": 1027024,
"E": 1589436922972,
"T": 1589436922959,
"bids": [["10", "1"]],
"asks": [["11", "1"]],
}
mock_api.get(regex_url, body=json.dumps(mock_response))
msg_queue: asyncio.Queue = asyncio.Queue()
self.listening_task = self.ev_loop.create_task(
self.data_source.listen_for_order_book_snapshots(self.ev_loop, msg_queue)
)
result = self.async_run_with_timeout(msg_queue.get())
self.assertIsInstance(result, OrderBookMessage)
self.assertEqual(OrderBookMessageType.SNAPSHOT, result.type)
self.assertTrue(result.has_update_id)
self.assertEqual(result.update_id, 1027024)
self.assertEqual(self.trading_pair, result.content["trading_pair"])
@patch("aiohttp.ClientSession.ws_connect", new_callable=AsyncMock)
def test_listen_for_funding_info_invalid_trading_pair(self, mock_ws):
mock_ws.return_value = self.mocking_assistant.create_websocket_mock()
mock_ws.close.return_value = None
mock_response = {
"stream": "unknown_pair@markPrice",
"data": {
"e": "markPriceUpdate",
"E": 1641288864000,
"s": "unknown_pair",
"p": "46353.99600757",
"P": "46507.47845460",
"i": "46358.63622407",
"r": "0.00010000",
"T": 1641312000000,
},
}
self.mocking_assistant.add_websocket_aiohttp_message(mock_ws.return_value, json.dumps(mock_response))
self.listening_task = self.ev_loop.create_task(self.data_source.listen_for_subscriptions())
self.listening_task_funding_info = self.ev_loop.create_task(self.data_source.listen_for_funding_info())
self.mocking_assistant.run_until_all_aiohttp_messages_delivered(mock_ws.return_value)
self.assertNotIn(self.trading_pair, self.data_source.funding_info)
def test_listen_for_funding_info_cancelled_error_raised(self):
mock_queue = AsyncMock()
mock_queue.get.side_effect = asyncio.CancelledError
self.data_source._message_queue[CONSTANTS.FUNDING_INFO_STREAM_ID] = mock_queue
with self.assertRaises(asyncio.CancelledError):
self.async_run_with_timeout(self.data_source.listen_for_funding_info())
@patch("hummingbot.core.data_type.order_book_tracker_data_source.OrderBookTrackerDataSource._sleep")
def test_listen_for_funding_info_logs_exception(self, mock_sleep):
mock_sleep.side_effect = lambda _: (self.ev_loop.run_until_complete(asyncio.sleep(0.5)))
mock_queue = AsyncMock()
mock_queue.get.side_effect = lambda: (self._raise_exception_and_unlock_test_with_event(Exception("TEST ERROR")))
self.data_source._message_queue[CONSTANTS.FUNDING_INFO_STREAM_ID] = mock_queue
self.listening_task_funding_info = self.ev_loop.create_task(self.data_source.listen_for_funding_info())
self.async_run_with_timeout(self.resume_test_event.wait())
self._is_logged(
"ERROR", "Unexpected error occured updating funding information. Retrying in 5 seconds... Error: TEST ERROR"
)
| 12,072 |
1,144 | <gh_stars>1000+
package de.metas.material.planning.ddorder;
import org.eevolution.model.I_PP_Product_Planning;
import org.springframework.stereotype.Service;
import de.metas.material.planning.IMaterialDemandMatcher;
import de.metas.material.planning.IMaterialPlanningContext;
import de.metas.util.Loggables;
/*
* #%L
* metasfresh-mrp
* %%
* Copyright (C) 2017 metas GmbH
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as
* published by the Free Software Foundation, either version 2 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public
* License along with this program. If not, see
* <http://www.gnu.org/licenses/gpl-2.0.html>.
* #L%
*/
/**
* This implementation figures out if a particular demand could be matched by a DDOrder.<br/>
* The business logic of the {@link #matches(IMaterialPlanningContext)} method is coming from
* <code>/de.metas.adempiere.libero.libero/src/main/java/org/eevolution/mrp/spi/impl/DDOrderMRPSupplyProducer.java</code>
*
* @author metas-dev <<EMAIL>>
*
*/
@Service
public class DDOrderDemandMatcher implements IMaterialDemandMatcher
{
@Override
public boolean matches(final IMaterialPlanningContext mrpContext)
{
final I_PP_Product_Planning productPlanning = mrpContext.getProductPlanning();
// Check if there is a distribution network
if (productPlanning.getDD_NetworkDistribution_ID() <= 0)
{
Loggables.addLog(
"No distribution network configured in product data planning of the given mrp context; DDOrderDemandMatcher returns false; productPlanning={}; mrpContext={}",
productPlanning, mrpContext);
return false;
}
return true;
}
}
| 631 |
348 | <gh_stars>100-1000
{"nom":"Francarville","dpt":"Haute-Garonne","inscrits":128,"abs":23,"votants":105,"blancs":21,"nuls":1,"exp":83,"res":[{"panneau":"1","voix":54},{"panneau":"2","voix":29}]} | 80 |
3,967 | <reponame>cyyever/DALI<filename>dali/kernels/common/split_shape.h<gh_stars>1000+
// Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef DALI_KERNELS_COMMON_SPLIT_SHAPE_H_
#define DALI_KERNELS_COMMON_SPLIT_SHAPE_H_
#include <utility>
#include "dali/core/util.h"
#include "dali/core/tensor_shape.h"
namespace dali {
namespace kernels {
/**
* @brief Utility to divide a bigger shape into smaller blocks, given a desired minimum number
* of blocks and a minimum practical block size.
* The algorithm starts splitting from the outermost dimension until either the number of
* blocks reaches the desired minimum, or until the remaining volume is under a given threshold.
* @remarks The algorithm makes an effort to keep a good balance of block sizes, which might result in
* a higher number of blocks than the minimum requested.
* @param split_factor Output argument used to represent split factors for each dimension.
* @param in_shape Input shape
* @param min_nblocks Desired minimum number of blocks
* @param min_sz Minimum practical block size
* @param skip_dim_mask Bitmask representing which dimensions should not be split
* @return product of split_factor
*/
template <typename SplitFactor, typename Shape>
int split_shape(SplitFactor& split_factor, const Shape& in_shape, int min_nblocks,
int min_sz = 16000, uint64_t skip_dim_mask = 0) {
int ndim = dali::size(in_shape);
assert(static_cast<int>(dali::size(split_factor)) == ndim);
for (int d = 0; d < ndim; d++)
split_factor[d] = 1;
int64_t vol = volume(in_shape);
for (int d = 0, nblocks = 1; d < ndim && nblocks < min_nblocks && vol > min_sz; d++) {
if (skip_dim_mask & (1_u64 << d))
continue;
int n = in_shape[d];
int &b = split_factor[d];
auto remaining = div_ceil(min_nblocks, nblocks);
constexpr int kThreshold = 4;
// ``* kThreshold`` to keep balance of block sizes,
// only dividing by ``remaining`` when the number is small.
if (remaining * kThreshold < n) {
b = remaining;
nblocks *= b;
assert(nblocks >= min_nblocks);
break;
}
b = n;
nblocks *= b;
vol = div_ceil(vol, b);
}
return volume(split_factor);
}
/**
* @brief returns the dimension index with a split factor > 1
*/
template <typename SplitFactor>
int LastSplitDim(const SplitFactor& split_factor) {
int last_split_dim = -1;
int ndim = dali::size(split_factor);
for (int d = ndim - 1; d >= 0; d--) {
if (split_factor[d] > 1) {
last_split_dim = d;
break;
}
}
return last_split_dim;
}
/**
* @brief Iterates over blocks, based on a split factor for each dimension
* @param start start coordinates of the region
* @param end end coordinates of the region
* @param split_factor split factor for each dimension in the region
* @param d Current dimension
* @param max_split_dim last dimension with a split factor different than 1.
* @param func Function to run for each block.
*/
template <int ndim, typename SplitFactor, typename OnBlockFunc>
void ForEachBlock(TensorShape<ndim> start, TensorShape<ndim> end, const SplitFactor& split_factor,
int d, int max_split_dim, OnBlockFunc&& func) {
assert(start.size() == end.size());
if (d > max_split_dim || d == start.size()) {
func(start, end);
return;
}
if (split_factor[d] == 1) {
ForEachBlock(start, end, split_factor, d + 1, max_split_dim,
std::forward<OnBlockFunc>(func));
return;
}
int64_t start_d = start[d];
int64_t extent_d = end[d] - start_d;
int nblocks_d = split_factor[d];
int64_t prev_end = start_d;
for (int b = 0; b < nblocks_d; b++) {
start[d] = prev_end;
end[d] = prev_end = extent_d * (b + 1) / nblocks_d + start_d;
ForEachBlock(start, end, split_factor, d + 1, max_split_dim,
std::forward<OnBlockFunc>(func));
}
}
} // namespace kernels
} // namespace dali
#endif // DALI_KERNELS_COMMON_SPLIT_SHAPE_H_
| 1,631 |
839 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cxf.systest.ws.addr_fromjava;
import java.io.ByteArrayOutputStream;
import java.net.URL;
import java.util.List;
import javax.xml.ws.BindingProvider;
import javax.xml.ws.soap.SOAPFaultException;
import org.apache.cxf.binding.soap.SoapMessage;
import org.apache.cxf.binding.soap.interceptor.AbstractSoapInterceptor;
import org.apache.cxf.binding.soap.interceptor.ReadHeadersInterceptor;
import org.apache.cxf.endpoint.Client;
import org.apache.cxf.ext.logging.LoggingFeature;
import org.apache.cxf.frontend.ClientProxy;
import org.apache.cxf.headers.Header;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.phase.Phase;
import org.apache.cxf.systest.ws.AbstractWSATestBase;
import org.apache.cxf.systest.ws.addr_fromjava.client.AddNumberImpl;
import org.apache.cxf.systest.ws.addr_fromjava.client.AddNumberImplService;
import org.apache.cxf.systest.ws.addr_fromjava.client.AddNumbersException_Exception;
import org.apache.cxf.systest.ws.addr_fromjava.server.Server;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class WSAFromJavaTest extends AbstractWSATestBase {
static final String PORT = allocatePort(Server.class);
@Before
public void setUp() throws Exception {
createBus();
}
@BeforeClass
public static void startServers() throws Exception {
assertTrue("server did not launch correctly", launchServer(Server.class, true));
}
@Test
public void testAddNumbers() throws Exception {
ByteArrayOutputStream input = setupInLogging();
ByteArrayOutputStream output = setupOutLogging();
AddNumberImpl port = getPort();
assertEquals(3, port.addNumbers(1, 2));
String expectedOut = "http://cxf.apache.org/input";
assertTrue(output.toString().indexOf(expectedOut) != -1);
String expectedIn = "http://cxf.apache.org/output";
assertTrue(input.toString().indexOf(expectedIn) != -1);
}
@Test
public void testAddNumbersFault() throws Exception {
ByteArrayOutputStream input = setupInLogging();
ByteArrayOutputStream output = setupOutLogging();
AddNumberImpl port = getPort();
try {
port.addNumbers(-1, 2);
} catch (AddNumbersException_Exception e) {
assert true;
} catch (Exception e) {
e.printStackTrace();
assert false;
}
assertTrue(output.toString().indexOf("http://cxf.apache.org/input") != -1);
String expectedFault =
"http://server.addr_fromjava.ws.systest.cxf.apache.org/AddNumberImpl/"
+ "addNumbers/Fault/AddNumbersException";
assertTrue(input.toString(),
input.toString().indexOf(expectedFault) != -1);
}
@Test
public void testAddNumbers2() throws Exception {
ByteArrayOutputStream input = setupInLogging();
ByteArrayOutputStream output = setupOutLogging();
AddNumberImpl port = getPort();
assertEquals(3, port.addNumbers2(1, 2));
String base = "http://server.addr_fromjava.ws.systest.cxf.apache.org/AddNumberImpl";
String expectedOut = base + "/addNumbers2";
assertTrue(output.toString().indexOf(expectedOut) != -1);
String expectedIn = base + "/addNumbers2Response";
assertTrue(input.toString().indexOf(expectedIn) != -1);
}
@Test
public void testAddNumbers3Fault() throws Exception {
ByteArrayOutputStream input = setupInLogging();
ByteArrayOutputStream output = setupOutLogging();
AddNumberImpl port = getPort();
try {
port.addNumbers3(-1, 2);
} catch (AddNumbersException_Exception e) {
assert true;
} catch (Exception e) {
e.printStackTrace();
assert false;
}
assertTrue(output.toString(), output.toString().indexOf("http://cxf.apache.org/input") != -1);
assertTrue(input.toString(), input.toString().indexOf("http://cxf.apache.org/fault3") != -1);
}
@Test
public void testAddNumbersJaxWsContext() throws Exception {
ByteArrayOutputStream output = setupOutLogging();
AddNumberImpl port = getPort();
BindingProvider bp = (BindingProvider)port;
java.util.Map<String, Object> requestContext = bp.getRequestContext();
requestContext.put(BindingProvider.SOAPACTION_URI_PROPERTY, "cxf");
try {
assertEquals(3, port.addNumbers(1, 2));
fail("Should have thrown an ActionNotSupported exception");
} catch (SOAPFaultException ex) {
//expected
}
assertLogContains(output.toString(), "//wsa:Action", "cxf");
assertTrue(output.toString(), output.toString().indexOf("SOAPAction=\"cxf\"") != -1);
}
private AddNumberImpl getPort() throws Exception {
URL wsdl = getClass().getResource("/wsdl_systest_wsspec/add_numbers-fromjava.wsdl");
assertNotNull("WSDL is null", wsdl);
AddNumberImplService service = new AddNumberImplService(wsdl);
assertNotNull("Service is null ", service);
AddNumberImpl port = service.getAddNumberImplPort();
updateAddressPort(port, PORT);
return port;
}
@Test
public void testUnmatchedActions() throws Exception {
AddNumberImpl port = getPort();
BindingProvider bp = (BindingProvider)port;
java.util.Map<String, Object> requestContext = bp.getRequestContext();
requestContext.put(BindingProvider.SOAPACTION_URI_PROPERTY,
"http://cxf.apache.org/input4");
try {
//CXF-2035
port.addNumbers3(-1, -1);
} catch (Exception e) {
assertTrue(e.getMessage().contains("Unexpected wrapper"));
}
}
@Test
public void testFaultFromNonAddressService() throws Exception {
new LoggingFeature().initialize(this.getBus());
AddNumberImpl port = getPort();
java.util.Map<String, Object> requestContext = ((BindingProvider)port).getRequestContext();
requestContext.put(BindingProvider.ENDPOINT_ADDRESS_PROPERTY,
"http://localhost:" + PORT + "/AddNumberImplPort-noaddr");
long start = System.currentTimeMillis();
port.addNumbers(1, 2);
try {
port.addNumbers3(-1, -1);
} catch (Exception ex) {
//ignore, expected
}
long end = System.currentTimeMillis();
assertTrue((end - start) < 50000);
}
static class RemoveRelatesToHeaderInterceptor extends AbstractSoapInterceptor {
RemoveRelatesToHeaderInterceptor() {
super(Phase.READ);
addAfter(ReadHeadersInterceptor.class.getName());
}
public void handleMessage(SoapMessage message) throws Fault {
List<Header> headers = message.getHeaders();
Header h2 = null;
for (Header h : headers) {
if ("RelatesTo".equals(h.getName().getLocalPart())) {
h2 = h;
}
}
headers.remove(h2);
}
}
@Test
public void testNoRelatesToHeader() throws Exception {
new LoggingFeature().initialize(this.getBus());
AddNumberImpl port = getPort();
Client c = ClientProxy.getClient(port);
c.getInInterceptors().add(new RemoveRelatesToHeaderInterceptor());
long start = System.currentTimeMillis();
port.addNumbers(1, 2);
try {
port.addNumbers3(-1, -1);
} catch (Exception ex) {
//ignore, expected
}
long end = System.currentTimeMillis();
assertTrue((end - start) < 50000);
}
} | 3,456 |
324 | <filename>apis/rackspace-cloudloadbalancers/src/main/java/org/jclouds/rackspace/cloudloadbalancers/v1/options/ListOptions.java
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.rackspace.cloudloadbalancers.v1.options;
import static com.google.common.base.Preconditions.checkArgument;
import org.jclouds.http.options.BaseHttpRequestOptions;
import com.google.common.collect.ImmutableSet;
/**
* To reduce load on the service, list operations will return a maximum of 100 items at a time. To
* navigate the collection, the limit and marker parameters (for example, ?limit=50&marker=1 ) can
* be set in the URI. If a marker beyond the end of a list is given, an empty list is returned. Note
* that list operations never return 404 (itemNotFound) faults.
*
* @see <a
* href="http://docs.rackspacecloud.com/loadbalancers/api/v1.0/clb-devguide/content/ch03s06.html"
* />
*/
public class ListOptions extends BaseHttpRequestOptions {
public static final ListOptions NONE = new ListOptions();
/**
* Indicates where to begin listing, if the previous list was larger than the limit.
*/
public ListOptions marker(String marker) {
checkArgument(marker != null, "marker cannot be null");
queryParameters.replaceValues("marker", ImmutableSet.of(marker));
return this;
}
/**
* To reduce load on the service, list operations will return a maximum of 100 items at a time.
* <p/>
* Note that list operations never return itemNotFound (404) faults.
*/
public ListOptions limit(int limit) {
checkArgument(limit >= 0, "limit must be >= 0");
checkArgument(limit <= 10000, "limit must be <= 10000");
queryParameters.replaceValues("limit", ImmutableSet.of(limit + ""));
return this;
}
public static class Builder {
/**
* @see ListOptions#marker(marker)
*/
public static ListOptions marker(String marker) {
ListOptions options = new ListOptions();
return options.marker(marker);
}
/**
* @see ListOptions#limit(long)
*/
public static ListOptions limit(int limit) {
ListOptions options = new ListOptions();
return options.limit(limit);
}
}
}
| 943 |
14,668 | <reponame>zealoussnow/chromium
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "gpu/config/gpu_info_collector.h"
// C system before C++ system.
#include <stddef.h>
#include <stdint.h>
// This has to be included before windows.h.
#include "third_party/re2/src/re2/re2.h"
#include <windows.h>
#include <d3d11.h>
#include <d3d11_3.h>
#include <d3d12.h>
#include <dxgi.h>
#include <vulkan/vulkan.h>
#include <wrl/client.h>
#include "base/file_version_info_win.h"
#include "base/files/file_path.h"
#include "base/files/file_util.h"
#include "base/logging.h"
#include "base/metrics/histogram_functions.h"
#include "base/metrics/histogram_macros.h"
#include "base/numerics/safe_conversions.h"
#include "base/scoped_native_library.h"
#include "base/strings/stringprintf.h"
#include "base/trace_event/trace_event.h"
#include "base/win/scoped_com_initializer.h"
#include "base/win/windows_version.h"
#include "build/branding_buildflags.h"
#include "gpu/config/gpu_util.h"
#include "ui/gl/direct_composition_surface_win.h"
#include "ui/gl/gl_angle_util_win.h"
#include "ui/gl/gl_surface_egl.h"
namespace gpu {
namespace {
// TODO(magchen@): Remove PFN_D3D12_CREATE_DEVICE_CHROMIUM and use
// PFN_D3D12_CREATE_DEVICE from d3d12.h directly once the Windows toolchain is
// updated.
// Declaration for D3D12CreateDevice() with D3D_FEATURE_LEVEL_12_2 support in
// D3D_FEATURE_LEVEL_CHROMIUM.
typedef HRESULT(WINAPI* PFN_D3D12_CREATE_DEVICE_CHROMIUM)(
_In_opt_ IUnknown*,
D3D_FEATURE_LEVEL_CHROMIUM,
_In_ REFIID,
_COM_Outptr_opt_ void**);
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
// This should match enum D3D12FeatureLevel in
// \tools\metrics\histograms\enums.xml
enum class D3D12FeatureLevel {
kD3DFeatureLevelUnknown = 0,
kD3DFeatureLevel_12_0 = 1,
kD3DFeatureLevel_12_1 = 2,
kD3DFeatureLevel_11_0 = 3,
kD3DFeatureLevel_11_1 = 4,
kD3DFeatureLevel_12_2 = 5,
kMaxValue = kD3DFeatureLevel_12_2,
};
inline D3D12FeatureLevel ConvertToHistogramFeatureLevel(
uint32_t d3d_feature_level) {
switch (d3d_feature_level) {
case 0:
return D3D12FeatureLevel::kD3DFeatureLevelUnknown;
case D3D12_FEATURE_LEVEL_12_0:
return D3D12FeatureLevel::kD3DFeatureLevel_12_0;
case D3D12_FEATURE_LEVEL_12_1:
return D3D12FeatureLevel::kD3DFeatureLevel_12_1;
case D3D12_FEATURE_LEVEL_12_2:
return D3D12FeatureLevel::kD3DFeatureLevel_12_2;
case D3D12_FEATURE_LEVEL_11_0:
return D3D12FeatureLevel::kD3DFeatureLevel_11_0;
case D3D12_FEATURE_LEVEL_11_1:
return D3D12FeatureLevel::kD3DFeatureLevel_11_1;
default:
NOTREACHED();
return D3D12FeatureLevel::kD3DFeatureLevelUnknown;
}
}
// These values are persisted to logs. Entries should not be renumbered and
// numeric values should never be reused.
enum class D3D12ShaderModel {
kUnknownOrNoD3D12Devices = 0,
kD3DShaderModel_5_1 = 1,
kD3DShaderModel_6_0 = 2,
kD3DShaderModel_6_1 = 3,
kD3DShaderModel_6_2 = 4,
kD3DShaderModel_6_3 = 5,
kD3DShaderModel_6_4 = 6,
kD3DShaderModel_6_5 = 7,
kD3DShaderModel_6_6 = 8,
kMaxValue = kD3DShaderModel_6_6,
};
D3D12ShaderModel ConvertToHistogramShaderVersion(uint32_t version) {
switch (version) {
case 0:
return D3D12ShaderModel::kUnknownOrNoD3D12Devices;
case D3D_SHADER_MODEL_5_1:
return D3D12ShaderModel::kD3DShaderModel_5_1;
case D3D_SHADER_MODEL_6_0:
return D3D12ShaderModel::kD3DShaderModel_6_0;
case D3D_SHADER_MODEL_6_1:
return D3D12ShaderModel::kD3DShaderModel_6_1;
case D3D_SHADER_MODEL_6_2:
return D3D12ShaderModel::kD3DShaderModel_6_2;
case D3D_SHADER_MODEL_6_3:
return D3D12ShaderModel::kD3DShaderModel_6_3;
case D3D_SHADER_MODEL_6_4:
return D3D12ShaderModel::kD3DShaderModel_6_4;
case D3D_SHADER_MODEL_6_5:
return D3D12ShaderModel::kD3DShaderModel_6_5;
case D3D_SHADER_MODEL_6_6:
return D3D12ShaderModel::kD3DShaderModel_6_6;
default:
NOTREACHED();
return D3D12ShaderModel::kUnknownOrNoD3D12Devices;
}
}
OverlaySupport FlagsToOverlaySupport(bool overlays_supported, UINT flags) {
if (flags & DXGI_OVERLAY_SUPPORT_FLAG_SCALING)
return OverlaySupport::kScaling;
if (flags & DXGI_OVERLAY_SUPPORT_FLAG_DIRECT)
return OverlaySupport::kDirect;
if (overlays_supported)
return OverlaySupport::kSoftware;
return OverlaySupport::kNone;
}
bool GetActiveAdapterLuid(LUID* luid) {
Microsoft::WRL::ComPtr<ID3D11Device> d3d11_device =
gl::QueryD3D11DeviceObjectFromANGLE();
if (!d3d11_device)
return false;
Microsoft::WRL::ComPtr<IDXGIDevice> dxgi_device;
if (FAILED(d3d11_device.As(&dxgi_device)))
return false;
Microsoft::WRL::ComPtr<IDXGIAdapter> adapter;
if (FAILED(dxgi_device->GetAdapter(&adapter)))
return false;
DXGI_ADAPTER_DESC desc;
if (FAILED(adapter->GetDesc(&desc)))
return false;
// Zero isn't a valid LUID.
if (desc.AdapterLuid.HighPart == 0 && desc.AdapterLuid.LowPart == 0)
return false;
*luid = desc.AdapterLuid;
return true;
}
} // namespace
#if BUILDFLAG(GOOGLE_CHROME_BRANDING) && defined(OFFICIAL_BUILD)
// This function has a real implementation for official builds that can
// be found in src/third_party/amd.
bool GetAMDSwitchableInfo(bool* is_switchable,
uint32_t* active_vendor_id,
uint32_t* active_device_id);
#else
bool GetAMDSwitchableInfo(bool* is_switchable,
uint32_t* active_vendor_id,
uint32_t* active_device_id) {
return false;
}
#endif
// This has to be called after a context is created, active GPU is identified,
// and GPU driver bug workarounds are computed again. Otherwise the workaround
// |disable_direct_composition| may not be correctly applied.
// Also, this has to be called after falling back to SwiftShader decision is
// finalized because this function depends on GL is ANGLE's GLES or not.
void CollectHardwareOverlayInfo(OverlayInfo* overlay_info) {
if (gl::GetGLImplementation() == gl::kGLImplementationEGLANGLE) {
overlay_info->direct_composition =
gl::DirectCompositionSurfaceWin::IsDirectCompositionSupported();
overlay_info->supports_overlays =
gl::DirectCompositionSurfaceWin::AreOverlaysSupported();
overlay_info->nv12_overlay_support = FlagsToOverlaySupport(
overlay_info->supports_overlays,
gl::DirectCompositionSurfaceWin::GetOverlaySupportFlags(
DXGI_FORMAT_NV12));
overlay_info->yuy2_overlay_support = FlagsToOverlaySupport(
overlay_info->supports_overlays,
gl::DirectCompositionSurfaceWin::GetOverlaySupportFlags(
DXGI_FORMAT_YUY2));
overlay_info->bgra8_overlay_support = FlagsToOverlaySupport(
overlay_info->supports_overlays,
gl::DirectCompositionSurfaceWin::GetOverlaySupportFlags(
DXGI_FORMAT_B8G8R8A8_UNORM));
overlay_info->rgb10a2_overlay_support = FlagsToOverlaySupport(
overlay_info->supports_overlays,
gl::DirectCompositionSurfaceWin::GetOverlaySupportFlags(
DXGI_FORMAT_R10G10B10A2_UNORM));
}
}
bool CollectDriverInfoD3D(GPUInfo* gpu_info) {
TRACE_EVENT0("gpu", "CollectDriverInfoD3D");
Microsoft::WRL::ComPtr<IDXGIFactory1> dxgi_factory;
HRESULT hr = ::CreateDXGIFactory1(IID_PPV_ARGS(&dxgi_factory));
if (FAILED(hr))
return false;
bool found_amd = false;
bool found_intel = false;
bool found_nvidia = false;
UINT i;
Microsoft::WRL::ComPtr<IDXGIAdapter> dxgi_adapter;
for (i = 0; SUCCEEDED(dxgi_factory->EnumAdapters(i, &dxgi_adapter)); i++) {
DXGI_ADAPTER_DESC desc;
dxgi_adapter->GetDesc(&desc);
GPUInfo::GPUDevice device;
device.vendor_id = desc.VendorId;
device.device_id = desc.DeviceId;
device.sub_sys_id = desc.SubSysId;
device.revision = desc.Revision;
device.luid =
CHROME_LUID{desc.AdapterLuid.LowPart, desc.AdapterLuid.HighPart};
LARGE_INTEGER umd_version;
hr = dxgi_adapter->CheckInterfaceSupport(__uuidof(IDXGIDevice),
&umd_version);
if (SUCCEEDED(hr)) {
device.driver_version = base::StringPrintf(
"%d.%d.%d.%d", HIWORD(umd_version.HighPart),
LOWORD(umd_version.HighPart), HIWORD(umd_version.LowPart),
LOWORD(umd_version.LowPart));
} else {
DLOG(ERROR) << "Unable to retrieve the umd version of adapter: "
<< desc.Description << " HR: " << std::hex << hr;
}
switch (device.vendor_id) {
case 0x8086:
found_intel = true;
break;
case 0x1002:
found_amd = true;
break;
case 0x10de:
found_nvidia = true;
break;
default:
break;
}
if (i == 0) {
gpu_info->gpu = device;
} else {
gpu_info->secondary_gpus.push_back(device);
}
}
if (found_intel && base::win::GetVersion() < base::win::Version::WIN10) {
// Since Windows 10 (and Windows 8.1 on some systems), switchable graphics
// platforms are managed by Windows and each adapter is accessible as
// separate devices.
// See https://msdn.microsoft.com/en-us/windows/dn265501(v=vs.80)
if (found_amd) {
bool is_amd_switchable = false;
uint32_t active_vendor = 0, active_device = 0;
GetAMDSwitchableInfo(&is_amd_switchable, &active_vendor, &active_device);
gpu_info->amd_switchable = is_amd_switchable;
} else if (found_nvidia) {
// nvd3d9wrap.dll is loaded into all processes when Optimus is enabled.
HMODULE nvd3d9wrap = GetModuleHandleW(L"nvd3d9wrap.dll");
gpu_info->optimus = nvd3d9wrap != nullptr;
}
}
return i > 0;
}
// CanCreateD3D12Device returns true/false depending on whether D3D12 device
// creation should be attempted on the passed in adapter. Returns false if there
// are known driver bugs.
bool CanCreateD3D12Device(IDXGIAdapter* dxgi_adapter) {
DXGI_ADAPTER_DESC desc;
HRESULT hr = dxgi_adapter->GetDesc(&desc);
if (FAILED(hr)) {
return false;
}
// Known driver bugs are Intel-only. Expand in the future, as necessary, for
// other IHVs.
if (desc.VendorId != 0x8086)
return true;
LARGE_INTEGER umd_version;
hr = dxgi_adapter->CheckInterfaceSupport(__uuidof(IDXGIDevice), &umd_version);
if (FAILED(hr)) {
return false;
}
// On certain Intel drivers, the driver will crash if you call
// D3D12CreateDevice and the command line of the process is greater than 1024
// bytes. 100.9416 is the first driver to introduce the bug, while 100.9664 is
// the first driver to fix it.
if (HIWORD(umd_version.LowPart) == 100 &&
LOWORD(umd_version.LowPart) >= 9416 &&
LOWORD(umd_version.LowPart) < 9664) {
const char* command_line = GetCommandLineA();
const size_t command_line_length = strlen(command_line);
// Check for 1023 since strlen doesn't include the null terminator.
if (command_line_length > 1023) {
return false;
}
}
return true;
}
// DirectX 12 are included with Windows 10 and Server 2016.
void GetGpuSupportedD3D12Version(uint32_t& d3d12_feature_level,
uint32_t& highest_shader_model_version) {
TRACE_EVENT0("gpu", "GetGpuSupportedD3D12Version");
// Initialize to 0 to indicated an unknown type in UMA.
d3d12_feature_level = 0;
highest_shader_model_version = 0;
base::ScopedNativeLibrary d3d12_library(
base::FilePath(FILE_PATH_LITERAL("d3d12.dll")));
if (!d3d12_library.is_valid())
return;
// The order of feature levels to attempt to create in D3D CreateDevice
const D3D_FEATURE_LEVEL_CHROMIUM feature_levels[] = {
D3D12_FEATURE_LEVEL_12_2, D3D12_FEATURE_LEVEL_12_1,
D3D12_FEATURE_LEVEL_12_0, D3D12_FEATURE_LEVEL_11_1,
D3D12_FEATURE_LEVEL_11_0};
PFN_D3D12_CREATE_DEVICE_CHROMIUM D3D12CreateDevice =
reinterpret_cast<PFN_D3D12_CREATE_DEVICE_CHROMIUM>(
d3d12_library.GetFunctionPointer("D3D12CreateDevice"));
Microsoft::WRL::ComPtr<ID3D12Device> d3d12_device;
if (D3D12CreateDevice) {
Microsoft::WRL::ComPtr<IDXGIFactory1> dxgi_factory;
HRESULT hr = ::CreateDXGIFactory1(IID_PPV_ARGS(&dxgi_factory));
if (FAILED(hr)) {
return;
}
Microsoft::WRL::ComPtr<IDXGIAdapter> dxgi_adapter;
hr = dxgi_factory->EnumAdapters(0, &dxgi_adapter);
if (FAILED(hr)) {
return;
}
if (!CanCreateD3D12Device(dxgi_adapter.Get())) {
return;
}
// For the default adapter only: EnumAdapters(0, ...).
// Check to see if the adapter supports Direct3D 12.
for (auto level : feature_levels) {
if (SUCCEEDED(D3D12CreateDevice(dxgi_adapter.Get(), level,
_uuidof(ID3D12Device), &d3d12_device))) {
d3d12_feature_level = level;
break;
}
}
}
// Query the maximum supported shader model version.
if (d3d12_device) {
D3D12_FEATURE_DATA_SHADER_MODEL shader_model_data = {};
shader_model_data.HighestShaderModel = D3D_SHADER_MODEL_6_6;
if (SUCCEEDED(d3d12_device->CheckFeatureSupport(
D3D12_FEATURE_SHADER_MODEL, &shader_model_data,
sizeof(shader_model_data)))) {
highest_shader_model_version = shader_model_data.HighestShaderModel;
}
}
}
// The old graphics drivers are installed to the Windows system directory
// c:\windows\system32 or SysWOW64. Those versions can be detected without
// specifying the absolute directory. For a newer version (>= ~2018), this won't
// work. The newer graphics drivers are located in
// c:\windows\system32\DriverStore\FileRepository\xxx.infxxx which contains a
// different number at each installation
bool BadAMDVulkanDriverVersion() {
// Both 32-bit and 64-bit dll are broken. If 64-bit doesn't exist,
// 32-bit dll will be used to detect the AMD Vulkan driver.
const base::FilePath kAmdDriver64(FILE_PATH_LITERAL("amdvlk64.dll"));
const base::FilePath kAmdDriver32(FILE_PATH_LITERAL("amdvlk32.dll"));
std::unique_ptr<FileVersionInfoWin> file_version_info =
FileVersionInfoWin::CreateFileVersionInfoWin(kAmdDriver64);
if (!file_version_info) {
file_version_info =
FileVersionInfoWin::CreateFileVersionInfoWin(kAmdDriver32);
if (!file_version_info)
return false;
}
base::Version amd_version = file_version_info->GetFileVersion();
// From the Canary crash logs, the broken amdvlk64.dll versions
// are 1.0.39.0, 1.0.51.0 and 1.0.54.0. In the manual test, version
// 9.2.10.1 dated 12/6/2017 works and version 1.0.54.0 dated 11/2/1017
// crashes. All version numbers small than 1.0.54.0 will be marked as
// broken.
const base::Version kBadAMDVulkanDriverVersion("1.0.54.0");
// CompareTo() returns -1, 0, 1 for <, ==, >.
if (amd_version.CompareTo(kBadAMDVulkanDriverVersion) != 1)
return true;
return false;
}
// Vulkan 1.1 was released by the Khronos Group on March 7, 2018.
// Blocklist all driver versions without Vulkan 1.1 support and those that cause
// lots of crashes.
bool BadGraphicsDriverVersions(const gpu::GPUInfo::GPUDevice& gpu_device) {
// GPU Device info is not available in gpu_integration_test.info-collection
// with --no-delay-for-dx12-vulkan-info-collection.
if (gpu_device.driver_version.empty())
return false;
base::Version driver_version(gpu_device.driver_version);
if (!driver_version.IsValid())
return true;
// AMD Vulkan drivers - amdvlk64.dll
constexpr uint32_t kAMDVendorId = 0x1002;
if (gpu_device.vendor_id == kAMDVendorId) {
// 26.20.12028.2 (2019)- number of crashes 1,188,048 as of 5/14/2020.
// Returns -1, 0, 1 for <, ==, >.
if (driver_version.CompareTo(base::Version("26.20.12028.2")) == 0)
return true;
}
return false;
}
bool InitVulkan(base::NativeLibrary* vulkan_library,
PFN_vkGetInstanceProcAddr* vkGetInstanceProcAddr,
PFN_vkCreateInstance* vkCreateInstance,
uint32_t* vulkan_version) {
*vulkan_version = 0;
*vulkan_library =
base::LoadNativeLibrary(base::FilePath(L"vulkan-1.dll"), nullptr);
if (!(*vulkan_library)) {
return false;
}
*vkGetInstanceProcAddr = reinterpret_cast<PFN_vkGetInstanceProcAddr>(
base::GetFunctionPointerFromNativeLibrary(*vulkan_library,
"vkGetInstanceProcAddr"));
if (*vkGetInstanceProcAddr) {
*vulkan_version = VK_MAKE_VERSION(1, 0, 0);
PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion;
vkEnumerateInstanceVersion =
reinterpret_cast<PFN_vkEnumerateInstanceVersion>(
(*vkGetInstanceProcAddr)(nullptr, "vkEnumerateInstanceVersion"));
// If the vkGetInstanceProcAddr returns nullptr for
// vkEnumerateInstanceVersion, it is a Vulkan 1.0 implementation.
if (!vkEnumerateInstanceVersion) {
return false;
}
// Return value can be VK_SUCCESS or VK_ERROR_OUT_OF_HOST_MEMORY.
if (vkEnumerateInstanceVersion(vulkan_version) != VK_SUCCESS) {
return false;
}
// The minimum version required for Vulkan to be enabled is 1.1.0.
// No further queries will be called for early versions. They are unstable
// and might cause crashes.
if (*vulkan_version < VK_MAKE_VERSION(1, 1, 0)) {
return false;
}
*vkCreateInstance = reinterpret_cast<PFN_vkCreateInstance>(
(*vkGetInstanceProcAddr)(nullptr, "vkCreateInstance"));
if (*vkCreateInstance)
return true;
}
// From the crash reports, unloading the library here might cause a crash in
// the Vulkan loader or in the Vulkan driver. To work around it, don't
// explicitly unload the DLL. Instead, GPU process shutdown will unload all
// loaded DLLs.
// base::UnloadNativeLibrary(*vulkan_library);
return false;
}
bool InitVulkanInstanceProc(
const VkInstance& vk_instance,
const PFN_vkGetInstanceProcAddr& vkGetInstanceProcAddr,
PFN_vkEnumeratePhysicalDevices* vkEnumeratePhysicalDevices,
PFN_vkEnumerateDeviceExtensionProperties*
vkEnumerateDeviceExtensionProperties) {
*vkEnumeratePhysicalDevices =
reinterpret_cast<PFN_vkEnumeratePhysicalDevices>(
vkGetInstanceProcAddr(vk_instance, "vkEnumeratePhysicalDevices"));
*vkEnumerateDeviceExtensionProperties =
reinterpret_cast<PFN_vkEnumerateDeviceExtensionProperties>(
vkGetInstanceProcAddr(vk_instance,
"vkEnumerateDeviceExtensionProperties"));
if ((*vkEnumeratePhysicalDevices) &&
(*vkEnumerateDeviceExtensionProperties)) {
return true;
}
return false;
}
uint32_t GetGpuSupportedVulkanVersion(
const gpu::GPUInfo::GPUDevice& gpu_device) {
TRACE_EVENT0("gpu", "GetGpuSupportedVulkanVersion");
base::NativeLibrary vulkan_library;
PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr;
PFN_vkCreateInstance vkCreateInstance;
PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices;
PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties;
VkInstance vk_instance = VK_NULL_HANDLE;
uint32_t physical_device_count = 0;
// Skip if the system has an older AMD Vulkan driver amdvlk64.dll or
// amdvlk32.dll which crashes when vkCreateInstance() is called. This bug has
// been fixed in the latest AMD driver.
// Detected by the file version of amdvlk64.dll.
if (BadAMDVulkanDriverVersion()) {
return 0;
}
// Don't collect any info if the graphics vulkan driver is blocklisted or
// doesn't support Vulkan 1.1
// Detected by the graphic driver version returned by DXGI
if (BadGraphicsDriverVersions(gpu_device))
return 0;
// Only supports a version >= 1.1.0.
uint32_t vulkan_version = 0;
if (!InitVulkan(&vulkan_library, &vkGetInstanceProcAddr, &vkCreateInstance,
&vulkan_version)) {
return 0;
}
VkApplicationInfo app_info = {};
app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
const std::vector<const char*> enabled_instance_extensions = {
"VK_KHR_surface", "VK_KHR_win32_surface"};
VkInstanceCreateInfo create_info = {};
create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
create_info.pApplicationInfo = &app_info;
create_info.enabledExtensionCount = enabled_instance_extensions.size();
create_info.ppEnabledExtensionNames = enabled_instance_extensions.data();
// Get the Vulkan API version supported in the GPU driver
int highest_minor_version = VK_VERSION_MINOR(vulkan_version);
for (int minor_version = highest_minor_version; minor_version >= 1;
--minor_version) {
app_info.apiVersion = VK_MAKE_VERSION(1, minor_version, 0);
VkResult result = vkCreateInstance(&create_info, nullptr, &vk_instance);
if (result == VK_SUCCESS && vk_instance &&
InitVulkanInstanceProc(vk_instance, vkGetInstanceProcAddr,
&vkEnumeratePhysicalDevices,
&vkEnumerateDeviceExtensionProperties)) {
result = vkEnumeratePhysicalDevices(vk_instance, &physical_device_count,
nullptr);
if (result == VK_SUCCESS && physical_device_count > 0) {
return app_info.apiVersion;
} else {
// Skip destroy here. GPU process shutdown will unload all loaded DLLs.
// vkDestroyInstance(vk_instance, nullptr);
vk_instance = VK_NULL_HANDLE;
}
}
}
// From the crash reports, calling the following two functions might cause a
// crash in the Vulkan loader or in the Vulkan driver. To work around it,
// don't explicitly unload the DLL. Instead, GPU process shutdown will unload
// all loaded DLLs.
// if (vk_instance) {
// vkDestroyInstance(vk_instance, nullptr);
// }
// base::UnloadNativeLibrary(vulkan_library);
return 0;
}
void RecordGpuSupportedDx12VersionHistograms(
uint32_t d3d12_feature_level,
uint32_t highest_shader_model_version) {
bool supports_dx12 =
(d3d12_feature_level >= D3D_FEATURE_LEVEL_12_0) ? true : false;
UMA_HISTOGRAM_BOOLEAN("GPU.SupportsDX12", supports_dx12);
UMA_HISTOGRAM_ENUMERATION(
"GPU.D3D12FeatureLevel",
ConvertToHistogramFeatureLevel(d3d12_feature_level));
UMA_HISTOGRAM_ENUMERATION(
"GPU.D3D12HighestShaderModel",
ConvertToHistogramShaderVersion(highest_shader_model_version));
}
bool CollectD3D11FeatureInfo(D3D_FEATURE_LEVEL* d3d11_feature_level,
bool* has_discrete_gpu) {
Microsoft::WRL::ComPtr<IDXGIFactory1> dxgi_factory;
if (FAILED(::CreateDXGIFactory1(IID_PPV_ARGS(&dxgi_factory))))
return false;
base::ScopedNativeLibrary d3d11_library(
base::FilePath(FILE_PATH_LITERAL("d3d11.dll")));
if (!d3d11_library.is_valid())
return false;
PFN_D3D11_CREATE_DEVICE D3D11CreateDevice =
reinterpret_cast<PFN_D3D11_CREATE_DEVICE>(
d3d11_library.GetFunctionPointer("D3D11CreateDevice"));
if (!D3D11CreateDevice)
return false;
// The order of feature levels to attempt to create in D3D CreateDevice
const D3D_FEATURE_LEVEL kFeatureLevels[] = {
D3D_FEATURE_LEVEL_12_1, D3D_FEATURE_LEVEL_12_0, D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0, D3D_FEATURE_LEVEL_10_1, D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_3, D3D_FEATURE_LEVEL_9_2, D3D_FEATURE_LEVEL_9_1};
bool detected_discrete_gpu = false;
D3D_FEATURE_LEVEL max_level = D3D_FEATURE_LEVEL_1_0_CORE;
Microsoft::WRL::ComPtr<IDXGIAdapter> dxgi_adapter;
for (UINT ii = 0; SUCCEEDED(dxgi_factory->EnumAdapters(ii, &dxgi_adapter));
++ii) {
DXGI_ADAPTER_DESC desc;
if (SUCCEEDED(dxgi_adapter->GetDesc(&desc)) && desc.VendorId == 0x1414) {
// Bypass Microsoft software renderer.
continue;
}
Microsoft::WRL::ComPtr<ID3D11Device> d3d11_device;
D3D_FEATURE_LEVEL returned_feature_level = D3D_FEATURE_LEVEL_1_0_CORE;
if (FAILED(D3D11CreateDevice(dxgi_adapter.Get(), D3D_DRIVER_TYPE_UNKNOWN,
/*Software=*/0,
/*Flags=*/0, kFeatureLevels,
_countof(kFeatureLevels), D3D11_SDK_VERSION,
&d3d11_device, &returned_feature_level,
/*ppImmediateContext=*/nullptr))) {
continue;
}
if (returned_feature_level > max_level)
max_level = returned_feature_level;
Microsoft::WRL::ComPtr<ID3D11Device3> d3d11_device_3;
if (FAILED(d3d11_device.As(&d3d11_device_3)))
continue;
D3D11_FEATURE_DATA_D3D11_OPTIONS2 data = {};
if (FAILED(d3d11_device_3->CheckFeatureSupport(D3D11_FEATURE_D3D11_OPTIONS2,
&data, sizeof(data)))) {
continue;
}
if (!data.UnifiedMemoryArchitecture)
detected_discrete_gpu = true;
}
if (max_level > D3D_FEATURE_LEVEL_1_0_CORE) {
*d3d11_feature_level = max_level;
*has_discrete_gpu = detected_discrete_gpu;
return true;
}
return false;
}
bool CollectContextGraphicsInfo(GPUInfo* gpu_info) {
TRACE_EVENT0("gpu", "CollectGraphicsInfo");
DCHECK(gpu_info);
if (!CollectGraphicsInfoGL(gpu_info))
return false;
// ANGLE's renderer strings are of the form:
// ANGLE (<adapter_identifier> Direct3D<version> vs_x_x ps_x_x)
std::string direct3d_version;
int vertex_shader_major_version = 0;
int vertex_shader_minor_version = 0;
int pixel_shader_major_version = 0;
int pixel_shader_minor_version = 0;
if (RE2::FullMatch(gpu_info->gl_renderer,
"ANGLE \\(.*\\)") &&
RE2::PartialMatch(gpu_info->gl_renderer,
" Direct3D(\\w+)",
&direct3d_version) &&
RE2::PartialMatch(gpu_info->gl_renderer,
" vs_(\\d+)_(\\d+)",
&vertex_shader_major_version,
&vertex_shader_minor_version) &&
RE2::PartialMatch(gpu_info->gl_renderer,
" ps_(\\d+)_(\\d+)",
&pixel_shader_major_version,
&pixel_shader_minor_version)) {
gpu_info->vertex_shader_version =
base::StringPrintf("%d.%d",
vertex_shader_major_version,
vertex_shader_minor_version);
gpu_info->pixel_shader_version =
base::StringPrintf("%d.%d",
pixel_shader_major_version,
pixel_shader_minor_version);
DCHECK(!gpu_info->vertex_shader_version.empty());
// Note: do not reorder, used by UMA_HISTOGRAM below
enum ShaderModel {
SHADER_MODEL_UNKNOWN,
SHADER_MODEL_2_0,
SHADER_MODEL_3_0,
SHADER_MODEL_4_0,
SHADER_MODEL_4_1,
SHADER_MODEL_5_0,
NUM_SHADER_MODELS
};
ShaderModel shader_model = SHADER_MODEL_UNKNOWN;
if (gpu_info->vertex_shader_version == "5.0") {
shader_model = SHADER_MODEL_5_0;
} else if (gpu_info->vertex_shader_version == "4.1") {
shader_model = SHADER_MODEL_4_1;
} else if (gpu_info->vertex_shader_version == "4.0") {
shader_model = SHADER_MODEL_4_0;
} else if (gpu_info->vertex_shader_version == "3.0") {
shader_model = SHADER_MODEL_3_0;
} else if (gpu_info->vertex_shader_version == "2.0") {
shader_model = SHADER_MODEL_2_0;
}
UMA_HISTOGRAM_ENUMERATION("GPU.D3DShaderModel", shader_model,
NUM_SHADER_MODELS);
// DirectX diagnostics are collected asynchronously because it takes a
// couple of seconds.
}
return true;
}
bool CollectBasicGraphicsInfo(GPUInfo* gpu_info) {
TRACE_EVENT0("gpu", "CollectPreliminaryGraphicsInfo");
DCHECK(gpu_info);
// TODO(zmo): we only need to call CollectDriverInfoD3D() if we use ANGLE.
return CollectDriverInfoD3D(gpu_info);
}
bool IdentifyActiveGPUWithLuid(GPUInfo* gpu_info) {
LUID luid;
if (!GetActiveAdapterLuid(&luid))
return false;
gpu_info->gpu.active = false;
for (size_t i = 0; i < gpu_info->secondary_gpus.size(); i++)
gpu_info->secondary_gpus[i].active = false;
if (gpu_info->gpu.luid.HighPart == luid.HighPart &&
gpu_info->gpu.luid.LowPart == luid.LowPart) {
gpu_info->gpu.active = true;
return true;
}
for (size_t i = 0; i < gpu_info->secondary_gpus.size(); i++) {
if (gpu_info->secondary_gpus[i].luid.HighPart == luid.HighPart &&
gpu_info->secondary_gpus[i].luid.LowPart == luid.LowPart) {
gpu_info->secondary_gpus[i].active = true;
return true;
}
}
return false;
}
} // namespace gpu
| 12,272 |
488 | <filename>src/3rdPartyLibraries/libharu-2.1.0/demo/link_annotation.c
/*
* << Haru Free PDF Library 2.0.0 >> -- link_annotation.c
*
* Copyright (c) 1999-2006 <NAME> <<EMAIL>>
*
* Permission to use, copy, modify, distribute and sell this software
* and its documentation for any purpose is hereby granted without fee,
* provided that the above copyright notice appear in all copies and
* that both that copyright notice and this permission notice appear
* in supporting documentation.
* It is provided "as is" without express or implied warranty.
*
*/
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <setjmp.h>
#include "hpdf.h"
jmp_buf env;
#ifdef HPDF_DLL
void __stdcall
#else
void
#endif
error_handler (HPDF_STATUS error_no,
HPDF_STATUS detail_no,
void *user_data)
{
printf ("ERROR: error_no=%04X, detail_no=%u\n", (HPDF_UINT)error_no,
(HPDF_UINT)detail_no);
longjmp(env, 1);
}
void
print_page (HPDF_Page page, HPDF_Font font, int page_num)
{
char buf[50];
HPDF_Page_SetWidth (page, 200);
HPDF_Page_SetHeight (page, 200);
HPDF_Page_SetFontAndSize (page, font, 20);
HPDF_Page_BeginText (page);
HPDF_Page_MoveTextPos (page, 50, 150);
#ifdef __WIN32__
_snprintf(buf, 50, "Page:%d", page_num);
#else
snprintf(buf, 50, "Page:%d", page_num);
#endif
HPDF_Page_ShowText (page, buf);
HPDF_Page_EndText (page);
}
int main(int argc, char **argv)
{
HPDF_Doc pdf;
HPDF_Font font;
HPDF_Page index_page;
HPDF_Page page[9];
HPDF_Destination dst;
char fname[256];
HPDF_Rect rect;
HPDF_Point tp;
HPDF_Annotation annot;
HPDF_UINT i;
const char *uri = "http://sourceforge.net/projects/libharu";
strcpy (fname, argv[0]);
strcat (fname, ".pdf");
pdf = HPDF_New (error_handler, NULL);
if (!pdf) {
printf ("error: cannot create PdfDoc object\n");
return 1;
}
if (setjmp(env)) {
HPDF_Free (pdf);
return 1;
}
/* create default-font */
font = HPDF_GetFont (pdf, "Helvetica", NULL);
/* create index page */
index_page = HPDF_AddPage (pdf);
HPDF_Page_SetWidth (index_page, 300);
HPDF_Page_SetHeight (index_page, 220);
/* Add 7 pages to the document. */
for (i = 0; i < 7; i++) {
page[i] = HPDF_AddPage (pdf);
print_page(page[i], font, i + 1);
}
HPDF_Page_BeginText (index_page);
HPDF_Page_SetFontAndSize (index_page, font, 10);
HPDF_Page_MoveTextPos (index_page, 15, 200);
HPDF_Page_ShowText (index_page, "Link Annotation Demo");
HPDF_Page_EndText (index_page);
/*
* Create Link-Annotation object on index page.
*/
HPDF_Page_BeginText(index_page);
HPDF_Page_SetFontAndSize (index_page, font, 8);
HPDF_Page_MoveTextPos (index_page, 20, 180);
HPDF_Page_SetTextLeading (index_page, 23);
/* page1 (HPDF_ANNOT_NO_HIGHTLIGHT) */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "Jump to Page1 (HilightMode=HPDF_ANNOT_NO_HIGHTLIGHT)");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_MoveToNextLine (index_page);
dst = HPDF_Page_CreateDestination (page[0]);
annot = HPDF_Page_CreateLinkAnnot (index_page, rect, dst);
HPDF_LinkAnnot_SetHighlightMode (annot, HPDF_ANNOT_NO_HIGHTLIGHT);
/* page2 (HPDF_ANNOT_INVERT_BOX) */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "Jump to Page2 (HilightMode=HPDF_ANNOT_INVERT_BOX)");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_MoveToNextLine (index_page);
dst = HPDF_Page_CreateDestination (page[1]);
annot = HPDF_Page_CreateLinkAnnot (index_page, rect, dst);
HPDF_LinkAnnot_SetHighlightMode (annot, HPDF_ANNOT_INVERT_BOX);
/* page3 (HPDF_ANNOT_INVERT_BORDER) */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "Jump to Page3 (HilightMode=HPDF_ANNOT_INVERT_BORDER)");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_MoveToNextLine (index_page);
dst = HPDF_Page_CreateDestination (page[2]);
annot = HPDF_Page_CreateLinkAnnot (index_page, rect, dst);
HPDF_LinkAnnot_SetHighlightMode (annot, HPDF_ANNOT_INVERT_BORDER);
/* page4 (HPDF_ANNOT_DOWN_APPEARANCE) */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "Jump to Page4 (HilightMode=HPDF_ANNOT_DOWN_APPEARANCE)");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_MoveToNextLine (index_page);
dst = HPDF_Page_CreateDestination (page[3]);
annot = HPDF_Page_CreateLinkAnnot (index_page, rect, dst);
HPDF_LinkAnnot_SetHighlightMode (annot, HPDF_ANNOT_DOWN_APPEARANCE);
/* page5 (dash border) */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "Jump to Page5 (dash border)");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_MoveToNextLine (index_page);
dst = HPDF_Page_CreateDestination (page[4]);
annot = HPDF_Page_CreateLinkAnnot (index_page, rect, dst);
HPDF_LinkAnnot_SetBorderStyle (annot, 1, 3, 2);
/* page6 (no border) */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "Jump to Page6 (no border)");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_MoveToNextLine (index_page);
dst = HPDF_Page_CreateDestination (page[5]);
annot = HPDF_Page_CreateLinkAnnot (index_page, rect, dst);
HPDF_LinkAnnot_SetBorderStyle (annot, 0, 0, 0);
/* page7 (bold border) */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "Jump to Page7 (bold border)");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_MoveToNextLine (index_page);
dst = HPDF_Page_CreateDestination (page[6]);
annot = HPDF_Page_CreateLinkAnnot (index_page, rect, dst);
HPDF_LinkAnnot_SetBorderStyle (annot, 2, 0, 0);
/* URI link */
tp = HPDF_Page_GetCurrentTextPos (index_page);
HPDF_Page_ShowText (index_page, "URI (");
HPDF_Page_ShowText (index_page, uri);
HPDF_Page_ShowText (index_page, ")");
rect.left = tp.x - 4;
rect.bottom = tp.y - 4;
rect.right = HPDF_Page_GetCurrentTextPos (index_page).x + 4;
rect.top = tp.y + 10;
HPDF_Page_CreateURILinkAnnot (index_page, rect, uri);
HPDF_Page_EndText (index_page);
/* save the document to a file */
HPDF_SaveToFile (pdf, fname);
/* clean up */
HPDF_Free (pdf);
return 0;
}
| 3,135 |
310 | {
"name": "LERBERG",
"description": "Table let trestles.",
"url": "https://www.ikea.com/gb/en/p/lerberg-trestle-grey-80130776/"
}
| 60 |
3,307 | <reponame>ruyimarone/dynet<filename>dynet/nodes-minmax.cc
#include "dynet/tensor-eigen.h"
#include "dynet/nodes-minmax.h"
#include "dynet/nodes-impl-macros.h"
#include "dynet/functors.h"
using namespace std;
namespace dynet {
// ************* Min *************
#ifndef __CUDACC__
string Min::as_string(const vector<string>& arg_names) const {
ostringstream s;
s << "min{" << arg_names[0] << ", " << arg_names[1] << "}";
return s.str();
}
Dim Min::dim_forward(const vector<Dim>& xs) const {
DYNET_ARG_CHECK(xs.size() == 2 && xs[0] == xs[1], "Bad arguments in Min: " << xs);
return xs[0].bd >= xs[1].bd ? xs[0] : xs[1];
}
size_t Min::aux_storage_size() const {
return dim.size() * sizeof(float);
}
#endif
template<class MyDevice>
void Min::forward_dev_impl(const MyDevice & dev, const vector<const Tensor*>& xs, Tensor& fx) const {
Tensor t(fx.d, static_cast<float*>(aux_mem), fx.device, DeviceMempool::FXS);
tvec(t).device(*dev.edevice) = (tvec(*xs[0]) < tvec(*xs[1])).cast<float>();
tvec(fx).device(*dev.edevice) = tvec(*xs[0]).cwiseMin(tvec(*xs[1]));
}
template<class MyDevice>
void Min::backward_dev_impl(const MyDevice & dev,
const vector<const Tensor*>& xs,
const Tensor& fx,
const Tensor& dEdf,
unsigned i,
Tensor& dEdxi) const {
DYNET_ASSERT(i < 2, "Failed dimension check in Min::backward");
const Tensor t(dEdxi.d, static_cast<float*>(aux_mem), fx.device, DeviceMempool::FXS);
if (i == 0) {
tvec(dEdxi).device(*dev.edevice) += tvec(t) * tvec(dEdf);
} else {
tvec(dEdxi).device(*dev.edevice) += tvec(t).binaryExpr(tvec(dEdf), FMaxBackwardInv());
}
}
DYNET_NODE_INST_DEV_IMPL(Min)
// ************* Max *************
#ifndef __CUDACC__
string Max::as_string(const vector<string>& arg_names) const {
ostringstream s;
s << "max{" << arg_names[0] << ", " << arg_names[1] << "}";
return s.str();
}
Dim Max::dim_forward(const vector<Dim>& xs) const {
DYNET_ARG_CHECK(xs.size() == 2 && xs[0] == xs[1], "Bad arguments in Max: " << xs);
return xs[0].bd >= xs[1].bd ? xs[0] : xs[1];
}
size_t Max::aux_storage_size() const {
return dim.size() * sizeof(float);
}
#endif
template<class MyDevice>
void Max::forward_dev_impl(const MyDevice & dev, const vector<const Tensor*>& xs, Tensor& fx) const {
Tensor t(fx.d, static_cast<float*>(aux_mem), fx.device, DeviceMempool::FXS);
tvec(t).device(*dev.edevice) = (tvec(*xs[0]) > tvec(*xs[1])).cast<float>();
tvec(fx).device(*dev.edevice) = tvec(*xs[0]).cwiseMax(tvec(*xs[1]));
}
template<class MyDevice>
void Max::backward_dev_impl(const MyDevice & dev,
const vector<const Tensor*>& xs,
const Tensor& fx,
const Tensor& dEdf,
unsigned i,
Tensor& dEdxi) const {
DYNET_ASSERT(i < 2, "Failed dimension check in Max::backward");
const Tensor t(dEdxi.d, static_cast<float*>(aux_mem), fx.device, DeviceMempool::FXS);
if (i == 0) {
tvec(dEdxi).device(*dev.edevice) += tvec(t) * tvec(dEdf);
} else {
tvec(dEdxi).device(*dev.edevice) += tvec(t).binaryExpr(tvec(dEdf), FMaxBackwardInv());
}
}
DYNET_NODE_INST_DEV_IMPL(Max)
// ************* MinDimension *************
#ifndef __CUDACC__
string MinDimension::as_string(const vector<string>& arg_names) const {
ostringstream s;
s << "min_dim(" << arg_names[0] << ", reduced_dim=" << reduced_dim << ')';
return s.str();
}
Dim MinDimension::dim_forward(const vector<Dim>& xs) const {
DYNET_ARG_CHECK(xs.size() == 1, "Failed input count check in MinDimension");
DYNET_ARG_CHECK(reduced_dim < xs[0].nd,
"Tried to MinDimension on dimension " << reduced_dim << " bigger than input " << xs[0]);
DYNET_ARG_CHECK(xs[0].nd < 4,
"MinDimension not currently supported for tensors of 4 or more dimensions.");
Dim ret(xs[0]);
ret.delete_dim(reduced_dim);
return ret;
}
size_t MinDimension::aux_storage_size() const {
return sizeof(Eigen::DenseIndex) * dim.size();
}
#endif
template<class MyDevice>
void MinDimension::forward_dev_impl(const MyDevice & dev, const vector<const Tensor*>& xs, Tensor& fx) const {
Eigen::DenseIndex* minmap = static_cast<Eigen::DenseIndex*>(aux_mem);
const unsigned batch_size = dim.batch_elems();
const unsigned first_dim_size = dim[0];
const unsigned second_dim_size = dim[1];
Eigen::TensorMap<Eigen::Tensor<Eigen::DenseIndex, 3>> locs(minmap, first_dim_size, second_dim_size, batch_size);
const Eigen::array<Eigen::DenseIndex, 1> reduction_axis = {reduced_dim};
locs.device(*dev.edevice) = tb<3>(*xs[0]).argmin(reduced_dim);
tb<2>(fx).device(*dev.edevice) = tb<3>(*xs[0]).minimum(reduction_axis);
}
template<class MyDevice>
void MinDimension::backward_dev_impl(const MyDevice & dev,
const vector<const Tensor*>& xs,
const Tensor& fx,
const Tensor& dEdf,
unsigned i,
Tensor& dEdxi) const {
DYNET_ARG_CHECK(i == 0, "Failed dimension check in MinDimension::backward");
#ifdef __CUDACC__
vector<Eigen::DenseIndex> indices(dim.size());
Eigen::DenseIndex* minmap = &indices[0];
CUDA_CHECK(cudaMemcpy((void*)minmap, aux_mem, sizeof(Eigen::DenseIndex) * dim.size(), cudaMemcpyDeviceToHost));
#else
Eigen::DenseIndex* minmap = static_cast<Eigen::DenseIndex*>(aux_mem);
#endif
const unsigned batch_size = dim.batch_elems();
const unsigned first_dim_size = dim[0];
const unsigned second_dim_size = dim[1];
Eigen::TensorMap<Eigen::Tensor<Eigen::DenseIndex, 3>> locs(minmap, first_dim_size, second_dim_size, batch_size);
for(unsigned b = 0; b < batch_size; ++b){
for(unsigned j = 0; j < second_dim_size; ++j){
for(unsigned i = 0; i < first_dim_size; ++i){
if (reduced_dim > second_dim)
tb<3>(dEdxi).chip<3>(b).chip(locs(i, j, b), reduced_dim).chip(j, second_dim).chip(i, first_dim).device(*dev.edevice)
+= tb<2>(dEdf).chip<2>(b).chip<1>(j).chip<0>(i);
else if (reduced_dim > first_dim)
tb<3>(dEdxi).chip<3>(b).chip(j, second_dim).chip(locs(i, j, b), reduced_dim).chip(i, first_dim).device(*dev.edevice)
+= tb<2>(dEdf).chip<2>(b).chip<1>(j).chip<0>(i);
else
tb<3>(dEdxi).chip<3>(b).chip(j, second_dim).chip(i, first_dim).chip(locs(i, j, b), reduced_dim).device(*dev.edevice)
+= tb<2>(dEdf).chip<2>(b).chip<1>(j).chip<0>(i);
}
}
}
}
DYNET_NODE_INST_DEV_IMPL(MinDimension)
// ************* MaxDimension *************
#ifndef __CUDACC__
string MaxDimension::as_string(const vector<string>& arg_names) const {
ostringstream s;
s << "max_dim(" << arg_names[0] << ", reduced_dim=" << reduced_dim << ')';
return s.str();
}
Dim MaxDimension::dim_forward(const vector<Dim>& xs) const {
DYNET_ARG_CHECK(xs.size() == 1, "Failed input count check in MaxDimension");
DYNET_ARG_CHECK(reduced_dim < xs[0].nd,
"Tried to MaxDimension on dimension " << reduced_dim << " bigger than input " << xs[0]);
DYNET_ARG_CHECK(xs[0].nd < 4,
"MaxDimension not currently supported for tensors of 4 or more dimensions.");
Dim ret(xs[0]);
ret.delete_dim(reduced_dim);
return ret;
}
size_t MaxDimension::aux_storage_size() const {
return sizeof(Eigen::DenseIndex) * dim.size();
}
#endif
template<class MyDevice>
void MaxDimension::forward_dev_impl(const MyDevice & dev, const vector<const Tensor*>& xs, Tensor& fx) const {
Eigen::DenseIndex* maxmap = static_cast<Eigen::DenseIndex*>(aux_mem);
const unsigned batch_size = dim.batch_elems();
const unsigned first_dim_size = dim[0];
const unsigned second_dim_size = dim[1];
Eigen::TensorMap<Eigen::Tensor<Eigen::DenseIndex, 3>> locs(maxmap, first_dim_size, second_dim_size, batch_size);
const Eigen::array<Eigen::DenseIndex, 1> reduction_axis = {reduced_dim};
locs.device(*dev.edevice) = tb<3>(*xs[0]).argmax(reduced_dim);
tb<2>(fx).device(*dev.edevice) = tb<3>(*xs[0]).maximum(reduction_axis);
}
template<class MyDevice>
void MaxDimension::backward_dev_impl(const MyDevice & dev,
const vector<const Tensor*>& xs,
const Tensor& fx,
const Tensor& dEdf,
unsigned i,
Tensor& dEdxi) const {
DYNET_ARG_CHECK(i == 0, "Failed dimension check in MaxDimension::backward");
#ifdef __CUDACC__
vector<Eigen::DenseIndex> indices(dim.size());
Eigen::DenseIndex* maxmap = &indices[0];
CUDA_CHECK(cudaMemcpy((void*)maxmap, aux_mem, sizeof(Eigen::DenseIndex) * dim.size(), cudaMemcpyDeviceToHost));
#else
Eigen::DenseIndex* maxmap = static_cast<Eigen::DenseIndex*>(aux_mem);
#endif
const unsigned batch_size = dim.batch_elems();
const unsigned first_dim_size = dim[0];
const unsigned second_dim_size = dim[1];
Eigen::TensorMap<Eigen::Tensor<Eigen::DenseIndex, 3>> locs(maxmap, first_dim_size, second_dim_size, batch_size);
for(unsigned b = 0; b < batch_size; ++b){
for(unsigned j = 0; j < second_dim_size; ++j){
for(unsigned i = 0; i < first_dim_size; ++i){
if (reduced_dim > second_dim)
tb<3>(dEdxi).chip<3>(b).chip(locs(i, j, b), reduced_dim).chip(j, second_dim).chip(i, first_dim).device(*dev.edevice)
+= tb<2>(dEdf).chip<2>(b).chip<1>(j).chip<0>(i);
else if (reduced_dim > first_dim)
tb<3>(dEdxi).chip<3>(b).chip(j, second_dim).chip(locs(i, j, b), reduced_dim).chip(i, first_dim).device(*dev.edevice)
+= tb<2>(dEdf).chip<2>(b).chip<1>(j).chip<0>(i);
else
tb<3>(dEdxi).chip<3>(b).chip(j, second_dim).chip(i, first_dim).chip(locs(i, j, b), reduced_dim).device(*dev.edevice)
+= tb<2>(dEdf).chip<2>(b).chip<1>(j).chip<0>(i);
}
}
}
}
DYNET_NODE_INST_DEV_IMPL(MaxDimension)
}
| 4,628 |
6,958 | <gh_stars>1000+
//
// NPUActivation.cpp
// MNN
//
// Created by MNN on 2019/09/19.
// Copyright © 2018, Alibaba Group Holding Limited
//
#include "NPUActivation.hpp"
#include "NPUBackend.hpp"
using namespace std;
namespace MNN {
NPUActivation::NPUActivation(Backend *b, const Op *op, const std::vector<Tensor *> &inputs, const std::vector<Tensor *> &outputs, int type) : MNN::NPUCommonExecution(b,op) {
mType = type;
}
ErrorCode NPUActivation::onResize(const std::vector<Tensor *> &inputs, const std::vector<Tensor *> &outputs) {
mNpuBackend->setNetworkInput(inputs, mOp);
auto opName = mOp->name()->str();
auto xOp = mNpuBackend->getInputOps(mOp);
if(mType == 5){
shared_ptr<hiai::op::PRelu> prelu(new hiai::op::PRelu(opName + "_prelu"));
auto slopePtr = mOp->main_as_PRelu()->slope()->data();
auto slopeSize = mOp->main_as_PRelu()->slope()->size();
mConst_w = ge::op::Const(opName + "_w_const");
{
ge::TensorDesc fdesc(ge::Shape({1, slopeSize, 1, 1}), ge::FORMAT_NCHW,
ge::DT_FLOAT); // in o h w ?
ge::TensorPtr filter = std::make_shared<ge::Tensor>();
filter->SetTensorDesc(fdesc);
filter->SetData((uint8_t *)slopePtr, slopeSize * sizeof(float));
mConst_w.set_attr_value(filter);
}
(*prelu)
.set_input_x(*xOp.get()).set_input_weight(mConst_w);
mNpuBackend->setOutputOps(mOp, {prelu}, outputs);
}else{
shared_ptr<ge::op::Activation> relu(new ge::op::Activation(opName + "_relu"));
(*relu)
.set_input_x(*xOp.get())
.set_attr_coef(.000000)
.set_attr_mode(mType);
mNpuBackend->setOutputOps(mOp, {relu}, outputs);
}
return NO_ERROR;
}
class ActivationCreator : public NPUBackend::Creator {
public:
virtual Execution *onCreate(const std::vector<Tensor *> &inputs, const std::vector<Tensor *> &outputs,
const MNN::Op *op, Backend *backend) const override {
if (op->type() == OpType_ReLU) {
return new NPUActivation(backend, op, inputs, outputs, 1);
}else if (op->type() == OpType_ReLU6) {
return new NPUActivation(backend, op, inputs, outputs, 14);
}else if (op->type() == OpType_Sigmoid) {
return new NPUActivation(backend, op, inputs, outputs, 0);
}else if (op->type() == OpType_PReLU) {
return new NPUActivation(backend, op, inputs, outputs, 5);
}else if (op->type() == OpType_TanH) {
return new NPUActivation(backend, op, inputs, outputs, 2);
}else{
MNN_ERROR("Activation not support this case %d \n", op->type());
return nullptr;
}
}
};
NPUCreatorRegister<ActivationCreator> __relu_op(OpType_ReLU);
NPUCreatorRegister<ActivationCreator> __relu6_op(OpType_ReLU6);
NPUCreatorRegister<ActivationCreator> __sigmoid_op(OpType_Sigmoid);
NPUCreatorRegister<ActivationCreator> __prelu_op(OpType_PReLU);
NPUCreatorRegister<ActivationCreator> __tanh_op(OpType_TanH);
} // namespace MNN | 1,459 |
326 | <gh_stars>100-1000
from typing import TYPE_CHECKING, DefaultDict, Dict
from collections import Counter, defaultdict, OrderedDict
from itertools import chain
from pydfs_lineup_optimizer.player import LineupPlayer
if TYPE_CHECKING:
from pydfs_lineup_optimizer.lineup_optimizer import LineupOptimizer
class Statistic:
def __init__(self, optimizer: 'LineupOptimizer', with_excluded: bool = True):
self.optimizer = optimizer
if self.optimizer.last_context is not None:
self.lineups = self.optimizer.last_context.get_lineups(with_excluded)
else:
self.lineups = []
def get_top_teams(self) -> Dict[str, int]:
teams_dict = defaultdict(int) # type: DefaultDict[str, int]
for lineup in self.lineups:
lineup_teams = {p.team for p in lineup}
for team in lineup_teams:
teams_dict[team] += 1
return OrderedDict(sorted(teams_dict.items(), key=lambda t: -t[1]))
def get_top_players(self) -> Dict[str, Dict[LineupPlayer, int]]:
players = defaultdict(lambda: defaultdict(int)) # type: DefaultDict[str, DefaultDict[LineupPlayer, int]]
for player in chain.from_iterable(self.lineups):
players[player.full_name][player] += 1
return OrderedDict(sorted(players.items(), key=lambda t: -sum(t[1].values()))) # type: ignore
def print_report(self) -> None:
top_teams = self.get_top_teams()
if len(top_teams) > 1:
print('Top Teams')
for team, appearance in top_teams.items():
print(' %s - %d' % (team, appearance))
print('Used %d/%d\n' % (len(top_teams), len(self.optimizer.player_pool.available_teams)))
print('Top Players')
players_per_team = defaultdict(dict) # type: DefaultDict[str, Dict[str, int]]
top_players = self.get_top_players()
total_players = len({player.full_name for player in self.optimizer.player_pool.all_players})
replicated_players = Counter([player.full_name for player in self.optimizer.player_pool.all_players])
for player_name, players in top_players.items():
total = sum(players.values())
players_per_team[list(players.keys())[0].team][player_name] = total
by_positions = ''
if replicated_players[player_name] > 1:
by_positions = ','.join('%s-%d' % ('/'.join(player.positions), appearance)
for player, appearance in players.items())
by_positions = '(%s)' % by_positions
print(' %s - %d %s' % (player_name, total, by_positions))
print('Used %d/%d\n' % (len(top_players), total_players))
if len(top_teams) > 1:
print('Top Players Per Team')
for team, team_players in sorted(players_per_team.items(), key=lambda t: -sum(t[1].values())):
total_appearances = sum(team_players.values())
team_players_str = ','.join('%s(%d)' % values for values in
sorted(players_per_team[team].items(), key=lambda t: -t[1]))
print(' %s(%d) - %s' % (team, total_appearances, team_players_str))
| 1,443 |
640 | <reponame>pearldrift/react-native-pearldrift-image-picker<gh_stars>100-1000
#if __has_include("RCTBridgeModule.h")
#import "RCTBridgeModule.h"
#else
#import <React/RCTBridgeModule.h>
#endif
#import <UIKit/UIKit.h>
#import "TZImagePickerController.h"
@interface RNSyanImagePicker : NSObject <RCTBridgeModule, TZImagePickerControllerDelegate, UINavigationControllerDelegate, UIImagePickerControllerDelegate, UIActionSheetDelegate>
@end
| 159 |
14,668 | // Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/ui/views/payments/validating_combobox.h"
#include <utility>
#include "ui/base/models/combobox_model.h"
namespace payments {
ValidatingCombobox::ValidatingCombobox(
std::unique_ptr<ui::ComboboxModel> model,
std::unique_ptr<ValidationDelegate> delegate)
: Combobox(std::move(model)), delegate_(std::move(delegate)) {
SetFocusBehavior(FocusBehavior::ALWAYS);
}
ValidatingCombobox::~ValidatingCombobox() = default;
void ValidatingCombobox::OnBlur() {
Combobox::OnBlur();
// Validations will occur when the content changes. Do not validate if the
// view is being removed.
if (!being_removed_) {
Validate();
}
}
void ValidatingCombobox::ViewHierarchyChanged(
const views::ViewHierarchyChangedDetails& details) {
if (details.child == this && !details.is_add)
being_removed_ = true;
}
void ValidatingCombobox::OnContentsChanged() {
Validate();
}
void ValidatingCombobox::OnComboboxModelChanged(ui::ComboboxModel* model) {
views::Combobox::OnComboboxModelChanged(model);
delegate_->ComboboxModelChanged(this);
}
bool ValidatingCombobox::IsValid() {
std::u16string unused;
return delegate_->IsValidCombobox(this, &unused);
}
void ValidatingCombobox::Validate() {
// ComboboxValueChanged may have side-effects, such as displaying errors.
SetInvalid(!delegate_->ComboboxValueChanged(this));
}
} // namespace payments
| 523 |
1,656 | /*
* Copyright 2013-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.cloud.sleuth.instrument.rsocket;
import io.netty.buffer.ByteBufAllocator;
import io.netty.buffer.ByteBufUtil;
import io.netty.buffer.CompositeByteBuf;
import io.rsocket.metadata.CompositeMetadataCodec;
import org.springframework.cloud.sleuth.propagation.Propagator;
class ByteBufSetter implements Propagator.Setter<CompositeByteBuf> {
@Override
public void set(CompositeByteBuf carrier, String key, String value) {
final ByteBufAllocator alloc = carrier.alloc();
CompositeMetadataCodec.encodeAndAddMetadataWithCompression(carrier, alloc, key,
ByteBufUtil.writeUtf8(alloc, value));
}
}
| 384 |
809 | <filename>src/include/net/util/hostent.h
/**
* @file
* @brief Useful functions for manipulations with a hostent structure
*
* @date 20.08.12
* @author <NAME>
*/
#ifndef NET_UTIL_HOSTENT_API_H_
#define NET_UTIL_HOSTENT_API_H_
#include <netdb.h>
extern struct hostent * hostent_create(void);
extern int hostent_set_name(struct hostent *he, const char *name);
extern int hostent_add_alias(struct hostent *he, const char *alias);
extern int hostent_set_addr_info(struct hostent *he, int addrtype, int addrlen);
extern int hostent_add_addr(struct hostent *he, const void *addr);
extern struct hostent * hostent_make(const char *name,
int addrtype, int addrlen, const void *addr);
#endif /* NET_UTIL_HOSTENT_API_H_ */
| 263 |
1,338 | /*
* Copyright 2007-2014 Haiku, Inc. All rights reserved.
* Distributed under the terms of the MIT License.
*/
#ifndef _BEEP_H
#define _BEEP_H
#include <BeBuild.h>
#include <SupportDefs.h>
#ifdef __cplusplus
// These functions are only exported for C++
status_t beep();
status_t system_beep(const char* eventName);
status_t add_system_beep_event(const char* eventName, uint32 flags = 0);
#endif // __cplusplus
#endif // _BEEP_H
| 157 |
329 | <filename>java/src/main/java/com/cloudera/api/v4/ServicesResourceV4.java
// Licensed to Cloudera, Inc. under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. Cloudera, Inc. licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.cloudera.api.v4;
import static com.cloudera.api.Parameters.*;
import com.cloudera.api.ApiTimeAggregation;
import com.cloudera.api.model.ApiCommand;
import com.cloudera.api.model.ApiDisableJtHaArguments;
import com.cloudera.api.model.ApiEnableJtHaArguments;
import com.cloudera.api.model.ApiHdfsUsageReport;
import com.cloudera.api.model.ApiMrUsageReport;
import com.cloudera.api.v3.ServicesResourceV3;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
@Consumes({ MediaType.APPLICATION_JSON })
@Produces({ MediaType.APPLICATION_JSON })
public interface ServicesResourceV4 extends ServicesResourceV3 {
/**
* Create the Hive Metastore Database. Only works with embedded postgresql
* database.
* <p>
* This command is to be run whenever a new user and database needs to be
* created in the embedded postgresql database for a Hive service. This
* command should usually be followed by a call to
* hiveCreateMetastoreDatabaseTables.
* <p>
* Available since API v4.
*
* @param serviceName
* Name of the Hive service on which to run the command.
* @return Information about the submitted command
*/
@POST
@Consumes()
@Path("/{serviceName}/commands/hiveCreateMetastoreDatabase")
public ApiCommand hiveCreateMetastoreDatabaseCommand(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Update Hive Metastore to point to a NameNode's Nameservice name instead of
* hostname.
* <p>
* <strong>Back up the Hive Metastore Database before running this command.</strong>
* <p>
* This command is to be run after enabling HDFS High Availability. Only
* available when all Hive Metastore Servers are stopped.
* <p>
* Available since API v4.
*
* @param serviceName
* Name of the Hive service on which to run the command.
* @return Information about the submitted command
*/
@POST
@Consumes()
@Path("/{serviceName}/commands/hiveUpdateMetastoreNamenodes")
public ApiCommand hiveUpdateMetastoreNamenodesCommand(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Creates the user directory of a Sqoop service in HDFS.
*
* <p>
* Available since API v4.
*
* @param serviceName The Sqoop service name.
* @return Information about the submitted command.
*/
@POST
@Consumes()
@Path("/{serviceName}/commands/createSqoopUserDir")
public ApiCommand createSqoopUserDirCommand(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Initializes the Solr service in Zookeeper.
*
* <p>
* Available since API v4.
*
* @param serviceName The Solr service name.
* @return Information about the submitted command.
*/
@POST
@Consumes()
@Path("/{serviceName}/commands/initSolr")
public ApiCommand initSolrCommand(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Creates the home directory of a Solr service in HDFS.
*
* <p>
* Available since API v4.
*
* @param serviceName The Solr service name.
* @return Information about the submitted command.
*/
@POST
@Consumes()
@Path("/{serviceName}/commands/createSolrHdfsHomeDir")
public ApiCommand createSolrHdfsHomeDirCommand(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Create the Hive user directory
* <p>
* Available since API v4.
* </p>
* @param serviceName The Hive service name.
* @return Information about the submitted command.
*/
@POST
@Consumes()
@Path("/{serviceName}/commands/hiveCreateHiveUserDir")
public ApiCommand createHiveUserDirCommand(
@PathParam(SERVICE_NAME) String serviceName);
/**
* @return The roles resource handler.
*/
@Override
@Path("/{serviceName}/roles")
public RolesResourceV4 getRolesResource(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Enable high availability (HA) for a JobTracker.
* <p>
* This command only applies to CDH4 MapReduce services.
* <p>
* The command will create a new JobTracker on the specified host and then
* create an active/standby pair with the existing JobTracker. Autofailover
* will be enabled using ZooKeeper. A ZNode will be created for this purpose.
* Command arguments provide option to forcefully create this ZNode if one
* already exists. A node may already exists if JobTracker was previously
* enabled in HA mode but HA mode was disabled later on. The ZNode is not
* deleted when HA is disabled.
* <p>
* As part of enabling HA, any services that depends on the MapReduce service
* being modified will be stopped. Command will redeploy the client
* configurations for services of the cluster after HA has been enabled.
*
* @param serviceName The MapReduce service name.
* @param args Arguments for the command.
* @return Information about the submitted command.
*/
@POST
@Path("/{serviceName}/commands/enableJtHa")
public ApiCommand enableJtHaCommand(
@PathParam(SERVICE_NAME) String serviceName,
ApiEnableJtHaArguments args);
/**
* Disable high availability (HA) for JobTracker.
*
* As part of disabling HA, any services that depend on the MapReduce service
* being modified will be stopped. The command arguments provide options to
* specify name of JobTracker that will be preserved. The Command will
* redeploy the client configurations for services of the cluster after HA
* has been disabled.
*
* @param serviceName The MapReduce service name.
* @param args Arguments for the command.
* @return Information about the submitted command.
*/
@POST
@Path("/{serviceName}/commands/disableJtHa")
public ApiCommand disableJtHaCommand(
@PathParam(SERVICE_NAME) String serviceName,
ApiDisableJtHaArguments args);
/**
* Return the Impala queries resource handler.
* <p/>
*
* @return The Impala queries resource handler
*/
@Path("/{serviceName}/impalaQueries")
public ImpalaQueriesResource getImpalaQueriesResource(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Only available with Cloudera Manager Enterprise Edition.
*
* @return The replications resource handler.
*/
@Path("/{serviceName}/replications")
@Override
public ReplicationsResourceV4 getReplicationsResource(
@PathParam(SERVICE_NAME) String serviceName);
/**
* @return The role command resource handler.
*/
@Override
@Path("/{serviceName}/roleCommands")
public RoleCommandsResourceV4 getRoleCommandsResource(
@PathParam(SERVICE_NAME) String serviceName);
/**
* Fetch the HDFS usage report. For the requested time range, at the
* specified aggregation intervals, the report shows HDFS disk usages
* per user.
* <p>
* This call supports returning JSON or CSV, as determined by the
* "Accept" header of application/json or text/csv.
* <p>
* Available since API v4. Only available with Cloudera Manager Enterprise
* Edition.
*
* @param hdfsServiceName The HDFS service name.
* @param nameService The (optional) HDFS nameservice. Required for HA setup.
* @param from The (optional) start time of the report in ISO 8601 format (
* defaults to 24 hours before "to" time).
* @param to The (optional) end time of the report in ISO 8601 format (
* defaults to now).
* @param aggregation The (optional) aggregation period for the data.
* Supports "hourly", "daily" (default) and "weekly".
* @return Report data.
*/
@GET
@Path("/{serviceName}/reports/hdfsUsageReport")
@Produces({ MediaType.APPLICATION_JSON, "text/csv" })
public ApiHdfsUsageReport getHdfsUsageReport(
@PathParam(SERVICE_NAME) String hdfsServiceName,
@QueryParam(NAMESERVICE_NAME) String nameService,
@QueryParam(FROM) String from,
@QueryParam(TO) @DefaultValue(DATE_TIME_NOW) String to,
@QueryParam("aggregation") @DefaultValue(DAILY_AGGREGATION)
ApiTimeAggregation aggregation);
/**
* Fetch the MR usage report. For the requested time range, at the
* specified aggregation intervals, the report shows job CPU usages (and other
* metrics) per user.
* <p>
* This call supports returning JSON or CSV, as determined by the
* "Accept" header of application/json or text/csv.
* <p>
* Available since API v4. Only available with Cloudera Manager Enterprise
* Edition.
*
* @param mrServiceName The MR service name.
* @param from The (optional) start time of the report in ISO 8601 format
* (defaults to 24 hours before "to" time).
* @param to The (optional) end time of the report in ISO 8601 format (defaults
* to now).
* @param aggregation The (optional) aggregation period for the data.
* Supports "hourly", "daily" (default) and "weekly".
* @return Report data.
*/
@GET
@Path("/{serviceName}/reports/mrUsageReport")
@Produces({ MediaType.APPLICATION_JSON, "text/csv" })
public ApiMrUsageReport getMrUsageReport(
@PathParam(SERVICE_NAME) String mrServiceName,
@QueryParam(FROM) String from,
@QueryParam(TO) @DefaultValue(DATE_TIME_NOW) String to,
@QueryParam("aggregation") @DefaultValue(DAILY_AGGREGATION)
ApiTimeAggregation aggregation);
}
| 3,369 |
336 | <filename>system/apps_featured/test63_ws2812/source/SPI_ds203.cpp
#if defined(DS203) || defined(DS213)
#include <library.h>
#include "SPI.h"
CSPI SPI;
void CSPI::begin()
{
BIOS::DAC::SetMode(BIOS::DAC::EMode::LogicLow, nullptr, 0);
}
void CSPI::end()
{
}
void CSPI::setClockDivider(int clockDivider)
{
}
void CSPI::dmaSendAsync(uint8_t* stream, int len)
{
// MSB first (SPI) -> LSB first (UART)
for (int i=0; i<len; i++)
{
int v = stream[i];
int n = 0;
if (v&1) n |= 128;
if (v&2) n |= 64;
if (v&4) n |= 32;
if (v&8) n |= 16;
if (v&16) n |= 8;
if (v&32) n |= 4;
if (v&64) n |= 2;
if (v&128) n |= 1;
stream[i] = n;
}
// determined empirically
#define DELAY() __asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); \
__asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); \
__asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop"); __asm__("nop");
#define DELAYS() __asm__("nop"); __asm__("nop"); __asm__("nop");
BIOS::OS::DisableInterrupts();
#ifdef DS203
uint32_t destptr[2] = {0x40010C14, 0x40010C10};
//constexpr int pin = 6;
#define pin 6
#endif
#ifdef DS213
uint32_t destptr[2] = {0x40010814, 0x40010810};
// constexpr int pin = 2;
#define pin 2
#endif
while (len--)
{
int v = *stream++;
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
v>>=1;
DELAY();
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
v>>=1;
DELAY();
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
v>>=1;
DELAY();
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
v>>=1;
DELAY();
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
v>>=1;
DELAY();
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
v>>=1;
DELAY();
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
v>>=1;
DELAY();
*((volatile uint32_t*)destptr[v&1]) = 1<<pin;
DELAYS();
}
BIOS::OS::EnableInterrupts(0);
}
#endif | 1,010 |
1,040 | <filename>OCExample/OCExample/View/TestTableViewCell.h
//
// TestTableViewCell.h
// OCExample
//
// Created by 高刘通 on 2018/4/18.
// Copyright © 2018年 LT. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface TestTableViewCell : UITableViewCell
+ (instancetype)cellWithTableView:(UITableView *)tableView;
@end
| 123 |
1,163 | <filename>iree/compiler/Dialect/Util/Analysis/DFX/DepGraph.cpp
// Copyright 2021 The IREE Authors
//
// Licensed under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#include "iree/compiler/Dialect/Util/Analysis/DFX/DepGraph.h"
#include <atomic>
#include "iree/compiler/Dialect/Util/Analysis/DFX/Element.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/FileSystem.h"
#include "llvm/Support/GraphWriter.h"
namespace llvm {
template <>
struct DOTGraphTraits<DFXDepGraph *> : public DefaultDOTGraphTraits {
explicit DOTGraphTraits(bool isSimple = false)
: DefaultDOTGraphTraits(isSimple) {}
static std::string getNodeLabel(const DFXDepGraphNode *node,
const DFXDepGraph *graph) {
std::string str;
llvm::raw_string_ostream os(str);
node->print(os, graph->asmState);
return str;
}
};
} // end namespace llvm
namespace mlir {
namespace iree_compiler {
namespace DFX {
void DepGraph::print(llvm::raw_ostream &os) {
for (auto &depElement : syntheticRoot.deps) {
cast<AbstractElement>(depElement.getPointer())->printWithDeps(os, asmState);
}
}
void DepGraph::dumpGraph() {
static std::atomic<int> callTimes;
std::string prefix = "dep_graph";
std::string filename =
prefix + "_" + std::to_string(callTimes.load()) + ".dot";
llvm::outs() << "Dependency graph dump to " << filename << ".\n";
std::error_code ec;
llvm::raw_fd_ostream file(filename, ec, llvm::sys::fs::OF_TextWithCRLF);
if (!ec) llvm::WriteGraph(file, this);
callTimes++;
}
} // namespace DFX
} // namespace iree_compiler
} // namespace mlir
| 652 |
1,931 | {
"extends": "../../tsconfig.json",
"compilerOptions": {
"allowJs": true,
"checkJs": true,
"noEmit": false,
"emitDeclarationOnly": true,
"skipLibCheck": false
},
"files": ["src/en/messages.js"]
}
| 98 |
348 | {"nom":"Vertheuil","circ":"5ème circonscription","dpt":"Gironde","inscrits":882,"abs":540,"votants":342,"blancs":41,"nuls":20,"exp":281,"res":[{"nuance":"SOC","nom":"<NAME>","voix":146},{"nuance":"REM","nom":"<NAME>","voix":135}]} | 91 |
575 | <reponame>mghgroup/Glide-Browser
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import base64
import json
import unittest
from blinkpy.common.host_mock import MockHost
from blinkpy.w3c.chromium_commit_mock import MockChromiumCommit
from blinkpy.w3c.common import EXPORT_PR_LABEL
from blinkpy.w3c.wpt_github import MAX_PR_HISTORY_WINDOW, GitHubError, MergeError, PullRequest, WPTGitHub
class WPTGitHubTest(unittest.TestCase):
def generate_pr_item(self, pr_number, state='closed'):
return {
'title': 'Foobar',
'number': pr_number,
'body': 'description',
'state': state,
'labels': [{
'name': EXPORT_PR_LABEL
}]
}
def setUp(self):
self.wpt_github = WPTGitHub(
MockHost(), user='rutabaga', token='<PASSWORD>')
def test_init(self):
self.assertEqual(self.wpt_github.user, 'rutabaga')
self.assertEqual(self.wpt_github.token, '<PASSWORD>')
def test_constructor_throws_on_pr_history_window_too_large(self):
with self.assertRaises(ValueError):
self.wpt_github = WPTGitHub(
MockHost(),
user='rutabaga',
token='<PASSWORD>',
pr_history_window=MAX_PR_HISTORY_WINDOW + 1)
def test_auth_token(self):
self.assertEqual(self.wpt_github.auth_token(),
base64.encodestring('rutabaga:decafbad').strip())
def test_extract_link_next(self):
link_header = (
'<https://api.github.com/user/repos?page=1&per_page=100>; rel="first", '
'<https://api.github.com/user/repos?page=2&per_page=100>; rel="prev", '
'<https://api.github.com/user/repos?page=4&per_page=100>; rel="next", '
'<https://api.github.com/user/repos?page=50&per_page=100>; rel="last"'
)
self.assertEqual(
self.wpt_github.extract_link_next(link_header),
'/user/repos?page=4&per_page=100')
def test_extract_link_next_not_found(self):
self.assertIsNone(self.wpt_github.extract_link_next(''))
def test_recent_failing_chromium_exports_single_page(self):
self.wpt_github = WPTGitHub(
MockHost(), user='rutabaga', token='<PASSWORD>', pr_history_window=1)
self.wpt_github.host.web.responses = [
{
'status_code':
200,
'headers': {
'Link': ''
},
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(1)]
})
},
]
self.assertEqual(
len(self.wpt_github.recent_failing_chromium_exports()), 1)
def test_recent_failing_chromium_exports_all_pages(self):
self.wpt_github = WPTGitHub(MockHost(),
user='rutabaga',
token='<PASSWORD>',
pr_history_window=1)
self.wpt_github.host.web.responses = [
{
'status_code':
200,
'headers': {
'Link':
'<https://api.github.com/resources?page=2>; rel="next"'
},
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(1)]
})
},
{
'status_code':
200,
'headers': {
'Link': ''
},
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(2)]
})
},
]
self.assertEqual(
len(self.wpt_github.recent_failing_chromium_exports()), 2)
def test_recent_failing_chromium_exports_throws_github_error(self):
self.wpt_github.host.web.responses = [
{
'status_code': 204
},
]
with self.assertRaises(GitHubError):
self.wpt_github.recent_failing_chromium_exports()
def test_all_pull_requests_single_page(self):
self.wpt_github = WPTGitHub(
MockHost(), user='rutabaga', token='<PASSWORD>', pr_history_window=1)
self.wpt_github.host.web.responses = [
{
'status_code':
200,
'headers': {
'Link': ''
},
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(1)]
})
},
]
self.assertEqual(len(self.wpt_github.all_pull_requests()), 1)
def test_all_pull_requests_all_pages(self):
self.wpt_github = WPTGitHub(
MockHost(), user='rutabaga', token='<PASSWORD>', pr_history_window=2)
self.wpt_github.host.web.responses = [
{
'status_code':
200,
'headers': {
'Link':
'<https://api.github.com/resources?page=2>; rel="next"'
},
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(1)]
})
},
{
'status_code':
200,
'headers': {
'Link': ''
},
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(2)]
})
},
]
self.assertEqual(len(self.wpt_github.all_pull_requests()), 2)
def test_all_pull_requests_reaches_pr_history_window(self):
self.wpt_github = WPTGitHub(
MockHost(), user='rutabaga', token='<PASSWORD>', pr_history_window=2)
self.wpt_github.host.web.responses = [
{
'status_code':
200,
'headers': {
'Link':
'<https://api.github.com/resources?page=2>; rel="next"'
},
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(1)]
})
},
{
'status_code':
200,
'headers': {
'Link': ''
},
'body':
json.dumps({
'incomplete_results':
False,
'items':
[self.generate_pr_item(2),
self.generate_pr_item(3)]
})
},
]
self.assertEqual(len(self.wpt_github.all_pull_requests()), 2)
def test_all_pull_requests_throws_github_error_on_non_200(self):
self.wpt_github.host.web.responses = [
{
'status_code': 204
},
]
with self.assertRaises(GitHubError):
self.wpt_github.all_pull_requests()
def test_all_pull_requests_throws_github_error_when_incomplete(self):
self.wpt_github = WPTGitHub(
MockHost(), user='rutabaga', token='<PASSWORD>', pr_history_window=1)
self.wpt_github.host.web.responses = [
{
'status_code':
200,
'body':
json.dumps({
'incomplete_results': True,
'items': [self.generate_pr_item(1)]
})
},
]
with self.assertRaises(GitHubError):
self.wpt_github.all_pull_requests()
def test_all_pull_requests_throws_github_error_when_too_few_prs(self):
self.wpt_github = WPTGitHub(
MockHost(), user='rutabaga', token='<PASSWORD>bad', pr_history_window=2)
self.wpt_github.host.web.responses = [
{
'status_code':
200,
'body':
json.dumps({
'incomplete_results': False,
'items': [self.generate_pr_item(1)]
})
},
]
with self.assertRaises(GitHubError):
self.wpt_github.all_pull_requests()
def test_create_pr_success(self):
self.wpt_github.host.web.responses = [
{
'status_code': 201,
'body': json.dumps({
'number': 1234
})
},
]
self.assertEqual(
self.wpt_github.create_pr('branch', 'title', 'body'), 1234)
def test_create_pr_throws_github_error_on_non_201(self):
self.wpt_github.host.web.responses = [
{
'status_code': 200
},
]
with self.assertRaises(GitHubError):
self.wpt_github.create_pr('branch', 'title', 'body')
def test_branch_check_runs_single_page(self):
self.wpt_github = WPTGitHub(MockHost(),
user='rutabaga',
token='<PASSWORD>',
pr_history_window=1)
self.wpt_github.host.web.responses = [
{
'status_code': 200,
'headers': {
'Link': ''
},
'body': json.dumps({'check_runs': [{
'conclusion': 'success'
}]})
},
]
self.assertEqual(
self.wpt_github.get_branch_check_runs('1')[0]['conclusion'],
'success')
def test_branch_check_runs_all_pages(self):
self.wpt_github = WPTGitHub(MockHost(),
user='rutabaga',
token='<PASSWORD>',
pr_history_window=1)
self.wpt_github.host.web.responses = [
{
'status_code': 200,
'headers': {
'Link':
'<https://api.github.com/resources?page=2>; rel="next"'
},
'body': json.dumps({'check_runs': [{
'conclusion': 'success'
}]})
},
{
'status_code': 200,
'headers': {
'Link': ''
},
'body': json.dumps({'check_runs': [{
'conclusion': 'failure'
}]})
},
]
check_runs = self.wpt_github.get_branch_check_runs('1')
self.assertEqual(check_runs[0]['conclusion'], 'success')
self.assertEqual(check_runs[1]['conclusion'], 'failure')
def test_get_pr_branch(self):
self.wpt_github.host.web.responses = [
{
'status_code': 200,
'body': json.dumps({
'head': {
'ref': 'fake_branch'
}
})
},
]
self.assertEqual(self.wpt_github.get_pr_branch(1234), 'fake_branch')
def test_is_pr_merged_receives_204(self):
self.wpt_github.host.web.responses = [
{
'status_code': 204
},
]
self.assertTrue(self.wpt_github.is_pr_merged(1234))
def test_is_pr_merged_receives_404(self):
self.wpt_github.host.web.responses = [
{
'status_code': 404
},
]
self.assertFalse(self.wpt_github.is_pr_merged(1234))
def test_merge_pr_success(self):
self.wpt_github.host.web.responses = [
{
'status_code': 200
},
]
self.wpt_github.merge_pr(1234)
def test_merge_pr_throws_merge_error_on_405(self):
self.wpt_github.host.web.responses = [
{
'status_code': 405
},
]
with self.assertRaises(MergeError):
self.wpt_github.merge_pr(5678)
def test_remove_label_throws_github_error_on_non_200_or_204(self):
self.wpt_github.host.web.responses = [
{
'status_code': 201
},
]
with self.assertRaises(GitHubError):
self.wpt_github.remove_label(1234, 'rutabaga')
def test_delete_remote_branch_throws_github_error_on_non_204(self):
self.wpt_github.host.web.responses = [
{
'status_code': 200
},
]
with self.assertRaises(GitHubError):
self.wpt_github.delete_remote_branch('rutabaga')
def test_add_comment_throws_github_error_on_non_201(self):
self.wpt_github.host.web.responses = [
{
'status_code': 200
},
]
with self.assertRaises(GitHubError):
self.wpt_github.add_comment(123, 'rutabaga')
def test_pr_for_chromium_commit_change_id_only(self):
self.wpt_github.all_pull_requests = lambda: [
PullRequest('PR1', 1, 'body\nChange-Id: I00c0ffee', 'open', []),
PullRequest('PR2', 2, 'body\nChange-Id: I00decade', 'open', []), ]
chromium_commit = MockChromiumCommit(
MockHost(),
change_id='I00decade',
position='refs/heads/master@{#10}')
pull_request = self.wpt_github.pr_for_chromium_commit(chromium_commit)
self.assertEqual(pull_request.number, 2)
def test_pr_for_chromium_commit_prefers_change_id(self):
self.wpt_github.all_pull_requests = lambda: [
PullRequest(
'PR1', 1,
'body\nChange-Id: I00c0ffee\nCr-Commit-Position: refs/heads/master@{#10}',
'open', []),
PullRequest(
'PR2', 2,
'body\nChange-Id: I00decade\nCr-Commit-Position: refs/heads/master@{#33}',
'open', []),
]
chromium_commit = MockChromiumCommit(
MockHost(),
change_id='I00decade',
position='refs/heads/master@{#10}')
pull_request = self.wpt_github.pr_for_chromium_commit(chromium_commit)
self.assertEqual(pull_request.number, 2)
def test_pr_for_chromium_commit_multiple_change_ids(self):
self.wpt_github.all_pull_requests = lambda: [
PullRequest('PR1', 1,
'body\nChange-Id: I00c0ffee\nChange-Id: I00decade',
'open', []),
]
chromium_commit = MockChromiumCommit(
MockHost(),
change_id='I00c0ffee',
position='refs/heads/master@{#10}')
pull_request = self.wpt_github.pr_for_chromium_commit(chromium_commit)
self.assertEqual(pull_request.number, 1)
chromium_commit = MockChromiumCommit(
MockHost(),
change_id='I00decade',
position='refs/heads/master@{#33}')
pull_request = self.wpt_github.pr_for_chromium_commit(chromium_commit)
self.assertEqual(pull_request.number, 1)
| 9,019 |
645 | <gh_stars>100-1000
package monkeylord.XServer.handler;
import java.io.Console;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import java.util.regex.Pattern;
import monkeylord.XServer.XServer;
import monkeylord.XServer.objectparser.GenericParser;
import monkeylord.XServer.objectparser.StoredObjectParser;
import monkeylord.XServer.utils.Utils;
import static monkeylord.XServer.XServer.parsers;
//处理对象相关内容
public class ObjectHandler {
public static HashMap<String, Object> objects = new HashMap<String, Object>();
public static Object storeObject(Object obj, String name) {
return objects.put(name, obj);
}
public static String saveObject(Object obj){
if(obj==null)return "Null";
XServer.ObjectParser parser = parsers.get(Utils.getTypeSignature(obj.getClass()));
if(parser==null)parser=parsers.get("store");
return Utils.getTypeSignature(obj.getClass())+"#"+parser.generate(obj);
}
public static String briefObject(Object obj){
if(obj==null)return "Null";
XServer.ObjectParser parser = parsers.get(Utils.getTypeSignature(obj.getClass()));
if(parser==null)parser=parsers.get("generic");
return Utils.getTypeSignature(obj.getClass())+"#"+parser.generate(obj);
}
public static Object getObject(String name) {
return objects.get(name);
}
public static Object parseObject(String Object){
if(Object.equals("Null"))return null;
if(Object==null)return null;
if(Object.indexOf("#")<0)return null;
String type=Object.substring(0,Object.indexOf("#"));
String raw=Object.substring(Object.indexOf("#")+1);
XServer.ObjectParser parser = parsers.get(type);
if(parser==null)parser=parsers.get("store");
return parser.parse(raw);
}
public static Object[] getObjects(String name, String type) {
return null;
}
public static Object removeObject(String name) {
return objects.remove(name);
}
public static Object removeObject(Object object) {
for (Map.Entry entry : objects.entrySet()) {
if (entry.getValue().equals(object)) return objects.remove(entry.getKey());
}
return null;
}
}
| 871 |
435 | from __future__ import absolute_import, print_function, division
from petl.test.failonerror import assert_failonerror
from petl.test.helpers import ieq
from petl.transform.conversions import convert, convertall, convertnumbers, \
replace, update, format, interpolate
from functools import partial
def test_convert():
table1 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
# test the simplest style - single field, lambda function
table2 = convert(table1, 'foo', lambda s: s.lower())
expect2 = (('foo', 'bar', 'baz'),
('a', 1, 2),
('b', '2', '3.4'),
(u'b', u'3', u'7.8', True),
('d', 'xyz', 9.0),
('e', None))
ieq(expect2, table2)
ieq(expect2, table2)
# test single field with method call
table3 = convert(table1, 'foo', 'lower')
expect3 = expect2
ieq(expect3, table3)
# test single field with method call with arguments
table4 = convert(table1, 'foo', 'replace', 'B', 'BB')
expect4 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('BB', '2', '3.4'),
(u'BB', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
ieq(expect4, table4)
# test multiple fields with the same conversion
table5 = convert(table1, ('bar', 'baz'), str)
expect5 = (('foo', 'bar', 'baz'),
('A', '1', '2'),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', '9.0'),
('E', 'None'))
ieq(expect5, table5)
# test convert with dictionary
table6 = convert(table1, 'foo', {'A': 'Z', 'B': 'Y'})
expect6 = (('foo', 'bar', 'baz'),
('Z', 1, 2),
('Y', '2', '3.4'),
(u'Y', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
ieq(expect6, table6)
def test_convert_empty():
table = (('foo', 'bar'),)
expect = (('foo', 'bar'),)
actual = convert(table, 'foo', int)
ieq(expect, actual)
def test_convert_indexes():
table1 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
# test the simplest style - single field, lambda function
table2 = convert(table1, 0, lambda s: s.lower())
expect2 = (('foo', 'bar', 'baz'),
('a', 1, 2),
('b', '2', '3.4'),
(u'b', u'3', u'7.8', True),
('d', 'xyz', 9.0),
('e', None))
ieq(expect2, table2)
ieq(expect2, table2)
# test single field with method call
table3 = convert(table1, 0, 'lower')
expect3 = expect2
ieq(expect3, table3)
# test single field with method call with arguments
table4 = convert(table1, 0, 'replace', 'B', 'BB')
expect4 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('BB', '2', '3.4'),
(u'BB', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
ieq(expect4, table4)
# test multiple fields with the same conversion
table5a = convert(table1, (1, 2), str)
table5b = convert(table1, (1, 'baz'), str)
table5c = convert(table1, ('bar', 2), str)
table5d = convert(table1, list(range(1, 3)), str)
expect5 = (('foo', 'bar', 'baz'),
('A', '1', '2'),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', '9.0'),
('E', 'None'))
ieq(expect5, table5a)
ieq(expect5, table5b)
ieq(expect5, table5c)
ieq(expect5, table5d)
# test convert with dictionary
table6 = convert(table1, 0, {'A': 'Z', 'B': 'Y'})
expect6 = (('foo', 'bar', 'baz'),
('Z', 1, 2),
('Y', '2', '3.4'),
(u'Y', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
ieq(expect6, table6)
def test_fieldconvert():
table1 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
# test the style where the converters functions are passed in as a
# dictionary
converters = {'foo': str, 'bar': int, 'baz': float}
table5 = convert(table1, converters, errorvalue='error')
expect5 = (('foo', 'bar', 'baz'),
('A', 1, 2.0),
('B', 2, 3.4),
('B', 3, 7.8, True), # N.B., long rows are preserved
('D', 'error', 9.0),
('E', 'error')) # N.B., short rows are preserved
ieq(expect5, table5)
# test the style where the converters functions are added one at a time
table6 = convert(table1, errorvalue='err')
table6['foo'] = str
table6['bar'] = int
table6['baz'] = float
expect6 = (('foo', 'bar', 'baz'),
('A', 1, 2.0),
('B', 2, 3.4),
('B', 3, 7.8, True),
('D', 'err', 9.0),
('E', 'err'))
ieq(expect6, table6)
# test some different converters
table7 = convert(table1)
table7['foo'] = 'replace', 'B', 'BB'
expect7 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('BB', '2', '3.4'),
(u'BB', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
ieq(expect7, table7)
# test the style where the converters functions are passed in as a list
converters = [str, int, float]
table8 = convert(table1, converters, errorvalue='error')
expect8 = (('foo', 'bar', 'baz'),
('A', 1, 2.0),
('B', 2, 3.4),
('B', 3, 7.8, True), # N.B., long rows are preserved
('D', 'error', 9.0),
('E', 'error')) # N.B., short rows are preserved
ieq(expect8, table8)
# test the style where the converters functions are passed in as a list
converters = [str, None, float]
table9 = convert(table1, converters, errorvalue='error')
expect9 = (('foo', 'bar', 'baz'),
('A', 1, 2.0),
('B', '2', 3.4),
('B', u'3', 7.8, True), # N.B., long rows are preserved
('D', 'xyz', 9.0),
('E', None)) # N.B., short rows are preserved
ieq(expect9, table9)
def test_convertall():
table1 = (('foo', 'bar', 'baz'),
('1', '3', '9'),
('2', '1', '7'))
table2 = convertall(table1, int)
expect2 = (('foo', 'bar', 'baz'),
(1, 3, 9),
(2, 1, 7))
ieq(expect2, table2)
ieq(expect2, table2)
# test with non-string field names
table1 = (('foo', 3, 4),
(2, 2, 2))
table2 = convertall(table1, lambda x: x**2)
expect = (('foo', 3, 4),
(4, 4, 4))
ieq(expect, table2)
def test_convertnumbers():
table1 = (('foo', 'bar', 'baz', 'quux'),
('1', '3.0', '9+3j', 'aaa'),
('2', '1.3', '7+2j', None))
table2 = convertnumbers(table1)
expect2 = (('foo', 'bar', 'baz', 'quux'),
(1, 3.0, 9+3j, 'aaa'),
(2, 1.3, 7+2j, None))
ieq(expect2, table2)
ieq(expect2, table2)
def test_convert_translate():
table = (('foo', 'bar'),
('M', 12),
('F', 34),
('-', 56))
trans = {'M': 'male', 'F': 'female'}
result = convert(table, 'foo', trans)
expectation = (('foo', 'bar'),
('male', 12),
('female', 34),
('-', 56))
ieq(expectation, result)
def test_convert_with_row():
table = (('foo', 'bar'),
('a', 1),
('b', 2))
expect = (('foo', 'bar'),
('a', 'A'),
('b', 'B'))
actual = convert(table, 'bar',
lambda v, row: row.foo.upper(),
pass_row=True)
ieq(expect, actual)
def test_convert_with_row_backwards_compat():
table = (('foo', 'bar'),
(' a ', 1),
(' b ', 2))
expect = (('foo', 'bar'),
('a', 1),
('b', 2))
actual = convert(table, 'foo', 'strip')
ieq(expect, actual)
def test_convert_where():
tbl1 = (('foo', 'bar'),
('a', 1),
('b', 2))
expect = (('foo', 'bar'),
('a', 1),
('b', 4))
actual = convert(tbl1, 'bar', lambda v: v*2, where=lambda r: r.foo == 'b')
ieq(expect, actual)
ieq(expect, actual)
actual = convert(tbl1, 'bar', lambda v: v*2, where="{foo} == 'b'")
ieq(expect, actual)
ieq(expect, actual)
def test_convert_failonerror():
input_ = (('foo',), ('A',), (1,))
cvt_ = {'foo': 'lower'}
expect_ = (('foo',), ('a',), (None,))
assert_failonerror(
input_fn=partial(convert, input_, cvt_),
expected_output=expect_)
def test_replace_where():
tbl1 = (('foo', 'bar'),
('a', 1),
('b', 2))
expect = (('foo', 'bar'),
('a', 1),
('b', 4))
actual = replace(tbl1, 'bar', 2, 4, where=lambda r: r.foo == 'b')
ieq(expect, actual)
ieq(expect, actual)
actual = replace(tbl1, 'bar', 2, 4, where="{foo} == 'b'")
ieq(expect, actual)
ieq(expect, actual)
def test_update():
table1 = (('foo', 'bar', 'baz'),
('A', 1, 2),
('B', '2', '3.4'),
(u'B', u'3', u'7.8', True),
('D', 'xyz', 9.0),
('E', None))
table2 = update(table1, 'foo', 'X')
expect2 = (('foo', 'bar', 'baz'),
('X', 1, 2),
('X', '2', '3.4'),
('X', u'3', u'7.8', True),
('X', 'xyz', 9.0),
('X', None))
ieq(expect2, table2)
ieq(expect2, table2)
def test_replace_unhashable():
table1 = (('foo', 'bar'), ('a', ['b']), ('c', None))
expect = (('foo', 'bar'), ('a', ['b']), ('c', []))
actual = replace(table1, 'bar', None, [])
ieq(expect, actual)
def test_format():
table = (('foo', 'bar'),
('a', 1),
('b', 2))
expect = (('foo', 'bar'),
('a', '01'),
('b', '02'))
actual = format(table, 'bar', '{0:02d}')
ieq(expect, actual)
ieq(expect, actual)
def test_interpolate():
table = (('foo', 'bar'),
('a', 1),
('b', 2))
expect = (('foo', 'bar'),
('a', '01'),
('b', '02'))
actual = interpolate(table, 'bar', '%02d')
ieq(expect, actual)
ieq(expect, actual)
| 5,887 |
8,454 | <filename>UltimateRecyclerView/ultimaterecyclerview/src/main/java/com/marshalchen/ultimaterecyclerview/quickAdapter/extBaseAdapter/QuickAdapter.java
package com.marshalchen.ultimaterecyclerview.quickAdapter.extBaseAdapter;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import com.marshalchen.ultimaterecyclerview.quickAdapter.extUtimateRVA.MultiItemTypeSupport;
import java.util.ArrayList;
import java.util.List;
import static com.marshalchen.ultimaterecyclerview.quickAdapter.extBaseAdapter.BaseAdapterHelper.get;
/**
* Abstraction class of a BaseAdapter in which you only need to provide the
* convert() implementation.
* Using the provided BaseAdapterHelper, your code is minimalist.
*
* @param <T> The type of the items in the list.
*/
public abstract class QuickAdapter<T> extends
BaseQuickAdapter<T, BaseAdapterHelper> {
/**
* Create a QuickAdapter.
*
* @param context The context.
* @param layoutResId The layout resource id of each item.
*/
public QuickAdapter(Context context, int layoutResId) {
super(context, layoutResId);
}
/**
* Same as QuickAdapter#QuickAdapter(Context,int) but with some
* initialization data.
*
* @param context The context.
* @param layoutResId The layout resource id of each item.
* @param data A new list is created out of this one to avoid mutable list
*/
public QuickAdapter(Context context, int layoutResId, List<T> data) {
super(context, layoutResId, data);
}
public QuickAdapter(Context context, ArrayList<T> data,
MultiItemTypeSupport<T> multiItemSupport) {
super(context, data, multiItemSupport);
}
protected BaseAdapterHelper getAdapterHelper(int position,
View convertView,
ViewGroup parent) {
if (mMultiItemSupport != null) {
return get(context,
convertView,
parent,
mMultiItemSupport.getLayoutId(position, data.get(position)),
position);
} else {
return get(context, convertView, parent, layoutResId, position);
}
}
} | 915 |
30,023 | """Test button of ONVIF integration."""
from unittest.mock import AsyncMock
from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, ButtonDeviceClass
from homeassistant.const import ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, STATE_UNKNOWN
from homeassistant.helpers import entity_registry as er
from . import MAC, setup_onvif_integration
async def test_reboot_button(hass):
"""Test states of the Reboot button."""
await setup_onvif_integration(hass)
state = hass.states.get("button.testcamera_reboot")
assert state
assert state.state == STATE_UNKNOWN
assert state.attributes.get(ATTR_DEVICE_CLASS) == ButtonDeviceClass.RESTART
registry = er.async_get(hass)
entry = registry.async_get("button.testcamera_reboot")
assert entry
assert entry.unique_id == f"{MAC}_reboot"
async def test_reboot_button_press(hass):
"""Test Reboot button press."""
_, camera, _ = await setup_onvif_integration(hass)
devicemgmt = camera.create_devicemgmt_service()
devicemgmt.SystemReboot = AsyncMock(return_value=True)
await hass.services.async_call(
BUTTON_DOMAIN,
"press",
{ATTR_ENTITY_ID: "button.testcamera_reboot"},
blocking=True,
)
await hass.async_block_till_done()
devicemgmt.SystemReboot.assert_called_once()
| 501 |
307 | /*
*********************************************************************************************************
*
* 模块名称 : 电压表
* 文件名称 : status_voltage_meter.h
*
*********************************************************************************************************
*/
#ifndef _STATUS_VOLTAGE_METER_H_
#define _STATUS_VOLTAGE_METER_H_
void status_VoltageMeter(void);
#endif
/***************************** 安富莱电子 www.armfly.com (END OF FILE) *********************************/
| 167 |
2,996 | // Copyright 2021 The Terasology Foundation
// SPDX-License-Identifier: Apache-2.0
package org.terasology.engine.rendering.assets.shader;
/**
* TODO: write javadoc
*/
public enum ParamType {
sampler2D(true),
samplerCube(true);
private boolean texture;
ParamType(boolean texture) {
this.texture = texture;
}
public boolean isTexture() {
return texture;
}
}
| 152 |
571 | <filename>ufora/FORA/Native/TypedNativeExpressionIntegerBehaviors.hpp
/***************************************************************************
Copyright 2015 Ufora Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
****************************************************************************/
#pragma once
#include "TypedNativeExpression.fwd.hpp"
#include "NativeCode.hppml"
#include "TypedNativeExpressionBehaviorCategories.hpp"
#include "TypedNativeExpressionConstantConversion.hpp"
template<class T>
class TypedNativeExpressionBuiltinBehaviors<T, TypedNativeExpressionBehaviorIntegerCategory> {
public:
TypedNativeExpressionBuiltinBehaviors(const NativeExpression& e);
TypedNativeExpressionBuiltinBehaviors(const T& in);
template<class target_type>
operator TypedNativeExpression<target_type>() const;
NativeExpression getExpression() const;
private:
NativeExpression mExpression;
};
| 384 |
854 | <filename>solutions/LeetCode/C++/13.cpp
__________________________________________________________________________________________________
12ms
static int x=[](){
std::ios::sync_with_stdio(false);
cin.tie(NULL);
return 0;
}();
class Solution {
public:
int romanToInt(string &s) {
int num[26] = {0,0,100,500,0,0,0,0,1,0,0,50,1000,0,0,0,0,0,0,0,0,5,0,10,0,0};
int res = 0;
int len = s.size();
for(int i=0;i<s.size();i++){
res += num[s[len-i-1]-'A'];
if(s[len-i-1]=='V' || s[len-i-1]=='X') num['I'-'A'] = -1;
if(s[len-i-1]=='L' || s[len-i-1]=='C') num['X'-'A'] = -10;
if(s[len-i-1]=='D' || s[len-i-1]=='M') num['C'-'A'] = -100;
}
return res;
}
};
__________________________________________________________________________________________________
16ms
class Solution {
public:
int charValue(char c)
{
switch(c){
case 'I' : return 1;
case 'V' : return 5 ;
case 'X' : return 10;
case 'L' : return 50;
case 'C' : return 100 ;
case 'D' : return 500;
case 'M' : return 1000;
default :
return 0;
}
return 0;
}
int romanToInt(string s)
{
int result=0;
if(s.empty()) return 0;
if(s.size()==1) return charValue(s[0]); // only 1
int temp=charValue(s[0]); // store first char
for(int i =1;i<s.size();++i)
{
int v = charValue(s[i]); // from 1 calcuate the char value
if(v>temp) // if its greater then IV we have to minus for the value 5-1
{
result=result-temp;
temp=v;
}
else
{
result=result+temp; // VI we have to add 5+1
temp=v;
}
}
result+=temp;
return result;
}
};
__________________________________________________________________________________________________
20ms
namespace {
int getTokenValue(const char c) {
switch (c) {
case 'I':
return 1;
case 'V':
return 5;
case 'X':
return 10;
case 'L':
return 50;
case 'C':
return 100;
case 'D':
return 500;
case 'M':
return 1000;
default:
return 0;
}
}
}
class Solution {
public:
int romanToInt(string s) {
vector<int> values;
values.resize(s.size());
std::transform(s.begin(), s.end(), values.begin(), &getTokenValue);
for (int i = 1; i < values.size(); ++i) {
if ((values[i-1] == values[i] / 10) ||
(values[i-1] == values[i] / 5)) {
values[i-1] *= -1;
}
}
return std::accumulate(values.begin(), values.end(), 0);
}
};
__________________________________________________________________________________________________
8204 kb
static bool _foo = ios::sync_with_stdio(false);
static ostream* _bar = cin.tie(NULL);
class Solution
{
public:
int romanToInt(string s)
{
int va[]{1,5,10,50,100,500,1000};
string sy="IVXLCDM";
int t[s.size()];
for(int i = 0; i<s.size();i++)
{
for(int j=0;j<7;j++)
{
if(s[i]==sy[j])
{
t[i]=j;
break;
}
}
}
int re = 0;
for(int i=0;i<s.size();i++)
{
re+=va[t[i]];
if(i!=s.size()-1&&t[i]<t[i+1])
{
re += va[t[i+1]]-2*va[t[i]];
i++;
}
}
return re;
}
};
__________________________________________________________________________________________________
8344 kb
static int x = [](){
ios::sync_with_stdio(false);
cin.tie(NULL);
cout.tie(NULL);
return NULL;
}();
class Solution {
public:
int romanToInt(string s) {
enum prior { I = 1, IV = 4, V=5, IX = 9, X=10, XL = 40, L=50, XC = 90, C=100,CD = 400, D=500,CM= 900, M=1000};
int ans = 0;
int i = 0, l = s.length();
while(i<l){
char x = s[i];
switch(x){
case 'M':
ans+=M;
i++;
continue;
case 'D':
ans+=D;
i++;
continue;
case 'C':
if(i+1<l && s[i+1]=='D'){
ans+=CD;
i+=2;
continue;
}
else if(i+1<l && s[i+1] == 'M'){
ans+=CM;
i+=2;
continue;
}
else{
ans+=C;
i++;
continue;
}
case 'L':
ans+=L;
i++;
continue;
case 'X':
if(i+1<l && s[i+1]=='L'){
ans+=XL;
i+=2;
continue;
}
else if(i+1<l && s[i+1] == 'C'){
ans+=XC;
i+=2;
continue;
}
else{
ans+=X;
i++;
continue;
}
case 'V':
ans+=V;
i++;
continue;
case 'I':
if(i+1<l && s[i+1]=='V'){
ans+=IV;
i+=2;
continue;
}
else if(i+1<l && s[i+1] == 'X'){
ans+=IX;
i+=2;
continue;
}
else{
ans+=I;
i++;
continue;
}
}
}
return ans;
}
};
__________________________________________________________________________________________________
| 4,002 |
317 | //
// test-HeaderParser.cc
// snowcrash
//
// Created by <NAME> on 5/22/13.
// Copyright (c) 2013 Apiary Inc. All rights reserved.
//
#include "snowcrashtest.h"
#include "HeadersParser.h"
using namespace snowcrash;
using namespace snowcrashtest;
const mdp::ByteBuffer HeadersFixture
= "+ Headers\n"
"\n"
" Content-Type: application/json\n"
" X-My-Header: Hello World!\n";
const mdp::ByteBuffer HeadersSignatureContentFixture
= "+ Headers\n"
" Content-Type: application/json\n"
" X-My-Header: Hello World!\n";
TEST_CASE("recognize headers signature", "[headers]")
{
mdp::MarkdownParser markdownParser;
mdp::MarkdownNode markdownAST;
markdownParser.parse(HeadersFixture, markdownAST);
REQUIRE(!markdownAST.children().empty());
SectionType sectionType = SectionProcessor<Headers>::sectionType(markdownAST.children().begin());
REQUIRE(sectionType == HeadersSectionType);
}
TEST_CASE("parse headers fixture", "[headers]")
{
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(
HeadersFixture, HeadersSectionType, headers, ExportSourcemapOption);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.empty());
REQUIRE(headers.node.size() == 2);
REQUIRE(headers.node[0].first == "Content-Type");
REQUIRE(headers.node[0].second == "application/json");
REQUIRE(headers.node[1].first == "X-My-Header");
REQUIRE(headers.node[1].second == "Hello World!");
REQUIRE(headers.sourceMap.collection.size() == 2);
SourceMapHelper::check(headers.sourceMap.collection[0].sourceMap, 19, 30);
SourceMapHelper::check(headers.sourceMap.collection[1].sourceMap, 58, 25);
}
// This test is commented out because, in real parsing it is never parsed is "one block sourcemap"
// Just in context of HeaderParser testing.
// This test is not completely removed, instead it is placed in context of ResourceParser (as in real world)
TEST_CASE("parse headers fixture with no empty line between signature and content", "[headers]")
{
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(
HeadersSignatureContentFixture, HeadersSectionType, headers, ExportSourcemapOption);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.size() == 1); // content in signature
REQUIRE(headers.node.size() == 2);
REQUIRE(headers.node[0].first == "Content-Type");
REQUIRE(headers.node[0].second == "application/json");
REQUIRE(headers.node[1].first == "X-My-Header");
REQUIRE(headers.node[1].second == "Hello World!");
REQUIRE(headers.sourceMap.collection.size() == 2);
SourceMapHelper::check(headers.sourceMap.collection[0].sourceMap, 18, 30);
SourceMapHelper::check(headers.sourceMap.collection[1].sourceMap, 57, 25);
}
TEST_CASE("parse malformed headers fixture", "[headers]")
{
mdp::ByteBuffer source = HeadersFixture;
source += " X-Custom-Header:\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers, ExportSourcemapOption);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.size() == 1); // malformed header
REQUIRE(headers.node.size() == 3);
REQUIRE(headers.node[0].first == "Content-Type");
REQUIRE(headers.node[0].second == "application/json");
REQUIRE(headers.node[1].first == "X-My-Header");
REQUIRE(headers.node[1].second == "Hello World!");
REQUIRE(headers.node[2].first == "X-Custom-Header");
REQUIRE(headers.node[2].second == "");
REQUIRE(headers.sourceMap.collection.size() == 3);
REQUIRE(headers.sourceMap.collection[0].sourceMap.size() == 1);
SourceMapHelper::check(headers.sourceMap.collection[0].sourceMap, 19, 30);
REQUIRE(headers.sourceMap.collection[1].sourceMap.size() == 1);
SourceMapHelper::check(headers.sourceMap.collection[1].sourceMap, 58, 25);
REQUIRE(headers.sourceMap.collection[2].sourceMap.size() == 1);
SourceMapHelper::check(headers.sourceMap.collection[2].sourceMap, 92, 16);
}
TEST_CASE("Parse header section composed of multiple blocks", "[headers]")
{
// Blueprint in question:
// R"(
//+ Headers
// Content-Type : text/plain
//
// B : 100
//
// X-My-Header: 42
//)";
mdp::ByteBuffer source = "+ Headers\n\n";
source += " Content-Type : text/plain\n\n";
source += " B : 100\n\n";
source += " X-My-Header: 42\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers, ExportSourcemapOption);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.size() == 1); // not a code block
REQUIRE(headers.node.size() == 3);
REQUIRE(headers.node[0].first == "Content-Type");
REQUIRE(headers.node[0].second == "text/plain");
REQUIRE(headers.node[1].first == "B");
REQUIRE(headers.node[1].second == "100");
REQUIRE(headers.node[2].first == "X-My-Header");
REQUIRE(headers.node[2].second == "42");
REQUIRE(headers.sourceMap.collection.size() == 3);
SourceMapHelper::check(headers.sourceMap.collection[0].sourceMap, 19, 27);
SourceMapHelper::check(headers.sourceMap.collection[1].sourceMap, 52, 7);
SourceMapHelper::check(headers.sourceMap.collection[2].sourceMap, 69, 15);
}
TEST_CASE("Parse header section with missing headers", "[headers]")
{
mdp::ByteBuffer source = "+ Headers\n\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers, ExportSourcemapOption);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.size() == 1); // no headers
REQUIRE(headers.node.size() == 0);
REQUIRE(headers.sourceMap.collection.size() == 0);
}
TEST_CASE("Headers parses should return warning on multiple definition of same headers", "[headers][issue][75]")
{
const mdp::ByteBuffer source
= "+ Headers\n"
"\n"
" Content-Type: application/json\n"
" Content-Type: application/json\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.size() == 1); // one warning due to multiple declaration same header
REQUIRE(headers.node.size() == 2);
}
TEST_CASE("Parse header section with more same headers Set-Cookie and Link - there should not be warning",
"[headers][issue][75]")
{
const mdp::ByteBuffer source
= "+ Headers\n"
"\n"
" Set-Cookie: abcd\n"
" Set-Cookie: kockaprede\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.empty()); // no warning - multiple definition, but from allowed set
REQUIRE(headers.node.size() == 2);
REQUIRE(headers.node[0].first == "Set-Cookie");
REQUIRE(headers.node[0].second == "abcd");
REQUIRE(headers.node[1].first == "Set-Cookie");
REQUIRE(headers.node[1].second == "kockaprede");
}
TEST_CASE("Headers Filed Name should be case insensitive", "[headers][issue][230]")
{
const mdp::ByteBuffer source
= "+ Headers\n"
"\n"
" Content-Type: application/json\n"
" content-type: application/json\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.size() == 1); // one warning - multiple definitions w/ different case sensitivity
REQUIRE(headers.node.size() == 2);
REQUIRE(headers.node[0].first == "Content-Type");
REQUIRE(headers.node[0].second == "application/json");
REQUIRE(headers.node[1].first == "content-type");
REQUIRE(headers.node[1].second == "application/json");
}
TEST_CASE(
"Additional test for Header name insensitivity combined with allowed multiple headers", "[headers][issue][230]")
{
const mdp::ByteBuffer source
= "+ Headers\n"
"\n"
" Set-cookie: abcd\n"
" Set-Cookie: kockaprede\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.empty()); // no warning - multiple definition, but from allowed set
REQUIRE(headers.node.size() == 2);
REQUIRE(headers.node[0].first == "Set-cookie");
REQUIRE(headers.node[0].second == "abcd");
REQUIRE(headers.node[1].first == "Set-Cookie");
REQUIRE(headers.node[1].second == "kockaprede");
}
TEST_CASE("Missing or wrong placed colon in header definition", "[headers][issue][158]")
{
SECTION("No colon")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" Set-Cookie chocolate cookie\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.size() == 1); // warning - header is not defined correctly
REQUIRE(headers.report.warnings[0].message == "missing colon after header name 'Set-Cookie'");
REQUIRE(headers.node[0].first == "Set-Cookie");
REQUIRE(headers.node[0].second == "chocolate cookie");
}
SECTION("Missing colon - but there is another in value")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" Last-Modified Sat, 02 Aug 2014 23:10:05 GMT\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.size() == 1); // warning - header is not defined correctly
REQUIRE(headers.report.warnings[0].message == "missing colon after header name 'Last-Modified'");
REQUIRE(headers.node[0].first == "Last-Modified");
REQUIRE(headers.node[0].second == "Sat, 02 Aug 2014 23:10:05 GMT");
}
SECTION("More colons")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" Set-Cookie :: chocolate cookie\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.empty());
REQUIRE(headers.node[0].first == "Set-Cookie");
REQUIRE(headers.node[0].second == ": chocolate cookie");
}
}
TEST_CASE("Allow parse nonvalid headers, provide appropriate warning", "[headers][issue][158]")
{
SECTION("Strange but valid token")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" # : chocolate cookie\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.empty());
REQUIRE(headers.node[0].first == "#");
REQUIRE(headers.node[0].second == "chocolate cookie");
}
SECTION("Header has no value - just name")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" Header:\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.size() == 1); // warning - header name is not defined correctly
REQUIRE(headers.report.warnings[0].message == "HTTP header 'Header' has no value");
REQUIRE(headers.node.size() == 1);
REQUIRE(headers.node[0].first == "Header");
REQUIRE(headers.node[0].second == "");
}
SECTION("Header has no value and there is no colon presented ")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" Header\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.size() == 2);
REQUIRE(headers.report.warnings[0].message == "missing colon after header name 'Header'");
REQUIRE(headers.report.warnings[1].message == "HTTP header 'Header' has no value");
REQUIRE(headers.node.size() == 1);
REQUIRE(headers.node[0].first == "Header");
REQUIRE(headers.node[0].second == "");
}
}
TEST_CASE("Skip completely invalid headers", "[headers][drafter-issue][382]")
{
SECTION("Invalid characters in header name")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" <Header> : Invalid\n"
" Header : chocolate cookie\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.size() == 1); // warning - header name is not defined correctly
REQUIRE(headers.report.warnings[0].message
== "HTTP header name '<Header>' contains illegal character '<' (0x3c) skipping the header");
REQUIRE(headers.node.size() == 1);
REQUIRE(headers.node[0].first == "Header");
REQUIRE(headers.node[0].second == "chocolate cookie");
}
SECTION("Invalid characters in the only header name")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" <Header> : Invalid\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.size() == 2); // warning - header name is not defined correctly
REQUIRE(headers.report.warnings[0].message
== "HTTP header name '<Header>' contains illegal character '<' (0x3c) skipping the header");
REQUIRE(headers.report.warnings[1].message == "no valid headers specified");
}
SECTION("Invalid header")
{
const mdp::ByteBuffer source
= "+ Headers\n\n"
" :Header: :\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK); // no error
REQUIRE(headers.report.warnings.size() == 2); // warning - header name is not defined correctly
REQUIRE(headers.report.warnings[0].message
== "unable to parse HTTP header, expected '<header name> : <header value>', one header per line");
REQUIRE(headers.report.warnings[1].message == "no valid headers specified");
}
}
TEST_CASE("Parse headers in codefences", "[headers][codefence]")
{
const mdp::ByteBuffer source
= "+ Headers\n"
" ```\n"
" Set-Cookie: abcd\n"
" ```\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.empty());
REQUIRE(headers.node.size() == 1);
REQUIRE(headers.node[0].first == "Set-Cookie");
REQUIRE(headers.node[0].second == "abcd");
}
TEST_CASE("Parse headers with in the middle codefences", "[headers][codefence]")
{
const mdp::ByteBuffer source
= "+ Headers\n"
" \n"
" a: b\n"
" ```\n"
" \n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.size() == 2);
REQUIRE(headers.node.size() == 2);
REQUIRE(headers.node[0].first == "a");
REQUIRE(headers.node[0].second == "b");
REQUIRE(headers.node[1].first == "```");
REQUIRE(headers.node[1].second.empty());
}
TEST_CASE("Parse headers in codefences with hint", "[headers][codefence]")
{
const mdp::ByteBuffer source
= "+ Headers\n"
" ```html\n"
" Set-Cookie: abcd\n"
" ```\n";
ParseResult<Headers> headers;
SectionParserHelper<Headers, HeadersParser>::parse(source, HeadersSectionType, headers);
REQUIRE(headers.report.error.code == Error::OK);
REQUIRE(headers.report.warnings.empty());
REQUIRE(headers.node.size() == 1);
REQUIRE(headers.node[0].first == "Set-Cookie");
REQUIRE(headers.node[0].second == "abcd");
}
| 6,771 |
2,002 | <filename>dev/Common/AppModel.Package.h<gh_stars>1000+
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See LICENSE in the project root for license information.
#ifndef __APPMODEL_PACKAGE_H
#define __APPMODEL_PACKAGE_H
#include <appmodel.h>
namespace AppModel::Package
{
/// Find all Main+Framework packages in a package family registered to the current user
inline std::vector<std::wstring> FindByFamily(PCWSTR packageFamilyName)
{
uint32_t count{};
uint32_t bufferLength{};
const LONG rc{ ::FindPackagesByPackageFamily(packageFamilyName, PACKAGE_FILTER_HEAD | PACKAGE_FILTER_DIRECT, &count, nullptr, &bufferLength, nullptr, nullptr) };
if (rc == ERROR_SUCCESS)
{
// The package family has no packages registered to the user
return std::vector<std::wstring>();
}
else if (rc != ERROR_INSUFFICIENT_BUFFER)
{
THROW_WIN32(rc);
}
auto packageFullNames{ std::make_unique<PWSTR[]>(count) };
auto buffer{ std::make_unique<WCHAR[]>(bufferLength) };
THROW_IF_WIN32_ERROR(::FindPackagesByPackageFamily(packageFamilyName, PACKAGE_FILTER_HEAD | PACKAGE_FILTER_DIRECT, &count, packageFullNames.get(), &bufferLength, buffer.get(), nullptr));
std::vector<std::wstring> packageFullNamesList;
for (UINT32 index = 0; index < count; ++index)
{
const auto packageFullName{ packageFullNames[index] };
packageFullNamesList.push_back(std::wstring(packageFullName));
}
return packageFullNamesList;
}
/// Find all Main+Framework packages in a package family registered to the current user
inline std::vector<std::wstring> FindByFamily(const std::wstring& packageFamilyName)
{
return FindByFamily(packageFamilyName.c_str());
}
}
#endif // __APPMODEL_PACKAGE_H
| 680 |
432 | <reponame>lambdaxymox/DragonFlyBSD<gh_stars>100-1000
/*-
* Copyright (c) 2012 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* $FreeBSD: src/sys/libkern/memcchr.c,v 1.1 2012/01/01 20:26:11 ed Exp $
*
*/
#include <sys/cdefs.h>
#include <sys/libkern.h>
#include <sys/limits.h>
#include <sys/param.h>
/*
* memcchr(): find first character in buffer not matching `c'.
*
* This function performs the complement of memchr(). To provide decent
* performance, this function compares data from the buffer one word at
* a time.
*
* This code is inspired by libc's strlen(), written by <NAME>.
*/
#if LONG_BIT != 32 && LONG_BIT != 64
#error Unsupported word size
#endif
#define LONGPTR_MASK (sizeof(long) - 1)
#define TESTBYTE \
do { \
if (*p != (unsigned char)c) \
goto done; \
p++; \
} while (0)
void *
memcchr(const void *begin, int c, size_t n)
{
const unsigned long *lp;
const unsigned char *p, *end;
unsigned long word;
/* Four or eight repetitions of `c'. */
word = (unsigned char)c;
word |= word << 8;
word |= word << 16;
#if LONG_BIT >= 64
word |= word << 32;
#endif
/* Don't perform memory I/O when passing a zero-length buffer. */
if (n == 0)
return (NULL);
/*
* First determine whether there is a character unequal to `c'
* in the first word. As this word may contain bytes before
* `begin', we may execute this loop spuriously.
*/
lp = (const unsigned long *)((uintptr_t)begin & ~LONGPTR_MASK);
end = (const unsigned char *)begin + n;
if (*lp++ != word)
for (p = begin; p < (const unsigned char *)lp;)
TESTBYTE;
/* Now compare the data one word at a time. */
for (; (const unsigned char *)lp < end; lp++) {
if (*lp != word) {
p = (const unsigned char *)lp;
TESTBYTE;
TESTBYTE;
TESTBYTE;
#if LONG_BIT >= 64
TESTBYTE;
TESTBYTE;
TESTBYTE;
TESTBYTE;
#endif
goto done;
}
}
return (NULL);
done:
/*
* If the end of the buffer is not word aligned, the previous
* loops may obtain an address that's beyond the end of the
* buffer.
*/
if (p < end)
return (__DECONST(void *, p));
return (NULL);
}
| 1,188 |
381 | # load __future__.py constants
def load_module():
from pypy.tool.lib_pypy import LIB_PYTHON
module_path = LIB_PYTHON.join('__future__.py')
execfile(str(module_path), globals())
load_module()
del load_module
# this could be generalized, it's also in opcode.py
| 105 |
8,679 | /*
* Copyright (C) 2011 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.moshi.adapters;
import com.squareup.moshi.JsonAdapter;
import com.squareup.moshi.JsonDataException;
import com.squareup.moshi.JsonReader;
import com.squareup.moshi.JsonWriter;
import com.squareup.moshi.Moshi;
import com.squareup.moshi.Types;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Type;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.annotation.CheckReturnValue;
import javax.annotation.Nullable;
/**
* A JsonAdapter factory for objects that include type information in the JSON. When decoding JSON
* Moshi uses this type information to determine which class to decode to. When encoding Moshi uses
* the object’s class to determine what type information to include.
*
* <p>Suppose we have an interface, its implementations, and a class that uses them:
*
* <pre>{@code
* interface HandOfCards {
* }
*
* class BlackjackHand implements HandOfCards {
* Card hidden_card;
* List<Card> visible_cards;
* }
*
* class HoldemHand implements HandOfCards {
* Set<Card> hidden_cards;
* }
*
* class Player {
* String name;
* HandOfCards hand;
* }
* }</pre>
*
* <p>We want to decode the following JSON into the player model above:
*
* <pre>{@code
* {
* "name": "Jesse",
* "hand": {
* "hand_type": "blackjack",
* "hidden_card": "9D",
* "visible_cards": ["8H", "4C"]
* }
* }
* }</pre>
*
* <p>Left unconfigured, Moshi would incorrectly attempt to decode the hand object to the abstract
* {@code HandOfCards} interface. We configure it to use the appropriate subtype instead:
*
* <pre>{@code
* Moshi moshi = new Moshi.Builder()
* .add(PolymorphicJsonAdapterFactory.of(HandOfCards.class, "hand_type")
* .withSubtype(BlackjackHand.class, "blackjack")
* .withSubtype(HoldemHand.class, "holdem"))
* .build();
* }</pre>
*
* <p>This class imposes strict requirements on its use:
*
* <ul>
* <li>Base types may be classes or interfaces.
* <li>Subtypes must encode as JSON objects.
* <li>Type information must be in the encoded object. Each message must have a type label like
* {@code hand_type} whose value is a string like {@code blackjack} that identifies which type
* to use.
* <li>Each type identifier must be unique.
* </ul>
*
* <p>For best performance type information should be the first field in the object. Otherwise Moshi
* must reprocess the JSON stream once it knows the object's type.
*
* <p>If an unknown subtype is encountered when decoding:
*
* <ul>
* <li>If {@link #withDefaultValue(Object)} is used, then {@code defaultValue} will be returned.
* <li>If {@link #withFallbackJsonAdapter(JsonAdapter)} is used, then the {@code
* fallbackJsonAdapter.fromJson(reader)} result will be returned.
* <li>Otherwise a {@link JsonDataException} will be thrown.
* </ul>
*
* <p>If an unknown type is encountered when encoding:
*
* <ul>
* <li>If {@link #withFallbackJsonAdapter(JsonAdapter)} is used, then the {@code
* fallbackJsonAdapter.toJson(writer, value)} result will be returned.
* <li>Otherwise a {@link IllegalArgumentException} will be thrown.
* </ul>
*
* <p>If the same subtype has multiple labels the first one is used when encoding.
*/
public final class PolymorphicJsonAdapterFactory<T> implements JsonAdapter.Factory {
final Class<T> baseType;
final String labelKey;
final List<String> labels;
final List<Type> subtypes;
@Nullable final JsonAdapter<Object> fallbackJsonAdapter;
PolymorphicJsonAdapterFactory(
Class<T> baseType,
String labelKey,
List<String> labels,
List<Type> subtypes,
@Nullable JsonAdapter<Object> fallbackJsonAdapter) {
this.baseType = baseType;
this.labelKey = labelKey;
this.labels = labels;
this.subtypes = subtypes;
this.fallbackJsonAdapter = fallbackJsonAdapter;
}
/**
* @param baseType The base type for which this factory will create adapters. Cannot be Object.
* @param labelKey The key in the JSON object whose value determines the type to which to map the
* JSON object.
*/
@CheckReturnValue
public static <T> PolymorphicJsonAdapterFactory<T> of(Class<T> baseType, String labelKey) {
if (baseType == null) throw new NullPointerException("baseType == null");
if (labelKey == null) throw new NullPointerException("labelKey == null");
return new PolymorphicJsonAdapterFactory<>(
baseType, labelKey, Collections.<String>emptyList(), Collections.<Type>emptyList(), null);
}
/** Returns a new factory that decodes instances of {@code subtype}. */
public PolymorphicJsonAdapterFactory<T> withSubtype(Class<? extends T> subtype, String label) {
if (subtype == null) throw new NullPointerException("subtype == null");
if (label == null) throw new NullPointerException("label == null");
if (labels.contains(label)) {
throw new IllegalArgumentException("Labels must be unique.");
}
List<String> newLabels = new ArrayList<>(labels);
newLabels.add(label);
List<Type> newSubtypes = new ArrayList<>(subtypes);
newSubtypes.add(subtype);
return new PolymorphicJsonAdapterFactory<>(
baseType, labelKey, newLabels, newSubtypes, fallbackJsonAdapter);
}
/**
* Returns a new factory that with default to {@code fallbackJsonAdapter.fromJson(reader)} upon
* decoding of unrecognized labels.
*
* <p>The {@link JsonReader} instance will not be automatically consumed, so make sure to consume
* it within your implementation of {@link JsonAdapter#fromJson(JsonReader)}
*/
public PolymorphicJsonAdapterFactory<T> withFallbackJsonAdapter(
@Nullable JsonAdapter<Object> fallbackJsonAdapter) {
return new PolymorphicJsonAdapterFactory<>(
baseType, labelKey, labels, subtypes, fallbackJsonAdapter);
}
/**
* Returns a new factory that will default to {@code defaultValue} upon decoding of unrecognized
* labels. The default value should be immutable.
*/
public PolymorphicJsonAdapterFactory<T> withDefaultValue(@Nullable T defaultValue) {
return withFallbackJsonAdapter(buildFallbackJsonAdapter(defaultValue));
}
private JsonAdapter<Object> buildFallbackJsonAdapter(final T defaultValue) {
return new JsonAdapter<Object>() {
@Override
public @Nullable Object fromJson(JsonReader reader) throws IOException {
reader.skipValue();
return defaultValue;
}
@Override
public void toJson(JsonWriter writer, Object value) throws IOException {
throw new IllegalArgumentException(
"Expected one of "
+ subtypes
+ " but found "
+ value
+ ", a "
+ value.getClass()
+ ". Register this subtype.");
}
};
}
@Override
public JsonAdapter<?> create(Type type, Set<? extends Annotation> annotations, Moshi moshi) {
if (Types.getRawType(type) != baseType || !annotations.isEmpty()) {
return null;
}
List<JsonAdapter<Object>> jsonAdapters = new ArrayList<>(subtypes.size());
for (int i = 0, size = subtypes.size(); i < size; i++) {
jsonAdapters.add(moshi.adapter(subtypes.get(i)));
}
return new PolymorphicJsonAdapter(labelKey, labels, subtypes, jsonAdapters, fallbackJsonAdapter)
.nullSafe();
}
static final class PolymorphicJsonAdapter extends JsonAdapter<Object> {
final String labelKey;
final List<String> labels;
final List<Type> subtypes;
final List<JsonAdapter<Object>> jsonAdapters;
@Nullable final JsonAdapter<Object> fallbackJsonAdapter;
/** Single-element options containing the label's key only. */
final JsonReader.Options labelKeyOptions;
/** Corresponds to subtypes. */
final JsonReader.Options labelOptions;
PolymorphicJsonAdapter(
String labelKey,
List<String> labels,
List<Type> subtypes,
List<JsonAdapter<Object>> jsonAdapters,
@Nullable JsonAdapter<Object> fallbackJsonAdapter) {
this.labelKey = labelKey;
this.labels = labels;
this.subtypes = subtypes;
this.jsonAdapters = jsonAdapters;
this.fallbackJsonAdapter = fallbackJsonAdapter;
this.labelKeyOptions = JsonReader.Options.of(labelKey);
this.labelOptions = JsonReader.Options.of(labels.toArray(new String[0]));
}
@Override
public Object fromJson(JsonReader reader) throws IOException {
JsonReader peeked = reader.peekJson();
peeked.setFailOnUnknown(false);
int labelIndex;
try {
labelIndex = labelIndex(peeked);
} finally {
peeked.close();
}
if (labelIndex == -1) {
return this.fallbackJsonAdapter.fromJson(reader);
} else {
return jsonAdapters.get(labelIndex).fromJson(reader);
}
}
private int labelIndex(JsonReader reader) throws IOException {
reader.beginObject();
while (reader.hasNext()) {
if (reader.selectName(labelKeyOptions) == -1) {
reader.skipName();
reader.skipValue();
continue;
}
int labelIndex = reader.selectString(labelOptions);
if (labelIndex == -1 && this.fallbackJsonAdapter == null) {
throw new JsonDataException(
"Expected one of "
+ labels
+ " for key '"
+ labelKey
+ "' but found '"
+ reader.nextString()
+ "'. Register a subtype for this label.");
}
return labelIndex;
}
throw new JsonDataException("Missing label for " + labelKey);
}
@Override
public void toJson(JsonWriter writer, Object value) throws IOException {
Class<?> type = value.getClass();
int labelIndex = subtypes.indexOf(type);
final JsonAdapter<Object> adapter;
if (labelIndex == -1) {
if (fallbackJsonAdapter == null) {
throw new IllegalArgumentException(
"Expected one of "
+ subtypes
+ " but found "
+ value
+ ", a "
+ value.getClass()
+ ". Register this subtype.");
}
adapter = fallbackJsonAdapter;
} else {
adapter = jsonAdapters.get(labelIndex);
}
writer.beginObject();
if (adapter != fallbackJsonAdapter) {
writer.name(labelKey).value(labels.get(labelIndex));
}
int flattenToken = writer.beginFlatten();
adapter.toJson(writer, value);
writer.endFlatten(flattenToken);
writer.endObject();
}
@Override
public String toString() {
return "PolymorphicJsonAdapter(" + labelKey + ")";
}
}
}
| 4,220 |
423 | <filename>services/src/main/java/io/scalecube/services/routing/RoundRobinServiceRouter.java<gh_stars>100-1000
package io.scalecube.services.routing;
import io.scalecube.services.ServiceReference;
import io.scalecube.services.api.ServiceMessage;
import io.scalecube.services.registry.api.ServiceRegistry;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import org.jctools.maps.NonBlockingHashMap;
public class RoundRobinServiceRouter implements Router {
private final Map<String, AtomicInteger> counterByServiceName = new NonBlockingHashMap<>();
@Override
public Optional<ServiceReference> route(ServiceRegistry serviceRegistry, ServiceMessage request) {
List<ServiceReference> serviceInstances = serviceRegistry.lookupService(request);
if (serviceInstances.isEmpty()) {
return Optional.empty();
} else if (serviceInstances.size() == 1) {
return Optional.of(serviceInstances.get(0));
} else {
AtomicInteger counter =
counterByServiceName.computeIfAbsent(request.qualifier(), or -> new AtomicInteger());
int index = (counter.incrementAndGet() & Integer.MAX_VALUE) % serviceInstances.size();
return Optional.of(serviceInstances.get(index));
}
}
}
| 416 |
824 | <filename>doc/tutorials/gpu_particle_burst/gpu_particle_burst_04.py
"""
Example showing how to create particle explosions via the GPU.
"""
import random
import time
import math
from array import array
from dataclasses import dataclass
import arcade
import arcade.gl
SCREEN_WIDTH = 1024
SCREEN_HEIGHT = 768
SCREEN_TITLE = "GPU Particle Explosion"
PARTICLE_COUNT = 300
@dataclass
class Burst:
""" Track for each burst. """
buffer: arcade.gl.Buffer
vao: arcade.gl.Geometry
start_time: float
class MyWindow(arcade.Window):
""" Main window"""
def __init__(self):
super().__init__(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_TITLE)
self.burst_list = []
# Program to visualize the points
self.program = self.ctx.load_program(
vertex_shader="vertex_shader_v2.glsl",
fragment_shader="fragment_shader.glsl",
)
self.ctx.enable_only()
def on_draw(self):
""" Draw everything """
self.clear()
# Set the particle size
self.ctx.point_size = 2 * self.get_pixel_ratio()
# Loop through each burst
for burst in self.burst_list:
# Set the uniform data
self.program['time'] = time.time() - burst.start_time
# Render the burst
burst.vao.render(self.program, mode=self.ctx.POINTS)
def on_update(self, dt):
""" Update everything """
pass
def on_mouse_press(self, x: float, y: float, button: int, modifiers: int):
""" User clicks mouse """
def _gen_initial_data(initial_x, initial_y):
""" Generate data for each particle """
for i in range(PARTICLE_COUNT):
angle = random.uniform(0, 2 * math.pi)
speed = random.uniform(0.0, 0.3)
dx = math.sin(angle) * speed
dy = math.cos(angle) * speed
yield initial_x
yield initial_y
yield dx
yield dy
# Recalculate the coordinates from pixels to the OpenGL system with
# 0, 0 at the center.
x2 = x / self.width * 2. - 1.
y2 = y / self.height * 2. - 1.
# Get initial particle data
initial_data = _gen_initial_data(x2, y2)
# Create a buffer with that data
buffer = self.ctx.buffer(data=array('f', initial_data))
# Create a buffer description that says how the buffer data is formatted.
buffer_description = arcade.gl.BufferDescription(buffer,
'2f 2f',
['in_pos', 'in_vel'])
# Create our Vertex Attribute Object
vao = self.ctx.geometry([buffer_description])
# Create the Burst object and add it to the list of bursts
burst = Burst(buffer=buffer, vao=vao, start_time=time.time())
self.burst_list.append(burst)
if __name__ == "__main__":
window = MyWindow()
window.center_window()
arcade.run()
| 1,382 |
8,027 | """ IGNORED DOCSTRING """
def get_suffix(symbol, default):
return native.implicit_package_symbol(symbol, default)
| 41 |
1,013 | /*!
@authors <NAME> (<EMAIL>)
@date 2014-2020
@copyright BSD-3-Clause
*/
#include <gtest/gtest.h>
#include <pyclustering/interface/cure_interface.h>
#include <pyclustering/interface/pyclustering_package.hpp>
#include "samples.hpp"
#include "utenv_utils.hpp"
#include "utenv_check.hpp"
#include <memory>
using namespace pyclustering;
TEST(utest_interface_cure, cure_api) {
std::shared_ptr<pyclustering_package> sample = pack(dataset({ { 1 }, { 2 }, { 3 }, { 10 }, { 11 }, { 12 } }));
void * cure_result = cure_algorithm(sample.get(), 2, 1, 0.5);
ASSERT_NE(nullptr, cure_result);
std::shared_ptr<pyclustering_package> clusters(cure_get_clusters(cure_result));
ASSERT_EQ(2U, clusters->size);
std::shared_ptr<pyclustering_package> representors(cure_get_representors(cure_result));
ASSERT_EQ(2U, representors->size);
std::shared_ptr<pyclustering_package> means(cure_get_means(cure_result));
ASSERT_EQ(2U, means->size);
cure_data_destroy(cure_result);
}
TEST(utest_interface_cure, cure_api_long_result) {
auto sample_sptr = fcps_sample_factory::create_sample(FCPS_SAMPLE::HEPTA);
std::shared_ptr<pyclustering_package> sample = pack(*sample_sptr);
void * cure_result = cure_algorithm(sample.get(), 7, 1, 0.3);
ASSERT_NE(nullptr, cure_result);
std::shared_ptr<pyclustering_package> clusters(cure_get_clusters(cure_result));
ASSERT_EQ(7U, clusters->size);
std::shared_ptr<pyclustering_package> representors(cure_get_representors(cure_result));
ASSERT_EQ(7U, representors->size);
std::shared_ptr<pyclustering_package> means(cure_get_means(cure_result));
ASSERT_EQ(7U, means->size);
cure_data_destroy(cure_result);
} | 771 |
1,802 | <reponame>fossabot/swift-1
# Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Tests for `swift.common.splice`'''
import os
import errno
import ctypes
import logging
import tempfile
import unittest
import contextlib
import re
import mock
import six
from swift.common.splice import splice, tee
LOGGER = logging.getLogger(__name__)
def NamedTemporaryFile():
'''Wrapper to tempfile.NamedTemporaryFile() disabling bufferring.
The wrapper is used to support Python 2 and Python 3 in the same
code base.
'''
if six.PY3:
return tempfile.NamedTemporaryFile(buffering=0)
else:
return tempfile.NamedTemporaryFile(bufsize=0)
def safe_close(fd):
'''Close a file descriptor, ignoring any exceptions'''
try:
os.close(fd)
except Exception:
LOGGER.exception('Error while closing FD')
@contextlib.contextmanager
def pipe():
'''Context-manager providing 2 ends of a pipe, closing them at exit'''
fds = os.pipe()
try:
yield fds
finally:
safe_close(fds[0])
safe_close(fds[1])
class TestSplice(unittest.TestCase):
'''Tests for `splice`'''
def setUp(self):
if not splice.available:
raise unittest.SkipTest('splice not available')
def test_flags(self):
'''Test flag attribute availability'''
self.assertTrue(hasattr(splice, 'SPLICE_F_MOVE'))
self.assertTrue(hasattr(splice, 'SPLICE_F_NONBLOCK'))
self.assertTrue(hasattr(splice, 'SPLICE_F_MORE'))
self.assertTrue(hasattr(splice, 'SPLICE_F_GIFT'))
@mock.patch('swift.common.splice.splice._c_splice', None)
def test_available(self):
'''Test `available` attribute correctness'''
self.assertFalse(splice.available)
def test_splice_pipe_to_pipe(self):
'''Test `splice` from a pipe to a pipe'''
with pipe() as (p1a, p1b):
with pipe() as (p2a, p2b):
os.write(p1b, b'abcdef')
res = splice(p1a, None, p2b, None, 3, 0)
self.assertEqual(res, (3, None, None))
self.assertEqual(os.read(p2a, 3), b'abc')
self.assertEqual(os.read(p1a, 3), b'def')
def test_splice_file_to_pipe(self):
'''Test `splice` from a file to a pipe'''
with NamedTemporaryFile() as fd:
with pipe() as (pa, pb):
fd.write(b'abcdef')
fd.seek(0, os.SEEK_SET)
res = splice(fd, None, pb, None, 3, 0)
self.assertEqual(res, (3, None, None))
# `fd.tell()` isn't updated...
self.assertEqual(os.lseek(fd.fileno(), 0, os.SEEK_CUR), 3)
fd.seek(0, os.SEEK_SET)
res = splice(fd, 3, pb, None, 3, 0)
self.assertEqual(res, (3, 6, None))
self.assertEqual(os.lseek(fd.fileno(), 0, os.SEEK_CUR), 0)
self.assertEqual(os.read(pa, 6), b'abcdef')
def test_splice_pipe_to_file(self):
'''Test `splice` from a pipe to a file'''
with NamedTemporaryFile() as fd:
with pipe() as (pa, pb):
os.write(pb, b'abcdef')
res = splice(pa, None, fd, None, 3, 0)
self.assertEqual(res, (3, None, None))
self.assertEqual(fd.tell(), 3)
fd.seek(0, os.SEEK_SET)
res = splice(pa, None, fd, 3, 3, 0)
self.assertEqual(res, (3, None, 6))
self.assertEqual(fd.tell(), 0)
self.assertEqual(fd.read(6), b'abcdef')
@mock.patch.object(splice, '_c_splice')
def test_fileno(self, mock_splice):
'''Test handling of file-descriptors'''
splice(1, None, 2, None, 3, 0)
self.assertEqual(mock_splice.call_args,
((1, None, 2, None, 3, 0), {}))
mock_splice.reset_mock()
with open('/dev/zero', 'r') as fd:
splice(fd, None, fd, None, 3, 0)
self.assertEqual(mock_splice.call_args,
((fd.fileno(), None, fd.fileno(), None, 3, 0),
{}))
@mock.patch.object(splice, '_c_splice')
def test_flags_list(self, mock_splice):
'''Test handling of flag lists'''
splice(1, None, 2, None, 3,
[splice.SPLICE_F_MOVE, splice.SPLICE_F_NONBLOCK])
flags = splice.SPLICE_F_MOVE | splice.SPLICE_F_NONBLOCK
self.assertEqual(mock_splice.call_args,
((1, None, 2, None, 3, flags), {}))
mock_splice.reset_mock()
splice(1, None, 2, None, 3, [])
self.assertEqual(mock_splice.call_args,
((1, None, 2, None, 3, 0), {}))
def test_errno(self):
'''Test handling of failures'''
# Invoke EBADF by using a read-only FD as fd_out
with open('/dev/null', 'r') as fd:
err = errno.EBADF
msg = r'\[Errno %d\] splice: %s' % (err, os.strerror(err))
try:
splice(fd, None, fd, None, 3, 0)
except IOError as e:
self.assertTrue(re.match(msg, str(e)))
else:
self.fail('Expected IOError was not raised')
self.assertEqual(ctypes.get_errno(), 0)
@mock.patch('swift.common.splice.splice._c_splice', None)
def test_unavailable(self):
'''Test exception when unavailable'''
self.assertRaises(EnvironmentError, splice, 1, None, 2, None, 2, 0)
def test_unavailable_in_libc(self):
'''Test `available` attribute when `libc` has no `splice` support'''
class LibC(object):
'''A fake `libc` object tracking `splice` attribute access'''
def __init__(self):
self.splice_retrieved = False
@property
def splice(self):
self.splice_retrieved = True
raise AttributeError
libc = LibC()
mock_cdll = mock.Mock(return_value=libc)
with mock.patch('ctypes.CDLL', new=mock_cdll):
# Force re-construction of a `Splice` instance
# Something you're not supposed to do in actual code
new_splice = type(splice)()
self.assertFalse(new_splice.available)
libc_name = ctypes.util.find_library('c')
mock_cdll.assert_called_once_with(libc_name, use_errno=True)
self.assertTrue(libc.splice_retrieved)
class TestTee(unittest.TestCase):
'''Tests for `tee`'''
def setUp(self):
if not tee.available:
raise unittest.SkipTest('tee not available')
@mock.patch('swift.common.splice.tee._c_tee', None)
def test_available(self):
'''Test `available` attribute correctness'''
self.assertFalse(tee.available)
def test_tee_pipe_to_pipe(self):
'''Test `tee` from a pipe to a pipe'''
with pipe() as (p1a, p1b):
with pipe() as (p2a, p2b):
os.write(p1b, b'abcdef')
res = tee(p1a, p2b, 3, 0)
self.assertEqual(res, 3)
self.assertEqual(os.read(p2a, 3), b'abc')
self.assertEqual(os.read(p1a, 6), b'abcdef')
@mock.patch.object(tee, '_c_tee')
def test_fileno(self, mock_tee):
'''Test handling of file-descriptors'''
with pipe() as (pa, pb):
tee(pa, pb, 3, 0)
self.assertEqual(mock_tee.call_args, ((pa, pb, 3, 0), {}))
mock_tee.reset_mock()
tee(os.fdopen(pa, 'r'), os.fdopen(pb, 'w'), 3, 0)
self.assertEqual(mock_tee.call_args, ((pa, pb, 3, 0), {}))
@mock.patch.object(tee, '_c_tee')
def test_flags_list(self, mock_tee):
'''Test handling of flag lists'''
tee(1, 2, 3, [splice.SPLICE_F_MOVE | splice.SPLICE_F_NONBLOCK])
flags = splice.SPLICE_F_MOVE | splice.SPLICE_F_NONBLOCK
self.assertEqual(mock_tee.call_args, ((1, 2, 3, flags), {}))
mock_tee.reset_mock()
tee(1, 2, 3, [])
self.assertEqual(mock_tee.call_args, ((1, 2, 3, 0), {}))
def test_errno(self):
'''Test handling of failures'''
# Invoke EBADF by using a read-only FD as fd_out
with open('/dev/null', 'r') as fd:
err = errno.EBADF
msg = r'\[Errno %d\] tee: %s' % (err, os.strerror(err))
try:
tee(fd, fd, 3, 0)
except IOError as e:
self.assertTrue(re.match(msg, str(e)))
else:
self.fail('Expected IOError was not raised')
self.assertEqual(ctypes.get_errno(), 0)
@mock.patch('swift.common.splice.tee._c_tee', None)
def test_unavailable(self):
'''Test exception when unavailable'''
self.assertRaises(EnvironmentError, tee, 1, 2, 2, 0)
def test_unavailable_in_libc(self):
'''Test `available` attribute when `libc` has no `tee` support'''
class LibC(object):
'''A fake `libc` object tracking `tee` attribute access'''
def __init__(self):
self.tee_retrieved = False
@property
def tee(self):
self.tee_retrieved = True
raise AttributeError
libc = LibC()
mock_cdll = mock.Mock(return_value=libc)
with mock.patch('ctypes.CDLL', new=mock_cdll):
# Force re-construction of a `Tee` instance
# Something you're not supposed to do in actual code
new_tee = type(tee)()
self.assertFalse(new_tee.available)
libc_name = ctypes.util.find_library('c')
mock_cdll.assert_called_once_with(libc_name, use_errno=True)
self.assertTrue(libc.tee_retrieved)
| 5,109 |
3,372 | <gh_stars>1000+
/*
* Copyright 2016-2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.chime.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.chime.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* MediaCapturePipelineMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class MediaCapturePipelineMarshaller {
private static final MarshallingInfo<String> MEDIAPIPELINEID_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("MediaPipelineId").build();
private static final MarshallingInfo<String> SOURCETYPE_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("SourceType").build();
private static final MarshallingInfo<String> SOURCEARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("SourceArn").build();
private static final MarshallingInfo<String> STATUS_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("Status").build();
private static final MarshallingInfo<String> SINKTYPE_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("SinkType").build();
private static final MarshallingInfo<String> SINKARN_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("SinkArn").build();
private static final MarshallingInfo<java.util.Date> CREATEDTIMESTAMP_BINDING = MarshallingInfo.builder(MarshallingType.DATE)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("CreatedTimestamp").timestampFormat("iso8601").build();
private static final MarshallingInfo<java.util.Date> UPDATEDTIMESTAMP_BINDING = MarshallingInfo.builder(MarshallingType.DATE)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("UpdatedTimestamp").timestampFormat("iso8601").build();
private static final MarshallingInfo<StructuredPojo> CHIMESDKMEETINGCONFIGURATION_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ChimeSdkMeetingConfiguration").build();
private static final MediaCapturePipelineMarshaller instance = new MediaCapturePipelineMarshaller();
public static MediaCapturePipelineMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(MediaCapturePipeline mediaCapturePipeline, ProtocolMarshaller protocolMarshaller) {
if (mediaCapturePipeline == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(mediaCapturePipeline.getMediaPipelineId(), MEDIAPIPELINEID_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getSourceType(), SOURCETYPE_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getSourceArn(), SOURCEARN_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getStatus(), STATUS_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getSinkType(), SINKTYPE_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getSinkArn(), SINKARN_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getCreatedTimestamp(), CREATEDTIMESTAMP_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getUpdatedTimestamp(), UPDATEDTIMESTAMP_BINDING);
protocolMarshaller.marshall(mediaCapturePipeline.getChimeSdkMeetingConfiguration(), CHIMESDKMEETINGCONFIGURATION_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
| 1,634 |
536 | <reponame>Han0nly/jazzer
// Copyright 2021 Code Intelligence GmbH
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.example;
import com.code_intelligence.jazzer.api.FuzzedDataProvider;
import java.io.File;
import java.io.IOException;
public class ExamplePathTraversalFuzzer {
/**
* The root path for all files that this application is allowed to upload.
*/
public static final String publicFilesRootPath = "/app/upload/";
public static void fuzzerTestOneInput(FuzzedDataProvider data) {
String relativePath = data.consumeRemainingAsAsciiString();
// Upload the file and try very hard to ignore errors thrown during the upload.
try {
uploadFile(relativePath);
} catch (Throwable ignored) {
}
}
private static void uploadFile(String relativePathToFile) throws IOException {
File fileToUpload = new File(publicFilesRootPath + relativePathToFile);
if (!fileToUpload.exists()) {
throw new IOException("File not found");
}
// In a real application, the file would be uploaded to a public server here.
}
}
| 463 |
318 | <gh_stars>100-1000
#ifndef _GEPDEFS_H
#define _GEPDEFS_H
#define CODE_WIN32 1
#define CODE_DC 2
#define CODE_PS2 3
#define CODE_XBOX 4
#endif
| 86 |
373 | <filename>src/main/java8/net/finmath/timeseries/models/parametric/DisplacedLognormal.java
/*
* (c) Copyright <NAME>, Germany. Contact: <EMAIL>.
*
* Created on 15.07.2012
*/
package net.finmath.timeseries.models.parametric;
import java.io.Serializable;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.math3.analysis.MultivariateFunction;
import org.apache.commons.math3.optimization.GoalType;
import org.apache.commons.math3.optimization.PointValuePair;
import net.finmath.timeseries.HistoricalSimulationModel;
/**
* Displaced log-normal process with constanst volatility.
*
* This class estimate the process
* \[
* \mathrm{d} \log(X + a) = \frac{\sigma}{b + a} \mathrm{d}W(t)
* \]
* where \( a > -min(X(t_{i}) \) and thus \( X+a > 0 \) and \( b = 1 - -min(X(t_{i}) \) \) and
* \( \sigma \) is a constant.
*
* The choice of b ensures that b+a ≥ 1.
* For a=0 we have a log-normal process with volatility σ/(b + a).
* For a=infinity we have a normal process with volatility σ.
*
* @author <NAME>
* @version 1.0
*/
public class DisplacedLognormal implements HistoricalSimulationModel {
private final double[] values;
private final double lowerBoundDisplacement;
private final double upperBoundDisplacement = 10000000;
private final int windowIndexStart;
private final int windowIndexEnd;
private final int maxIterations = 1000000;
public DisplacedLognormal(final double[] values) {
this.values = values;
windowIndexStart = 0;
windowIndexEnd = values.length-1;
double valuesMin = Double.MAX_VALUE;
for (int i = windowIndexStart; i <= windowIndexEnd; i++) {
valuesMin = Math.min(values[i], valuesMin);
}
lowerBoundDisplacement = -valuesMin+1;
}
public DisplacedLognormal(final double[] values, final double lowerBoundDisplacement) {
this.values = values;
windowIndexStart = 0;
windowIndexEnd = values.length-1;
double valuesMin = Double.MAX_VALUE;
for (int i = windowIndexStart; i <= windowIndexEnd; i++) {
valuesMin = Math.min(values[i], valuesMin);
}
this.lowerBoundDisplacement = Math.max(-valuesMin+1,lowerBoundDisplacement);
}
public DisplacedLognormal(final double[] values, final int windowIndexStart, final int windowIndexEnd) {
this.values = values;
this.windowIndexStart = windowIndexStart;
this.windowIndexEnd = windowIndexEnd;
double valuesMin = Double.MAX_VALUE;
for (int i = windowIndexStart; i <= windowIndexEnd; i++) {
valuesMin = Math.min(values[i], valuesMin);
}
lowerBoundDisplacement = -valuesMin+1;
}
public DisplacedLognormal(final double[] values, final double lowerBoundDisplacement, final int windowIndexStart, final int windowIndexEnd) {
this.values = values;
this.windowIndexStart = windowIndexStart;
this.windowIndexEnd = windowIndexEnd;
double valuesMin = Double.MAX_VALUE;
for (int i = windowIndexStart; i <= windowIndexEnd; i++) {
valuesMin = Math.min(values[i], valuesMin);
}
this.lowerBoundDisplacement = Math.max(-valuesMin+1,lowerBoundDisplacement);
}
@Override
public HistoricalSimulationModel getCloneWithWindow(final int windowIndexStart, final int windowIndexEnd) {
return new DisplacedLognormal(values, windowIndexStart, windowIndexEnd);
}
public HistoricalSimulationModel getCloneWithWindow(final double lowerBoundDisplacement, final int windowIndexStart, final int windowIndexEnd) {
return new DisplacedLognormal(values, lowerBoundDisplacement, windowIndexStart, windowIndexEnd);
}
public double getLogLikelihoodForParameters(final double omega, final double alpha, final double beta, final double displacement)
{
double logLikelihood = 0.0;
final double volScaling = (1+Math.abs(displacement));
double volSquaredEstimate = 0.0;
for (int i = windowIndexStart+1; i <= windowIndexEnd-1; i++) {
final double eval = volScaling * (Math.log((values[i]+displacement)/(values[i-1]+displacement)));
volSquaredEstimate += eval*eval;
}
volSquaredEstimate /= windowIndexEnd-windowIndexStart;
double eval = volScaling * (Math.log((values[windowIndexStart+1]+displacement)/(values[windowIndexStart+1-1]+displacement)));
for (int i = windowIndexStart+1; i <= windowIndexEnd-1; i++) {
final double evalNext = volScaling * (Math.log((values[i+1]+displacement)/(values[i]+displacement)));
final double volSquared = volSquaredEstimate / volScaling * volScaling; // h = (sigma*)^2, volSquared = (sigma^a)^2
logLikelihood += - Math.log(volSquaredEstimate) - 2 * Math.log((values[i+1]+displacement)/volScaling) - evalNext*evalNext / volSquaredEstimate;
eval = evalNext;
}
logLikelihood += - Math.log(2 * Math.PI) * (windowIndexEnd-windowIndexStart);
logLikelihood *= 0.5;
return logLikelihood;
}
public double getLastResidualForParameters(final double omega, final double alpha, final double beta, final double displacement) {
final double volScaling = (1+Math.abs(displacement));
double h = omega / (1.0 - alpha - beta);
for (int i = windowIndexStart+1; i <= windowIndexEnd; i++) {
final double eval = volScaling * (Math.log((values[i]+displacement)/(values[i-1]+displacement)));
// double eval = volScaling * (values[i]-values[i-1])/(values[i-1]+displacement);
h = omega + alpha * eval * eval + beta * h;
}
return h;
}
public double[] getQuantilPredictionsForParameters(final double omega, final double alpha, final double beta, final double displacement, final double[] quantiles) {
final double[] szenarios = new double[windowIndexEnd-windowIndexStart+1-1];
final double volScaling = (1+Math.abs(displacement));
double volSquaredEstimate = 0.0;
for (int i = windowIndexStart+1; i <= windowIndexEnd-1; i++) {
final double eval = volScaling * (Math.log((values[i]+displacement)/(values[i-1]+displacement)));
volSquaredEstimate += eval*eval;
}
volSquaredEstimate /= windowIndexEnd-windowIndexStart;
double vol = Math.sqrt(volSquaredEstimate) / volScaling;
for (int i = windowIndexStart+1; i <= windowIndexEnd; i++) {
final double y = Math.log((values[i]+displacement)/(values[i-1]+displacement));
// double y = (values[i]-values[i-1])/(values[i-1]+displacement);
szenarios[i-windowIndexStart-1] = y / vol;
final double eval = volScaling * y;
vol = Math.sqrt(volSquaredEstimate) / volScaling;
}
java.util.Arrays.sort(szenarios);
final double[] quantileValues = new double[quantiles.length];
for(int i=0; i<quantiles.length; i++) {
final double quantile = quantiles[i];
final double quantileIndex = szenarios.length * quantile - 1;
final int quantileIndexLo = (int)quantileIndex;
final int quantileIndexHi = quantileIndexLo+1;
final double szenarioRelativeChange =
(quantileIndexHi-quantileIndex) * Math.exp(szenarios[Math.max(quantileIndexLo,0 )] * vol)
+ (quantileIndex-quantileIndexLo) * Math.exp(szenarios[Math.min(quantileIndexHi,szenarios.length)] * vol);
/*
double szenarioRelativeChange =
(quantileIndexHi-quantileIndex) * (1 + szenarios[Math.max(quantileIndexLo,0 )] * vol)
+ (quantileIndex-quantileIndexLo) * (1 + szenarios[Math.min(quantileIndexHi,szenarios.length)] * vol);
*/
final double quantileValue = (values[windowIndexEnd]+displacement) * szenarioRelativeChange - displacement;
quantileValues[i] = quantileValue;
}
return quantileValues;
}
/* (non-Javadoc)
* @see net.finmath.timeseries.HistoricalSimulationModel#getBestParameters()
*/
@Override
public Map<String, Object> getBestParameters() {
return getBestParameters(null);
}
/* (non-Javadoc)
* @see net.finmath.timeseries.HistoricalSimulationModel#getBestParameters(java.util.Map)
*/
@Override
public Map<String, Object> getBestParameters(final Map<String, Object> guess) {
// Create the objective function for the solver
class GARCHMaxLikelihoodFunction implements MultivariateFunction, Serializable {
private static final long serialVersionUID = 7072187082052755854L;
@Override
public double value(final double[] variables) {
/*
* Transform variables: The solver variables are in (-\infty, \infty).
* We transform the variable to the admissible domain for GARCH, that is
* omega > 0, 0 < alpha < 1, 0 < beta < (1-alpha), displacement > lowerBoundDisplacement ??????
* ???? usually for GARCH the restrictions are written like omega > 0, alpha > 0, beta > 0, and alpha + beta < 1
*/
final double omega = Math.exp(variables[0]);
final double mucorr = Math.exp(-Math.exp(-variables[1]));
final double muema = Math.exp(-Math.exp(-variables[2]));
final double beta = mucorr * muema;
final double alpha = mucorr - beta;
// double alpha = 1.0/(1.0+Math.exp(-variables[1]));
// double beta = (1.0-alpha)*1.0/(1.0+Math.exp(-variables[2]));
final double displacementNormed = 1.0/(1.0+Math.exp(-variables[3]));
final double displacement = (upperBoundDisplacement-lowerBoundDisplacement)*displacementNormed+lowerBoundDisplacement;
double logLikelihood = getLogLikelihoodForParameters(omega,alpha,beta,displacement);
// Penalty to prevent solver from hitting the bounds
logLikelihood -= Math.max(1E-30-omega,0)/1E-30;
logLikelihood -= Math.max(1E-30-alpha,0)/1E-30;
logLikelihood -= Math.max((alpha-1)+1E-30,0)/1E-30;
logLikelihood -= Math.max(1E-30-beta,0)/1E-30;
logLikelihood -= Math.max((beta-1)+1E-30,0)/1E-30;
logLikelihood -= Math.max(1E-30-displacementNormed,0)/1E-30;
logLikelihood -= Math.max((displacementNormed-1)+1E-30,0)/1E-30;
return logLikelihood;
}
}
final GARCHMaxLikelihoodFunction objectiveFunction = new GARCHMaxLikelihoodFunction();
// Create a guess for the solver
double guessOmega = 1.0;
double guessAlpha = 0.2;
double guessBeta = 0.2;
double guessDisplacement = (lowerBoundDisplacement + upperBoundDisplacement) / 2.0;
if(guess != null) {
// A guess was provided, use that one
guessOmega = (Double)guess.get("Omega");
guessAlpha = (Double)guess.get("Alpha");
guessBeta = (Double)guess.get("Beta");
guessDisplacement = (Double)guess.get("Displacement");
}
// Constrain guess to admissible range
guessOmega = restrictToOpenSet(guessOmega, 0.0, Double.MAX_VALUE);
guessAlpha = restrictToOpenSet(guessAlpha, 0.0, 1.0);
guessBeta = restrictToOpenSet(guessBeta, 0.0, 1.0-guessAlpha);
guessDisplacement = restrictToOpenSet(guessDisplacement, lowerBoundDisplacement, upperBoundDisplacement);
final double guessMucorr = guessAlpha + guessBeta;
final double guessMuema = guessBeta / (guessAlpha+guessBeta);
// Transform guess to solver coordinates
final double[] guessParameters = new double[4];
guessParameters[0] = Math.log(guessOmega);
guessParameters[1] = -Math.log(-Math.log(guessMucorr));
guessParameters[2] = -Math.log(-Math.log(guessMuema));
guessParameters[3] = -Math.log(1.0/((guessDisplacement-lowerBoundDisplacement)/(upperBoundDisplacement-lowerBoundDisplacement))-1.0);
// Seek optimal parameter configuration
// org.apache.commons.math3.optimization.direct.BOBYQAOptimizer optimizer2 = new org.apache.commons.math3.optimization.direct.BOBYQAOptimizer(6);
final org.apache.commons.math3.optimization.direct.CMAESOptimizer optimizer2 = new org.apache.commons.math3.optimization.direct.CMAESOptimizer();
double[] bestParameters = null;
try {
final PointValuePair result = optimizer2.optimize(
maxIterations,
objectiveFunction,
GoalType.MAXIMIZE,
guessParameters /* start point */
);
bestParameters = result.getPoint();
} catch(final org.apache.commons.math3.exception.MathIllegalStateException e) {
// Retry with new guess. This guess corresponds to omaga=1, alpha=0.5; beta=0.25; displacement=1+lowerBoundDisplacement;
final double[] guessParameters2 = {0.0, 0.0, 0.0, 10.0};
/* PointValuePair result = optimizer2.optimize(
maxIterations,
objectiveFunction,
GoalType.MAXIMIZE,
guessParameters2
);*/
System.out.println("Solver failed");
bestParameters = guessParameters2;//result.getPoint();
}
// Transform parameters to GARCH parameters
final double omega = Math.exp(bestParameters[0]);
final double mucorr = Math.exp(-Math.exp(-bestParameters[1]));
final double muema = Math.exp(-Math.exp(-bestParameters[2]));
final double beta = mucorr * muema;
final double alpha = mucorr - beta;
final double displacementNormed = 1.0/(1.0+Math.exp(-bestParameters[3]));
final double displacement = (upperBoundDisplacement-lowerBoundDisplacement)*displacementNormed+lowerBoundDisplacement;
final double[] quantiles = {0.01, 0.05, 0.5};
final double[] quantileValues = this.getQuantilPredictionsForParameters(omega, alpha, beta, displacement, quantiles);
final Map<String, Object> results = new HashMap<>();
results.put("Omega", omega);
results.put("Alpha", alpha);
results.put("Beta", beta);
results.put("Displacement", displacement);
results.put("Likelihood", this.getLogLikelihoodForParameters(omega, alpha, beta, displacement));
results.put("Vol", Math.sqrt(this.getLastResidualForParameters(omega, alpha, beta, displacement)));
results.put("Quantile=1%", quantileValues[0]);
results.put("Quantile=5%", quantileValues[1]);
results.put("Quantile=50%", quantileValues[2]);
return results;
}
private static double restrictToOpenSet(double value, final double lowerBond, final double upperBound) {
value = Math.max(value, lowerBond * (1.0+Math.signum(lowerBond)*1E-15) + 1E-15);
value = Math.min(value, upperBound * (1.0-Math.signum(upperBound)*1E-15) - 1E-15);
return value;
}
}
| 4,873 |
492 | from fastapi import FastAPI
from pydantic import BaseModel
from tortoise import fields
from tortoise.models import Model
from tortoise.contrib.fastapi import register_tortoise
from tortoise.contrib.pydantic import pydantic_model_creator
import requests
import aiohttp
import asyncio
app = FastAPI()
session = None
@app.on_event('startup')
async def startup_event():
global session
session = aiohttp.ClientSession()
@app.on_event('shutdown')
async def shutdown_event():
await session.close()
class City(Model):
id = fields.IntField(pk=True)
name = fields.CharField(50, unique=True)
timezone = fields.CharField(50)
def current_time(self) -> str:
return ''
@classmethod
async def get_current_time(cls, obj, session):
async with session.get(f'http://worldtimeapi.org/api/timezone/{obj.timezone}') as response:
result = await response.json()
current_time = result['datetime']
obj.current_time = current_time
class PydanticMeta:
computed = ('current_time', )
City_Pydantic = pydantic_model_creator(City, name='City')
CityIn_Pydantic = pydantic_model_creator(City, name='CityIn', exclude_readonly=True)
@app.get('/')
def index():
return {'key' : 'value'}
@app.get('/cities')
async def get_cities():
cities = await City_Pydantic.from_queryset(City.all())
global session
tasks = []
for city in cities:
task = asyncio.create_task(City.get_current_time(city, session))
tasks.append(task)
await asyncio.gather(*tasks)
return cities
@app.get('/cities/{city_id}')
async def get_city(city_id: int):
return await City_Pydantic.from_queryset_single(City.get(id=city_id))
@app.post('/cities')
async def create_city(city: CityIn_Pydantic):
city_obj = await City.create(**city.dict(exclude_unset=True))
return await City_Pydantic.from_tortoise_orm(city_obj)
@app.delete('/cities/{city_id}')
async def delete_city(city_id: int):
await City.filter(id=city_id).delete()
return {}
register_tortoise(
app,
db_url='sqlite://db.sqlite3',
modules={'models': ['main']},
generate_schemas=True,
add_exception_handlers=True
) | 860 |
315 | #include "Tools/Code/LDPC/Update_rule/OMS/Update_rule_OMS_simd.hpp"
namespace aff3ct
{
namespace tools
{
template <typename R>
Update_rule_OMS_simd<R>
::Update_rule_OMS_simd(const R offset)
: name("OMS"), offset(offset), MS()
{
}
template <typename R>
inline std::string Update_rule_OMS_simd<R>
::get_name() const
{
return this->name;
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::begin_decoding(const int n_ite)
{
MS.begin_decoding(n_ite);
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::begin_ite(const int ite)
{
MS.begin_ite(ite);
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::begin_chk_node_in(const int chk_id, const int chk_degree)
{
MS.begin_chk_node_in(chk_id, chk_degree);
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::compute_chk_node_in(const int var_id, const mipp::Reg<R> var_val)
{
MS.compute_chk_node_in(var_id, var_val);
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::end_chk_node_in()
{
MS.cst1 = MS.min2 - this->offset;
MS.cst2 = MS.min1 - this->offset;
MS.cst1 = mipp::max(MS.zero, MS.cst1);
MS.cst2 = mipp::max(MS.zero, MS.cst2);
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::begin_chk_node_out(const int chk_id, const int chk_degree)
{
MS.begin_chk_node_out(chk_id, chk_degree);
}
template <typename R>
inline mipp::Reg<R> Update_rule_OMS_simd<R>
::compute_chk_node_out(const int var_id, const mipp::Reg<R> var_val)
{
return MS.compute_chk_node_out(var_id, var_val);
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::end_chk_node_out()
{
MS.end_chk_node_out();
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::end_ite()
{
MS.end_ite();
}
template <typename R>
inline void Update_rule_OMS_simd<R>
::end_decoding()
{
MS.end_decoding();
}
}
}
| 827 |
3,172 | <reponame>lp2333/PARL
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from parl.utils import logger
__all__ = ['ReplayMemory']
class ReplayMemory(object):
def __init__(self, max_size, obs_dim, act_dim):
""" create a replay memory for off-policy RL or offline RL.
Args:
max_size (int): max size of replay memory
obs_dim (list or tuple): observation shape
act_dim (list or tuple): action shape
"""
self.max_size = int(max_size)
self.obs_dim = obs_dim
self.act_dim = act_dim
self.obs = np.zeros((max_size, obs_dim), dtype='float32')
if act_dim == 0: # Discrete control environment
self.action = np.zeros((max_size, ), dtype='int32')
else: # Continuous control environment
self.action = np.zeros((max_size, act_dim), dtype='float32')
self.reward = np.zeros((max_size, ), dtype='float32')
self.terminal = np.zeros((max_size, ), dtype='bool')
self.next_obs = np.zeros((max_size, obs_dim), dtype='float32')
self._curr_size = 0
self._curr_pos = 0
def sample_batch(self, batch_size):
""" sample a batch from replay memory
Args:
batch_size (int): batch size
Returns:
a batch of experience samples: obs, action, reward, next_obs, terminal
"""
batch_idx = np.random.randint(self._curr_size, size=batch_size)
obs = self.obs[batch_idx]
reward = self.reward[batch_idx]
action = self.action[batch_idx]
next_obs = self.next_obs[batch_idx]
terminal = self.terminal[batch_idx]
return obs, action, reward, next_obs, terminal
def make_index(self, batch_size):
""" sample a batch of indexes
Args:
batch_size (int): batch size
Returns:
batch of indexes
"""
batch_idx = np.random.randint(self._curr_size, size=batch_size)
return batch_idx
def sample_batch_by_index(self, batch_idx):
""" sample a batch from replay memory by indexes
Args:
batch_idx (list or np.array): batch indexes
Returns:
a batch of experience samples: obs, action, reward, next_obs, terminal
"""
obs = self.obs[batch_idx]
reward = self.reward[batch_idx]
action = self.action[batch_idx]
next_obs = self.next_obs[batch_idx]
terminal = self.terminal[batch_idx]
return obs, action, reward, next_obs, terminal
def append(self, obs, act, reward, next_obs, terminal):
""" add an experience sample at the end of replay memory
Args:
obs (float32): observation, shape of obs_dim
act (int32 in Continuous control environment, float32 in Continuous control environment): action, shape of act_dim
reward (float32): reward
next_obs (float32): next observation, shape of obs_dim
terminal (bool): terminal of an episode or not
"""
if self._curr_size < self.max_size:
self._curr_size += 1
self.obs[self._curr_pos] = obs
self.action[self._curr_pos] = act
self.reward[self._curr_pos] = reward
self.next_obs[self._curr_pos] = next_obs
self.terminal[self._curr_pos] = terminal
self._curr_pos = (self._curr_pos + 1) % self.max_size
def size(self):
""" get current size of replay memory.
"""
return self._curr_size
def __len__(self):
return self._curr_size
def save(self, pathname):
""" save replay memory to local file (numpy file format: *.npz).
"""
other = np.array([self._curr_size, self._curr_pos], dtype=np.int32)
np.savez(
pathname,
obs=self.obs,
action=self.action,
reward=self.reward,
terminal=self.terminal,
next_obs=self.next_obs,
other=other)
def load(self, pathname):
""" load replay memory from local file (numpy file format: *.npz).
"""
data = np.load(pathname)
other = data['other']
if int(other[0]) > self.max_size:
logger.warn('loading from a bigger size rpm!')
self._curr_size = min(int(other[0]), self.max_size)
self._curr_pos = min(int(other[1]), self.max_size - 1)
self.obs[:self._curr_size] = data['obs'][:self._curr_size]
self.action[:self._curr_size] = data['action'][:self._curr_size]
self.reward[:self._curr_size] = data['reward'][:self._curr_size]
self.terminal[:self._curr_size] = data['terminal'][:self._curr_size]
self.next_obs[:self._curr_size] = data['next_obs'][:self._curr_size]
logger.info("[load rpm]memory loade from {}".format(pathname))
def load_from_d4rl(self, dataset):
""" load data from d4rl dataset(https://github.com/rail-berkeley/d4rl#using-d4rl) to replay memory.
Args:
dataset(dict): dataset that contains:
observations (np.float32): shape of (batch_size, obs_dim),
next_observations (np.int32): shape of (batch_size, obs_dim),
actions (np.float32): shape of (batch_size, act_dim),
rewards (np.float32): shape of (batch_size),
terminals (bool): shape of (batch_size)
Example:
.. code-block:: python
import gym
import d4rl
env = gym.make("hopper-medium-v0")
rpm = ReplayMemory(max_size=int(2e6), obs_dim=11, act_dim=3)
rpm.load_from_d4rl(d4rl.qlearning_dataset(env))
# Output
# Dataset Info:
# key: observations, shape: (999981, 11), dtype: float32
# key: actions, shape: (999981, 3), dtype: float32
# key: next_observations, shape: (999981, 11), dtype: float32
# key: rewards, shape: (999981,), dtype: float32
# key: terminals, shape: (999981,), dtype: bool
# Number of terminals on: 3045
"""
logger.info("Dataset Info: ")
for key in dataset:
logger.info('key: {},\tshape: {},\tdtype: {}'.format(
key, dataset[key].shape, dataset[key].dtype))
assert 'observations' in dataset
assert 'next_observations' in dataset
assert 'actions' in dataset
assert 'rewards' in dataset
assert 'terminals' in dataset
self.obs = dataset['observations']
self.next_obs = dataset['next_observations']
self.action = dataset['actions']
self.reward = dataset['rewards']
self.terminal = dataset['terminals']
self._curr_size = dataset['terminals'].shape[0]
assert self._curr_size <= self.max_size, 'please set a proper max_size for ReplayMemory'
logger.info('Number of terminals on: {}'.format(self.terminal.sum()))
| 3,404 |
12,252 | /*
* Copyright 2020 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.model.parameters;
import com.google.common.collect.ImmutableSet;
import org.keycloak.common.util.MultivaluedHashMap;
import org.keycloak.provider.ProviderFactory;
import org.keycloak.provider.Spi;
import org.keycloak.representations.idm.ComponentRepresentation;
import org.keycloak.storage.UserStorageProvider;
import org.keycloak.storage.UserStorageProviderModel;
import org.keycloak.testsuite.federation.UserMapStorageFactory;
import org.keycloak.testsuite.federation.UserPropertyFileStorageFactory;
import org.keycloak.testsuite.model.KeycloakModelParameters;
import java.io.File;
import java.net.URISyntaxException;
import java.util.Set;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Stream;
/**
*
* @author hmlnarik
*/
public class TestsuiteUserFileStorage extends KeycloakModelParameters {
static final Set<Class<? extends Spi>> ALLOWED_SPIS = ImmutableSet.<Class<? extends Spi>>builder()
.build();
static final Set<Class<? extends ProviderFactory>> ALLOWED_FACTORIES = ImmutableSet.<Class<? extends ProviderFactory>>builder()
.add(UserPropertyFileStorageFactory.class)
.build();
private static final File CONFIG_DIR;
static {
try {
CONFIG_DIR = new File(TestsuiteUserFileStorage.class.getClassLoader().getResource("file-storage-provider").toURI());
} catch (URISyntaxException e) {
throw new RuntimeException("Cannot get resource directory");
}
}
public TestsuiteUserFileStorage() {
super(ALLOWED_SPIS, ALLOWED_FACTORIES);
}
@Override
public <T> Stream<T> getParameters(Class<T> clazz) {
if (UserStorageProviderModel.class.isAssignableFrom(clazz)) {
UserStorageProviderModel propProviderRO = new UserStorageProviderModel();
propProviderRO.setName("read-only-user-props");
propProviderRO.setProviderId(UserPropertyFileStorageFactory.PROVIDER_ID);
propProviderRO.setProviderType(UserStorageProvider.class.getName());
propProviderRO.setConfig(new MultivaluedHashMap<>());
propProviderRO.getConfig().putSingle("priority", Integer.toString(1));
propProviderRO.getConfig().putSingle("propertyFile",
CONFIG_DIR.getAbsolutePath() + File.separator + "read-only-user-password.properties");
UserStorageProviderModel propProviderRW = new UserStorageProviderModel();
propProviderRW.setName("user-props");
propProviderRW.setProviderId(UserPropertyFileStorageFactory.PROVIDER_ID);
propProviderRW.setProviderType(UserStorageProvider.class.getName());
propProviderRW.setConfig(new MultivaluedHashMap<>());
propProviderRW.getConfig().putSingle("priority", Integer.toString(2));
propProviderRW.getConfig().putSingle("propertyFile", CONFIG_DIR.getAbsolutePath() + File.separator + "user-password.properties");
propProviderRW.getConfig().putSingle("federatedStorage", "true");
return Stream.of((T) propProviderRO, (T) propProviderRW);
} else {
return super.getParameters(clazz);
}
}
}
| 1,344 |
349 | #include <pal.h>
/**
* Computes a interpolating FIR filter (direct-form) on input data 'x' using
* coefficient stored in 'h' and places result in 'r'. This function
* retains the address of the delay filter memory containing the previous
* delayed values to allow consecutive processing of blocks.
*
* @param x Pointer to input vector of 'nx' elements
*
* @param h Pointer to 'nh' filter coefficients
*
* @param r Output vector of size 'nx*ifactor'
*
* @param nx The number of input samples
* @param nh The number of coefficients of the filter
*
* @param ifactor Interpolation factor. 'ifactor' output samples produced
* for every 1 input sample
*
* @return None
*
*/
void p_firint_f32(const float *x, const float *h, float *r, int nx, int nh,
int ifactor)
{
/*PLACE CODE HERE*/
}
| 309 |
1,290 | <reponame>dfirpaul/Active-Directory-Exploitation-Cheat-Sheet-1
/*
* This software is Copyright (c) 2018 magnum,
* and it is hereby released to the general public under the following terms:
* Redistribution and use in source and binary forms, with or without
* modification, are permitted.
*
* Fast, portable, and easy-to-use Twofish implementation,
* Version 0.3.
* Copyright (c) 2002 by <NAME>.
*
* See the twofish.c file for the details of the how and why of this code.
*
* The author hereby grants a perpetual license to everybody to
* use this code for any purpose as long as the copyright message is included
* in the source code of this or any derived work.
*
***************************************************************************
* Copyright (C) 2005-2007 <NAME> <<EMAIL>> *
* Copyright (c) 2003,2004 <NAME> <<EMAIL>> *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; version 2 of the License. *
* *
* This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
***************************************************************************
*/
#ifndef OPENCL_TWOFISH_H_
#define OPENCL_TWOFISH_H_
#include "opencl_misc.h"
typedef uchar Twofish_Byte;
typedef uint Twofish_UInt32;
typedef struct {
Twofish_UInt32 s[4][256];
Twofish_UInt32 K[40];
} Twofish_key;
#define UINT32_MASK ( (((UInt32)2)<<31) - 1 )
#define ROL32( x, n ) rotate((x), (uint)(n))
#define ROR32( x, n ) rotate((x), (32U - (n)))
#if __OS_X__
#define LARGE_Q_TABLE 0
#else
#define LARGE_Q_TABLE 1
#endif
#define SELECT_BYTE_FROM_UINT32_IN_MEMORY 0
#define CONVERT_USING_CASTS 0
#if __ENDIAN_LITTLE__
#define CPU_IS_BIG_ENDIAN 0
#else
#define CPU_IS_BIG_ENDIAN 1
#endif
#define BSWAP(x) SWAP32(x)
typedef Twofish_Byte Byte;
typedef Twofish_UInt32 UInt32;
#if CPU_IS_BIG_ENDIAN
#define ENDIAN_CONVERT(x) BSWAP(x)
#define BYTE_OFFSET( n ) (sizeof(UInt32) - 1 - (n) )
#else
#define ENDIAN_CONVERT(x) (x)
#define BYTE_OFFSET( n ) (n)
#endif
#if SELECT_BYTE_FROM_UINT32_IN_MEMORY
#define SELECT_BYTE( X, b ) (((Byte *)(&(X)))[BYTE_OFFSET(b)])
#else
#define SELECT_BYTE( X, b ) (((X) >> 8*(b)) & 0xff)
#endif
#define b0(X) SELECT_BYTE((X),0)
#define b1(X) SELECT_BYTE((X),1)
#define b2(X) SELECT_BYTE((X),2)
#define b3(X) SELECT_BYTE((X),3)
#if CONVERT_USING_CASTS
#define GET32( p ) ENDIAN_CONVERT( *((UInt32 *)(p)) )
#define PUT32( v, p ) *((UInt32 *)(p)) = ENDIAN_CONVERT(v)
#else
#define GET32( p ) \
( \
(UInt32)((p)[0]) \
| (UInt32)((p)[1])<< 8 \
| (UInt32)((p)[2])<<16 \
| (UInt32)((p)[3])<<24 \
)
#define PUT32( v, p ) \
(p)[0] = (Byte)(((v) ) & 0xff); \
(p)[1] = (Byte)(((v) >> 8) & 0xff); \
(p)[2] = (Byte)(((v) >> 16) & 0xff); \
(p)[3] = (Byte)(((v) >> 24) & 0xff)
#endif
__constant Byte t_table[2][4][16] =
{
{
{0x8,0x1,0x7,0xD,0x6,0xF,0x3,0x2,0x0,0xB,0x5,0x9,0xE,0xC,0xA,0x4},
{0xE,0xC,0xB,0x8,0x1,0x2,0x3,0x5,0xF,0x4,0xA,0x6,0x7,0x0,0x9,0xD},
{0xB,0xA,0x5,0xE,0x6,0xD,0x9,0x0,0xC,0x8,0xF,0x3,0x2,0x4,0x7,0x1},
{0xD,0x7,0xF,0x4,0x1,0x2,0x6,0xE,0x9,0xB,0x3,0x0,0x8,0x5,0xC,0xA}
},
{
{0x2,0x8,0xB,0xD,0xF,0x7,0x6,0xE,0x3,0x1,0x9,0x4,0x0,0xA,0xC,0x5},
{0x1,0xE,0x2,0xB,0x4,0xC,0x3,0x7,0x6,0xD,0xA,0x5,0xF,0x9,0x0,0x8},
{0x4,0xC,0x7,0x5,0x1,0x6,0x9,0xA,0x0,0xE,0xD,0x8,0x2,0xB,0x3,0xF},
{0xB,0x9,0x5,0x1,0xC,0x3,0xD,0xE,0x6,0x4,0x7,0xF,0x2,0x0,0x8,0xA}
}
};
#define ROR4BY1( x ) (((x)>>1) | (((x)<<3) & 0x8) )
#if LARGE_Q_TABLE
typedef UInt32 Qtype;
#else
typedef Byte Qtype;
#endif
__constant Qtype q_table[2][256] =
{
{
0xA9, 0x67, 0xB3, 0xE8, 0x04, 0xFD, 0xA3, 0x76, 0x9A, 0x92, 0x80, 0x78,
0xE4, 0xDD, 0xD1, 0x38, 0x0D, 0xC6, 0x35, 0x98, 0x18, 0xF7, 0xEC, 0x6C,
0x43, 0x75, 0x37, 0x26, 0xFA, 0x13, 0x94, 0x48, 0xF2, 0xD0, 0x8B, 0x30,
0x84, 0x54, 0xDF, 0x23, 0x19, 0x5B, 0x3D, 0x59, 0xF3, 0xAE, 0xA2, 0x82,
0x63, 0x01, 0x83, 0x2E, 0xD9, 0x51, 0x9B, 0x7C, 0xA6, 0xEB, 0xA5, 0xBE,
0x16, 0x0C, 0xE3, 0x61, 0xC0, 0x8C, 0x3A, 0xF5, 0x73, 0x2C, 0x25, 0x0B,
0xBB, 0x4E, 0x89, 0x6B, 0x53, 0x6A, 0xB4, 0xF1, 0xE1, 0xE6, 0xBD, 0x45,
0xE2, 0xF4, 0xB6, 0x66, 0xCC, 0x95, 0x03, 0x56, 0xD4, 0x1C, 0x1E, 0xD7,
0xFB, 0xC3, 0x8E, 0xB5, 0xE9, 0xCF, 0xBF, 0xBA, 0xEA, 0x77, 0x39, 0xAF,
0x33, 0xC9, 0x62, 0x71, 0x81, 0x79, 0x09, 0xAD, 0x24, 0xCD, 0xF9, 0xD8,
0xE5, 0xC5, 0xB9, 0x4D, 0x44, 0x08, 0x86, 0xE7, 0xA1, 0x1D, 0xAA, 0xED,
0x06, 0x70, 0xB2, 0xD2, 0x41, 0x7B, 0xA0, 0x11, 0x31, 0xC2, 0x27, 0x90,
0x20, 0xF6, 0x60, 0xFF, 0x96, 0x5C, 0xB1, 0xAB, 0x9E, 0x9C, 0x52, 0x1B,
0x5F, 0x93, 0x0A, 0xEF, 0x91, 0x85, 0x49, 0xEE, 0x2D, 0x4F, 0x8F, 0x3B,
0x47, 0x87, 0x6D, 0x46, 0xD6, 0x3E, 0x69, 0x64, 0x2A, 0xCE, 0xCB, 0x2F,
0xFC, 0x97, 0x05, 0x7A, 0xAC, 0x7F, 0xD5, 0x1A, 0x4B, 0x0E, 0xA7, 0x5A,
0x28, 0x14, 0x3F, 0x29, 0x88, 0x3C, 0x4C, 0x02, 0xB8, 0xDA, 0xB0, 0x17,
0x55, 0x1F, 0x8A, 0x7D, 0x57, 0xC7, 0x8D, 0x74, 0xB7, 0xC4, 0x9F, 0x72,
0x7E, 0x15, 0x22, 0x12, 0x58, 0x07, 0x99, 0x34, 0x6E, 0x50, 0xDE, 0x68,
0x65, 0xBC, 0xDB, 0xF8, 0xC8, 0xA8, 0x2B, 0x40, 0xDC, 0xFE, 0x32, 0xA4,
0xCA, 0x10, 0x21, 0xF0, 0xD3, 0x5D, 0x0F, 0x00, 0x6F, 0x9D, 0x36, 0x42,
0x4A, 0x5E, 0xC1, 0xE0
},
{
0x75, 0xF3, 0xC6, 0xF4, 0xDB, 0x7B, 0xFB, 0xC8, 0x4A, 0xD3, 0xE6, 0x6B,
0x45, 0x7D, 0xE8, 0x4B, 0xD6, 0x32, 0xD8, 0xFD, 0x37, 0x71, 0xF1, 0xE1,
0x30, 0x0F, 0xF8, 0x1B, 0x87, 0xFA, 0x06, 0x3F, 0x5E, 0xBA, 0xAE, 0x5B,
0x8A, 0x00, 0xBC, 0x9D, 0x6D, 0xC1, 0xB1, 0x0E, 0x80, 0x5D, 0xD2, 0xD5,
0xA0, 0x84, 0x07, 0x14, 0xB5, 0x90, 0x2C, 0xA3, 0xB2, 0x73, 0x4C, 0x54,
0x92, 0x74, 0x36, 0x51, 0x38, 0xB0, 0xBD, 0x5A, 0xFC, 0x60, 0x62, 0x96,
0x6C, 0x42, 0xF7, 0x10, 0x7C, 0x28, 0x27, 0x8C, 0x13, 0x95, 0x9C, 0xC7,
0x24, 0x46, 0x3B, 0x70, 0xCA, 0xE3, 0x85, 0xCB, 0x11, 0xD0, 0x93, 0xB8,
0xA6, 0x83, 0x20, 0xFF, 0x9F, 0x77, 0xC3, 0xCC, 0x03, 0x6F, 0x08, 0xBF,
0x40, 0xE7, 0x2B, 0xE2, 0x79, 0x0C, 0xAA, 0x82, 0x41, 0x3A, 0xEA, 0xB9,
0xE4, 0x9A, 0xA4, 0x97, 0x7E, 0xDA, 0x7A, 0x17, 0x66, 0x94, 0xA1, 0x1D,
0x3D, 0xF0, 0xDE, 0xB3, 0x0B, 0x72, 0xA7, 0x1C, 0xEF, 0xD1, 0x53, 0x3E,
0x8F, 0x33, 0x26, 0x5F, 0xEC, 0x76, 0x2A, 0x49, 0x81, 0x88, 0xEE, 0x21,
0xC4, 0x1A, 0xEB, 0xD9, 0xC5, 0x39, 0x99, 0xCD, 0xAD, 0x31, 0x8B, 0x01,
0x18, 0x23, 0xDD, 0x1F, 0x4E, 0x2D, 0xF9, 0x48, 0x4F, 0xF2, 0x65, 0x8E,
0x78, 0x5C, 0x58, 0x19, 0x8D, 0xE5, 0x98, 0x57, 0x67, 0x7F, 0x05, 0x64,
0xAF, 0x63, 0xB6, 0xFE, 0xF5, 0xB7, 0x3C, 0xA5, 0xCE, 0xE9, 0x68, 0x44,
0xE0, 0x4D, 0x43, 0x69, 0x29, 0x2E, 0xAC, 0x15, 0x59, 0xA8, 0x0A, 0x9E,
0x6E, 0x47, 0xDF, 0x34, 0x35, 0x6A, 0xCF, 0xDC, 0x22, 0xC9, 0xC0, 0x9B,
0x89, 0xD4, 0xED, 0xAB, 0x12, 0xA2, 0x0D, 0x52, 0xBB, 0x02, 0x2F, 0xA9,
0xD7, 0x61, 0x1E, 0xB4, 0x50, 0x04, 0xF6, 0xC2, 0x16, 0x25, 0x86, 0x56,
0x55, 0x09, 0xBE, 0x91
}
};
__constant UInt32 MDS_table[4][256] =
{
{ 0xBCBC3275, 0xECEC21F3, 0x202043C6, 0xB3B3C9F4, 0xDADA03DB, 0x02028B7B,
0xE2E22BFB, 0x9E9EFAC8, 0xC9C9EC4A, 0xD4D409D3, 0x18186BE6, 0x1E1E9F6B,
0x98980E45, 0xB2B2387D, 0xA6A6D2E8, 0x2626B74B, 0x3C3C57D6, 0x93938A32,
0x8282EED8, 0x525298FD, 0x7B7BD437, 0xBBBB3771, 0x5B5B97F1, 0x474783E1,
0x24243C30, 0x5151E20F, 0xBABAC6F8, 0x4A4AF31B, 0xBFBF4887, 0x0D0D70FA,
0xB0B0B306, 0x7575DE3F, 0xD2D2FD5E, 0x7D7D20BA, 0x666631AE, 0x3A3AA35B,
0x59591C8A, 0x00000000, 0xCDCD93BC, 0x1A1AE09D, 0xAEAE2C6D, 0x7F7FABC1,
0x2B2BC7B1, 0xBEBEB90E, 0xE0E0A080, 0x8A8A105D, 0x3B3B52D2, 0x6464BAD5,
0xD8D888A0, 0xE7E7A584, 0x5F5FE807, 0x1B1B1114, 0x2C2CC2B5, 0xFCFCB490,
0x3131272C, 0x808065A3, 0x73732AB2, 0x0C0C8173, 0x79795F4C, 0x6B6B4154,
0x4B4B0292, 0x53536974, 0x94948F36, 0x83831F51, 0x2A2A3638, 0xC4C49CB0,
0x2222C8BD, 0xD5D5F85A, 0xBDBDC3FC, 0x48487860, 0xFFFFCE62, 0x4C4C0796,
0x4141776C, 0xC7C7E642, 0xEBEB24F7, 0x1C1C1410, 0x5D5D637C, 0x36362228,
0x6767C027, 0xE9E9AF8C, 0x4444F913, 0x1414EA95, 0xF5F5BB9C, 0xCFCF18C7,
0x3F3F2D24, 0xC0C0E346, 0x7272DB3B, 0x54546C70, 0x29294CCA, 0xF0F035E3,
0x0808FE85, 0xC6C617CB, 0xF3F34F11, 0x8C8CE4D0, 0xA4A45993, 0xCACA96B8,
0x68683BA6, 0xB8B84D83, 0x38382820, 0xE5E52EFF, 0xADAD569F, 0x0B0B8477,
0xC8C81DC3, 0x9999FFCC, 0x5858ED03, 0x19199A6F, 0x0E0E0A08, 0x95957EBF,
0x70705040, 0xF7F730E7, 0x6E6ECF2B, 0x1F1F6EE2, 0xB5B53D79, 0x09090F0C,
0x616134AA, 0x57571682, 0x9F9F0B41, 0x9D9D803A, 0x111164EA, 0x2525CDB9,
0xAFAFDDE4, 0x4545089A, 0xDFDF8DA4, 0xA3A35C97, 0xEAEAD57E, 0x353558DA,
0xEDEDD07A, 0x4343FC17, 0xF8F8CB66, 0xFBFBB194, 0x3737D3A1, 0xFAFA401D,
0xC2C2683D, 0xB4B4CCF0, 0x32325DDE, 0x9C9C71B3, 0x5656E70B, 0xE3E3DA72,
0x878760A7, 0x15151B1C, 0xF9F93AEF, 0x6363BFD1, 0x3434A953, 0x9A9A853E,
0xB1B1428F, 0x7C7CD133, 0x88889B26, 0x3D3DA65F, 0xA1A1D7EC, 0xE4E4DF76,
0x8181942A, 0x91910149, 0x0F0FFB81, 0xEEEEAA88, 0x161661EE, 0xD7D77321,
0x9797F5C4, 0xA5A5A81A, 0xFEFE3FEB, 0x6D6DB5D9, 0x7878AEC5, 0xC5C56D39,
0x1D1DE599, 0x7676A4CD, 0x3E3EDCAD, 0xCBCB6731, 0xB6B6478B, 0xEFEF5B01,
0x12121E18, 0x6060C523, 0x6A6AB0DD, 0x4D4DF61F, 0xCECEE94E, 0xDEDE7C2D,
0x55559DF9, 0x7E7E5A48, 0x2121B24F, 0x03037AF2, 0xA0A02665, 0x5E5E198E,
0x5A5A6678, 0x65654B5C, 0x62624E58, 0xFDFD4519, 0x0606F48D, 0x404086E5,
0xF2F2BE98, 0x3333AC57, 0x17179067, 0x05058E7F, 0xE8E85E05, 0x4F4F7D64,
0x89896AAF, 0x10109563, 0x74742FB6, 0x0A0A75FE, 0x5C5C92F5, 0x9B9B74B7,
0x2D2D333C, 0x3030D6A5, 0x2E2E49CE, 0x494989E9, 0x46467268, 0x77775544,
0xA8A8D8E0, 0x9696044D, 0x2828BD43, 0xA9A92969, 0xD9D97929, 0x8686912E,
0xD1D187AC, 0xF4F44A15, 0x8D8D1559, 0xD6D682A8, 0xB9B9BC0A, 0x42420D9E,
0xF6F6C16E, 0x2F2FB847, 0xDDDD06DF, 0x23233934, 0xCCCC6235, 0xF1F1C46A,
0xC1C112CF, 0x8585EBDC, 0x8F8F9E22, 0x7171A1C9, 0x9090F0C0, 0xAAAA539B,
0x0101F189, 0x8B8BE1D4, 0x4E4E8CED, 0x8E8E6FAB, 0xABABA212, 0x6F6F3EA2,
0xE6E6540D, 0xDBDBF252, 0x92927BBB, 0xB7B7B602, 0x6969CA2F, 0x3939D9A9,
0xD3D30CD7, 0xA7A72361, 0xA2A2AD1E, 0xC3C399B4, 0x6C6C4450, 0x07070504,
0x04047FF6, 0x272746C2, 0xACACA716, 0xD0D07625, 0x50501386, 0xDCDCF756,
0x84841A55, 0xE1E15109, 0x7A7A25BE, 0x1313EF91 },
{ 0xA9D93939, 0x67901717, 0xB3719C9C, 0xE8D2A6A6, 0x04050707, 0xFD985252,
0xA3658080, 0x76DFE4E4, 0x9A084545, 0x92024B4B, 0x80A0E0E0, 0x78665A5A,
0xE4DDAFAF, 0xDDB06A6A, 0xD1BF6363, 0x38362A2A, 0x0D54E6E6, 0xC6432020,
0x3562CCCC, 0x98BEF2F2, 0x181E1212, 0xF724EBEB, 0xECD7A1A1, 0x6C774141,
0x43BD2828, 0x7532BCBC, 0x37D47B7B, 0x269B8888, 0xFA700D0D, 0x13F94444,
0x94B1FBFB, 0x485A7E7E, 0xF27A0303, 0xD0E48C8C, 0x8B47B6B6, 0x303C2424,
0x84A5E7E7, 0x54416B6B, 0xDF06DDDD, 0x23C56060, 0x1945FDFD, 0x5BA33A3A,
0x3D68C2C2, 0x59158D8D, 0xF321ECEC, 0xAE316666, 0xA23E6F6F, 0x82165757,
0x63951010, 0x015BEFEF, 0x834DB8B8, 0x2E918686, 0xD9B56D6D, 0x511F8383,
0x9B53AAAA, 0x7C635D5D, 0xA63B6868, 0xEB3FFEFE, 0xA5D63030, 0xBE257A7A,
0x16A7ACAC, 0x0C0F0909, 0xE335F0F0, 0x6123A7A7, 0xC0F09090, 0x8CAFE9E9,
0x3A809D9D, 0xF5925C5C, 0x73810C0C, 0x2C273131, 0x2576D0D0, 0x0BE75656,
0xBB7B9292, 0x4EE9CECE, 0x89F10101, 0x6B9F1E1E, 0x53A93434, 0x6AC4F1F1,
0xB499C3C3, 0xF1975B5B, 0xE1834747, 0xE66B1818, 0xBDC82222, 0x450E9898,
0xE26E1F1F, 0xF4C9B3B3, 0xB62F7474, 0x66CBF8F8, 0xCCFF9999, 0x95EA1414,
0x03ED5858, 0x56F7DCDC, 0xD4E18B8B, 0x1C1B1515, 0x1EADA2A2, 0xD70CD3D3,
0xFB2BE2E2, 0xC31DC8C8, 0x8E195E5E, 0xB5C22C2C, 0xE9894949, 0xCF12C1C1,
0xBF7E9595, 0xBA207D7D, 0xEA641111, 0x77840B0B, 0x396DC5C5, 0xAF6A8989,
0x33D17C7C, 0xC9A17171, 0x62CEFFFF, 0x7137BBBB, 0x81FB0F0F, 0x793DB5B5,
0x0951E1E1, 0xADDC3E3E, 0x242D3F3F, 0xCDA47676, 0xF99D5555, 0xD8EE8282,
0xE5864040, 0xC5AE7878, 0xB9CD2525, 0x4D049696, 0x44557777, 0x080A0E0E,
0x86135050, 0xE730F7F7, 0xA1D33737, 0x1D40FAFA, 0xAA346161, 0xED8C4E4E,
0x06B3B0B0, 0x706C5454, 0xB22A7373, 0xD2523B3B, 0x410B9F9F, 0x7B8B0202,
0xA088D8D8, 0x114FF3F3, 0x3167CBCB, 0xC2462727, 0x27C06767, 0x90B4FCFC,
0x20283838, 0xF67F0404, 0x60784848, 0xFF2EE5E5, 0x96074C4C, 0x5C4B6565,
0xB1C72B2B, 0xAB6F8E8E, 0x9E0D4242, 0x9CBBF5F5, 0x52F2DBDB, 0x1BF34A4A,
0x5FA63D3D, 0x9359A4A4, 0x0ABCB9B9, 0xEF3AF9F9, 0x91EF1313, 0x85FE0808,
0x49019191, 0xEE611616, 0x2D7CDEDE, 0x4FB22121, 0x8F42B1B1, 0x3BDB7272,
0x47B82F2F, 0x8748BFBF, 0x6D2CAEAE, 0x46E3C0C0, 0xD6573C3C, 0x3E859A9A,
0x6929A9A9, 0x647D4F4F, 0x2A948181, 0xCE492E2E, 0xCB17C6C6, 0x2FCA6969,
0xFCC3BDBD, 0x975CA3A3, 0x055EE8E8, 0x7AD0EDED, 0xAC87D1D1, 0x7F8E0505,
0xD5BA6464, 0x1AA8A5A5, 0x4BB72626, 0x0EB9BEBE, 0xA7608787, 0x5AF8D5D5,
0x28223636, 0x14111B1B, 0x3FDE7575, 0x2979D9D9, 0x88AAEEEE, 0x3C332D2D,
0x4C5F7979, 0x02B6B7B7, 0xB896CACA, 0xDA583535, 0xB09CC4C4, 0x17FC4343,
0x551A8484, 0x1FF64D4D, 0x8A1C5959, 0x7D38B2B2, 0x57AC3333, 0xC718CFCF,
0x8DF40606, 0x74695353, 0xB7749B9B, 0xC4F59797, 0x9F56ADAD, 0x72DAE3E3,
0x7ED5EAEA, 0x154AF4F4, 0x229E8F8F, 0x12A2ABAB, 0x584E6262, 0x07E85F5F,
0x99E51D1D, 0x34392323, 0x6EC1F6F6, 0x50446C6C, 0xDE5D3232, 0x68724646,
0x6526A0A0, 0xBC93CDCD, 0xDB03DADA, 0xF8C6BABA, 0xC8FA9E9E, 0xA882D6D6,
0x2BCF6E6E, 0x40507070, 0xDCEB8585, 0xFE750A0A, 0x328A9393, 0xA48DDFDF,
0xCA4C2929, 0x10141C1C, 0x2173D7D7, 0xF0CCB4B4, 0xD309D4D4, 0x5D108A8A,
0x0FE25151, 0x00000000, 0x6F9A1919, 0x9DE01A1A, 0x368F9494, 0x42E6C7C7,
0x4AECC9C9, 0x5EFDD2D2, 0xC1AB7F7F, 0xE0D8A8A8 },
{ 0xBC75BC32, 0xECF3EC21, 0x20C62043, 0xB3F4B3C9, 0xDADBDA03, 0x027B028B,
0xE2FBE22B, 0x9EC89EFA, 0xC94AC9EC, 0xD4D3D409, 0x18E6186B, 0x1E6B1E9F,
0x9845980E, 0xB27DB238, 0xA6E8A6D2, 0x264B26B7, 0x3CD63C57, 0x9332938A,
0x82D882EE, 0x52FD5298, 0x7B377BD4, 0xBB71BB37, 0x5BF15B97, 0x47E14783,
0x2430243C, 0x510F51E2, 0xBAF8BAC6, 0x4A1B4AF3, 0xBF87BF48, 0x0DFA0D70,
0xB006B0B3, 0x753F75DE, 0xD25ED2FD, 0x7DBA7D20, 0x66AE6631, 0x3A5B3AA3,
0x598A591C, 0x00000000, 0xCDBCCD93, 0x1A9D1AE0, 0xAE6DAE2C, 0x7FC17FAB,
0x2BB12BC7, 0xBE0EBEB9, 0xE080E0A0, 0x8A5D8A10, 0x3BD23B52, 0x64D564BA,
0xD8A0D888, 0xE784E7A5, 0x5F075FE8, 0x1B141B11, 0x2CB52CC2, 0xFC90FCB4,
0x312C3127, 0x80A38065, 0x73B2732A, 0x0C730C81, 0x794C795F, 0x6B546B41,
0x4B924B02, 0x53745369, 0x9436948F, 0x8351831F, 0x2A382A36, 0xC4B0C49C,
0x22BD22C8, 0xD55AD5F8, 0xBDFCBDC3, 0x48604878, 0xFF62FFCE, 0x4C964C07,
0x416C4177, 0xC742C7E6, 0xEBF7EB24, 0x1C101C14, 0x5D7C5D63, 0x36283622,
0x672767C0, 0xE98CE9AF, 0x441344F9, 0x149514EA, 0xF59CF5BB, 0xCFC7CF18,
0x3F243F2D, 0xC046C0E3, 0x723B72DB, 0x5470546C, 0x29CA294C, 0xF0E3F035,
0x088508FE, 0xC6CBC617, 0xF311F34F, 0x8CD08CE4, 0xA493A459, 0xCAB8CA96,
0x68A6683B, 0xB883B84D, 0x38203828, 0xE5FFE52E, 0xAD9FAD56, 0x0B770B84,
0xC8C3C81D, 0x99CC99FF, 0x580358ED, 0x196F199A, 0x0E080E0A, 0x95BF957E,
0x70407050, 0xF7E7F730, 0x6E2B6ECF, 0x1FE21F6E, 0xB579B53D, 0x090C090F,
0x61AA6134, 0x57825716, 0x9F419F0B, 0x9D3A9D80, 0x11EA1164, 0x25B925CD,
0xAFE4AFDD, 0x459A4508, 0xDFA4DF8D, 0xA397A35C, 0xEA7EEAD5, 0x35DA3558,
0xED7AEDD0, 0x431743FC, 0xF866F8CB, 0xFB94FBB1, 0x37A137D3, 0xFA1DFA40,
0xC23DC268, 0xB4F0B4CC, 0x32DE325D, 0x9CB39C71, 0x560B56E7, 0xE372E3DA,
0x87A78760, 0x151C151B, 0xF9EFF93A, 0x63D163BF, 0x345334A9, 0x9A3E9A85,
0xB18FB142, 0x7C337CD1, 0x8826889B, 0x3D5F3DA6, 0xA1ECA1D7, 0xE476E4DF,
0x812A8194, 0x91499101, 0x0F810FFB, 0xEE88EEAA, 0x16EE1661, 0xD721D773,
0x97C497F5, 0xA51AA5A8, 0xFEEBFE3F, 0x6DD96DB5, 0x78C578AE, 0xC539C56D,
0x1D991DE5, 0x76CD76A4, 0x3EAD3EDC, 0xCB31CB67, 0xB68BB647, 0xEF01EF5B,
0x1218121E, 0x602360C5, 0x6ADD6AB0, 0x4D1F4DF6, 0xCE4ECEE9, 0xDE2DDE7C,
0x55F9559D, 0x7E487E5A, 0x214F21B2, 0x03F2037A, 0xA065A026, 0x5E8E5E19,
0x5A785A66, 0x655C654B, 0x6258624E, 0xFD19FD45, 0x068D06F4, 0x40E54086,
0xF298F2BE, 0x335733AC, 0x17671790, 0x057F058E, 0xE805E85E, 0x4F644F7D,
0x89AF896A, 0x10631095, 0x74B6742F, 0x0AFE0A75, 0x5CF55C92, 0x9BB79B74,
0x2D3C2D33, 0x30A530D6, 0x2ECE2E49, 0x49E94989, 0x46684672, 0x77447755,
0xA8E0A8D8, 0x964D9604, 0x284328BD, 0xA969A929, 0xD929D979, 0x862E8691,
0xD1ACD187, 0xF415F44A, 0x8D598D15, 0xD6A8D682, 0xB90AB9BC, 0x429E420D,
0xF66EF6C1, 0x2F472FB8, 0xDDDFDD06, 0x23342339, 0xCC35CC62, 0xF16AF1C4,
0xC1CFC112, 0x85DC85EB, 0x8F228F9E, 0x71C971A1, 0x90C090F0, 0xAA9BAA53,
0x018901F1, 0x8BD48BE1, 0x4EED4E8C, 0x8EAB8E6F, 0xAB12ABA2, 0x6FA26F3E,
0xE60DE654, 0xDB52DBF2, 0x92BB927B, 0xB702B7B6, 0x692F69CA, 0x39A939D9,
0xD3D7D30C, 0xA761A723, 0xA21EA2AD, 0xC3B4C399, 0x6C506C44, 0x07040705,
0x04F6047F, 0x27C22746, 0xAC16ACA7, 0xD025D076, 0x50865013, 0xDC56DCF7,
0x8455841A, 0xE109E151, 0x7ABE7A25, 0x139113EF },
{ 0xD939A9D9, 0x90176790, 0x719CB371, 0xD2A6E8D2, 0x05070405, 0x9852FD98,
0x6580A365, 0xDFE476DF, 0x08459A08, 0x024B9202, 0xA0E080A0, 0x665A7866,
0xDDAFE4DD, 0xB06ADDB0, 0xBF63D1BF, 0x362A3836, 0x54E60D54, 0x4320C643,
0x62CC3562, 0xBEF298BE, 0x1E12181E, 0x24EBF724, 0xD7A1ECD7, 0x77416C77,
0xBD2843BD, 0x32BC7532, 0xD47B37D4, 0x9B88269B, 0x700DFA70, 0xF94413F9,
0xB1FB94B1, 0x5A7E485A, 0x7A03F27A, 0xE48CD0E4, 0x47B68B47, 0x3C24303C,
0xA5E784A5, 0x416B5441, 0x06DDDF06, 0xC56023C5, 0x45FD1945, 0xA33A5BA3,
0x68C23D68, 0x158D5915, 0x21ECF321, 0x3166AE31, 0x3E6FA23E, 0x16578216,
0x95106395, 0x5BEF015B, 0x4DB8834D, 0x91862E91, 0xB56DD9B5, 0x1F83511F,
0x53AA9B53, 0x635D7C63, 0x3B68A63B, 0x3FFEEB3F, 0xD630A5D6, 0x257ABE25,
0xA7AC16A7, 0x0F090C0F, 0x35F0E335, 0x23A76123, 0xF090C0F0, 0xAFE98CAF,
0x809D3A80, 0x925CF592, 0x810C7381, 0x27312C27, 0x76D02576, 0xE7560BE7,
0x7B92BB7B, 0xE9CE4EE9, 0xF10189F1, 0x9F1E6B9F, 0xA93453A9, 0xC4F16AC4,
0x99C3B499, 0x975BF197, 0x8347E183, 0x6B18E66B, 0xC822BDC8, 0x0E98450E,
0x6E1FE26E, 0xC9B3F4C9, 0x2F74B62F, 0xCBF866CB, 0xFF99CCFF, 0xEA1495EA,
0xED5803ED, 0xF7DC56F7, 0xE18BD4E1, 0x1B151C1B, 0xADA21EAD, 0x0CD3D70C,
0x2BE2FB2B, 0x1DC8C31D, 0x195E8E19, 0xC22CB5C2, 0x8949E989, 0x12C1CF12,
0x7E95BF7E, 0x207DBA20, 0x6411EA64, 0x840B7784, 0x6DC5396D, 0x6A89AF6A,
0xD17C33D1, 0xA171C9A1, 0xCEFF62CE, 0x37BB7137, 0xFB0F81FB, 0x3DB5793D,
0x51E10951, 0xDC3EADDC, 0x2D3F242D, 0xA476CDA4, 0x9D55F99D, 0xEE82D8EE,
0x8640E586, 0xAE78C5AE, 0xCD25B9CD, 0x04964D04, 0x55774455, 0x0A0E080A,
0x13508613, 0x30F7E730, 0xD337A1D3, 0x40FA1D40, 0x3461AA34, 0x8C4EED8C,
0xB3B006B3, 0x6C54706C, 0x2A73B22A, 0x523BD252, 0x0B9F410B, 0x8B027B8B,
0x88D8A088, 0x4FF3114F, 0x67CB3167, 0x4627C246, 0xC06727C0, 0xB4FC90B4,
0x28382028, 0x7F04F67F, 0x78486078, 0x2EE5FF2E, 0x074C9607, 0x4B655C4B,
0xC72BB1C7, 0x6F8EAB6F, 0x0D429E0D, 0xBBF59CBB, 0xF2DB52F2, 0xF34A1BF3,
0xA63D5FA6, 0x59A49359, 0xBCB90ABC, 0x3AF9EF3A, 0xEF1391EF, 0xFE0885FE,
0x01914901, 0x6116EE61, 0x7CDE2D7C, 0xB2214FB2, 0x42B18F42, 0xDB723BDB,
0xB82F47B8, 0x48BF8748, 0x2CAE6D2C, 0xE3C046E3, 0x573CD657, 0x859A3E85,
0x29A96929, 0x7D4F647D, 0x94812A94, 0x492ECE49, 0x17C6CB17, 0xCA692FCA,
0xC3BDFCC3, 0x5CA3975C, 0x5EE8055E, 0xD0ED7AD0, 0x87D1AC87, 0x8E057F8E,
0xBA64D5BA, 0xA8A51AA8, 0xB7264BB7, 0xB9BE0EB9, 0x6087A760, 0xF8D55AF8,
0x22362822, 0x111B1411, 0xDE753FDE, 0x79D92979, 0xAAEE88AA, 0x332D3C33,
0x5F794C5F, 0xB6B702B6, 0x96CAB896, 0x5835DA58, 0x9CC4B09C, 0xFC4317FC,
0x1A84551A, 0xF64D1FF6, 0x1C598A1C, 0x38B27D38, 0xAC3357AC, 0x18CFC718,
0xF4068DF4, 0x69537469, 0x749BB774, 0xF597C4F5, 0x56AD9F56, 0xDAE372DA,
0xD5EA7ED5, 0x4AF4154A, 0x9E8F229E, 0xA2AB12A2, 0x4E62584E, 0xE85F07E8,
0xE51D99E5, 0x39233439, 0xC1F66EC1, 0x446C5044, 0x5D32DE5D, 0x72466872,
0x26A06526, 0x93CDBC93, 0x03DADB03, 0xC6BAF8C6, 0xFA9EC8FA, 0x82D6A882,
0xCF6E2BCF, 0x50704050, 0xEB85DCEB, 0x750AFE75, 0x8A93328A, 0x8DDFA48D,
0x4C29CA4C, 0x141C1014, 0x73D72173, 0xCCB4F0CC, 0x09D4D309, 0x108A5D10,
0xE2510FE2, 0x00000000, 0x9A196F9A, 0xE01A9DE0, 0x8F94368F, 0xE6C742E6,
0xECC94AEC, 0xFDD25EFD, 0xAB7FC1AB, 0xD8A8E0D8 }
};
__constant UInt32 mds_poly_divx_const[] = { 0, 0xb4 };
#define q0 q_table[0]
#define q1 q_table[1]
#define H02( y, L ) MDS_table[0][q0[q0[y]^L[ 8]]^L[0]]
#define H12( y, L ) MDS_table[1][q0[q1[y]^L[ 9]]^L[1]]
#define H22( y, L ) MDS_table[2][q1[q0[y]^L[10]]^L[2]]
#define H32( y, L ) MDS_table[3][q1[q1[y]^L[11]]^L[3]]
#define H03( y, L ) H02( q1[y]^L[16], L )
#define H13( y, L ) H12( q1[y]^L[17], L )
#define H23( y, L ) H22( q0[y]^L[18], L )
#define H33( y, L ) H32( q0[y]^L[19], L )
#define H04( y, L ) H03( q1[y]^L[24], L )
#define H14( y, L ) H13( q0[y]^L[25], L )
#define H24( y, L ) H23( q0[y]^L[26], L )
#define H34( y, L ) H33( q1[y]^L[27], L )
inline
UInt32 two_h(int k, Byte L[], int kCycles)
{
switch (kCycles) {
case 2:
return H02(k, L) ^ H12(k, L) ^ H22(k, L) ^ H32(k, L);
case 3:
return H03(k, L) ^ H13(k, L) ^ H23(k, L) ^ H33(k, L);
case 4:
return H04(k, L) ^ H14(k, L) ^ H24(k, L) ^ H34(k, L);
default:
return 0;
}
}
inline
void fill_keyed_sboxes(Byte S[], int kCycles, Twofish_key *xkey)
{
int i;
switch (kCycles) {
case 2:
for (i = 0; i < 256; i++) {
xkey->s[0][i] = H02(i, S);
xkey->s[1][i] = H12(i, S);
xkey->s[2][i] = H22(i, S);
xkey->s[3][i] = H32(i, S);
}
break;
case 3:
for (i = 0; i < 256; i++) {
xkey->s[0][i] = H03(i, S);
xkey->s[1][i] = H13(i, S);
xkey->s[2][i] = H23(i, S);
xkey->s[3][i] = H33(i, S);
}
break;
case 4:
for (i = 0; i < 256; i++) {
xkey->s[0][i] = H04(i, S);
xkey->s[1][i] = H14(i, S);
xkey->s[2][i] = H24(i, S);
xkey->s[3][i] = H34(i, S);
}
break;
}
}
__constant uint rs_poly_const[] = { 0, 0x14d };
__constant uint rs_poly_div_const[] = { 0, 0xa6 };
inline
void Twofish_prepare_key(__private Byte key[], int key_len, Twofish_key *xkey)
{
Byte K[32 + 32 + 4] = { 0 };
int kCycles;
int i;
Byte *kptr;
Byte *sptr;
Byte *t;
Byte b, bx, bxx;
memcpy_pp(K, key, key_len);
kCycles = (key_len + 7) >> 3;
if (kCycles < 2) {
kCycles = 2;
}
for (i = 0; i < 40; i += 2) {
UInt32 A, B;
A = two_h(i, K, kCycles);
B = two_h(i + 1, K + 4, kCycles);
B = ROL32(B, 8);
A += B;
B += A;
xkey->K[i] = A;
xkey->K[i + 1] = ROL32(B, 9);
}
kptr = K + 8 * kCycles;
sptr = K + 32;
while (kptr > K) {
kptr -= 8;
memset_p(sptr, 0, 4);
memcpy_pp(sptr + 4, kptr, 8);
t = sptr + 11;
while (t > sptr + 3) {
b = *t;
bx = (Byte)((b << 1) ^ rs_poly_const[b >> 7]);
bxx = (Byte)((b >> 1) ^ rs_poly_div_const[b & 1] ^ bx);
t[-1] ^= bxx;
t[-2] ^= bx;
t[-3] ^= bxx;
t[-4] ^= b;
t--;
}
sptr += 8;
}
fill_keyed_sboxes(&K[32], kCycles, xkey);
}
#define g0(X,xkey) \
(xkey->s[0][b0(X)]^xkey->s[1][b1(X)]^xkey->s[2][b2(X)]^xkey->s[3][b3(X)])
#define g1(X,xkey) \
(xkey->s[0][b3(X)]^xkey->s[1][b0(X)]^xkey->s[2][b1(X)]^xkey->s[3][b2(X)])
#define ENCRYPT_RND( A,B,C,D, T0, T1, xkey, r ) \
T0 = g0(A,xkey); T1 = g1(B,xkey); \
C ^= T0+T1+xkey->K[8+2*(r)]; C = ROR32(C,1); \
D = ROL32(D,1); D ^= T0+2*T1+xkey->K[8+2*(r)+1]
#define ENCRYPT_CYCLE( A, B, C, D, T0, T1, xkey, r ) \
ENCRYPT_RND( A,B,C,D,T0,T1,xkey,2*(r) ); \
ENCRYPT_RND( C,D,A,B,T0,T1,xkey,2*(r)+1 )
#define ENCRYPT( A,B,C,D,T0,T1,xkey ) \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 0 ); \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 1 ); \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 2 ); \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 3 ); \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 4 ); \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 5 ); \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 6 ); \
ENCRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 7 )
#define DECRYPT_RND( A,B,C,D, T0, T1, xkey, r ) \
T0 = g0(A,xkey); T1 = g1(B,xkey); \
C = ROL32(C,1); C ^= T0+T1+xkey->K[8+2*(r)]; \
D ^= T0+2*T1+xkey->K[8+2*(r)+1]; D = ROR32(D,1)
#define DECRYPT_CYCLE( A, B, C, D, T0, T1, xkey, r ) \
DECRYPT_RND( A,B,C,D,T0,T1,xkey,2*(r)+1 ); \
DECRYPT_RND( C,D,A,B,T0,T1,xkey,2*(r) )
#define DECRYPT( A,B,C,D,T0,T1, xkey ) \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 7 ); \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 6 ); \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 5 ); \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 4 ); \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 3 ); \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 2 ); \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 1 ); \
DECRYPT_CYCLE( A,B,C,D,T0,T1,xkey, 0 )
#define GET_INPUT( src, A,B,C,D, xkey, koff ) \
A = GET32(src )^xkey->K[ koff]; B = GET32(src+ 4)^xkey->K[1+koff]; \
C = GET32(src+ 8)^xkey->K[2+koff]; D = GET32(src+12)^xkey->K[3+koff]
#define PUT_OUTPUT( A,B,C,D, dst, xkey, koff ) \
A ^= xkey->K[ koff]; B ^= xkey->K[1+koff]; \
C ^= xkey->K[2+koff]; D ^= xkey->K[3+koff]; \
PUT32( A, dst ); PUT32( B, dst+ 4 ); \
PUT32( C, dst+8 ); PUT32( D, dst+12 )
inline
void Twofish_encrypt(Twofish_key *xkey, Byte p[16], Byte c[16])
{
UInt32 A, B, C, D, T0, T1;
GET_INPUT(p, A, B, C, D, xkey, 0);
ENCRYPT(A, B, C, D, T0, T1, xkey);
PUT_OUTPUT(C, D, A, B, c, xkey, 4);
}
inline
int Twofish_Encrypt(Twofish_key *m_key, Byte *pInput, Byte *pOutBuffer,
int nInputOctets, Byte *m_pInitVector)
{
int i, numBlocks, padLen;
Byte *iv;
union {
Byte block[16];
UInt32 p32[4];
} x;
UInt32 *p;
Byte *block;
p = x.p32;
block = x.block;
if (!pInput || (nInputOctets <= 0) || !pOutBuffer)
return 0;
numBlocks = nInputOctets / 16;
iv = m_pInitVector;
for (i = numBlocks; i > 0; i--) {
p[0] = ((UInt32 *) pInput)[0] ^ ((UInt32 *) iv)[0];
p[1] = ((UInt32 *) pInput)[1] ^ ((UInt32 *) iv)[1];
// ((UInt32*)block)[2] = ((UInt32*)pInput)[2] ^ ((UInt32*)iv)[2];
p[2] = ((UInt32 *) pInput)[2] ^ ((UInt32 *) iv)[2];
p[3] = ((UInt32 *) pInput)[3] ^ ((UInt32 *) iv)[3];
Twofish_encrypt(m_key, (Twofish_Byte *) block,
(Twofish_Byte *) pOutBuffer);
iv = pOutBuffer;
pInput += 16;
pOutBuffer += 16;
}
padLen = 16 - (nInputOctets - (16 * numBlocks));
for (i = 0; i < 16 - padLen; i++) {
block[i] = (Byte)(pInput[i] ^ iv[i]);
}
for (i = 16 - padLen; i < 16; i++) {
block[i] = (Byte)((Byte) padLen ^ iv[i]);
}
Twofish_encrypt(m_key, (Twofish_Byte *) block,
(Twofish_Byte *) pOutBuffer);
return 16 * (numBlocks + 1);
}
inline
void Twofish_decrypt(Twofish_key *xkey, Byte c[16], Byte p[16])
{
UInt32 A, B, C, D, T0, T1;
GET_INPUT(c, A, B, C, D, xkey, 4);
DECRYPT(A, B, C, D, T0, T1, xkey);
PUT_OUTPUT(C, D, A, B, p, xkey, 0);
}
inline
int Twofish_Decrypt(Twofish_key *m_key, Byte *pInput, Byte *pOutBuffer,
int nInputOctets, Byte *m_pInitVector, int check_pad)
{
int i, numBlocks, padLen;
UInt32 iv[4];
union {
Byte block[16];
UInt32 p32[4];
} x;
UInt32 *p;
Byte *block;
p = x.p32;
block = x.block;
if (!pInput || (nInputOctets <= 0) || !pOutBuffer)
return 0;
if ((nInputOctets % 16) != 0) {
return -1;
}
numBlocks = nInputOctets / 16;
memcpy_pp(iv, m_pInitVector, 16);
for (i = numBlocks - 1; i > 0; i--) {
Twofish_decrypt(m_key, (Twofish_Byte *) pInput,
(Twofish_Byte *) block);
p[0] ^= iv[0];
p[1] ^= iv[1];
p[2] ^= iv[2];
p[3] ^= iv[3];
memcpy_pp(iv, pInput, 16);
memcpy_pp(pOutBuffer, block, 16);
pInput += 16;
pOutBuffer += 16;
}
Twofish_decrypt(m_key, (Twofish_Byte *) pInput, (Twofish_Byte *) block);
p[0] ^= iv[0];
p[1] ^= iv[1];
p[2] ^= iv[2];
p[3] ^= iv[3];
if (check_pad) {
padLen = block[15];
if (padLen <= 0 || padLen > 16)
return -1;
for (i = 16 - padLen; i < 16; i++) {
if (block[i] != padLen)
return -1;
}
} else {
padLen = 0;
memcpy_pp(m_pInitVector, pInput, 16);
}
memcpy_pp(pOutBuffer, block, 16 - padLen);
return 16 * numBlocks - padLen;
}
#if 0
inline
int Twofish_Decrypt_cfb128(Twofish_key *m_key, Twofish_Byte *pInput,
Twofish_Byte *pOutBuffer, int nInputOctets,
Twofish_Byte *m_pInitVector)
{
int i, numBlocks, ex;
UInt32 iv[4];
union {
Byte block[16];
UInt32 p32[4];
} x;
UInt32 *p;
Byte *block;
p = x.p32;
block = x.block;
if (!pInput || (nInputOctets <= 0) || !pOutBuffer)
return 0;
numBlocks = nInputOctets / 16;
ex = nInputOctets % 16;
memcpy_pp(iv, m_pInitVector, 16);
for (i = numBlocks; i > 0; i--) {
Twofish_encrypt(m_key, (Twofish_Byte *) iv, (Twofish_Byte *) block);
memcpy_pp(iv, pInput, 16);
p[0] ^= iv[0];
p[1] ^= iv[1];
p[2] ^= iv[2];
p[3] ^= iv[3];
memcpy_pp(pOutBuffer, block, 16);
pInput += 16;
pOutBuffer += 16;
}
if (ex) {
Twofish_encrypt(m_key, (Twofish_Byte *) iv, (Twofish_Byte *) block);
for (i = 0; i < ex; ++i)
pOutBuffer[i] = pInput[i] ^ block[i];
}
return nInputOctets;
}
#endif
#endif /* OPENCL_TWOFISH_H_ */
| 18,783 |
357 | <filename>vmafd/interop/python/cdcclient.py
#
# Copyright (C) 2012-2015 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, without
# warranties or conditions of any kind, EITHER EXPRESS OR IMPLIED. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# KeyStore
import sys
import identity.vmafd as vmafd
def getDefaultType():
return "CDC"
class CdcClient:
"""CdcClient Class"""
_client_context = None
def __init__(self, serverName):
if not serverName:
serverName = 'localhost'
self._client_context = vmafd.client(serverName)
def __del__(self):
if self._client_context is not None:
pass
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if self._client_context is not None:
pass
def enableClientAffinity(self):
self._client_context.EnableClientAffinity()
def disableClientAffinity(self):
self._client_context.DisableClientAffinity()
def affinitizedDC(self, domainName, forceRefresh=0):
affinitized_dc = self._client_context.GetAffinitizedDC(domainName, forceRefresh)
return affinitized_dc
def state(self):
cdc_state = self._client_context.GetCdcState()
return cdc_state
def isEnabled(self):
cdc_state = self._client_context.GetCdcState()
if cdc_state == 'UNKNOWN' or cdc_state == 'DISABLED':
return False
return True
def cachedDCEntries(self):
l = self._client_context.EnumDCEntries()
for a in l:
yield a
| 744 |
820 | <reponame>kubajir/msticpy<filename>tests/datamodel/test_pivot_register.py
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""Test Pivot registered functions."""
import warnings
from collections import namedtuple
import pandas as pd
import pytest
import pytest_check as check
from msticpy.data.query_container import QueryContainer
from msticpy.datamodel import entities
from msticpy.datamodel.pivot import Pivot
from msticpy.sectools import GeoLiteLookup, TILookup
__author__ = "<NAME>"
# pylint: disable=redefined-outer-name
@pytest.fixture(scope="session")
def data_providers():
"""Return dict of providers."""
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UserWarning)
return {
"ti_lookup": TILookup(),
"geolite": GeoLiteLookup(),
# "ip_stack": IPStackLookup(),
}
def _reset_entities():
"""Clear any query containers in entities."""
for entity_name in ("Host", "IpAddress", "Account", "Url"):
entity = getattr(entities, entity_name)
for attr in dir(entity):
if isinstance(getattr(entity, attr), QueryContainer):
delattr(entity, attr)
@pytest.fixture(scope="session")
def _create_pivot(data_providers):
_reset_entities()
providers = data_providers.values()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UserWarning)
return Pivot(providers=providers)
EntityQuery = namedtuple(
"EntityQuery",
"entity, args, attrib, provider, pivot_func, func_param, src_col, exp_col, exp_val",
)
_ENTITY_QUERIES = [
pytest.param(
EntityQuery(
entity="IpAddress",
args=dict(Address="192.168.127.12"),
attrib="Address",
provider="util",
pivot_func="whois",
func_param="ip_address",
src_col="ip",
exp_col="asn_description",
exp_val="MICROSOFT-CORP-MSN-AS-BLOCK",
),
id="IpAddress-who_is",
),
pytest.param(
EntityQuery(
entity="IpAddress",
args=dict(Address="192.168.127.12"),
attrib="Address",
provider="util",
pivot_func="ip_type",
func_param="ip_str",
src_col="ip",
exp_col="result",
exp_val="Public",
),
id="IpAddress-ip_type",
),
pytest.param(
EntityQuery(
entity="Process",
args=dict(
CommandLine="VGhpcyBpcyBhIHRlc3Qgb2YgYmFzZTY0IGVuY29kZWQgc3RyaW5n"
),
attrib="CommandLine",
provider="util",
pivot_func="b64decode",
func_param="value",
src_col="cmdline",
exp_col="decoded_string",
exp_val="This is a test of base64 encoded string",
),
id="Process-b64decode",
),
pytest.param(
EntityQuery(
entity="Url",
args=dict(Url="https://www.contoso.com/path?p1=test"),
attrib="Url",
provider="util",
pivot_func="extract_iocs",
func_param="value",
src_col="url",
exp_col="Observable",
exp_val="www.contoso.com",
),
id="Url-extract_iocs",
),
pytest.param(
EntityQuery(
entity="Url",
args=dict(Url="https://www.contoso.com/path?p1=test"),
attrib="host",
provider="util",
pivot_func="dns_validate_tld",
func_param="value",
src_col="host",
exp_col="result",
exp_val="True",
),
id="Url-dns_validate_tld",
),
pytest.param(
EntityQuery(
entity="Url",
args=dict(Url="https://www.contoso.com/path?p1=test"),
attrib="host",
provider="util",
pivot_func="dns_is_resolvable",
func_param="value",
src_col="host",
exp_col="result",
exp_val="True",
),
id="Url-dns_is_resolvable",
),
pytest.param(
EntityQuery(
entity="Url",
args=dict(Url="https://www.contoso.com/path?p1=test"),
attrib="host",
provider="util",
pivot_func="dns_in_abuse_list",
func_param="value",
src_col="host",
exp_col="result",
exp_val="False",
),
id="Url-dns_in_abuse_list",
),
pytest.param(
EntityQuery(
entity="Url",
args=dict(Url="https://www.contoso.com/path?p1=test"),
attrib="host",
provider="util",
pivot_func="dns_components",
func_param="value",
src_col="host",
exp_col="subdomain",
exp_val="www",
),
id="Url-dns_components",
),
pytest.param(
EntityQuery(
entity="Url",
args=dict(Url="https://www.contoso.com/path?p1=test"),
attrib="host",
provider="util",
pivot_func="url_components",
func_param="value",
src_col="host",
exp_col="host",
exp_val="www.contoso.com",
),
id="Url-url_components",
),
pytest.param(
EntityQuery(
entity="Url",
args=dict(Url="https://www.contoso.com/path?p1=test"),
attrib="Url",
provider="util",
pivot_func="dns_resolve",
func_param="value",
src_col="host",
exp_col="qname",
exp_val="www.contoso.com",
),
id="Url-dns_resolve",
),
pytest.param(
EntityQuery(
entity="IpAddress",
args=dict(Address="192.168.127.12"),
attrib="Address",
provider="util",
pivot_func="ip_rev_resolve",
func_param="value",
src_col="host",
exp_col="qname",
exp_val="192.168.127.12",
),
id="IpAddress-ip_rev_resolve",
),
pytest.param(
EntityQuery(
entity="IpAddress",
args=dict(Address="192.168.127.12"),
attrib="Address",
provider="util",
pivot_func="geoloc",
func_param="value",
src_col="ip",
exp_col="CountryName",
exp_val="United States",
),
id="IpAddress-geoip_maxmind",
),
# This test sometimes files because of non-responsiveness
# from ipstack service
# pytest.param(
# EntityQuery(
# entity="IpAddress",
# args=dict(Address="192.168.127.12"),
# attrib="Address",
# provider="util",
# pivot_func="geoloc_ips",
# func_param="value",
# src_col="ip",
# exp_col="CountryName",
# exp_val="United States",
# ),
# id="IpAddress-geoip_ipstack",
# ),
]
@pytest.mark.parametrize("test_case", _ENTITY_QUERIES)
def test_entity_attr_funcs_entity(_create_pivot, test_case):
"""Test calling function with entity attributes."""
# Test entity
ent_cls = getattr(entities, test_case.entity)
entity = ent_cls(**(test_case.args))
func = getattr(getattr(entity, test_case.provider), test_case.pivot_func)
# Test entity input
result_df = func(entity)
check.is_in(test_case.exp_val, result_df.iloc[0][test_case.exp_col])
@pytest.mark.parametrize("test_case", _ENTITY_QUERIES)
def test_entity_attr_funcs_value(_create_pivot, test_case):
"""Test calling function with value."""
ent_cls = getattr(entities, test_case.entity)
entity = ent_cls(**(test_case.args))
func = getattr(getattr(entity, test_case.provider), test_case.pivot_func)
# Test value input
val = getattr(entity, test_case.attrib)
params = {test_case.func_param: val}
result_df = func(**params)
check.is_in(test_case.exp_val, result_df.iloc[0][test_case.exp_col])
@pytest.mark.parametrize("test_case", _ENTITY_QUERIES)
def test_entity_attr_funcs_itbl(_create_pivot, test_case):
"""Test calling function with iterable input."""
ent_cls = getattr(entities, test_case.entity)
entity = ent_cls(**(test_case.args))
func = getattr(getattr(entity, test_case.provider), test_case.pivot_func)
# Test iterable input
val = [getattr(entity, test_case.attrib)]
params = {test_case.func_param: val}
result_df = func(**params)
check.is_in(test_case.exp_val, result_df.iloc[0][test_case.exp_col])
@pytest.mark.parametrize("test_case", _ENTITY_QUERIES)
def test_entity_attr_funcs_df(_create_pivot, test_case):
"""Test calling function with DF input attributes."""
ent_cls = getattr(entities, test_case.entity)
entity = ent_cls(**(test_case.args))
func = getattr(getattr(entity, test_case.provider), test_case.pivot_func)
# Test DF input
val = getattr(entity, test_case.attrib)
in_df = pd.DataFrame([val], columns=[test_case.src_col])
result_df = func(data=in_df, src_column=test_case.src_col)
check.is_in(test_case.exp_val, result_df.iloc[0][test_case.exp_col])
| 4,655 |
596 | <gh_stars>100-1000
#include "flightlib/sensors/rgb_camera.hpp"
namespace flightlib {
RGBCamera::RGBCamera()
: channels_(3),
width_(720),
height_(480),
fov_{70.0},
depth_scale_{0.2},
enabled_layers_({false, false, false}) {}
RGBCamera::~RGBCamera() {}
bool RGBCamera::feedImageQueue(const int image_layer,
const cv::Mat& image_mat) {
queue_mutex_.lock();
switch (image_layer) {
case 0: // rgb image
if (rgb_queue_.size() > queue_size_) rgb_queue_.resize(queue_size_);
rgb_queue_.push_back(image_mat);
break;
case CameraLayer::DepthMap:
if (depth_queue_.size() > queue_size_) depth_queue_.resize(queue_size_);
depth_queue_.push_back(image_mat);
break;
case CameraLayer::Segmentation:
if (segmentation_queue_.size() > queue_size_)
segmentation_queue_.resize(queue_size_);
segmentation_queue_.push_back(image_mat);
break;
case CameraLayer::OpticalFlow:
if (opticalflow_queue_.size() > queue_size_)
opticalflow_queue_.resize(queue_size_);
opticalflow_queue_.push_back(image_mat);
break;
}
queue_mutex_.unlock();
return true;
}
bool RGBCamera::setRelPose(const Ref<Vector<3>> B_r_BC,
const Ref<Matrix<3, 3>> R_BC) {
if (!B_r_BC.allFinite() || !R_BC.allFinite()) {
logger_.error(
"The setting value for Camera Relative Pose Matrix is not valid, discard "
"the setting.");
return false;
}
B_r_BC_ = B_r_BC;
T_BC_.block<3, 3>(0, 0) = R_BC;
T_BC_.block<3, 1>(0, 3) = B_r_BC;
T_BC_.row(3) << 0.0, 0.0, 0.0, 1.0;
return true;
}
bool RGBCamera::setWidth(const int width) {
if (width <= 0.0) {
logger_.warn(
"The setting value for Image Width is not valid, discard the setting.");
return false;
}
width_ = width;
return true;
}
bool RGBCamera::setHeight(const int height) {
if (height <= 0.0) {
logger_.warn(
"The setting value for Image Height is not valid, discard the "
"setting.");
return false;
}
height_ = height;
return true;
}
bool RGBCamera::setFOV(const Scalar fov) {
if (fov <= 0.0) {
logger_.warn(
"The setting value for Camera Field-of-View is not valid, discard the "
"setting.");
return false;
}
fov_ = fov;
return true;
}
bool RGBCamera::setDepthScale(const Scalar depth_scale) {
if (depth_scale_ < 0.0 || depth_scale_ > 1.0) {
logger_.warn(
"The setting value for Camera Depth Scale is not valid, discard the "
"setting.");
return false;
}
depth_scale_ = depth_scale;
return true;
}
bool RGBCamera::setPostProcesscing(const std::vector<bool>& enabled_layers) {
if (enabled_layers_.size() != enabled_layers.size()) {
logger_.warn(
"Vector size does not match. The vector size should be equal to %d.",
enabled_layers_.size());
return false;
}
enabled_layers_ = enabled_layers;
return true;
}
std::vector<bool> RGBCamera::getEnabledLayers(void) const {
return enabled_layers_;
}
Matrix<4, 4> RGBCamera::getRelPose(void) const { return T_BC_; }
int RGBCamera::getChannels(void) const { return channels_; }
int RGBCamera::getWidth(void) const { return width_; }
int RGBCamera::getHeight(void) const { return height_; }
Scalar RGBCamera::getFOV(void) const { return fov_; }
Scalar RGBCamera::getDepthScale(void) const { return depth_scale_; }
void RGBCamera::enableDepth(const bool on) {
if (enabled_layers_[CameraLayer::DepthMap] == on) {
logger_.warn("Depth layer was already %s.", on ? "on" : "off");
}
enabled_layers_[CameraLayer::DepthMap] = on;
}
void RGBCamera::enableSegmentation(const bool on) {
if (enabled_layers_[CameraLayer::Segmentation] == on) {
logger_.warn("Segmentation layer was already %s.", on ? "on" : "off");
}
enabled_layers_[CameraLayer::Segmentation] = on;
}
void RGBCamera::enableOpticalFlow(const bool on) {
if (enabled_layers_[CameraLayer::OpticalFlow] == on) {
logger_.warn("Optical Flow layer was already %s.", on ? "on" : "off");
}
enabled_layers_[CameraLayer::OpticalFlow] = on;
}
bool RGBCamera::getRGBImage(cv::Mat& rgb_img) {
if (!rgb_queue_.empty()) {
rgb_img = rgb_queue_.front();
rgb_queue_.pop_front();
return true;
}
return false;
}
bool RGBCamera::getDepthMap(cv::Mat& depth_map) {
if (!depth_queue_.empty()) {
depth_map = depth_queue_.front();
depth_queue_.pop_front();
return true;
}
return false;
}
bool RGBCamera::getSegmentation(cv::Mat& segmentation) {
if (!segmentation_queue_.empty()) {
segmentation = segmentation_queue_.front();
segmentation_queue_.pop_front();
return true;
}
return false;
}
bool RGBCamera::getOpticalFlow(cv::Mat& opticalflow) {
if (!opticalflow_queue_.empty()) {
opticalflow = opticalflow_queue_.front();
opticalflow_queue_.pop_front();
return true;
}
return false;
}
} // namespace flightlib | 1,974 |
14,668 | // Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef COMPONENTS_URL_MATCHER_REGEX_SET_MATCHER_H_
#define COMPONENTS_URL_MATCHER_REGEX_SET_MATCHER_H_
#include <map>
#include <memory>
#include <set>
#include <string>
#include <vector>
#include "components/url_matcher/string_pattern.h"
#include "components/url_matcher/substring_set_matcher.h"
#include "components/url_matcher/url_matcher_export.h"
namespace re2 {
class FilteredRE2;
}
namespace url_matcher {
// Efficiently matches URLs against a collection of regular expressions,
// using FilteredRE2 to reduce the number of regexes that must be matched
// by pre-filtering with substring matching. See:
// http://swtch.com/~rsc/regexp/regexp3.html#analysis
class URL_MATCHER_EXPORT RegexSetMatcher {
public:
RegexSetMatcher();
virtual ~RegexSetMatcher();
// Adds the regex patterns in |regex_list| to the matcher. Also rebuilds
// the FilteredRE2 matcher; thus, for efficiency, prefer adding multiple
// patterns at once.
// Ownership of the patterns remains with the caller.
void AddPatterns(const std::vector<const StringPattern*>& regex_list);
// Removes all regex patterns.
void ClearPatterns();
// Appends the IDs of regular expressions in our set that match the |text|
// to |matches|.
bool Match(const std::string& text,
std::set<StringPattern::ID>* matches) const;
bool IsEmpty() const;
private:
typedef int RE2ID;
typedef std::map<StringPattern::ID, const StringPattern*> RegexMap;
typedef std::vector<StringPattern::ID> RE2IDMap;
// Use Aho-Corasick SubstringSetMatcher to find which literal patterns
// match the |text|.
std::vector<RE2ID> FindSubstringMatches(const std::string& text) const;
// Rebuild FilteredRE2 from scratch. Needs to be called whenever
// our set of regexes changes.
// TODO(yoz): investigate if it could be done incrementally;
// apparently not supported by FilteredRE2.
void RebuildMatcher();
// Mapping of regex StringPattern::IDs to regexes.
RegexMap regexes_;
// Mapping of RE2IDs from FilteredRE2 (which are assigned in order)
// to regex StringPattern::IDs.
RE2IDMap re2_id_map_;
std::unique_ptr<re2::FilteredRE2> filtered_re2_;
std::unique_ptr<SubstringSetMatcher> substring_matcher_;
};
} // namespace url_matcher
#endif // COMPONENTS_URL_MATCHER_REGEX_SET_MATCHER_H_
| 815 |
11,356 | <filename>mlmodel/build/format/ItemSimilarityRecommender.pb.cc
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: ItemSimilarityRecommender.proto
#define INTERNAL_SUPPRESS_PROTOBUF_FIELD_DEPRECATION
#include "ItemSimilarityRecommender.pb.h"
#include <algorithm>
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/port.h>
#include <google/protobuf/stubs/once.h>
#include <google/protobuf/io/coded_stream.h>
#include <google/protobuf/wire_format_lite_inl.h>
#include <google/protobuf/io/zero_copy_stream_impl_lite.h>
// @@protoc_insertion_point(includes)
namespace CoreML {
namespace Specification {
class ItemSimilarityRecommender_ConnectedItemDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<ItemSimilarityRecommender_ConnectedItem> {
} _ItemSimilarityRecommender_ConnectedItem_default_instance_;
class ItemSimilarityRecommender_SimilarItemsDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<ItemSimilarityRecommender_SimilarItems> {
} _ItemSimilarityRecommender_SimilarItems_default_instance_;
class ItemSimilarityRecommenderDefaultTypeInternal : public ::google::protobuf::internal::ExplicitlyConstructed<ItemSimilarityRecommender> {
} _ItemSimilarityRecommender_default_instance_;
namespace protobuf_ItemSimilarityRecommender_2eproto {
PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTableField
const TableStruct::entries[] = {
{0, 0, 0, ::google::protobuf::internal::kInvalidMask, 0, 0},
};
PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::AuxillaryParseTableField
const TableStruct::aux[] = {
::google::protobuf::internal::AuxillaryParseTableField(),
};
PROTOBUF_CONSTEXPR_VAR ::google::protobuf::internal::ParseTable const
TableStruct::schema[] = {
{ NULL, NULL, 0, -1, -1, false },
{ NULL, NULL, 0, -1, -1, false },
{ NULL, NULL, 0, -1, -1, false },
};
void TableStruct::Shutdown() {
_ItemSimilarityRecommender_ConnectedItem_default_instance_.Shutdown();
_ItemSimilarityRecommender_SimilarItems_default_instance_.Shutdown();
_ItemSimilarityRecommender_default_instance_.Shutdown();
}
void TableStruct::InitDefaultsImpl() {
GOOGLE_PROTOBUF_VERIFY_VERSION;
::google::protobuf::internal::InitProtobufDefaults();
::CoreML::Specification::protobuf_DataStructures_2eproto::InitDefaults();
_ItemSimilarityRecommender_ConnectedItem_default_instance_.DefaultConstruct();
_ItemSimilarityRecommender_SimilarItems_default_instance_.DefaultConstruct();
_ItemSimilarityRecommender_default_instance_.DefaultConstruct();
_ItemSimilarityRecommender_default_instance_.get_mutable()->itemstringids_ = const_cast< ::CoreML::Specification::StringVector*>(
::CoreML::Specification::StringVector::internal_default_instance());
_ItemSimilarityRecommender_default_instance_.get_mutable()->itemint64ids_ = const_cast< ::CoreML::Specification::Int64Vector*>(
::CoreML::Specification::Int64Vector::internal_default_instance());
}
void InitDefaults() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &TableStruct::InitDefaultsImpl);
}
void AddDescriptorsImpl() {
InitDefaults();
::CoreML::Specification::protobuf_DataStructures_2eproto::AddDescriptors();
::google::protobuf::internal::OnShutdown(&TableStruct::Shutdown);
}
void AddDescriptors() {
static GOOGLE_PROTOBUF_DECLARE_ONCE(once);
::google::protobuf::GoogleOnceInit(&once, &AddDescriptorsImpl);
}
#ifdef GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
// Force AddDescriptors() to be called at static initialization time.
struct StaticDescriptorInitializer {
StaticDescriptorInitializer() {
AddDescriptors();
}
} static_descriptor_initializer;
#endif // GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER
} // namespace protobuf_ItemSimilarityRecommender_2eproto
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int ItemSimilarityRecommender_ConnectedItem::kItemIdFieldNumber;
const int ItemSimilarityRecommender_ConnectedItem::kSimilarityScoreFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
ItemSimilarityRecommender_ConnectedItem::ItemSimilarityRecommender_ConnectedItem()
: ::google::protobuf::MessageLite(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_ItemSimilarityRecommender_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
}
ItemSimilarityRecommender_ConnectedItem::ItemSimilarityRecommender_ConnectedItem(const ItemSimilarityRecommender_ConnectedItem& from)
: ::google::protobuf::MessageLite(),
_internal_metadata_(NULL),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
::memcpy(&itemid_, &from.itemid_,
reinterpret_cast<char*>(&similarityscore_) -
reinterpret_cast<char*>(&itemid_) + sizeof(similarityscore_));
// @@protoc_insertion_point(copy_constructor:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
}
void ItemSimilarityRecommender_ConnectedItem::SharedCtor() {
::memset(&itemid_, 0, reinterpret_cast<char*>(&similarityscore_) -
reinterpret_cast<char*>(&itemid_) + sizeof(similarityscore_));
_cached_size_ = 0;
}
ItemSimilarityRecommender_ConnectedItem::~ItemSimilarityRecommender_ConnectedItem() {
// @@protoc_insertion_point(destructor:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
SharedDtor();
}
void ItemSimilarityRecommender_ConnectedItem::SharedDtor() {
}
void ItemSimilarityRecommender_ConnectedItem::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ItemSimilarityRecommender_ConnectedItem& ItemSimilarityRecommender_ConnectedItem::default_instance() {
protobuf_ItemSimilarityRecommender_2eproto::InitDefaults();
return *internal_default_instance();
}
ItemSimilarityRecommender_ConnectedItem* ItemSimilarityRecommender_ConnectedItem::New(::google::protobuf::Arena* arena) const {
ItemSimilarityRecommender_ConnectedItem* n = new ItemSimilarityRecommender_ConnectedItem;
if (arena != NULL) {
arena->Own(n);
}
return n;
}
void ItemSimilarityRecommender_ConnectedItem::Clear() {
// @@protoc_insertion_point(message_clear_start:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
::memset(&itemid_, 0, reinterpret_cast<char*>(&similarityscore_) -
reinterpret_cast<char*>(&itemid_) + sizeof(similarityscore_));
}
bool ItemSimilarityRecommender_ConnectedItem::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// uint64 itemId = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(8u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::uint64, ::google::protobuf::internal::WireFormatLite::TYPE_UINT64>(
input, &itemid_)));
} else {
goto handle_unusual;
}
break;
}
// double similarityScore = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(17u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
double, ::google::protobuf::internal::WireFormatLite::TYPE_DOUBLE>(
input, &similarityscore_)));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
return true;
failure:
// @@protoc_insertion_point(parse_failure:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
return false;
#undef DO_
}
void ItemSimilarityRecommender_ConnectedItem::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
// uint64 itemId = 1;
if (this->itemid() != 0) {
::google::protobuf::internal::WireFormatLite::WriteUInt64(1, this->itemid(), output);
}
// double similarityScore = 2;
if (this->similarityscore() != 0) {
::google::protobuf::internal::WireFormatLite::WriteDouble(2, this->similarityscore(), output);
}
// @@protoc_insertion_point(serialize_end:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
}
size_t ItemSimilarityRecommender_ConnectedItem::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
size_t total_size = 0;
// uint64 itemId = 1;
if (this->itemid() != 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::UInt64Size(
this->itemid());
}
// double similarityScore = 2;
if (this->similarityscore() != 0) {
total_size += 1 + 8;
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ItemSimilarityRecommender_ConnectedItem::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite& from) {
MergeFrom(*::google::protobuf::down_cast<const ItemSimilarityRecommender_ConnectedItem*>(&from));
}
void ItemSimilarityRecommender_ConnectedItem::MergeFrom(const ItemSimilarityRecommender_ConnectedItem& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
if (from.itemid() != 0) {
set_itemid(from.itemid());
}
if (from.similarityscore() != 0) {
set_similarityscore(from.similarityscore());
}
}
void ItemSimilarityRecommender_ConnectedItem::CopyFrom(const ItemSimilarityRecommender_ConnectedItem& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ItemSimilarityRecommender_ConnectedItem::IsInitialized() const {
return true;
}
void ItemSimilarityRecommender_ConnectedItem::Swap(ItemSimilarityRecommender_ConnectedItem* other) {
if (other == this) return;
InternalSwap(other);
}
void ItemSimilarityRecommender_ConnectedItem::InternalSwap(ItemSimilarityRecommender_ConnectedItem* other) {
std::swap(itemid_, other->itemid_);
std::swap(similarityscore_, other->similarityscore_);
std::swap(_cached_size_, other->_cached_size_);
}
::std::string ItemSimilarityRecommender_ConnectedItem::GetTypeName() const {
return "CoreML.Specification.ItemSimilarityRecommender.ConnectedItem";
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// ItemSimilarityRecommender_ConnectedItem
// uint64 itemId = 1;
void ItemSimilarityRecommender_ConnectedItem::clear_itemid() {
itemid_ = GOOGLE_ULONGLONG(0);
}
::google::protobuf::uint64 ItemSimilarityRecommender_ConnectedItem::itemid() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem.itemId)
return itemid_;
}
void ItemSimilarityRecommender_ConnectedItem::set_itemid(::google::protobuf::uint64 value) {
itemid_ = value;
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem.itemId)
}
// double similarityScore = 2;
void ItemSimilarityRecommender_ConnectedItem::clear_similarityscore() {
similarityscore_ = 0;
}
double ItemSimilarityRecommender_ConnectedItem::similarityscore() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem.similarityScore)
return similarityscore_;
}
void ItemSimilarityRecommender_ConnectedItem::set_similarityscore(double value) {
similarityscore_ = value;
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.ConnectedItem.similarityScore)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int ItemSimilarityRecommender_SimilarItems::kItemIdFieldNumber;
const int ItemSimilarityRecommender_SimilarItems::kSimilarItemListFieldNumber;
const int ItemSimilarityRecommender_SimilarItems::kItemScoreAdjustmentFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
ItemSimilarityRecommender_SimilarItems::ItemSimilarityRecommender_SimilarItems()
: ::google::protobuf::MessageLite(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_ItemSimilarityRecommender_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
}
ItemSimilarityRecommender_SimilarItems::ItemSimilarityRecommender_SimilarItems(const ItemSimilarityRecommender_SimilarItems& from)
: ::google::protobuf::MessageLite(),
_internal_metadata_(NULL),
similaritemlist_(from.similaritemlist_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
::memcpy(&itemid_, &from.itemid_,
reinterpret_cast<char*>(&itemscoreadjustment_) -
reinterpret_cast<char*>(&itemid_) + sizeof(itemscoreadjustment_));
// @@protoc_insertion_point(copy_constructor:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
}
void ItemSimilarityRecommender_SimilarItems::SharedCtor() {
::memset(&itemid_, 0, reinterpret_cast<char*>(&itemscoreadjustment_) -
reinterpret_cast<char*>(&itemid_) + sizeof(itemscoreadjustment_));
_cached_size_ = 0;
}
ItemSimilarityRecommender_SimilarItems::~ItemSimilarityRecommender_SimilarItems() {
// @@protoc_insertion_point(destructor:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
SharedDtor();
}
void ItemSimilarityRecommender_SimilarItems::SharedDtor() {
}
void ItemSimilarityRecommender_SimilarItems::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ItemSimilarityRecommender_SimilarItems& ItemSimilarityRecommender_SimilarItems::default_instance() {
protobuf_ItemSimilarityRecommender_2eproto::InitDefaults();
return *internal_default_instance();
}
ItemSimilarityRecommender_SimilarItems* ItemSimilarityRecommender_SimilarItems::New(::google::protobuf::Arena* arena) const {
ItemSimilarityRecommender_SimilarItems* n = new ItemSimilarityRecommender_SimilarItems;
if (arena != NULL) {
arena->Own(n);
}
return n;
}
void ItemSimilarityRecommender_SimilarItems::Clear() {
// @@protoc_insertion_point(message_clear_start:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
similaritemlist_.Clear();
::memset(&itemid_, 0, reinterpret_cast<char*>(&itemscoreadjustment_) -
reinterpret_cast<char*>(&itemid_) + sizeof(itemscoreadjustment_));
}
bool ItemSimilarityRecommender_SimilarItems::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(127u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// uint64 itemId = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(8u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
::google::protobuf::uint64, ::google::protobuf::internal::WireFormatLite::TYPE_UINT64>(
input, &itemid_)));
} else {
goto handle_unusual;
}
break;
}
// repeated .CoreML.Specification.ItemSimilarityRecommender.ConnectedItem similarItemList = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, add_similaritemlist()));
} else {
goto handle_unusual;
}
break;
}
// double itemScoreAdjustment = 3;
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(25u)) {
DO_((::google::protobuf::internal::WireFormatLite::ReadPrimitive<
double, ::google::protobuf::internal::WireFormatLite::TYPE_DOUBLE>(
input, &itemscoreadjustment_)));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
return true;
failure:
// @@protoc_insertion_point(parse_failure:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
return false;
#undef DO_
}
void ItemSimilarityRecommender_SimilarItems::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
// uint64 itemId = 1;
if (this->itemid() != 0) {
::google::protobuf::internal::WireFormatLite::WriteUInt64(1, this->itemid(), output);
}
// repeated .CoreML.Specification.ItemSimilarityRecommender.ConnectedItem similarItemList = 2;
for (unsigned int i = 0, n = this->similaritemlist_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessage(
2, this->similaritemlist(i), output);
}
// double itemScoreAdjustment = 3;
if (this->itemscoreadjustment() != 0) {
::google::protobuf::internal::WireFormatLite::WriteDouble(3, this->itemscoreadjustment(), output);
}
// @@protoc_insertion_point(serialize_end:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
}
size_t ItemSimilarityRecommender_SimilarItems::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
size_t total_size = 0;
// repeated .CoreML.Specification.ItemSimilarityRecommender.ConnectedItem similarItemList = 2;
{
unsigned int count = this->similaritemlist_size();
total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->similaritemlist(i));
}
}
// uint64 itemId = 1;
if (this->itemid() != 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::UInt64Size(
this->itemid());
}
// double itemScoreAdjustment = 3;
if (this->itemscoreadjustment() != 0) {
total_size += 1 + 8;
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ItemSimilarityRecommender_SimilarItems::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite& from) {
MergeFrom(*::google::protobuf::down_cast<const ItemSimilarityRecommender_SimilarItems*>(&from));
}
void ItemSimilarityRecommender_SimilarItems::MergeFrom(const ItemSimilarityRecommender_SimilarItems& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
similaritemlist_.MergeFrom(from.similaritemlist_);
if (from.itemid() != 0) {
set_itemid(from.itemid());
}
if (from.itemscoreadjustment() != 0) {
set_itemscoreadjustment(from.itemscoreadjustment());
}
}
void ItemSimilarityRecommender_SimilarItems::CopyFrom(const ItemSimilarityRecommender_SimilarItems& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:CoreML.Specification.ItemSimilarityRecommender.SimilarItems)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ItemSimilarityRecommender_SimilarItems::IsInitialized() const {
return true;
}
void ItemSimilarityRecommender_SimilarItems::Swap(ItemSimilarityRecommender_SimilarItems* other) {
if (other == this) return;
InternalSwap(other);
}
void ItemSimilarityRecommender_SimilarItems::InternalSwap(ItemSimilarityRecommender_SimilarItems* other) {
similaritemlist_.InternalSwap(&other->similaritemlist_);
std::swap(itemid_, other->itemid_);
std::swap(itemscoreadjustment_, other->itemscoreadjustment_);
std::swap(_cached_size_, other->_cached_size_);
}
::std::string ItemSimilarityRecommender_SimilarItems::GetTypeName() const {
return "CoreML.Specification.ItemSimilarityRecommender.SimilarItems";
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// ItemSimilarityRecommender_SimilarItems
// uint64 itemId = 1;
void ItemSimilarityRecommender_SimilarItems::clear_itemid() {
itemid_ = GOOGLE_ULONGLONG(0);
}
::google::protobuf::uint64 ItemSimilarityRecommender_SimilarItems::itemid() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.itemId)
return itemid_;
}
void ItemSimilarityRecommender_SimilarItems::set_itemid(::google::protobuf::uint64 value) {
itemid_ = value;
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.itemId)
}
// repeated .CoreML.Specification.ItemSimilarityRecommender.ConnectedItem similarItemList = 2;
int ItemSimilarityRecommender_SimilarItems::similaritemlist_size() const {
return similaritemlist_.size();
}
void ItemSimilarityRecommender_SimilarItems::clear_similaritemlist() {
similaritemlist_.Clear();
}
const ::CoreML::Specification::ItemSimilarityRecommender_ConnectedItem& ItemSimilarityRecommender_SimilarItems::similaritemlist(int index) const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.similarItemList)
return similaritemlist_.Get(index);
}
::CoreML::Specification::ItemSimilarityRecommender_ConnectedItem* ItemSimilarityRecommender_SimilarItems::mutable_similaritemlist(int index) {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.similarItemList)
return similaritemlist_.Mutable(index);
}
::CoreML::Specification::ItemSimilarityRecommender_ConnectedItem* ItemSimilarityRecommender_SimilarItems::add_similaritemlist() {
// @@protoc_insertion_point(field_add:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.similarItemList)
return similaritemlist_.Add();
}
::google::protobuf::RepeatedPtrField< ::CoreML::Specification::ItemSimilarityRecommender_ConnectedItem >*
ItemSimilarityRecommender_SimilarItems::mutable_similaritemlist() {
// @@protoc_insertion_point(field_mutable_list:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.similarItemList)
return &similaritemlist_;
}
const ::google::protobuf::RepeatedPtrField< ::CoreML::Specification::ItemSimilarityRecommender_ConnectedItem >&
ItemSimilarityRecommender_SimilarItems::similaritemlist() const {
// @@protoc_insertion_point(field_list:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.similarItemList)
return similaritemlist_;
}
// double itemScoreAdjustment = 3;
void ItemSimilarityRecommender_SimilarItems::clear_itemscoreadjustment() {
itemscoreadjustment_ = 0;
}
double ItemSimilarityRecommender_SimilarItems::itemscoreadjustment() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.itemScoreAdjustment)
return itemscoreadjustment_;
}
void ItemSimilarityRecommender_SimilarItems::set_itemscoreadjustment(double value) {
itemscoreadjustment_ = value;
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.SimilarItems.itemScoreAdjustment)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// ===================================================================
#if !defined(_MSC_VER) || _MSC_VER >= 1900
const int ItemSimilarityRecommender::kItemItemSimilaritiesFieldNumber;
const int ItemSimilarityRecommender::kItemStringIdsFieldNumber;
const int ItemSimilarityRecommender::kItemInt64IdsFieldNumber;
const int ItemSimilarityRecommender::kItemInputFeatureNameFieldNumber;
const int ItemSimilarityRecommender::kNumRecommendationsInputFeatureNameFieldNumber;
const int ItemSimilarityRecommender::kItemRestrictionInputFeatureNameFieldNumber;
const int ItemSimilarityRecommender::kItemExclusionInputFeatureNameFieldNumber;
const int ItemSimilarityRecommender::kRecommendedItemListOutputFeatureNameFieldNumber;
const int ItemSimilarityRecommender::kRecommendedItemScoreOutputFeatureNameFieldNumber;
#endif // !defined(_MSC_VER) || _MSC_VER >= 1900
ItemSimilarityRecommender::ItemSimilarityRecommender()
: ::google::protobuf::MessageLite(), _internal_metadata_(NULL) {
if (GOOGLE_PREDICT_TRUE(this != internal_default_instance())) {
protobuf_ItemSimilarityRecommender_2eproto::InitDefaults();
}
SharedCtor();
// @@protoc_insertion_point(constructor:CoreML.Specification.ItemSimilarityRecommender)
}
ItemSimilarityRecommender::ItemSimilarityRecommender(const ItemSimilarityRecommender& from)
: ::google::protobuf::MessageLite(),
_internal_metadata_(NULL),
itemitemsimilarities_(from.itemitemsimilarities_),
_cached_size_(0) {
_internal_metadata_.MergeFrom(from._internal_metadata_);
iteminputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.iteminputfeaturename().size() > 0) {
iteminputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.iteminputfeaturename_);
}
numrecommendationsinputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.numrecommendationsinputfeaturename().size() > 0) {
numrecommendationsinputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.numrecommendationsinputfeaturename_);
}
itemrestrictioninputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.itemrestrictioninputfeaturename().size() > 0) {
itemrestrictioninputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.itemrestrictioninputfeaturename_);
}
itemexclusioninputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.itemexclusioninputfeaturename().size() > 0) {
itemexclusioninputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.itemexclusioninputfeaturename_);
}
recommendeditemlistoutputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.recommendeditemlistoutputfeaturename().size() > 0) {
recommendeditemlistoutputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.recommendeditemlistoutputfeaturename_);
}
recommendeditemscoreoutputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (from.recommendeditemscoreoutputfeaturename().size() > 0) {
recommendeditemscoreoutputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.recommendeditemscoreoutputfeaturename_);
}
if (from.has_itemstringids()) {
itemstringids_ = new ::CoreML::Specification::StringVector(*from.itemstringids_);
} else {
itemstringids_ = NULL;
}
if (from.has_itemint64ids()) {
itemint64ids_ = new ::CoreML::Specification::Int64Vector(*from.itemint64ids_);
} else {
itemint64ids_ = NULL;
}
// @@protoc_insertion_point(copy_constructor:CoreML.Specification.ItemSimilarityRecommender)
}
void ItemSimilarityRecommender::SharedCtor() {
iteminputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
numrecommendationsinputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
itemrestrictioninputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
itemexclusioninputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
recommendeditemlistoutputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
recommendeditemscoreoutputfeaturename_.UnsafeSetDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
::memset(&itemstringids_, 0, reinterpret_cast<char*>(&itemint64ids_) -
reinterpret_cast<char*>(&itemstringids_) + sizeof(itemint64ids_));
_cached_size_ = 0;
}
ItemSimilarityRecommender::~ItemSimilarityRecommender() {
// @@protoc_insertion_point(destructor:CoreML.Specification.ItemSimilarityRecommender)
SharedDtor();
}
void ItemSimilarityRecommender::SharedDtor() {
iteminputfeaturename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
numrecommendationsinputfeaturename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
itemrestrictioninputfeaturename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
itemexclusioninputfeaturename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
recommendeditemlistoutputfeaturename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
recommendeditemscoreoutputfeaturename_.DestroyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (this != internal_default_instance()) {
delete itemstringids_;
}
if (this != internal_default_instance()) {
delete itemint64ids_;
}
}
void ItemSimilarityRecommender::SetCachedSize(int size) const {
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
}
const ItemSimilarityRecommender& ItemSimilarityRecommender::default_instance() {
protobuf_ItemSimilarityRecommender_2eproto::InitDefaults();
return *internal_default_instance();
}
ItemSimilarityRecommender* ItemSimilarityRecommender::New(::google::protobuf::Arena* arena) const {
ItemSimilarityRecommender* n = new ItemSimilarityRecommender;
if (arena != NULL) {
arena->Own(n);
}
return n;
}
void ItemSimilarityRecommender::Clear() {
// @@protoc_insertion_point(message_clear_start:CoreML.Specification.ItemSimilarityRecommender)
itemitemsimilarities_.Clear();
iteminputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
numrecommendationsinputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
itemrestrictioninputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
itemexclusioninputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
recommendeditemlistoutputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
recommendeditemscoreoutputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
if (GetArenaNoVirtual() == NULL && itemstringids_ != NULL) {
delete itemstringids_;
}
itemstringids_ = NULL;
if (GetArenaNoVirtual() == NULL && itemint64ids_ != NULL) {
delete itemint64ids_;
}
itemint64ids_ = NULL;
}
bool ItemSimilarityRecommender::MergePartialFromCodedStream(
::google::protobuf::io::CodedInputStream* input) {
#define DO_(EXPRESSION) if (!GOOGLE_PREDICT_TRUE(EXPRESSION)) goto failure
::google::protobuf::uint32 tag;
// @@protoc_insertion_point(parse_start:CoreML.Specification.ItemSimilarityRecommender)
for (;;) {
::std::pair< ::google::protobuf::uint32, bool> p = input->ReadTagWithCutoffNoLastTag(16383u);
tag = p.first;
if (!p.second) goto handle_unusual;
switch (::google::protobuf::internal::WireFormatLite::GetTagFieldNumber(tag)) {
// repeated .CoreML.Specification.ItemSimilarityRecommender.SimilarItems itemItemSimilarities = 1;
case 1: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(10u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, add_itemitemsimilarities()));
} else {
goto handle_unusual;
}
break;
}
// .CoreML.Specification.StringVector itemStringIds = 2;
case 2: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(18u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_itemstringids()));
} else {
goto handle_unusual;
}
break;
}
// .CoreML.Specification.Int64Vector itemInt64Ids = 3;
case 3: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(26u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadMessageNoVirtual(
input, mutable_itemint64ids()));
} else {
goto handle_unusual;
}
break;
}
// string itemInputFeatureName = 10;
case 10: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(82u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_iteminputfeaturename()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->iteminputfeaturename().data(), this->iteminputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName"));
} else {
goto handle_unusual;
}
break;
}
// string numRecommendationsInputFeatureName = 11;
case 11: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(90u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_numrecommendationsinputfeaturename()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->numrecommendationsinputfeaturename().data(), this->numrecommendationsinputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName"));
} else {
goto handle_unusual;
}
break;
}
// string itemRestrictionInputFeatureName = 12;
case 12: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(98u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_itemrestrictioninputfeaturename()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->itemrestrictioninputfeaturename().data(), this->itemrestrictioninputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName"));
} else {
goto handle_unusual;
}
break;
}
// string itemExclusionInputFeatureName = 13;
case 13: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(106u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_itemexclusioninputfeaturename()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->itemexclusioninputfeaturename().data(), this->itemexclusioninputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName"));
} else {
goto handle_unusual;
}
break;
}
// string recommendedItemListOutputFeatureName = 20;
case 20: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(162u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_recommendeditemlistoutputfeaturename()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->recommendeditemlistoutputfeaturename().data(), this->recommendeditemlistoutputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName"));
} else {
goto handle_unusual;
}
break;
}
// string recommendedItemScoreOutputFeatureName = 21;
case 21: {
if (static_cast< ::google::protobuf::uint8>(tag) ==
static_cast< ::google::protobuf::uint8>(170u)) {
DO_(::google::protobuf::internal::WireFormatLite::ReadString(
input, this->mutable_recommendeditemscoreoutputfeaturename()));
DO_(::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->recommendeditemscoreoutputfeaturename().data(), this->recommendeditemscoreoutputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::PARSE,
"CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName"));
} else {
goto handle_unusual;
}
break;
}
default: {
handle_unusual:
if (tag == 0 ||
::google::protobuf::internal::WireFormatLite::GetTagWireType(tag) ==
::google::protobuf::internal::WireFormatLite::WIRETYPE_END_GROUP) {
goto success;
}
DO_(::google::protobuf::internal::WireFormatLite::SkipField(input, tag));
break;
}
}
}
success:
// @@protoc_insertion_point(parse_success:CoreML.Specification.ItemSimilarityRecommender)
return true;
failure:
// @@protoc_insertion_point(parse_failure:CoreML.Specification.ItemSimilarityRecommender)
return false;
#undef DO_
}
void ItemSimilarityRecommender::SerializeWithCachedSizes(
::google::protobuf::io::CodedOutputStream* output) const {
// @@protoc_insertion_point(serialize_start:CoreML.Specification.ItemSimilarityRecommender)
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
// repeated .CoreML.Specification.ItemSimilarityRecommender.SimilarItems itemItemSimilarities = 1;
for (unsigned int i = 0, n = this->itemitemsimilarities_size(); i < n; i++) {
::google::protobuf::internal::WireFormatLite::WriteMessage(
1, this->itemitemsimilarities(i), output);
}
// .CoreML.Specification.StringVector itemStringIds = 2;
if (this->has_itemstringids()) {
::google::protobuf::internal::WireFormatLite::WriteMessage(
2, *this->itemstringids_, output);
}
// .CoreML.Specification.Int64Vector itemInt64Ids = 3;
if (this->has_itemint64ids()) {
::google::protobuf::internal::WireFormatLite::WriteMessage(
3, *this->itemint64ids_, output);
}
// string itemInputFeatureName = 10;
if (this->iteminputfeaturename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->iteminputfeaturename().data(), this->iteminputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
10, this->iteminputfeaturename(), output);
}
// string numRecommendationsInputFeatureName = 11;
if (this->numrecommendationsinputfeaturename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->numrecommendationsinputfeaturename().data(), this->numrecommendationsinputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
11, this->numrecommendationsinputfeaturename(), output);
}
// string itemRestrictionInputFeatureName = 12;
if (this->itemrestrictioninputfeaturename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->itemrestrictioninputfeaturename().data(), this->itemrestrictioninputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
12, this->itemrestrictioninputfeaturename(), output);
}
// string itemExclusionInputFeatureName = 13;
if (this->itemexclusioninputfeaturename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->itemexclusioninputfeaturename().data(), this->itemexclusioninputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
13, this->itemexclusioninputfeaturename(), output);
}
// string recommendedItemListOutputFeatureName = 20;
if (this->recommendeditemlistoutputfeaturename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->recommendeditemlistoutputfeaturename().data(), this->recommendeditemlistoutputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
20, this->recommendeditemlistoutputfeaturename(), output);
}
// string recommendedItemScoreOutputFeatureName = 21;
if (this->recommendeditemscoreoutputfeaturename().size() > 0) {
::google::protobuf::internal::WireFormatLite::VerifyUtf8String(
this->recommendeditemscoreoutputfeaturename().data(), this->recommendeditemscoreoutputfeaturename().length(),
::google::protobuf::internal::WireFormatLite::SERIALIZE,
"CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName");
::google::protobuf::internal::WireFormatLite::WriteStringMaybeAliased(
21, this->recommendeditemscoreoutputfeaturename(), output);
}
// @@protoc_insertion_point(serialize_end:CoreML.Specification.ItemSimilarityRecommender)
}
size_t ItemSimilarityRecommender::ByteSizeLong() const {
// @@protoc_insertion_point(message_byte_size_start:CoreML.Specification.ItemSimilarityRecommender)
size_t total_size = 0;
// repeated .CoreML.Specification.ItemSimilarityRecommender.SimilarItems itemItemSimilarities = 1;
{
unsigned int count = this->itemitemsimilarities_size();
total_size += 1UL * count;
for (unsigned int i = 0; i < count; i++) {
total_size +=
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
this->itemitemsimilarities(i));
}
}
// string itemInputFeatureName = 10;
if (this->iteminputfeaturename().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->iteminputfeaturename());
}
// string numRecommendationsInputFeatureName = 11;
if (this->numrecommendationsinputfeaturename().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->numrecommendationsinputfeaturename());
}
// string itemRestrictionInputFeatureName = 12;
if (this->itemrestrictioninputfeaturename().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->itemrestrictioninputfeaturename());
}
// string itemExclusionInputFeatureName = 13;
if (this->itemexclusioninputfeaturename().size() > 0) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->itemexclusioninputfeaturename());
}
// string recommendedItemListOutputFeatureName = 20;
if (this->recommendeditemlistoutputfeaturename().size() > 0) {
total_size += 2 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->recommendeditemlistoutputfeaturename());
}
// string recommendedItemScoreOutputFeatureName = 21;
if (this->recommendeditemscoreoutputfeaturename().size() > 0) {
total_size += 2 +
::google::protobuf::internal::WireFormatLite::StringSize(
this->recommendeditemscoreoutputfeaturename());
}
// .CoreML.Specification.StringVector itemStringIds = 2;
if (this->has_itemstringids()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->itemstringids_);
}
// .CoreML.Specification.Int64Vector itemInt64Ids = 3;
if (this->has_itemint64ids()) {
total_size += 1 +
::google::protobuf::internal::WireFormatLite::MessageSizeNoVirtual(
*this->itemint64ids_);
}
int cached_size = ::google::protobuf::internal::ToCachedSize(total_size);
GOOGLE_SAFE_CONCURRENT_WRITES_BEGIN();
_cached_size_ = cached_size;
GOOGLE_SAFE_CONCURRENT_WRITES_END();
return total_size;
}
void ItemSimilarityRecommender::CheckTypeAndMergeFrom(
const ::google::protobuf::MessageLite& from) {
MergeFrom(*::google::protobuf::down_cast<const ItemSimilarityRecommender*>(&from));
}
void ItemSimilarityRecommender::MergeFrom(const ItemSimilarityRecommender& from) {
// @@protoc_insertion_point(class_specific_merge_from_start:CoreML.Specification.ItemSimilarityRecommender)
GOOGLE_DCHECK_NE(&from, this);
_internal_metadata_.MergeFrom(from._internal_metadata_);
::google::protobuf::uint32 cached_has_bits = 0;
(void) cached_has_bits;
itemitemsimilarities_.MergeFrom(from.itemitemsimilarities_);
if (from.iteminputfeaturename().size() > 0) {
iteminputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.iteminputfeaturename_);
}
if (from.numrecommendationsinputfeaturename().size() > 0) {
numrecommendationsinputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.numrecommendationsinputfeaturename_);
}
if (from.itemrestrictioninputfeaturename().size() > 0) {
itemrestrictioninputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.itemrestrictioninputfeaturename_);
}
if (from.itemexclusioninputfeaturename().size() > 0) {
itemexclusioninputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.itemexclusioninputfeaturename_);
}
if (from.recommendeditemlistoutputfeaturename().size() > 0) {
recommendeditemlistoutputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.recommendeditemlistoutputfeaturename_);
}
if (from.recommendeditemscoreoutputfeaturename().size() > 0) {
recommendeditemscoreoutputfeaturename_.AssignWithDefault(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), from.recommendeditemscoreoutputfeaturename_);
}
if (from.has_itemstringids()) {
mutable_itemstringids()->::CoreML::Specification::StringVector::MergeFrom(from.itemstringids());
}
if (from.has_itemint64ids()) {
mutable_itemint64ids()->::CoreML::Specification::Int64Vector::MergeFrom(from.itemint64ids());
}
}
void ItemSimilarityRecommender::CopyFrom(const ItemSimilarityRecommender& from) {
// @@protoc_insertion_point(class_specific_copy_from_start:CoreML.Specification.ItemSimilarityRecommender)
if (&from == this) return;
Clear();
MergeFrom(from);
}
bool ItemSimilarityRecommender::IsInitialized() const {
return true;
}
void ItemSimilarityRecommender::Swap(ItemSimilarityRecommender* other) {
if (other == this) return;
InternalSwap(other);
}
void ItemSimilarityRecommender::InternalSwap(ItemSimilarityRecommender* other) {
itemitemsimilarities_.InternalSwap(&other->itemitemsimilarities_);
iteminputfeaturename_.Swap(&other->iteminputfeaturename_);
numrecommendationsinputfeaturename_.Swap(&other->numrecommendationsinputfeaturename_);
itemrestrictioninputfeaturename_.Swap(&other->itemrestrictioninputfeaturename_);
itemexclusioninputfeaturename_.Swap(&other->itemexclusioninputfeaturename_);
recommendeditemlistoutputfeaturename_.Swap(&other->recommendeditemlistoutputfeaturename_);
recommendeditemscoreoutputfeaturename_.Swap(&other->recommendeditemscoreoutputfeaturename_);
std::swap(itemstringids_, other->itemstringids_);
std::swap(itemint64ids_, other->itemint64ids_);
std::swap(_cached_size_, other->_cached_size_);
}
::std::string ItemSimilarityRecommender::GetTypeName() const {
return "CoreML.Specification.ItemSimilarityRecommender";
}
#if PROTOBUF_INLINE_NOT_IN_HEADERS
// ItemSimilarityRecommender
// repeated .CoreML.Specification.ItemSimilarityRecommender.SimilarItems itemItemSimilarities = 1;
int ItemSimilarityRecommender::itemitemsimilarities_size() const {
return itemitemsimilarities_.size();
}
void ItemSimilarityRecommender::clear_itemitemsimilarities() {
itemitemsimilarities_.Clear();
}
const ::CoreML::Specification::ItemSimilarityRecommender_SimilarItems& ItemSimilarityRecommender::itemitemsimilarities(int index) const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.itemItemSimilarities)
return itemitemsimilarities_.Get(index);
}
::CoreML::Specification::ItemSimilarityRecommender_SimilarItems* ItemSimilarityRecommender::mutable_itemitemsimilarities(int index) {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.itemItemSimilarities)
return itemitemsimilarities_.Mutable(index);
}
::CoreML::Specification::ItemSimilarityRecommender_SimilarItems* ItemSimilarityRecommender::add_itemitemsimilarities() {
// @@protoc_insertion_point(field_add:CoreML.Specification.ItemSimilarityRecommender.itemItemSimilarities)
return itemitemsimilarities_.Add();
}
::google::protobuf::RepeatedPtrField< ::CoreML::Specification::ItemSimilarityRecommender_SimilarItems >*
ItemSimilarityRecommender::mutable_itemitemsimilarities() {
// @@protoc_insertion_point(field_mutable_list:CoreML.Specification.ItemSimilarityRecommender.itemItemSimilarities)
return &itemitemsimilarities_;
}
const ::google::protobuf::RepeatedPtrField< ::CoreML::Specification::ItemSimilarityRecommender_SimilarItems >&
ItemSimilarityRecommender::itemitemsimilarities() const {
// @@protoc_insertion_point(field_list:CoreML.Specification.ItemSimilarityRecommender.itemItemSimilarities)
return itemitemsimilarities_;
}
// .CoreML.Specification.StringVector itemStringIds = 2;
bool ItemSimilarityRecommender::has_itemstringids() const {
return this != internal_default_instance() && itemstringids_ != NULL;
}
void ItemSimilarityRecommender::clear_itemstringids() {
if (GetArenaNoVirtual() == NULL && itemstringids_ != NULL) delete itemstringids_;
itemstringids_ = NULL;
}
const ::CoreML::Specification::StringVector& ItemSimilarityRecommender::itemstringids() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.itemStringIds)
return itemstringids_ != NULL ? *itemstringids_
: *::CoreML::Specification::StringVector::internal_default_instance();
}
::CoreML::Specification::StringVector* ItemSimilarityRecommender::mutable_itemstringids() {
if (itemstringids_ == NULL) {
itemstringids_ = new ::CoreML::Specification::StringVector;
}
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.itemStringIds)
return itemstringids_;
}
::CoreML::Specification::StringVector* ItemSimilarityRecommender::release_itemstringids() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.itemStringIds)
::CoreML::Specification::StringVector* temp = itemstringids_;
itemstringids_ = NULL;
return temp;
}
void ItemSimilarityRecommender::set_allocated_itemstringids(::CoreML::Specification::StringVector* itemstringids) {
delete itemstringids_;
itemstringids_ = itemstringids;
if (itemstringids) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.itemStringIds)
}
// .CoreML.Specification.Int64Vector itemInt64Ids = 3;
bool ItemSimilarityRecommender::has_itemint64ids() const {
return this != internal_default_instance() && itemint64ids_ != NULL;
}
void ItemSimilarityRecommender::clear_itemint64ids() {
if (GetArenaNoVirtual() == NULL && itemint64ids_ != NULL) delete itemint64ids_;
itemint64ids_ = NULL;
}
const ::CoreML::Specification::Int64Vector& ItemSimilarityRecommender::itemint64ids() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.itemInt64Ids)
return itemint64ids_ != NULL ? *itemint64ids_
: *::CoreML::Specification::Int64Vector::internal_default_instance();
}
::CoreML::Specification::Int64Vector* ItemSimilarityRecommender::mutable_itemint64ids() {
if (itemint64ids_ == NULL) {
itemint64ids_ = new ::CoreML::Specification::Int64Vector;
}
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.itemInt64Ids)
return itemint64ids_;
}
::CoreML::Specification::Int64Vector* ItemSimilarityRecommender::release_itemint64ids() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.itemInt64Ids)
::CoreML::Specification::Int64Vector* temp = itemint64ids_;
itemint64ids_ = NULL;
return temp;
}
void ItemSimilarityRecommender::set_allocated_itemint64ids(::CoreML::Specification::Int64Vector* itemint64ids) {
delete itemint64ids_;
itemint64ids_ = itemint64ids;
if (itemint64ids) {
} else {
}
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.itemInt64Ids)
}
// string itemInputFeatureName = 10;
void ItemSimilarityRecommender::clear_iteminputfeaturename() {
iteminputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
const ::std::string& ItemSimilarityRecommender::iteminputfeaturename() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
return iteminputfeaturename_.GetNoArena();
}
void ItemSimilarityRecommender::set_iteminputfeaturename(const ::std::string& value) {
iteminputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
}
#if LANG_CXX11
void ItemSimilarityRecommender::set_iteminputfeaturename(::std::string&& value) {
iteminputfeaturename_.SetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
}
#endif
void ItemSimilarityRecommender::set_iteminputfeaturename(const char* value) {
GOOGLE_DCHECK(value != NULL);
iteminputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
}
void ItemSimilarityRecommender::set_iteminputfeaturename(const char* value, size_t size) {
iteminputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
}
::std::string* ItemSimilarityRecommender::mutable_iteminputfeaturename() {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
return iteminputfeaturename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
::std::string* ItemSimilarityRecommender::release_iteminputfeaturename() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
return iteminputfeaturename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void ItemSimilarityRecommender::set_allocated_iteminputfeaturename(::std::string* iteminputfeaturename) {
if (iteminputfeaturename != NULL) {
} else {
}
iteminputfeaturename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), iteminputfeaturename);
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.itemInputFeatureName)
}
// string numRecommendationsInputFeatureName = 11;
void ItemSimilarityRecommender::clear_numrecommendationsinputfeaturename() {
numrecommendationsinputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
const ::std::string& ItemSimilarityRecommender::numrecommendationsinputfeaturename() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
return numrecommendationsinputfeaturename_.GetNoArena();
}
void ItemSimilarityRecommender::set_numrecommendationsinputfeaturename(const ::std::string& value) {
numrecommendationsinputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
}
#if LANG_CXX11
void ItemSimilarityRecommender::set_numrecommendationsinputfeaturename(::std::string&& value) {
numrecommendationsinputfeaturename_.SetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
}
#endif
void ItemSimilarityRecommender::set_numrecommendationsinputfeaturename(const char* value) {
GOOGLE_DCHECK(value != NULL);
numrecommendationsinputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
}
void ItemSimilarityRecommender::set_numrecommendationsinputfeaturename(const char* value, size_t size) {
numrecommendationsinputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
}
::std::string* ItemSimilarityRecommender::mutable_numrecommendationsinputfeaturename() {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
return numrecommendationsinputfeaturename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
::std::string* ItemSimilarityRecommender::release_numrecommendationsinputfeaturename() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
return numrecommendationsinputfeaturename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void ItemSimilarityRecommender::set_allocated_numrecommendationsinputfeaturename(::std::string* numrecommendationsinputfeaturename) {
if (numrecommendationsinputfeaturename != NULL) {
} else {
}
numrecommendationsinputfeaturename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), numrecommendationsinputfeaturename);
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.numRecommendationsInputFeatureName)
}
// string itemRestrictionInputFeatureName = 12;
void ItemSimilarityRecommender::clear_itemrestrictioninputfeaturename() {
itemrestrictioninputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
const ::std::string& ItemSimilarityRecommender::itemrestrictioninputfeaturename() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
return itemrestrictioninputfeaturename_.GetNoArena();
}
void ItemSimilarityRecommender::set_itemrestrictioninputfeaturename(const ::std::string& value) {
itemrestrictioninputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
}
#if LANG_CXX11
void ItemSimilarityRecommender::set_itemrestrictioninputfeaturename(::std::string&& value) {
itemrestrictioninputfeaturename_.SetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
}
#endif
void ItemSimilarityRecommender::set_itemrestrictioninputfeaturename(const char* value) {
GOOGLE_DCHECK(value != NULL);
itemrestrictioninputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
}
void ItemSimilarityRecommender::set_itemrestrictioninputfeaturename(const char* value, size_t size) {
itemrestrictioninputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
}
::std::string* ItemSimilarityRecommender::mutable_itemrestrictioninputfeaturename() {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
return itemrestrictioninputfeaturename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
::std::string* ItemSimilarityRecommender::release_itemrestrictioninputfeaturename() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
return itemrestrictioninputfeaturename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void ItemSimilarityRecommender::set_allocated_itemrestrictioninputfeaturename(::std::string* itemrestrictioninputfeaturename) {
if (itemrestrictioninputfeaturename != NULL) {
} else {
}
itemrestrictioninputfeaturename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), itemrestrictioninputfeaturename);
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.itemRestrictionInputFeatureName)
}
// string itemExclusionInputFeatureName = 13;
void ItemSimilarityRecommender::clear_itemexclusioninputfeaturename() {
itemexclusioninputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
const ::std::string& ItemSimilarityRecommender::itemexclusioninputfeaturename() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
return itemexclusioninputfeaturename_.GetNoArena();
}
void ItemSimilarityRecommender::set_itemexclusioninputfeaturename(const ::std::string& value) {
itemexclusioninputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
}
#if LANG_CXX11
void ItemSimilarityRecommender::set_itemexclusioninputfeaturename(::std::string&& value) {
itemexclusioninputfeaturename_.SetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
}
#endif
void ItemSimilarityRecommender::set_itemexclusioninputfeaturename(const char* value) {
GOOGLE_DCHECK(value != NULL);
itemexclusioninputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
}
void ItemSimilarityRecommender::set_itemexclusioninputfeaturename(const char* value, size_t size) {
itemexclusioninputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
}
::std::string* ItemSimilarityRecommender::mutable_itemexclusioninputfeaturename() {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
return itemexclusioninputfeaturename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
::std::string* ItemSimilarityRecommender::release_itemexclusioninputfeaturename() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
return itemexclusioninputfeaturename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void ItemSimilarityRecommender::set_allocated_itemexclusioninputfeaturename(::std::string* itemexclusioninputfeaturename) {
if (itemexclusioninputfeaturename != NULL) {
} else {
}
itemexclusioninputfeaturename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), itemexclusioninputfeaturename);
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.itemExclusionInputFeatureName)
}
// string recommendedItemListOutputFeatureName = 20;
void ItemSimilarityRecommender::clear_recommendeditemlistoutputfeaturename() {
recommendeditemlistoutputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
const ::std::string& ItemSimilarityRecommender::recommendeditemlistoutputfeaturename() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
return recommendeditemlistoutputfeaturename_.GetNoArena();
}
void ItemSimilarityRecommender::set_recommendeditemlistoutputfeaturename(const ::std::string& value) {
recommendeditemlistoutputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
}
#if LANG_CXX11
void ItemSimilarityRecommender::set_recommendeditemlistoutputfeaturename(::std::string&& value) {
recommendeditemlistoutputfeaturename_.SetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
}
#endif
void ItemSimilarityRecommender::set_recommendeditemlistoutputfeaturename(const char* value) {
GOOGLE_DCHECK(value != NULL);
recommendeditemlistoutputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
}
void ItemSimilarityRecommender::set_recommendeditemlistoutputfeaturename(const char* value, size_t size) {
recommendeditemlistoutputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
}
::std::string* ItemSimilarityRecommender::mutable_recommendeditemlistoutputfeaturename() {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
return recommendeditemlistoutputfeaturename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
::std::string* ItemSimilarityRecommender::release_recommendeditemlistoutputfeaturename() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
return recommendeditemlistoutputfeaturename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void ItemSimilarityRecommender::set_allocated_recommendeditemlistoutputfeaturename(::std::string* recommendeditemlistoutputfeaturename) {
if (recommendeditemlistoutputfeaturename != NULL) {
} else {
}
recommendeditemlistoutputfeaturename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), recommendeditemlistoutputfeaturename);
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.recommendedItemListOutputFeatureName)
}
// string recommendedItemScoreOutputFeatureName = 21;
void ItemSimilarityRecommender::clear_recommendeditemscoreoutputfeaturename() {
recommendeditemscoreoutputfeaturename_.ClearToEmptyNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
const ::std::string& ItemSimilarityRecommender::recommendeditemscoreoutputfeaturename() const {
// @@protoc_insertion_point(field_get:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
return recommendeditemscoreoutputfeaturename_.GetNoArena();
}
void ItemSimilarityRecommender::set_recommendeditemscoreoutputfeaturename(const ::std::string& value) {
recommendeditemscoreoutputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), value);
// @@protoc_insertion_point(field_set:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
}
#if LANG_CXX11
void ItemSimilarityRecommender::set_recommendeditemscoreoutputfeaturename(::std::string&& value) {
recommendeditemscoreoutputfeaturename_.SetNoArena(
&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::move(value));
// @@protoc_insertion_point(field_set_rvalue:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
}
#endif
void ItemSimilarityRecommender::set_recommendeditemscoreoutputfeaturename(const char* value) {
GOOGLE_DCHECK(value != NULL);
recommendeditemscoreoutputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), ::std::string(value));
// @@protoc_insertion_point(field_set_char:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
}
void ItemSimilarityRecommender::set_recommendeditemscoreoutputfeaturename(const char* value, size_t size) {
recommendeditemscoreoutputfeaturename_.SetNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(),
::std::string(reinterpret_cast<const char*>(value), size));
// @@protoc_insertion_point(field_set_pointer:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
}
::std::string* ItemSimilarityRecommender::mutable_recommendeditemscoreoutputfeaturename() {
// @@protoc_insertion_point(field_mutable:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
return recommendeditemscoreoutputfeaturename_.MutableNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
::std::string* ItemSimilarityRecommender::release_recommendeditemscoreoutputfeaturename() {
// @@protoc_insertion_point(field_release:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
return recommendeditemscoreoutputfeaturename_.ReleaseNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited());
}
void ItemSimilarityRecommender::set_allocated_recommendeditemscoreoutputfeaturename(::std::string* recommendeditemscoreoutputfeaturename) {
if (recommendeditemscoreoutputfeaturename != NULL) {
} else {
}
recommendeditemscoreoutputfeaturename_.SetAllocatedNoArena(&::google::protobuf::internal::GetEmptyStringAlreadyInited(), recommendeditemscoreoutputfeaturename);
// @@protoc_insertion_point(field_set_allocated:CoreML.Specification.ItemSimilarityRecommender.recommendedItemScoreOutputFeatureName)
}
#endif // PROTOBUF_INLINE_NOT_IN_HEADERS
// @@protoc_insertion_point(namespace_scope)
} // namespace Specification
} // namespace CoreML
// @@protoc_insertion_point(global_scope)
| 25,162 |
411 | volume = 5 * 16.5 * 12.5
area = 10*16.5 + 10*12.5 + 2*16.5*12.5
print('volume =', volume)
print('area =', area)
| 52 |
473 | <filename>django_comments_xtd/utils.py
# Idea borrowed from Selwin Ong post:
# http://ui.co.id/blog/asynchronous-send_mail-in-django
from copy import copy
import hashlib
try:
import Queue as queue # python2
except ImportError:
import queue as queue # python3
import threading
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
from django.core.mail import EmailMultiAlternatives
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.shortcuts import get_current_site
from django.utils.crypto import salted_hmac
from django_comments_xtd.conf import settings
mail_sent_queue = queue.Queue()
class EmailThread(threading.Thread):
def __init__(self, subject, body, from_email, recipient_list,
fail_silently, html):
self.subject = subject
self.body = body
self.recipient_list = recipient_list
self.from_email = from_email
self.fail_silently = fail_silently
self.html = html
threading.Thread.__init__(self)
def run(self):
_send_mail(self.subject, self.body, self.from_email,
self.recipient_list, self.fail_silently, self.html)
mail_sent_queue.put(True)
def _send_mail(subject, body, from_email, recipient_list,
fail_silently=False, html=None):
msg = EmailMultiAlternatives(subject, body, from_email, recipient_list)
if html:
msg.attach_alternative(html, "text/html")
msg.send(fail_silently)
def send_mail(subject, body, from_email, recipient_list,
fail_silently=False, html=None):
if settings.COMMENTS_XTD_THREADED_EMAILS:
EmailThread(subject, body, from_email, recipient_list,
fail_silently, html).start()
else:
_send_mail(subject, body, from_email, recipient_list,
fail_silently, html)
def get_app_model_options(comment=None, content_type=None):
"""
Get the app_model_option from COMMENTS_XTD_APP_MODEL_OPTIONS.
If a comment is given, the content_type is extracted from it. Otherwise,
the content_type kwarg has to be provided. The funcion checks whether there
is a matching dictionary for the app_label.model of the content_type, and
returns it. It returns the default otherwise: { 'who_can_post': 'all',
'allow_flagging': False, 'allow_feedback': False, 'show_feedback': False }.
"""
default = {
'who_can_post': 'all', # Valid values: "users", "all"
'allow_flagging': False,
'allow_feedback': False,
'show_feedback': False,
}
if 'default' in settings.COMMENTS_XTD_APP_MODEL_OPTIONS:
# The developer overwrite the default settings. Check whether
# the developer added all the expected keys in the dictionary.
has_missing_key = False
for k in default.keys():
if k not in settings.COMMENTS_XTD_APP_MODEL_OPTIONS['default']:
has_missing_key = True
if not has_missing_key:
default = copy(settings.COMMENTS_XTD_APP_MODEL_OPTIONS['default'])
if comment:
content_type = ContentType.objects.get_for_model(comment.content_object)
key = "%s.%s" % (content_type.app_label, content_type.model)
elif content_type:
key = content_type
else:
return default
try:
default.update(settings.COMMENTS_XTD_APP_MODEL_OPTIONS[key])
return default
except Exception:
return default
def get_current_site_id(request=None):
""" it's a shortcut """
return getattr(get_current_site(request), 'pk', 1) # fallback value
def get_html_id_suffix(object):
value = "%s" % object.__hash__()
suffix = salted_hmac(settings.COMMENTS_XTD_SALT, value).hexdigest()
return suffix
def get_user_avatar(comment):
path = hashlib.md5(comment.user_email.lower().encode('utf-8')).hexdigest()
param = urlencode({'s': 48})
return "//www.gravatar.com/avatar/%s?%s&d=identicon" % (path, param)
| 1,614 |
5,813 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.schema;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Sets;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.query.DataSource;
import org.apache.druid.query.GlobalTableDataSource;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.segment.join.JoinConditionAnalysis;
import org.apache.druid.segment.join.Joinable;
import org.apache.druid.segment.join.JoinableFactory;
import org.apache.druid.segment.loading.SegmentLoader;
import org.apache.druid.server.QueryStackTests;
import org.apache.druid.server.SegmentManager;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.easymock.EasyMock;
import org.joda.time.Period;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.rules.TemporaryFolder;
import java.io.IOException;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
public abstract class DruidSchemaTestCommon extends CalciteTestBase
{
static final PlannerConfig PLANNER_CONFIG_DEFAULT = new PlannerConfig()
{
@Override
public Period getMetadataRefreshPeriod()
{
return new Period("PT1S");
}
};
static final List<InputRow> ROWS1 = ImmutableList.of(
CalciteTests.createRow(ImmutableMap.of("t", "2000-01-01", "m1", "1.0", "dim1", "")),
CalciteTests.createRow(ImmutableMap.of("t", "2000-01-02", "m1", "2.0", "dim1", "10.1")),
CalciteTests.createRow(ImmutableMap.of("t", "2000-01-03", "m1", "3.0", "dim1", "2"))
);
static final List<InputRow> ROWS2 = ImmutableList.of(
CalciteTests.createRow(ImmutableMap.of("t", "2001-01-01", "m1", "4.0", "dim2", ImmutableList.of("a"))),
CalciteTests.createRow(ImmutableMap.of("t", "2001-01-02", "m1", "5.0", "dim2", ImmutableList.of("abc"))),
CalciteTests.createRow(ImmutableMap.of("t", "2001-01-03", "m1", "6.0"))
);
static QueryRunnerFactoryConglomerate conglomerate;
static Closer resourceCloser;
CountDownLatch getDatasourcesLatch = new CountDownLatch(1);
@BeforeClass
public static void setUpClass()
{
resourceCloser = Closer.create();
conglomerate = QueryStackTests.createQueryRunnerFactoryConglomerate(resourceCloser);
}
@AfterClass
public static void tearDownClass() throws IOException
{
resourceCloser.close();
}
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
SegmentManager segmentManager;
Set<String> segmentDataSourceNames;
Set<String> joinableDataSourceNames;
JoinableFactory globalTableJoinable;
@Before
public void setUpCommon()
{
segmentDataSourceNames = Sets.newConcurrentHashSet();
joinableDataSourceNames = Sets.newConcurrentHashSet();
segmentManager = new SegmentManager(EasyMock.createMock(SegmentLoader.class))
{
@Override
public Set<String> getDataSourceNames()
{
getDatasourcesLatch.countDown();
return segmentDataSourceNames;
}
};
globalTableJoinable = new JoinableFactory()
{
@Override
public boolean isDirectlyJoinable(DataSource dataSource)
{
return dataSource instanceof GlobalTableDataSource &&
joinableDataSourceNames.contains(((GlobalTableDataSource) dataSource).getName());
}
@Override
public Optional<Joinable> build(
DataSource dataSource,
JoinConditionAnalysis condition
)
{
return Optional.empty();
}
};
}
}
| 1,621 |
335 | <filename>S/Scaffolding_noun.json
{
"word": "Scaffolding",
"definitions": [
"A temporary structure on the outside of a building, made of wooden planks and metal poles, used by workmen while building, repairing, or cleaning the building.",
"The materials used in scaffolding."
],
"parts-of-speech": "Noun"
} | 116 |
354 | <filename>execserver/xsPosixFileReader.cpp
/*-------------------------------------------------------------------------
* drawElements Quality Program Execution Server
* ---------------------------------------------
*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*//*!
* \file
* \brief File Reader.
*//*--------------------------------------------------------------------*/
#include "xsPosixFileReader.hpp"
#include <vector>
namespace xs
{
namespace posix
{
FileReader::FileReader (int blockSize, int numBlocks)
: m_file (DE_NULL)
, m_buf (blockSize, numBlocks)
, m_isRunning (false)
{
}
FileReader::~FileReader (void)
{
}
void FileReader::start (const char* filename)
{
DE_ASSERT(!m_isRunning);
m_file = deFile_create(filename, DE_FILEMODE_OPEN|DE_FILEMODE_READ);
XS_CHECK(m_file);
#if (DE_OS != DE_OS_IOS)
// Set to non-blocking mode.
if (!deFile_setFlags(m_file, DE_FILE_NONBLOCKING))
{
deFile_destroy(m_file);
m_file = DE_NULL;
XS_FAIL("Failed to set non-blocking mode");
}
#endif
m_isRunning = true;
de::Thread::start();
}
void FileReader::run (void)
{
std::vector<deUint8> tmpBuf (FILEREADER_TMP_BUFFER_SIZE);
deInt64 numRead = 0;
while (!m_buf.isCanceled())
{
deFileResult result = deFile_read(m_file, &tmpBuf[0], (deInt64)tmpBuf.size(), &numRead);
if (result == DE_FILERESULT_SUCCESS)
{
// Write to buffer.
try
{
m_buf.write((int)numRead, &tmpBuf[0]);
m_buf.flush();
}
catch (const ThreadedByteBuffer::CanceledException&)
{
// Canceled.
break;
}
}
else if (result == DE_FILERESULT_END_OF_FILE ||
result == DE_FILERESULT_WOULD_BLOCK)
{
// Wait for more data.
deSleep(FILEREADER_IDLE_SLEEP);
}
else
break; // Error.
}
}
void FileReader::stop (void)
{
if (!m_isRunning)
return; // Nothing to do.
m_buf.cancel();
// Join thread.
join();
// Destroy file.
deFile_destroy(m_file);
m_file = DE_NULL;
// Reset buffer.
m_buf.clear();
m_isRunning = false;
}
} // posix
} // xs
| 961 |
1,001 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class ListConnectionPoolsRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'IoTCC', '2021-05-13', 'ListConnectionPools','cciot')
self.set_method('POST')
def get_NextToken(self):
return self.get_query_params().get('NextToken')
def set_NextToken(self,NextToken):
self.add_query_param('NextToken',NextToken)
def get_ConnectionPoolStatuss(self):
return self.get_query_params().get('ConnectionPoolStatus')
def set_ConnectionPoolStatuss(self, ConnectionPoolStatuss):
for depth1 in range(len(ConnectionPoolStatuss)):
if ConnectionPoolStatuss[depth1] is not None:
self.add_query_param('ConnectionPoolStatus.' + str(depth1 + 1) , ConnectionPoolStatuss[depth1])
def get_ConnectionPoolNames(self):
return self.get_query_params().get('ConnectionPoolName')
def set_ConnectionPoolNames(self, ConnectionPoolNames):
for depth1 in range(len(ConnectionPoolNames)):
if ConnectionPoolNames[depth1] is not None:
self.add_query_param('ConnectionPoolName.' + str(depth1 + 1) , ConnectionPoolNames[depth1])
def get_IoTCloudConnectorId(self):
return self.get_query_params().get('IoTCloudConnectorId')
def set_IoTCloudConnectorId(self,IoTCloudConnectorId):
self.add_query_param('IoTCloudConnectorId',IoTCloudConnectorId)
def get_ConnectionPoolIdss(self):
return self.get_query_params().get('ConnectionPoolIds')
def set_ConnectionPoolIdss(self, ConnectionPoolIdss):
for depth1 in range(len(ConnectionPoolIdss)):
if ConnectionPoolIdss[depth1] is not None:
self.add_query_param('ConnectionPoolIds.' + str(depth1 + 1) , ConnectionPoolIdss[depth1])
def get_MaxResults(self):
return self.get_query_params().get('MaxResults')
def set_MaxResults(self,MaxResults):
self.add_query_param('MaxResults',MaxResults) | 871 |
575 | # A server used to store and retrieve arbitrary data.
# This is used by: ./dispatcher.js
def main(request, response):
response.headers.set(b'Access-Control-Allow-Origin', b'*')
response.headers.set(b'Access-Control-Allow-Methods', b'OPTIONS, GET, POST')
response.headers.set(b'Access-Control-Allow-Headers', b'Content-Type')
response.headers.set(b'Cache-Control', b'no-cache, no-store, must-revalidate')
if request.method == u'OPTIONS': # CORS preflight
return b''
uuid = request.GET[b'uuid']
stash = request.server.stash;
with stash.lock:
queue = stash.take(uuid)
if queue is None:
queue = []
if request.method == u'POST':
queue.append(request.body)
ret = b'done'
else:
if len(queue) == 0:
ret = b'not ready'
else:
ret = queue.pop(0)
stash.put(uuid, queue)
return ret;
| 430 |
12,278 | // Copyright <NAME> 2013-2017
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE.md or copy at http://boost.org/LICENSE_1_0.txt)
#include <boost/hana/string.hpp>
namespace hana = boost::hana;
int main() {
using Str = decltype(hana::string_c<'a', 'b', 'c', 'd'>);
Str s{};
(void)s;
}
| 134 |
608 | <gh_stars>100-1000
#pragma once
#include <memory>
#include <string>
#include <torch/torch.h>
#include <torch/script.h>
class TorchModelProp
{
public:
TorchModelProp(int prediction_dim, int feature_dim)
: mPredictionDim(prediction_dim), mFeatureDim(feature_dim)
{}
TorchModelProp()
: mPredictionDim(-1), mFeatureDim(-1) { }
int getPredictionDim() const { return mPredictionDim; }
int getFeatureDim() const { return mFeatureDim; }
private:
int mPredictionDim;
int mFeatureDim;
};
class TorchManager
{
public:
typedef std::shared_ptr<torch::jit::script::Module> torchmodule_t;
typedef std::unordered_map<std::string, torchmodule_t> modulemap_t;
typedef std::unordered_map<std::string, TorchModelProp> moduleprop_t;
public:
void addModule(const std::string &module_name,
const std::string &file_name);
/**
* Return a module from the module manager.
* @param module_name the name of the module
* @param module the returning module
* @return true if module was found, false otherwise
*/
bool getModule(const std::string &module_name, torchmodule_t &module) const;
TorchModelProp getModuleProps(const std::string &module_name) const;
void populateFromDir(const std::string &dirname);
std::vector<std::string> getModuleList() const;
int size() const;
typedef std::shared_ptr<TorchManager> TorchManagerPtr;
private:
modulemap_t mModuleMap;
moduleprop_t mModuleProp;
};
| 548 |
855 | # -*- coding: utf-8 -*-
import django.dispatch
user_added = django.dispatch.Signal()
user_removed = django.dispatch.Signal()
invitation_accepted = django.dispatch.Signal()
owner_changed = django.dispatch.Signal()
| 82 |
2,224 | import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import re
import os
sns.set(style='darkgrid')
def get_info(filename):
filename = filename.replace('.npy', '') # remove .npy
algo, env, seed = re.split('_', filename)
seed = int(seed)
return algo, env, seed
def get_file_name(path='./'):
file_names = []
for _, __, file_name in os.walk(path):
file_names += file_name
data_name = [f for f in file_names if '.npy' in f]
return data_name
def exact_data(file_name, steps):
'''
exact data from single .npy file
:param file_name:
:return: a Dataframe include time, seed, algo_name, avg_reward
'''
avg_reward = np.load(file_name).reshape(-1, 1)
algo, env_name, seed = get_info(file_name)
df = pd.DataFrame(avg_reward)
df.columns = ['Average Return']
df['Time Steps (1e6)'] = steps
df['Algorithm'] = algo
df['env'] = env_name
df['seed'] = seed
return df
if __name__ == '__main__':
file_names = get_file_name('./')
_, env_name, __ = get_info(file_names[0])
df = pd.DataFrame([])
steps = np.linspace(0, 1, 201)
for file in file_names:
data = exact_data(file, steps)
df = pd.concat([df, data], axis=0)
sns.lineplot(x='Time Steps (1e6)', y='Average Return', data=df, hue='Algorithm',ci=90)
plt.title(env_name)
plt.savefig(env_name + '.svg')
plt.show()
| 624 |
627 | <gh_stars>100-1000
#ifndef __D3D9_IMPL_H__
#define __D3D9_IMPL_H__
#include "shared.h"
namespace impl
{
namespace d3d9
{
void init();
}
}
#endif // __D3D9_IMPL_H__ | 92 |
32,544 | package com.baeldung.objectclass;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import org.junit.Ignore;
import org.junit.Test;
public class CreditAppUnitTest {
@Test
public void givenLender_whenInstanceOf_thenReturnTrue() {
User lender = new Lender();
assertTrue(lender instanceof Lender);
assertTrue(lender instanceof User);
}
@Test
public void givenUser_whenInstanceOfLender_thenDowncast() {
User user = new Lender();
Lender lender = null;
if(user instanceof Lender) {
lender = (Lender) user;
}
assertNotNull(lender);
}
@Test
public void givenUser_whenIsInstanceOfLender_thenDowncast() {
User user = new Lender();
Lender lender = null;
if(Lender.class.isInstance(user)) {
lender = (Lender) user;
}
assertNotNull(lender);
}
@Ignore
@Test
public void givenBorrower_whenDoubleOrNotString_thenRequestLoan() {
Borrower borrower = new Borrower();
double amount = 100.0;
/*if(amount instanceof Double) { // Compilation error, no autoboxing
borrower.requestLoan(amount);
}
if(!(amount instanceof String)) { // Compilation error, incompatible operands
borrower.requestLoan(amount);
}*/
}
@Test
public void givenBorrower_whenLoanAmountIsDouble_thenRequestLoan() {
Borrower borrower = new Borrower();
double amount = 100.0;
if(Double.class.isInstance(amount)) { // No compilation error
borrower.requestLoan(amount);
}
assertEquals(100, borrower.getTotalLoanAmount());
}
@Test
public void givenBorrower_whenLoanAmountIsNotString_thenRequestLoan() {
Borrower borrower = new Borrower();
Double amount = 100.0;
if(!String.class.isInstance(amount)) { // No compilation error
borrower.requestLoan(amount);
}
assertEquals(100, borrower.getTotalLoanAmount());
}
@Test
public void givenLender_whenGetClass_thenEqualsLenderType() {
User lender = new Lender();
assertEquals(Lender.class, lender.getClass());
assertNotEquals(User.class, lender.getClass());
}
}
| 1,120 |
738 | #include <iostream>
#include <vector>
#include <algorithm>
#define pb push_back
#define mp make_pair
#define st first
#define nd second
#define lli long long int
using namespace std;
int lis(vector<int> arr) {
if(arr.empty()) return 0;
int len = arr.size(), answer = 1;
for(int i=0; i < len; i++) {
vector<int> next;
for(int j=i+1; j < len; j++) {
if(arr[j] > arr[i]) next.push_back(arr[j]);
}
answer = max(answer, 1 + lis(next));
}
return answer;
}
int main(){
ios::sync_with_stdio(0);
cin.tie(0);
vector<int> arr;
int n, num;
cin >> n;
for(int i=0;i<n;i++) {
cin >> num;
arr.pb(num);
}
// arr.pb(10);
// arr.pb(20);
// arr.pb(10);
// arr.pb(30);
// arr.pb(20);
// arr.pb(50);
// arr.pb(40);
// arr.pb(25);
// arr.pb(20);
// arr.pb(50);
// arr.pb(30);
// arr.pb(70);
// arr.pb(85);
// cout << lis(arr) << "\n";
}
| 433 |
4,538 | <gh_stars>1000+
#ifndef __NET_DUMP_H__
#define __NET_DUMP_H__
#ifdef __cplusplus
extern "C" {
#endif
typedef void *net_dump_handle_t;
int net_dump_connect(net_dump_handle_t net_dump_t,char *server_addr,int port);
void net_dump_disconnect(net_dump_handle_t net_dump_t);
int net_dump_data(net_dump_handle_t net_dump_t,char *dump_data,int dump_len);
int net_get_dump_size(net_dump_handle_t net_dump_t,char *file_name);
int net_dump_login(net_dump_handle_t net_dump_t,char *username,char*password);
int net_dump_notice_server(net_dump_handle_t net_dump_t,char *dump_file_name);
void net_dump_free(net_dump_handle_t net_dump_ops_t);
net_dump_handle_t net_dump_init(void);
#ifdef __cplusplus
}
#endif
#endif | 295 |
578 | <reponame>lllrrr2/bk-job<filename>src/backend/commons/common-k8s/src/main/java/com/tencent/bk/job/common/k8s/provider/K8SServiceInfoProvider.java<gh_stars>100-1000
/*
* Tencent is pleased to support the open source community by making BK-JOB蓝鲸智云作业平台 available.
*
* Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved.
*
* BK-JOB蓝鲸智云作业平台 is licensed under the MIT License.
*
* License for BK-JOB蓝鲸智云作业平台:
* --------------------------------------------------------------------
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
* to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
* THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
* IN THE SOFTWARE.
*/
package com.tencent.bk.job.common.k8s.provider;
import com.tencent.bk.job.common.discovery.ServiceInfoProvider;
import com.tencent.bk.job.common.discovery.model.ServiceInstanceInfoDTO;
import com.tencent.bk.job.common.util.json.JsonUtils;
import io.kubernetes.client.openapi.ApiClient;
import io.kubernetes.client.openapi.ApiException;
import io.kubernetes.client.openapi.Configuration;
import io.kubernetes.client.openapi.apis.CoreV1Api;
import io.kubernetes.client.openapi.models.V1ObjectMeta;
import io.kubernetes.client.openapi.models.V1Pod;
import io.kubernetes.client.openapi.models.V1PodList;
import io.kubernetes.client.openapi.models.V1PodStatus;
import io.kubernetes.client.util.Config;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.springframework.cloud.client.ServiceInstance;
import org.springframework.cloud.client.discovery.DiscoveryClient;
import org.springframework.cloud.kubernetes.commons.discovery.KubernetesServiceInstance;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
@Slf4j
public class K8SServiceInfoProvider implements ServiceInfoProvider {
public final String KEY_HELM_NAMESPACE = "meta.helm.sh/release-namespace";
public final String KEY_JOB_MS_VERSION = "bk.job.image/tag";
public final String VERSION_UNKNOWN = "-";
public final String NAMESPACE_DEFAULT = "default";
public final String PHASE_RUNNING = "Running";
private final DiscoveryClient discoveryClient;
public K8SServiceInfoProvider(DiscoveryClient discoveryClient) {
this.discoveryClient = discoveryClient;
log.debug("K8sServiceInfoServiceImpl inited");
}
private String getNameSpace(ServiceInstance serviceInstance) {
KubernetesServiceInstance k8sServiceInstance = (KubernetesServiceInstance) serviceInstance;
String namespace = k8sServiceInstance.getNamespace();
if (StringUtils.isNotBlank(namespace)) return namespace;
return serviceInstance.getMetadata().getOrDefault(KEY_HELM_NAMESPACE, NAMESPACE_DEFAULT);
}
private V1Pod findPodByUid(V1PodList podList, String uid) {
if (podList == null) return null;
for (V1Pod pod : podList.getItems()) {
V1ObjectMeta metaData = pod.getMetadata();
if (metaData != null && uid.equals(metaData.getUid())) {
return pod;
}
}
return null;
}
private Byte convertPodStatus(V1PodStatus podStatus) {
if (podStatus == null) return ServiceInstanceInfoDTO.STATUS_UNKNOWN;
String phase = podStatus.getPhase();
if (StringUtils.isNotBlank(phase)) {
if (phase.equals(PHASE_RUNNING))
return ServiceInstanceInfoDTO.STATUS_OK;
else
return ServiceInstanceInfoDTO.STATUS_ERROR;
}
return ServiceInstanceInfoDTO.STATUS_UNKNOWN;
}
private ServiceInstanceInfoDTO getDetailFromK8s(
ServiceInstance serviceInstance
) {
ServiceInstanceInfoDTO serviceInstanceInfoDTO = new ServiceInstanceInfoDTO();
serviceInstanceInfoDTO.setServiceName(serviceInstance.getServiceId());
serviceInstanceInfoDTO.setIp(serviceInstance.getHost());
serviceInstanceInfoDTO.setPort(serviceInstance.getPort());
String namespace = getNameSpace(serviceInstance);
V1PodList podList;
try {
ApiClient client = Config.defaultClient();
Configuration.setDefaultApiClient(client);
CoreV1Api api = new CoreV1Api();
podList = api.listNamespacedPod(
namespace, null, null, null,
null, null, null, null,
null, null, null);
} catch (ApiException | IOException e) {
log.error("Fail to get pod info from k8s API", e);
serviceInstanceInfoDTO.setName(serviceInstance.getInstanceId());
serviceInstanceInfoDTO.setVersion(VERSION_UNKNOWN);
serviceInstanceInfoDTO.setStatusCode(convertPodStatus(null));
serviceInstanceInfoDTO.setStatusMessage("Fail to get pod info from k8s");
return serviceInstanceInfoDTO;
}
V1Pod pod = findPodByUid(podList, serviceInstance.getInstanceId());
V1ObjectMeta metaData = pod.getMetadata();
if (metaData != null) {
serviceInstanceInfoDTO.setName(metaData.getName());
}
if (metaData != null && metaData.getLabels() != null) {
String version = metaData.getLabels().getOrDefault(KEY_JOB_MS_VERSION, VERSION_UNKNOWN);
serviceInstanceInfoDTO.setVersion(version);
log.debug("namespace={},version={}", namespace, version);
}
V1PodStatus podStatus = pod.getStatus();
if (podStatus != null) {
serviceInstanceInfoDTO.setStatusCode(convertPodStatus(podStatus));
serviceInstanceInfoDTO.setStatusMessage(podStatus.getReason());
}
if (log.isDebugEnabled()) {
log.debug("podStatus={}", JsonUtils.toJson(podStatus));
}
return serviceInstanceInfoDTO;
}
@Override
public List<ServiceInstanceInfoDTO> listServiceInfo() {
List<String> serviceIdList = discoveryClient.getServices();
List<ServiceInstance> serviceInstanceList = new ArrayList<>();
for (String serviceId : serviceIdList) {
serviceInstanceList.addAll(discoveryClient.getInstances(serviceId));
}
for (ServiceInstance serviceInstance : serviceInstanceList) {
log.debug("serviceInstance={}", JsonUtils.toJson(serviceInstance));
}
return serviceInstanceList.parallelStream().filter(serviceInstance -> {
if (serviceInstance.getServiceId().contains("job-gateway-management")) {
return false;
} else {
return serviceInstance.getServiceId().contains("job-");
}
}).map(this::getDetailFromK8s).collect(Collectors.toList());
}
}
| 2,907 |
494 | <gh_stars>100-1000
/*******************************************************************************
* Copyright 2011 Netflix
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.netflix.astyanax.thrift;
import java.util.List;
import com.netflix.astyanax.AbstractColumnListMutation;
import org.apache.cassandra.thrift.Column;
import org.apache.cassandra.thrift.ColumnOrSuperColumn;
import org.apache.cassandra.thrift.Deletion;
import org.apache.cassandra.thrift.Mutation;
import org.apache.cassandra.thrift.SlicePredicate;
import org.apache.cassandra.thrift.SuperColumn;
import com.netflix.astyanax.ColumnListMutation;
import com.netflix.astyanax.Serializer;
import com.netflix.astyanax.model.ColumnPath;
/**
* @deprecated Use composite columns instead
* @author elandau
*
* @param <C>
*/
public class ThriftSuperColumnMutationImpl<C> extends AbstractColumnListMutation<C> {
private final List<Mutation> mutationList;
private final ColumnPath<C> path;
private SuperColumn superColumn;
private SlicePredicate deletionPredicate;
public ThriftSuperColumnMutationImpl(long timestamp, List<Mutation> mutationList, ColumnPath<C> path) {
super(timestamp);
this.path = path;
this.mutationList = mutationList;
}
@Override
public <V> ColumnListMutation<C> putColumn(C columnName, V value, Serializer<V> valueSerializer, Integer ttl) {
Column column = new Column();
column.setName(path.getSerializer().toByteBuffer(columnName));
column.setValue(valueSerializer.toByteBuffer(value));
column.setTimestamp(timestamp);
if (ttl != null)
column.setTtl(ttl);
else if (defaultTtl != null)
column.setTtl(defaultTtl);
addMutation(column);
return this;
}
private void addMutation(Column column) {
// 2. Create the super column mutation if this is the first call
if (superColumn == null) {
superColumn = new SuperColumn().setName(path.get(0));
Mutation mutation = new Mutation();
mutation.setColumn_or_supercolumn(new ColumnOrSuperColumn().setSuper_column(superColumn));
mutationList.add(mutation);
}
superColumn.addToColumns(column);
}
@Override
public ColumnListMutation<C> putEmptyColumn(C columnName, Integer ttl) {
Column column = new Column();
column.setName(path.getSerializer().toByteBuffer(columnName));
column.setValue(ThriftUtils.EMPTY_BYTE_BUFFER);
column.setTimestamp(timestamp);
if (ttl != null)
column.setTtl(ttl);
else if (defaultTtl != null)
column.setTtl(defaultTtl);
addMutation(column);
return this;
}
@Override
public ColumnListMutation<C> delete() {
Deletion d = new Deletion();
d.setSuper_column(path.get(0));
d.setTimestamp(timestamp);
mutationList.add(new Mutation().setDeletion(d));
timestamp++;
return this;
}
@Override
public <SC> ColumnListMutation<SC> withSuperColumn(ColumnPath<SC> superColumnPath) {
throw new UnsupportedOperationException();
}
@Override
public ColumnListMutation<C> incrementCounterColumn(C columnName, long amount) {
throw new UnsupportedOperationException();
}
@Override
public ColumnListMutation<C> deleteColumn(C columnName) {
if (deletionPredicate == null) {
deletionPredicate = new SlicePredicate();
Deletion d = new Deletion();
d.setTimestamp(timestamp);
d.setSuper_column(path.get(0));
d.setPredicate(deletionPredicate);
mutationList.add(new Mutation().setDeletion(d));
}
deletionPredicate.addToColumn_names(path.getSerializer().toByteBuffer(columnName));
return this;
}
@Override
public ColumnListMutation<C> putCompressedColumn(C columnName, String value, Integer ttl) {
throw new UnsupportedOperationException();
}
@Override
public ColumnListMutation<C> putCompressedColumn(C columnName, String value) {
throw new UnsupportedOperationException();
}
@Override
public ColumnListMutation<C> putCompressedColumnIfNotNull(C columnName, String value, Integer ttl) {
throw new UnsupportedOperationException();
}
@Override
public ColumnListMutation<C> putCompressedColumnIfNotNull(C columnName, String value) {
throw new UnsupportedOperationException();
}
}
| 1,871 |
2,226 | # Copyright (c) OpenMMLab. All rights reserved.
from .roi_embed_head import RoIEmbedHead
from .roi_track_head import RoITrackHead
from .siamese_rpn_head import CorrelationHead, SiameseRPNHead
__all__ = ['CorrelationHead', 'SiameseRPNHead', 'RoIEmbedHead', 'RoITrackHead']
| 98 |
411 | <reponame>kachinc/documents4j
package com.documents4j.demo;
import org.apache.wicket.extensions.markup.html.repeater.data.sort.ISortStateLocator;
import org.apache.wicket.extensions.markup.html.repeater.data.table.*;
import org.apache.wicket.markup.repeater.data.IDataProvider;
import org.apache.wicket.model.Model;
import java.util.ArrayList;
import java.util.List;
class FileTable extends DataTable<FileRow, Void> {
@SuppressWarnings("unchecked")
public FileTable(String id) {
super(id, makeColumns(), makeDataProvider(), 100L);
addTopToolbar(new HeadersToolbar<Void>(this, (ISortStateLocator<Void>) getDataProvider()));
addBottomToolbar(new NoRecordsToolbar(this));
}
private static List<? extends IColumn<FileRow, Void>> makeColumns() {
List<IColumn<FileRow, Void>> result = new ArrayList<IColumn<FileRow, Void>>();
result.add(new PropertyColumn<FileRow, Void>(Model.of("#"), "row"));
result.add(new LinkColumn(Model.of("Uploaded file"), LinkColumn.FileInput.SOURCE));
result.add(new LinkColumn(Model.of("Converted file"), LinkColumn.FileInput.TARGET));
result.add(new PropertyColumn<FileRow, Void>(Model.of("Duration"), "duration"));
return result;
}
private static IDataProvider<FileRow> makeDataProvider() {
return new FileDataProvider();
}
}
| 503 |
394 | <gh_stars>100-1000
package com.bugtag.networkapp.volley;
import android.util.Log;
import com.android.volley.AuthFailureError;
import com.android.volley.NetworkResponse;
import com.android.volley.ParseError;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.toolbox.HttpHeaderParser;
import org.apache.http.HttpEntity;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/**
* Created by root on 15-9-16.
*/
public class MultipartRequest extends Request<String>{
private Response.ErrorListener errorListener = null;
private Response.Listener mListener = null;
private MultipartRequestParams params = null;
private HttpEntity httpEntity = null;
public MultipartRequest(int method,MultipartRequestParams params, String url,Response.Listener<String> listener, Response.ErrorListener errorListener) {
super(method, url, errorListener);
this.params = params;
this.mListener = listener;
}
@Override
public byte[] getBody() throws AuthFailureError {
// TODO Auto-generated method stub
ByteArrayOutputStream baos = new ByteArrayOutputStream();
if(params != null) {
httpEntity = params.getEntity();
try {
httpEntity.writeTo(baos);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String str = new String(baos.toByteArray());
Log.e("test", "bodyString is :" + str);
}
return baos.toByteArray();
}
@Override
public Map<String, String> getHeaders() throws AuthFailureError {
// TODO Auto-generated method stub
Map<String, String> headers = super.getHeaders();
if (null == headers || headers.equals(Collections.emptyMap())) {
headers = new HashMap<String, String>();
}
return headers;
}
public String getBodyContentType() {
// TODO Auto-generated method stub
String str = httpEntity.getContentType().getValue();
return httpEntity.getContentType().getValue();
}
@Override
protected Response<String> parseNetworkResponse(NetworkResponse response) {
String parsed;
try {
parsed = new String(response.data, HttpHeaderParser.parseCharset(response.headers));
} catch (UnsupportedEncodingException e) {
return Response.error(new ParseError(e));
}
return Response.success(parsed, HttpHeaderParser.parseCacheHeaders(response));
}
@Override
protected void deliverResponse(String response) {
mListener.onResponse(response);
}
}
| 1,072 |
572 | <gh_stars>100-1000
default_app_config = 'tests._testsite.apps.forum_conversation.apps.ForumConversationAppConfig'
| 40 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.