text
stringlengths
2
100k
meta
dict
/* * Copyright (c) 2010 Broadcom Corporation * * Permission to use, copy, modify, and/or distribute this software for any * purpose with or without fee is hereby granted, provided that the above * copyright notice and this permission notice appear in all copies. * * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY * SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION * OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN * CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #ifndef _BRCMU_D11_H_ #define _BRCMU_D11_H_ /* d11 io type */ #define BRCMU_D11N_IOTYPE 1 #define BRCMU_D11AC_IOTYPE 2 /* A chanspec (channel specification) holds the channel number, band, * bandwidth and control sideband */ /* chanspec binary format */ #define BRCMU_CHSPEC_INVALID 255 /* bit 0~7 channel number * for 80+80 channels: bit 0~3 low channel id, bit 4~7 high channel id */ #define BRCMU_CHSPEC_CH_MASK 0x00ff #define BRCMU_CHSPEC_CH_SHIFT 0 #define BRCMU_CHSPEC_CHL_MASK 0x000f #define BRCMU_CHSPEC_CHL_SHIFT 0 #define BRCMU_CHSPEC_CHH_MASK 0x00f0 #define BRCMU_CHSPEC_CHH_SHIFT 4 /* bit 8~16 for dot 11n IO types * bit 8~9 sideband * bit 10~11 bandwidth * bit 12~13 spectral band * bit 14~15 not used */ #define BRCMU_CHSPEC_D11N_SB_MASK 0x0300 #define BRCMU_CHSPEC_D11N_SB_SHIFT 8 #define BRCMU_CHSPEC_D11N_SB_L 0x0100 /* control lower */ #define BRCMU_CHSPEC_D11N_SB_U 0x0200 /* control upper */ #define BRCMU_CHSPEC_D11N_SB_N 0x0300 /* none */ #define BRCMU_CHSPEC_D11N_BW_MASK 0x0c00 #define BRCMU_CHSPEC_D11N_BW_SHIFT 10 #define BRCMU_CHSPEC_D11N_BW_10 0x0400 #define BRCMU_CHSPEC_D11N_BW_20 0x0800 #define BRCMU_CHSPEC_D11N_BW_40 0x0c00 #define BRCMU_CHSPEC_D11N_BND_MASK 0x3000 #define BRCMU_CHSPEC_D11N_BND_SHIFT 12 #define BRCMU_CHSPEC_D11N_BND_5G 0x1000 #define BRCMU_CHSPEC_D11N_BND_2G 0x2000 /* bit 8~16 for dot 11ac IO types * bit 8~10 sideband * bit 11~13 bandwidth * bit 14~15 spectral band */ #define BRCMU_CHSPEC_D11AC_SB_MASK 0x0700 #define BRCMU_CHSPEC_D11AC_SB_SHIFT 8 #define BRCMU_CHSPEC_D11AC_SB_LLL 0x0000 #define BRCMU_CHSPEC_D11AC_SB_LLU 0x0100 #define BRCMU_CHSPEC_D11AC_SB_LUL 0x0200 #define BRCMU_CHSPEC_D11AC_SB_LUU 0x0300 #define BRCMU_CHSPEC_D11AC_SB_ULL 0x0400 #define BRCMU_CHSPEC_D11AC_SB_ULU 0x0500 #define BRCMU_CHSPEC_D11AC_SB_UUL 0x0600 #define BRCMU_CHSPEC_D11AC_SB_UUU 0x0700 #define BRCMU_CHSPEC_D11AC_SB_LL BRCMU_CHSPEC_D11AC_SB_LLL #define BRCMU_CHSPEC_D11AC_SB_LU BRCMU_CHSPEC_D11AC_SB_LLU #define BRCMU_CHSPEC_D11AC_SB_UL BRCMU_CHSPEC_D11AC_SB_LUL #define BRCMU_CHSPEC_D11AC_SB_UU BRCMU_CHSPEC_D11AC_SB_LUU #define BRCMU_CHSPEC_D11AC_SB_L BRCMU_CHSPEC_D11AC_SB_LLL #define BRCMU_CHSPEC_D11AC_SB_U BRCMU_CHSPEC_D11AC_SB_LLU #define BRCMU_CHSPEC_D11AC_BW_MASK 0x3800 #define BRCMU_CHSPEC_D11AC_BW_SHIFT 11 #define BRCMU_CHSPEC_D11AC_BW_5 0x0000 #define BRCMU_CHSPEC_D11AC_BW_10 0x0800 #define BRCMU_CHSPEC_D11AC_BW_20 0x1000 #define BRCMU_CHSPEC_D11AC_BW_40 0x1800 #define BRCMU_CHSPEC_D11AC_BW_80 0x2000 #define BRCMU_CHSPEC_D11AC_BW_160 0x2800 #define BRCMU_CHSPEC_D11AC_BW_8080 0x3000 #define BRCMU_CHSPEC_D11AC_BND_MASK 0xc000 #define BRCMU_CHSPEC_D11AC_BND_SHIFT 14 #define BRCMU_CHSPEC_D11AC_BND_2G 0x0000 #define BRCMU_CHSPEC_D11AC_BND_3G 0x4000 #define BRCMU_CHSPEC_D11AC_BND_4G 0x8000 #define BRCMU_CHSPEC_D11AC_BND_5G 0xc000 #define BRCMU_CHAN_BAND_2G 0 #define BRCMU_CHAN_BAND_5G 1 enum brcmu_chan_bw { BRCMU_CHAN_BW_20, BRCMU_CHAN_BW_40, BRCMU_CHAN_BW_80, BRCMU_CHAN_BW_80P80, BRCMU_CHAN_BW_160, }; enum brcmu_chan_sb { BRCMU_CHAN_SB_NONE = 0, BRCMU_CHAN_SB_L, BRCMU_CHAN_SB_U, BRCMU_CHAN_SB_LL, BRCMU_CHAN_SB_LU, BRCMU_CHAN_SB_UL, BRCMU_CHAN_SB_UU, BRCMU_CHAN_SB_LLL, BRCMU_CHAN_SB_LLU, BRCMU_CHAN_SB_LUL, BRCMU_CHAN_SB_LUU, BRCMU_CHAN_SB_ULL, BRCMU_CHAN_SB_ULU, BRCMU_CHAN_SB_UUL, BRCMU_CHAN_SB_UUU, }; struct brcmu_chan { u16 chspec; u8 chnum; u8 band; enum brcmu_chan_bw bw; enum brcmu_chan_sb sb; }; struct brcmu_d11inf { u8 io_type; void (*encchspec)(struct brcmu_chan *ch); void (*decchspec)(struct brcmu_chan *ch); }; extern void brcmu_d11_attach(struct brcmu_d11inf *d11inf); #endif /* _BRCMU_CHANNELS_H_ */
{ "pile_set_name": "Github" }
import React from 'react' import { NavLink } from 'react-router-dom' import Logo from 'react-svg-loader!../../assets/origin-logo.svg' const Navigation = props => { return ( <nav id="sidebar" className="navbar" style={{ height: props.expandSidebar ? '100vh' : '' }} > <div className={`nav-icon d-md-none ${ props.expandSidebar ? 'nav-icon-open' : '' }`} onClick={props.onExpandSidebar} > <div></div> </div> <div className="brand mt-3 text-center"> <Logo /> <br /> <p className="mt-2 mb-0">dShop Edit</p> </div> <div className={`mt-4 ml-3 mb-auto ${ props.expandSidebar ? '' : 'd-none d-md-block' }`} > <ul className="navbar-nav mb-5"> <li className="nav-item mb-3"> <NavLink to="/edit/products" className="nav-link text"> Products </NavLink> </li> <li className="nav-item mb-3"> <NavLink to="/edit/collections" className="nav-link text"> Collections </NavLink> </li> <li className="nav-item mb-3"> <NavLink to="/edit/settings" className="nav-link text"> Settings </NavLink> </li> </ul> </div> </nav> ) } export default Navigation
{ "pile_set_name": "Github" }
#ifndef _IMAGES_H #define _IMAGES_H #if !defined(ANIMATE_PLAYER) Image PLAYER_IMAGE; #else #if defined(USE_MEMORY_BUFFERS) || defined(USE_TINY_MEMORY_BUFFERS) || defined(USE_LIGHT_MEMORY_BUFFERS) extern Image PLAYER_DOWN_IMAGE; extern Image PLAYER_UP_IMAGE; extern Image PLAYER_RIGHT_IMAGE; extern Image PLAYER_LEFT_IMAGE; #else Image PLAYER_DOWN_IMAGE; Image PLAYER_UP_IMAGE; Image PLAYER_RIGHT_IMAGE; Image PLAYER_LEFT_IMAGE; #endif #endif #if defined(USE_MEMORY_BUFFERS) || defined(USE_TINY_MEMORY_BUFFERS) || defined(USE_LIGHT_MEMORY_BUFFERS) extern Image GHOST_IMAGE; extern Image BOMB_IMAGE; #else Image GHOST_IMAGE; Image BOMB_IMAGE; #endif #if !defined(NO_DEAD_GHOSTS) Image DEAD_GHOST_IMAGE; #endif #if defined(USE_MEMORY_BUFFERS) || defined(USE_LIGHT_MEMORY_BUFFERS) extern Image CALM_DOWN_IMAGE; extern Image FIRE_CHARGE_IMAGE; extern Image BULLET_IMAGE; extern Image FIRE_POWER_IMAGE; extern Image SKULL_IMAGE; extern Image EXTRA_POINTS_IMAGE; extern Image HORIZONTAL_BRICK_IMAGE; extern Image VERTICAL_BRICK_IMAGE; #else Image CALM_DOWN_IMAGE; Image FIRE_CHARGE_IMAGE; Image BULLET_IMAGE; Image FIRE_POWER_IMAGE; Image SKULL_IMAGE; Image EXTRA_POINTS_IMAGE; Image HORIZONTAL_BRICK_IMAGE; Image VERTICAL_BRICK_IMAGE; #endif #if defined(USE_MEMORY_BUFFERS) extern Image LEFT_HORIZONTAL_MISSILE_IMAGE; extern Image RIGHT_HORIZONTAL_MISSILE_IMAGE; extern Image ROCKET_IMAGE; extern Image FREEZE_IMAGE; extern Image EXTRA_LIFE_IMAGE; extern Image INVINCIBILITY_IMAGE; extern Image CHASE_IMAGE; extern Image SUPER_IMAGE; extern Image CONFUSE_IMAGE; extern Image SUICIDE_IMAGE; extern Image BROKEN_BRICK_IMAGE; #else Image LEFT_HORIZONTAL_MISSILE_IMAGE; Image RIGHT_HORIZONTAL_MISSILE_IMAGE; Image ROCKET_IMAGE; Image FREEZE_IMAGE; Image EXTRA_LIFE_IMAGE; Image INVINCIBILITY_IMAGE; Image CHASE_IMAGE; Image SUPER_IMAGE; Image CONFUSE_IMAGE; Image SUICIDE_IMAGE; Image BROKEN_BRICK_IMAGE; #endif #endif // _IMAGES_H
{ "pile_set_name": "Github" }
/* * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "webrtc/modules/audio_processing/aec3/decimator_by_4.h" #include "webrtc/rtc_base/checks.h" namespace webrtc { namespace { // [B,A] = butter(2,1500/16000) which are the same as [B,A] = // butter(2,750/8000). const CascadedBiQuadFilter::BiQuadCoefficients kLowPassFilterCoefficients = { {0.0179f, 0.0357f, 0.0179f}, {-1.5879f, 0.6594f}}; } // namespace DecimatorBy4::DecimatorBy4() : low_pass_filter_(kLowPassFilterCoefficients, 3) {} void DecimatorBy4::Decimate(rtc::ArrayView<const float> in, rtc::ArrayView<float> out) { RTC_DCHECK_EQ(kBlockSize, in.size()); RTC_DCHECK_EQ(kSubBlockSize, out.size()); std::array<float, kBlockSize> x; // Limit the frequency content of the signal to avoid aliasing. low_pass_filter_.Process(in, x); // Downsample the signal. for (size_t j = 0, k = 0; j < out.size(); ++j, k += 4) { RTC_DCHECK_GT(kBlockSize, k); out[j] = x[k]; } } } // namespace webrtc
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>Hello MUI</title> <meta name="viewport" content="width=device-width, initial-scale=1,maximum-scale=1,user-scalable=no"> <meta name="apple-mobile-web-app-capable" content="yes"> <meta name="apple-mobile-web-app-status-bar-style" content="black"> <link rel="stylesheet" href="css/mui.min.css"> <style> html,body { background-color: #efeff4; } .title{ margin: 20px 15px 10px; color: #6d6d72; font-size: 15px; } </style> </head> <body> <!--<header class="mui-bar mui-bar-nav"> <a class="mui-action-back mui-icon mui-icon-left-nav mui-pull-left"></a> <h1 class="mui-title">设置</h1> </header>--> <div class="mui-content"> <div class="title"> 这是webview模式选项卡中的第4个子页面,该页面展示一个常见的设置示例 </div> <ul class="mui-table-view"> <li class="mui-table-view-cell"> <a class="mui-navigate-right"> 新消息通知 </a> </li> <li class="mui-table-view-cell"> <a class="mui-navigate-right"> 隐私 </a> </li> <li class="mui-table-view-cell"> <a class="mui-navigate-right"> 通用 </a> </li> </ul> <ul class="mui-table-view" style="margin-top: 25px;"> <li class="mui-table-view-cell"> <a id="about" class="mui-navigate-right"> 关于mui </a> </li> </ul> <ul class="mui-table-view" style="margin-top: 25px;"> <li class="mui-table-view-cell"> <a style="text-align: center;color: #FF3B30;" id="exit"> 退出登录 </a> </li> </ul> </div> </style> </body> <script src="js/mui.min.js"></script> <script> mui.init({ swipeBack:true //启用右滑关闭功能 }); document.getElementById("exit").addEventListener('tap',function () { // window.parent.location.href = './login.html'; // mui.openWindow('login.html'); location.href = 'login.html'; }); </script> </html>
{ "pile_set_name": "Github" }
/* Test of conversion of string to 32-bit wide string. Copyright (C) 2008-2020 Free Software Foundation, Inc. This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <https://www.gnu.org/licenses/>. */ /* Written by Bruno Haible <[email protected]>, 2008. */ #include <config.h> #include <uchar.h> #include "signature.h" SIGNATURE_CHECK (mbsnrtoc32s, size_t, (char32_t *, const char **, size_t, size_t, mbstate_t *)); #include <locale.h> #include <stdio.h> #include <string.h> #include "macros.h" int main (int argc, char *argv[]) { mbstate_t state; char32_t wc; size_t ret; /* configure should already have checked that the locale is supported. */ if (setlocale (LC_ALL, "") == NULL) return 1; /* Test NUL byte input. */ { const char *src; memset (&state, '\0', sizeof (mbstate_t)); src = ""; ret = mbsnrtoc32s (NULL, &src, 1, 0, &state); ASSERT (ret == 0); ASSERT (mbsinit (&state)); src = ""; ret = mbsnrtoc32s (NULL, &src, 1, 1, &state); ASSERT (ret == 0); ASSERT (mbsinit (&state)); wc = (char32_t) 0xBADFACE; src = ""; ret = mbsnrtoc32s (&wc, &src, 1, 0, &state); ASSERT (ret == 0); ASSERT (wc == (char32_t) 0xBADFACE); ASSERT (mbsinit (&state)); wc = (char32_t) 0xBADFACE; src = ""; ret = mbsnrtoc32s (&wc, &src, 1, 1, &state); ASSERT (ret == 0); ASSERT (wc == 0); ASSERT (mbsinit (&state)); } if (argc > 1) { int unlimited; for (unlimited = 0; unlimited < 2; unlimited++) { #define BUFSIZE 10 char32_t buf[BUFSIZE]; const char *src; mbstate_t temp_state; { size_t i; for (i = 0; i < BUFSIZE; i++) buf[i] = (char32_t) 0xBADFACE; } switch (argv[1][0]) { case '1': /* Locale encoding is ISO-8859-1 or ISO-8859-15. */ { char input[] = "B\374\337er"; /* "Büßer" */ memset (&state, '\0', sizeof (mbstate_t)); wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input, 1, &state); ASSERT (ret == 1); ASSERT (wc == 'B'); ASSERT (mbsinit (&state)); input[0] = '\0'; wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input + 1, 1, &state); ASSERT (ret == 1); ASSERT (c32tob (wc) == (unsigned char) '\374'); ASSERT (mbsinit (&state)); input[1] = '\0'; src = input + 2; temp_state = state; ret = mbsnrtoc32s (NULL, &src, 4, unlimited ? BUFSIZE : 1, &temp_state); ASSERT (ret == 3); ASSERT (src == input + 2); ASSERT (mbsinit (&state)); src = input + 2; ret = mbsnrtoc32s (buf, &src, 4, unlimited ? BUFSIZE : 1, &state); ASSERT (ret == (unlimited ? 3 : 1)); ASSERT (src == (unlimited ? NULL : input + 3)); ASSERT (c32tob (buf[0]) == (unsigned char) '\337'); if (unlimited) { ASSERT (buf[1] == 'e'); ASSERT (buf[2] == 'r'); ASSERT (buf[3] == 0); ASSERT (buf[4] == (char32_t) 0xBADFACE); } else ASSERT (buf[1] == (char32_t) 0xBADFACE); ASSERT (mbsinit (&state)); } break; case '2': /* Locale encoding is UTF-8. */ { char input[] = "s\303\274\303\237\360\237\230\213!"; /* "süß😋!" */ memset (&state, '\0', sizeof (mbstate_t)); wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input, 1, &state); ASSERT (ret == 1); ASSERT (wc == 's'); ASSERT (mbsinit (&state)); input[0] = '\0'; wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input + 1, 1, &state); ASSERT (ret == (size_t)(-2)); ASSERT (wc == (char32_t) 0xBADFACE); ASSERT (!mbsinit (&state)); input[1] = '\0'; src = input + 2; temp_state = state; ret = mbsnrtoc32s (NULL, &src, 9, unlimited ? BUFSIZE : 2, &temp_state); ASSERT (ret == 4); ASSERT (src == input + 2); ASSERT (!mbsinit (&state)); src = input + 2; ret = mbsnrtoc32s (buf, &src, 9, unlimited ? BUFSIZE : 2, &state); ASSERT (ret == (unlimited ? 4 : 2)); ASSERT (src == (unlimited ? NULL : input + 5)); ASSERT (c32tob (buf[0]) == EOF); ASSERT (c32tob (buf[1]) == EOF); if (unlimited) { ASSERT (buf[2] == 0x1F60B); /* expect Unicode encoding */ ASSERT (buf[3] == '!'); ASSERT (buf[4] == 0); ASSERT (buf[5] == (char32_t) 0xBADFACE); } else ASSERT (buf[2] == (char32_t) 0xBADFACE); ASSERT (mbsinit (&state)); } break; case '3': /* Locale encoding is EUC-JP. */ { char input[] = "<\306\374\313\334\270\354>"; /* "<日本語>" */ memset (&state, '\0', sizeof (mbstate_t)); wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input, 1, &state); ASSERT (ret == 1); ASSERT (wc == '<'); ASSERT (mbsinit (&state)); input[0] = '\0'; wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input + 1, 2, &state); ASSERT (ret == 2); ASSERT (c32tob (wc) == EOF); ASSERT (mbsinit (&state)); input[1] = '\0'; input[2] = '\0'; wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input + 3, 1, &state); ASSERT (ret == (size_t)(-2)); ASSERT (wc == (char32_t) 0xBADFACE); ASSERT (!mbsinit (&state)); input[3] = '\0'; src = input + 4; temp_state = state; ret = mbsnrtoc32s (NULL, &src, 5, unlimited ? BUFSIZE : 2, &temp_state); ASSERT (ret == 3); ASSERT (src == input + 4); ASSERT (!mbsinit (&state)); src = input + 4; ret = mbsnrtoc32s (buf, &src, 5, unlimited ? BUFSIZE : 2, &state); ASSERT (ret == (unlimited ? 3 : 2)); ASSERT (src == (unlimited ? NULL : input + 7)); ASSERT (c32tob (buf[0]) == EOF); ASSERT (c32tob (buf[1]) == EOF); if (unlimited) { ASSERT (buf[2] == '>'); ASSERT (buf[3] == 0); ASSERT (buf[4] == (char32_t) 0xBADFACE); } else ASSERT (buf[2] == (char32_t) 0xBADFACE); ASSERT (mbsinit (&state)); } break; case '4': /* Locale encoding is GB18030. */ { char input[] = "s\250\271\201\060\211\070\224\071\375\067!"; /* "süß😋!" */ memset (&state, '\0', sizeof (mbstate_t)); wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input, 1, &state); ASSERT (ret == 1); ASSERT (wc == 's'); ASSERT (mbsinit (&state)); input[0] = '\0'; wc = (char32_t) 0xBADFACE; ret = mbrtoc32 (&wc, input + 1, 1, &state); ASSERT (ret == (size_t)(-2)); ASSERT (wc == (char32_t) 0xBADFACE); ASSERT (!mbsinit (&state)); input[1] = '\0'; src = input + 2; temp_state = state; ret = mbsnrtoc32s (NULL, &src, 11, unlimited ? BUFSIZE : 2, &temp_state); ASSERT (ret == 4); ASSERT (src == input + 2); ASSERT (!mbsinit (&state)); src = input + 2; ret = mbsnrtoc32s (buf, &src, 11, unlimited ? BUFSIZE : 2, &state); ASSERT (ret == (unlimited ? 4 : 2)); ASSERT (src == (unlimited ? NULL : input + 7)); ASSERT (c32tob (buf[0]) == EOF); ASSERT (c32tob (buf[1]) == EOF); if (unlimited) { ASSERT (c32tob (buf[2]) == EOF); ASSERT (buf[3] == '!'); ASSERT (buf[4] == 0); ASSERT (buf[5] == (char32_t) 0xBADFACE); } else ASSERT (buf[2] == (char32_t) 0xBADFACE); ASSERT (mbsinit (&state)); } break; default: return 1; } } return 0; } return 1; }
{ "pile_set_name": "Github" }
image: "/assets/png/explosion.png" tile_width: 24 tile_height: 24 tile_margin: 0 tile_spacing: 0 collision: "" material_tag: "tile" animations { id: "anim" start_tile: 1 end_tile: 8 playback: PLAYBACK_ONCE_FORWARD fps: 30 flip_horizontal: 0 flip_vertical: 0 } extrude_borders: 0 inner_padding: 0
{ "pile_set_name": "Github" }
// Copyright 2014 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // Flags: --expose-debug-as debug --allow-natives-syntax // Test debug events when we listen to all exceptions and // there is a catch handler for the to-be-rejected Promise. // We expect an Exception debug event with a promise to be triggered. Debug = debug.Debug; var expected_events = 1; var log = []; var p = new Promise(function(resolve, reject) { log.push("resolve"); resolve(); }); var q = p.chain( function() { log.push("reject"); return Promise.reject(new Error("uncaught reject")); }); function listener(event, exec_state, event_data, data) { try { if (event == Debug.DebugEvent.Exception) { expected_events--; assertTrue(expected_events >= 0); assertEquals("uncaught reject", event_data.exception().message); assertTrue(event_data.promise() instanceof Promise); assertSame(q, event_data.promise()); assertTrue(event_data.uncaught()); // All of the frames on the stack are from native Javascript. assertEquals(0, exec_state.frameCount()); } } catch (e) { %AbortJS(e + "\n" + e.stack); } } Debug.setBreakOnException(); Debug.setListener(listener); log.push("end main"); function testDone(iteration) { function checkResult() { try { assertTrue(iteration < 10); if (expected_events === 0) { assertEquals(["resolve", "end main", "reject"], log); } else { testDone(iteration + 1); } } catch (e) { %AbortJS(e + "\n" + e.stack); } } // Run testDone through the Object.observe processing loop. var dummy = {}; Object.observe(dummy, checkResult); dummy.dummy = dummy; } testDone(0);
{ "pile_set_name": "Github" }
# MIT License # # Copyright (C) The Adversarial Robustness Toolbox (ART) Authors 2018 # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated # documentation files (the "Software"), to deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the # Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. from __future__ import absolute_import, division, print_function, unicode_literals import logging import unittest import numpy as np from art.attacks.evasion.fast_gradient import FastGradientMethod from art.attacks.evasion.deepfool import DeepFool from art.data_generators import DataGenerator from art.defences.trainer.adversarial_trainer import AdversarialTrainer from art.utils import load_mnist from tests.utils import master_seed, get_image_classifier_tf logger = logging.getLogger(__name__) BATCH_SIZE = 10 NB_TRAIN = 100 NB_TEST = 100 class TestAdversarialTrainer(unittest.TestCase): """ Test cases for the AdversarialTrainer class. """ @classmethod def setUpClass(cls): # MNIST (x_train, y_train), (x_test, y_test), _, _ = load_mnist() x_train, y_train, x_test, y_test = ( x_train[:NB_TRAIN], y_train[:NB_TRAIN], x_test[:NB_TEST], y_test[:NB_TEST], ) cls.mnist = ((x_train, y_train), (x_test, y_test)) cls.classifier, _ = get_image_classifier_tf() def setUp(self): master_seed(seed=1234) def test_classifier_match(self): attack = FastGradientMethod(self.classifier) adv_trainer = AdversarialTrainer(self.classifier, attack) self.assertEqual(len(adv_trainer.attacks), 1) self.assertEqual(adv_trainer.attacks[0].estimator, adv_trainer.get_classifier()) def test_fit_predict(self): (x_train, y_train), (x_test, y_test) = self.mnist x_test_original = x_test.copy() attack = FastGradientMethod(self.classifier) x_test_adv = attack.generate(x_test) predictions = np.argmax(self.classifier.predict(x_test_adv), axis=1) accuracy = np.sum(predictions == np.argmax(y_test, axis=1)) / NB_TEST adv_trainer = AdversarialTrainer(self.classifier, attack) adv_trainer.fit(x_train, y_train, nb_epochs=5, batch_size=128) predictions_new = np.argmax(adv_trainer.predict(x_test_adv), axis=1) accuracy_new = np.sum(predictions_new == np.argmax(y_test, axis=1)) / NB_TEST self.assertEqual(accuracy_new, 0.12) self.assertEqual(accuracy, 0.13) # Check that x_test has not been modified by attack and classifier self.assertAlmostEqual(float(np.max(np.abs(x_test_original - x_test))), 0.0, delta=0.00001) def test_two_attacks(self): (x_train, y_train), (x_test, y_test) = self.mnist x_test_original = x_test.copy() attack1 = FastGradientMethod(estimator=self.classifier, batch_size=16) attack2 = DeepFool(classifier=self.classifier, max_iter=5, batch_size=16) x_test_adv = attack1.generate(x_test) predictions = np.argmax(self.classifier.predict(x_test_adv), axis=1) accuracy = np.sum(predictions == np.argmax(y_test, axis=1)) / NB_TEST adv_trainer = AdversarialTrainer(self.classifier, attacks=[attack1, attack2]) adv_trainer.fit(x_train, y_train, nb_epochs=2, batch_size=16) predictions_new = np.argmax(adv_trainer.predict(x_test_adv), axis=1) accuracy_new = np.sum(predictions_new == np.argmax(y_test, axis=1)) / NB_TEST self.assertEqual(accuracy_new, 0.14) self.assertEqual(accuracy, 0.13) # Check that x_test has not been modified by attack and classifier self.assertAlmostEqual(float(np.max(np.abs(x_test_original - x_test))), 0.0, delta=0.00001) def test_two_attacks_with_generator(self): (x_train, y_train), (x_test, y_test) = self.mnist x_train_original = x_train.copy() x_test_original = x_test.copy() class MyDataGenerator(DataGenerator): def __init__(self, x, y, size, batch_size): super().__init__(size=size, batch_size=batch_size) self.x = x self.y = y self._size = size self._batch_size = batch_size def get_batch(self): ids = np.random.choice(self.size, size=min(self.size, self.batch_size), replace=False) return self.x[ids], self.y[ids] generator = MyDataGenerator(x_train, y_train, size=x_train.shape[0], batch_size=16) attack1 = FastGradientMethod(estimator=self.classifier, batch_size=16) attack2 = DeepFool(classifier=self.classifier, max_iter=5, batch_size=16) x_test_adv = attack1.generate(x_test) predictions = np.argmax(self.classifier.predict(x_test_adv), axis=1) accuracy = np.sum(predictions == np.argmax(y_test, axis=1)) / NB_TEST adv_trainer = AdversarialTrainer(self.classifier, attacks=[attack1, attack2]) adv_trainer.fit_generator(generator, nb_epochs=3) predictions_new = np.argmax(adv_trainer.predict(x_test_adv), axis=1) accuracy_new = np.sum(predictions_new == np.argmax(y_test, axis=1)) / NB_TEST self.assertAlmostEqual(accuracy_new, 0.38, delta=0.02) self.assertAlmostEqual(accuracy, 0.1, delta=0.0) # Check that x_train and x_test has not been modified by attack and classifier self.assertAlmostEqual(float(np.max(np.abs(x_train_original - x_train))), 0.0, delta=0.00001) self.assertAlmostEqual(float(np.max(np.abs(x_test_original - x_test))), 0.0, delta=0.00001) def test_targeted_attack_error(self): """ Test the adversarial trainer using a targeted attack, which will currently result in a NotImplementError. :return: None """ (x_train, y_train), (_, _) = self.mnist params = {"nb_epochs": 2, "batch_size": BATCH_SIZE} adv = FastGradientMethod(self.classifier, targeted=True) adv_trainer = AdversarialTrainer(self.classifier, attacks=adv) self.assertRaises(NotImplementedError, adv_trainer.fit, x_train, y_train, **params) if __name__ == "__main__": unittest.main()
{ "pile_set_name": "Github" }
-- -- checksum table column and the whole talbe -- drop table if exists zktt1; drop table if exists zktt2; drop table if exists zktt3; create table zktt1 ( c1 tinyint, c2 smallint, c3 integer, c4 bigint, c5 numeric, c6 text, c7 varchar(10), c8 char, c9 smalldatetime, c10 timestamp, c11 timestamptz, c12 date, c13 money, c14 bool ); create table zktt2 ( a int, b text ); create table zktt3 ( a int, b text ) with (orientation = column); -- -- insert values -- insert into zktt1 values (1, 2, 3, 4, 5.6, '789', '1112', '1', smalldatetime '2010-01-10 08:08:08', timestamp '2010-01-10 08:08:08',timestamptz '2010-01-10 08:08:08',date '2010-08-08', money '234234.90', true); insert into zktt1 values (4, 3, 2, 1, 6.5, '432', '2222', '2', smalldatetime '2012-01-10 08:08:08', timestamp '2011-12-10 08:08:08',timestamptz '2011-12-10 08:08:08',date '1234-05-06', money '1234.56', false); insert into zktt2 values (1, ''); insert into zktt2 values (2, null); insert into zktt2 values (3, 3); insert into zktt3 select * from zktt2; -- -- checksum for given date types -- select checksum(c1) from zktt1; select checksum(c2) from zktt1; select checksum(c3) from zktt1; select checksum(c4) from zktt1; select checksum(c5) from zktt1; select checksum(c6) from zktt1; select checksum(c7) from zktt1; select checksum(c8) from zktt1; select checksum(c9) from zktt1; select checksum(c11) from zktt1; select checksum(c12) from zktt1; select checksum(c13) from zktt1; select checksum(c13::text) from zktt1; select checksum(c14) from zktt1; select checksum(c14::text) from zktt1; -- -- checksum for null values -- select checksum(b) from zktt2 where a < 3; select checksum(b) from zktt2; select checksum(b), checksum(b) from zktt2; -- -- checksum for group by order by -- select checksum(a) from zktt2 group by a order by a; -- -- checksum for join tables -- select checksum(a) from zktt2 join zktt1 on zktt2.a=zktt1.c1; -- -- checksum for sub query and complex query -- select checksum(a) from zktt2 where a in (select c1 from zktt1); select * from (select checksum(a) from zktt2 group by a order by a) order by checksum; select checksum(tbl::text) from (select checksum(a) from zktt2 group by a) as tbl; -- -- checksum for update table data -- begin; select checksum(zktt2::text) from zktt2; update zktt2 set b='4' where a=1; select checksum(zktt2::text) from zktt2; rollback; -- -- checksum for insert table data -- begin; select checksum(zktt2::text) from zktt2; insert into zktt2 values (1,2); select checksum(zktt2::text) from zktt2; rollback; -- -- checksum for delete table data -- begin; select checksum(zktt2::text) from zktt2; delete from zktt2 where a=1; select checksum(zktt2::text) from zktt2; rollback; -- -- checksum for column-stored table -- select checksum(zktt3::text) from zktt3; DROP TABLE zktt1; DROP TABLE zktt2; DROP TABLE zktt3; -- -- test pg_stat_get_activity_for_temptable -- select * from pg_stat_get_activity_for_temptable() limit 1;
{ "pile_set_name": "Github" }
/* * Copyright (c) Eicon Networks, 2002. * This source file is supplied for the use with Eicon Networks range of DIVA Server Adapters. * Eicon File Revision : 2.1 * This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY OF ANY KIND WHATSOEVER INCLUDING ANY implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. * You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. * */ #ifndef __DIVA_DMA_MAPPING_IFC_H__ #define __DIVA_DMA_MAPPING_IFC_H__ typedef struct _diva_dma_map_entry diva_dma_map_entry_t; struct _diva_dma_map_entry *diva_alloc_dma_map(void *os_context, int nentries); void diva_init_dma_map_entry(struct _diva_dma_map_entry *pmap, int nr, void *virt, dword phys, void *addr_handle); int diva_alloc_dma_map_entry(struct _diva_dma_map_entry *pmap); void diva_free_dma_map_entry(struct _diva_dma_map_entry *pmap, int entry); void diva_get_dma_map_entry(struct _diva_dma_map_entry *pmap, int nr, void **pvirt, dword *pphys); void diva_free_dma_mapping(struct _diva_dma_map_entry *pmap); /* Functionality to be implemented by OS wrapper and running in process context */ void diva_init_dma_map(void *hdev, struct _diva_dma_map_entry **ppmap, int nentries); void diva_free_dma_map(void *hdev, struct _diva_dma_map_entry *pmap); void *diva_get_entry_handle(struct _diva_dma_map_entry *pmap, int nr); #endif
{ "pile_set_name": "Github" }
define([ "../core", "../queue", "../effects" // Delay is optional because of this dependency ], function( jQuery ) { // Based off of the plugin by Clint Helfers, with permission. // http://blindsignals.com/index.php/2009/07/jquery-delay/ jQuery.fn.delay = function( time, type ) { time = jQuery.fx ? jQuery.fx.speeds[ time ] || time : time; type = type || "fx"; return this.queue( type, function( next, hooks ) { var timeout = setTimeout( next, time ); hooks.stop = function() { clearTimeout( timeout ); }; }); }; return jQuery.fn.delay; });
{ "pile_set_name": "Github" }
/* * Copyright (C) 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.errai.bus.client.api; /** * Abstract test that covers the contract of the AsyncTask interface. Tests for * client-side AsyncTask implementations should extend this class; tests for * server-side implementations should extend {@link AbstractAsyncTaskTest} directly. * * @author Jonathan Fuerth <[email protected]> */ public abstract class ClientAsyncTaskTest extends AbstractAsyncTaskTest { @Override public String getModuleName() { return "org.jboss.errai.bus.ErraiBusTests"; } }
{ "pile_set_name": "Github" }
<?php /* * Copyright 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ class Google_Service_ConsumerSurveys_SurveyAudience extends Google_Collection { protected $collection_key = 'languages'; public $ages; public $country; public $countrySubdivision; public $gender; public $languages; public $mobileAppPanelId; public $populationSource; public function setAges($ages) { $this->ages = $ages; } public function getAges() { return $this->ages; } public function setCountry($country) { $this->country = $country; } public function getCountry() { return $this->country; } public function setCountrySubdivision($countrySubdivision) { $this->countrySubdivision = $countrySubdivision; } public function getCountrySubdivision() { return $this->countrySubdivision; } public function setGender($gender) { $this->gender = $gender; } public function getGender() { return $this->gender; } public function setLanguages($languages) { $this->languages = $languages; } public function getLanguages() { return $this->languages; } public function setMobileAppPanelId($mobileAppPanelId) { $this->mobileAppPanelId = $mobileAppPanelId; } public function getMobileAppPanelId() { return $this->mobileAppPanelId; } public function setPopulationSource($populationSource) { $this->populationSource = $populationSource; } public function getPopulationSource() { return $this->populationSource; } }
{ "pile_set_name": "Github" }
-- Copyright (C) 2001 Bill Billowitch. -- Some of the work to develop this test suite was done with Air Force -- support. The Air Force and Bill Billowitch assume no -- responsibilities for this software. -- This file is part of VESTs (Vhdl tESTs). -- VESTs is free software; you can redistribute it and/or modify it -- under the terms of the GNU General Public License as published by the -- Free Software Foundation; either version 2 of the License, or (at -- your option) any later version. -- VESTs is distributed in the hope that it will be useful, but WITHOUT -- ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or -- FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License -- for more details. -- You should have received a copy of the GNU General Public License -- along with VESTs; if not, write to the Free Software Foundation, -- Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -- --------------------------------------------------------------------- -- -- $Id: tc2602.vhd,v 1.2 2001-10-26 16:30:20 paw Exp $ -- $Revision: 1.2 $ -- -- --------------------------------------------------------------------- ENTITY c13s03b01x00p02n01i02602ent IS END c13s03b01x00p02n01i02602ent; ARCHITECTURE c13s03b01x00p02n01i02602arch OF c13s03b01x00p02n01i02602ent IS BEGIN TESTING: PROCESS variable k, : integer := 0; BEGIN assert FALSE report "***FAILED TEST: c13s03b01x00p02n01i02602 - Identifier can not end with ','." severity ERROR; wait; END PROCESS TESTING; END c13s03b01x00p02n01i02602arch;
{ "pile_set_name": "Github" }
{ "images" : [ { "idiom" : "universal", "scale" : "1x", "filename" : "smile_89.png" }, { "idiom" : "universal", "scale" : "2x" }, { "idiom" : "universal", "scale" : "3x" } ], "info" : { "version" : 1, "author" : "xcode" } }
{ "pile_set_name": "Github" }
#!/usr/bin/env python import argparse, os, json import seldon_utils as seldon from kazoo.client import KazooClient dir = os.path.dirname(os.path.abspath(__file__)) parser = argparse.ArgumentParser(prog="add_client", description="Adds a new client to the Seldon Server") parser.add_argument("client",nargs=1,help="The client to add") parser.add_argument("--json",help="extra JSON configuration for the client") parser.add_argument("--props",help="Relative path to the file with the props", default='../server_config.json') parser.add_argument("--db",help="The name of the DB to use (from the config file), default is to use the first one mentioned.") parser.add_argument("--zookeeper",help="Location of zookeeper (hosts)", default="localhost") args = parser.parse_args() filename = os.path.join(dir, args.props) zk = KazooClient(hosts=args.zookeeper) zk.start() if not os.path.exists(filename): print "Properties file doesn't exist at", filename, ", please create it before running this script" exit(1) with open(filename) as data_file: data = json.load(data_file) seldon.clientSetup(zk,[{"name":args.client[0],"db":args.db}],data["db"],"/all_clients") print "Finished successfully"
{ "pile_set_name": "Github" }
// Targeted by JavaCPP version 1.5.4: DO NOT EDIT THIS FILE package org.bytedeco.hyperscan; import java.nio.*; import org.bytedeco.javacpp.*; import org.bytedeco.javacpp.annotation.*; import static org.bytedeco.javacpp.presets.javacpp.*; import static org.bytedeco.hyperscan.global.hyperscan.*; /** * A structure containing additional parameters related to an expression, * passed in at build time to \ref hs_compile_ext_multi() or \ref * hs_expression_ext_info. * * These parameters allow the set of matches produced by a pattern to be * constrained at compile time, rather than relying on the application to * process unwanted matches at runtime. */ @Properties(inherit = org.bytedeco.hyperscan.presets.hyperscan.class) public class hs_expr_ext_t extends Pointer { static { Loader.load(); } /** Default native constructor. */ public hs_expr_ext_t() { super((Pointer)null); allocate(); } /** Native array allocator. Access with {@link Pointer#position(long)}. */ public hs_expr_ext_t(long size) { super((Pointer)null); allocateArray(size); } /** Pointer cast constructor. Invokes {@link Pointer#Pointer(Pointer)}. */ public hs_expr_ext_t(Pointer p) { super(p); } private native void allocate(); private native void allocateArray(long size); @Override public hs_expr_ext_t position(long position) { return (hs_expr_ext_t)super.position(position); } @Override public hs_expr_ext_t getPointer(long i) { return new hs_expr_ext_t(this).position(position + i); } /** * Flags governing which parts of this structure are to be used by the * compiler. See \ref HS_EXT_FLAG. */ public native @Cast("unsigned long long") long flags(); public native hs_expr_ext_t flags(long setter); /** * The minimum end offset in the data stream at which this expression * should match successfully. To use this parameter, set the * \ref HS_EXT_FLAG_MIN_OFFSET flag in the hs_expr_ext::flags field. */ public native @Cast("unsigned long long") long min_offset(); public native hs_expr_ext_t min_offset(long setter); /** * The maximum end offset in the data stream at which this expression * should match successfully. To use this parameter, set the * \ref HS_EXT_FLAG_MAX_OFFSET flag in the hs_expr_ext::flags field. */ public native @Cast("unsigned long long") long max_offset(); public native hs_expr_ext_t max_offset(long setter); /** * The minimum match length (from start to end) required to successfully * match this expression. To use this parameter, set the * \ref HS_EXT_FLAG_MIN_LENGTH flag in the hs_expr_ext::flags field. */ public native @Cast("unsigned long long") long min_length(); public native hs_expr_ext_t min_length(long setter); /** * Allow patterns to approximately match within this edit distance. To use * this parameter, set the \ref HS_EXT_FLAG_EDIT_DISTANCE flag in the * hs_expr_ext::flags field. */ public native @Cast("unsigned") int edit_distance(); public native hs_expr_ext_t edit_distance(int setter); /** * Allow patterns to approximately match within this Hamming distance. To * use this parameter, set the \ref HS_EXT_FLAG_HAMMING_DISTANCE flag in the * hs_expr_ext::flags field. */ public native @Cast("unsigned") int hamming_distance(); public native hs_expr_ext_t hamming_distance(int setter); }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0"> <xsl:output method="html" encoding="UTF-8"/> <xsl:template match="/runscript"> <html> <head> <title><xsl:value-of select="@name"/></title> </head> <body style="font-family:Arial;font-size:90%"> <font face="Arial" size="3" color="224488"><b>JSBSim Script:<xsl:value-of select="@name"/></b></font><br/> <font face="Arial" size="2"><b>Description</b>: <xsl:value-of select="description"/></font><br/> <font face="Arial" size="2"><b>Aircraft</b>: <xsl:value-of select="use/@aircraft"/></font><br/> <font face="Arial" size="2"><b>Initial Conditions</b>:<xsl:value-of select="use/@initialize"/></font><br/> <font face="Arial" size="2"><b>Starts at</b>:<xsl:value-of select="run/@start"/></font><br/> <font face="Arial" size="2"><b>Ends at</b>:<xsl:value-of select="run/@end"/></font><br/> <font face="Arial" size="2"><b>Delta time</b>:<xsl:value-of select="run/@dt"/></font><br/> <table width="100%"> <font face="Arial" size="2"> <xsl:if test="run/property"> <tr bgcolor="EEEEEE"><td><hr width="100%"/><font face="Arial" size="2"><font color="#0033ff"><b>Local Properties</b>:</font> <ul> <xsl:for-each select="run/property"> <li><xsl:value-of select="."/></li> </xsl:for-each> </ul> </font></td></tr> </xsl:if> <xsl:for-each select="run/event"> <tr bgcolor="EEEEEE"><td><hr width="100%"/><font face="Arial" size="2" color="#0033ff"><b>Event</b>: <xsl:value-of select="@name"/></font> <xsl:if test="description"><font face="Arial" size="2"><br/><b>Description</b>: <xsl:value-of select="description"/></font></xsl:if> </td></tr> <tr><td> <font face="Arial" size="2"> <xsl:if test="condition"> <b>Test Conditions</b>: <ul> <xsl:for-each select="condition"> <li><xsl:value-of select="."/></li> </xsl:for-each> </ul> </xsl:if> <xsl:if test="set"> <!-- false if no set actions --> <b>Actions</b>: <xsl:if test="set"> <ul> <xsl:for-each select="set"> <li> <xsl:if test="@type"> Change <xsl:value-of select="@name"/> by <xsl:value-of select="@value"/> </xsl:if> <xsl:if test="not(@type)"> Set <xsl:value-of select="@name"/> to <xsl:value-of select="@value"/> </xsl:if> <xsl:if test="@action"> <xsl:if test="@action = 'FG_STEP'"> via step </xsl:if> <xsl:if test="@action = 'FG_EXP'"> via exponential input </xsl:if> <xsl:if test="@action = 'FG_RAMP'"> via ramp input </xsl:if> <xsl:if test="@tc"> over <xsl:value-of select="@tc"/> seconds </xsl:if> </xsl:if> </li> </xsl:for-each> </ul> </xsl:if> </xsl:if> <!-- Actions --> <xsl:if test="notify"> When this event is triggered, a notification message will be shown <xsl:if test="notify/property"> and the values of following property or properties will be displayed: <ul> <xsl:for-each select="notify/property"> <li><xsl:value-of select="."/></li> </xsl:for-each> </ul> </xsl:if> </xsl:if> </font> </td></tr> <tr><td></td></tr> </xsl:for-each> </font> </table> </body> </html> </xsl:template> </xsl:stylesheet>
{ "pile_set_name": "Github" }
{ "name" : "142.pdf", "metadata" : { "source" : "CRF", "title" : "Exploring Prediction Uncertainty in Machine Translation Quality Estimation", "authors" : [ ], "emails" : [ ], "sections" : [ { "heading" : null, "text" : "1 000\n012\n013\n014\n015\n016\n017\n018\n019\n020\n021\n022\n023\n024\n025\n026\n027\n028\n029\n030\n031\n032\n033\n034\n035\n036\n037\n038\n039\n040\n041\n042\n043\n044\n045\n046\n047\n048\n049\n050\n051\n064\n065\n066\n067\n068\n069\n070\n071\n072\n073\n074\n075\n076\n077\n078\n079\n080\n081\n082\n083\n084\n085\n086\n087\n088\n089\n090\n091\n092\n093\n094\n095\n096\n097\n098\n099\n100\n101\n102\n103" }, { "heading" : "1 Introduction", "text" : "Quality Estimation (QE) (Blatz et al., 2004; Specia et al., 2009) models aim at predicting the quality of automatically translated text segments. Traditionally, these models provide point estimates and are evaluated using metrics like Mean Absolute Error (MAE), Root-Mean-Square Error (RMSE) and Pearson’s r correlation coefficient. However, in practice QE models are built for use in decision making in large workflows involving Machine Translation (MT). In these settings, relying on point estimates would mean that only very accurate prediction models can be useful in practice.\nA way to improve decision making based on quality predictions is to explore uncertainty estimates. Consider for example a post-editing scenario where professional translators use MT in an effort to speed-up the translation process. A QE\nmodel can be used to determine if an MT segment is good enough for post-editing or should be discarded and translated from scratch. But since QE models are not perfect they can end up allowing bad MT segments to go through for postediting because of a prediction error. In such a scenario, having an uncertainty estimate for the prediction can provide additional information for the filtering decision. For instance, in order to ensure good user experience for the human translator and maximise translation productivity, an MT segment could be forwarded for post-editing only if a QE model assigns a high quality score with low uncertainty (high confidence). Such a decision process is not possible with point estimates only.\nGood uncertainty estimates can be acquired from well-calibrated probability distributions over the quality predictions. In QE, arguably the most successful probabilistic models are Gaussian Processes (GPs) since they considered the state-ofthe-art for regression (Cohn and Specia, 2013; Hensman et al., 2013), especially in the low-data regimes typical for this task. We focus our analysis in this paper on GPs since other common models used in QE can only provide point estimates as predictions. Another reason why we focus on probabilistic models is because this lets us employ the ideas proposed by Quiñonero-Candela et al. (2006), which defined new evaluation metrics that take into account probability distributions over predictions.\nThe remaining of this paper is organized as follows:\n• In Section 2 we further motivate the use of GPs for uncertainty modelling in QE and revisit their underlying theory. We also propose some model extensions previously developed in the GP literature and argue they are more appropriate for the task.\n2\n105\n106\n107\n108\n109\n110\n111\n112\n113\n114\n115\n116\n117\n118\n119\n120\n121\n122\n123\n124\n125\n126\n127\n128\n129\n130\n131\n132\n133\n134\n135\n136\n137\n138\n139\n140\n141\n142\n143\n144\n145\n146\n147\n148\n149\n150\n151\n152\n153\n154\n155\n156\n157\n158\n159\n160\n161\n162\n163\n164\n165\n166\n167\n168\n169\n170\n171\n172\n173\n174\n175\n176\n177\n178\n179\n180\n181\n182\n183\n184\n185\n186\n187\n188\n189\n190\n191\n192\n193\n194\n195\n196\n197\n198\n199\n200\n201\n202\n203\n204\n205\n206\n207\n• We intrinsically evaluate our proposed models in terms of their posterior distributions on training and test data in Section 3. Specifically, we show that differences in uncertainty modelling are not captured by the usual point estimate metrics commonly used for this task.\n• As an example of an application for predicitive distributions, in Section 4 we show how they can be useful in scenarios with asymmetric risk and how the proposed models can provide better performance in this case.\nWe discuss related work in Section 5 and give conclusions and avenues for future work in Section 6.\nWhile we focus on QE as application, the methods we explore in this paper can be applied to any text regression task where modelling predictive uncertainty is useful, either in human decision making or by propagating this information for further computational processing." }, { "heading" : "2 Probabilistic Models for QE", "text" : "Traditionally, QE is treated as a regression task with hand-crafted features. Kernel methods are arguably the state-of-the-art in QE since they can easily model non-linearities in the data. Furthermore, the scalability issues that arise in kernel methods do not tend to affect QE in practice since the datasets are usually small, in the order of thousands of instances.\nThe most popular method for QE is Support Vector Regression (SVR), as shown in the multiple instances of the WMT QE shared tasks (Callisonburch et al., 2012; Bojar et al., 2013; Bojar et al., 2014; Bojar et al., 2015). While SVR models can generate competitive predictions for this task, they lack a probabilistic interpretation, which makes it hard to extract uncertainty estimates using them. Bootstrapping approaches like bagging (Abe and Mamitsuka, 1998) can be applied, but this requires setting and optimising hyperparameters like bag size and number of bootstraps. There is also no guarantee these estimates come from a well-calibrated probabilistic distribution.\nGaussian Processes (GPs) (Rasmussen and Williams, 2006) is an alternative kernel-based framework that gives competitive results for point estimates (Cohn and Specia, 2013; Shah et al., 2013; Beck et al., 2014b). Unlike SVR, they explicitly model uncertainty in the data and in the predictions. This makes GPs very applicable when\nwell-calibrated uncertainty estimates are required. Furthermore, they are very flexible in terms of modelling decisions by allowing the use of a variety of kernels and likelihoods while providing efficient ways of doing model selection. Therefore, in this work we focus on GPs for probabilistic modelling of QE. In what follows we briefly describe the GPs framework for regression." }, { "heading" : "2.1 Gaussian Process Regression", "text" : "Here we follow closely the definition of GPs given by Rasmussen and Williams (2006). Let X = {(x1, y1), (x2, y2), . . . , (xn, yn)} be our data, where each x ∈ RD is a D-dimensional input and y is its corresponding response variable. A GP is defined as a stochastic model over the latent function f that generates the data X :\nf(x) ∼ GP(m(x), k(x,x′)),\nwhere m(x) is the mean function, which is usually the 0 constant, and k(x,x′) is the kernel or covariance function, which describes the covariance between values of f at the different locations of x and x′.\nThe prior is combined with a likelihood via Bayes’ rule to obtain a posterior over the latent function:\np(f |X ) = p(y|X, f)p(f) p(y|X) ,\nwhere X and y are the training inputs and response variables, respectively. For regression, we assume that each yi = f(xi) + η, where η ∼ N (0, σ2n) is added white noise. Having a Gaussian likelihood results in a closed form solution for the posterior.\nTraining a GP involves the optimisation of model hyperparameters, which is done by maximising the marginal likelihood p(y|X) via gradient ascent. Predictive posteriors for unseen x∗ are obtained by integrating over the latent function evaluations at x∗.\nGPs can be extended in many different ways by applying different kernels, likelihoods and modifying the posterior, for instance. In the next Sections, we explain in detail some sensible modelling choices in applying GPs for QE." }, { "heading" : "2.2 Matèrn Kernels", "text" : "Choosing an appropriate kernel is a crucial step in defining a GP model (and any other kernel\n3\n209\n210\n211\n212\n213\n214\n215\n216\n217\n218\n219\n220\n221\n222\n223\n224\n225\n226\n227\n228\n229\n230\n231\n232\n233\n234\n235\n236\n237\n238\n239\n240\n241\n242\n243\n244\n245\n246\n247\n248\n249\n250\n251\n252\n253\n254\n255\n256\n257\n258\n259\n260\n261\n262\n263\n264\n265\n266\n267\n268\n269\n270\n271\n272\n273\n274\n275\n276\n277\n278\n279\n280\n281\n282\n283\n284\n285\n286\n287\n288\n289\n290\n291\n292\n293\n294\n295\n296\n297\n298\n299\n300\n301\n302\n303\n304\n305\n306\n307\n308\n309\n310\n311\nmethod). A common choice is to employ the exponentiated quadratic (EQ) kernel1:\nkEQ(x,x ′) = σv exp(−\nr2\n2 ) ,\nwhere r2 = D∑ i=1 (xi − x′i)2 l2i\nis the scaled distance between the two inputs, σv is a scale hyperparameter and l is a vector of lengthscales. Most kernel methods tie all lengthscale to a single value, resulting in an isotropic kernel. However, since in GPs hyperparameter optimisation can be done efficiently, it is common to employ one lengthscale per feature, a method called Automatic Relevance Determination (ARD).\nThe EQ kernel allows the modelling of nonlinearities between the inputs and the response variables but it makes a strong assumption: it generates smooth, infinitely differentiable functions. This assumption can be too strong for noisy data. An alternative is the Matèrn class of kernels, which relax the smoothness assumption by modelling functions which are ν-times differentiable only. Common values for ν are the half-integers 3/2 and 5/2, resulting in the following Matèrn kernels:\nkM32 = σv(1 + √ 3r2) exp(− √ 3r2)\nkM52 = σv\n( 1 + √ 5r2 + 5r2\n3\n) exp(− √ 5r2) ,\nwhere we have omitted the dependence of kM32 and kM52 on the inputs (x,x′) for brevity. Higher values for ν are usually not very useful since the resulting behaviour is hard to distinguish from limit case ν → ∞, which retrieves the EQ kernel (Rasmussen and Williams, 2006, Sec. 4.2).\nThe relaxed smoothness assumptions from the Matèrn kernels makes them promising candidates for QE datasets, which tend to be very noisy. We expect that employing them will result in a better models for this application." }, { "heading" : "2.3 Warped Gaussian Processes", "text" : "The Gaussian likelihood of standard GPs has support over the entire real number line. However, common quality scores are strictly positive values, which means that the Gaussian assumption\n1Also known as Radial Basis Function (RBF) kernel.\nis not ideal. A usual way to deal with this problem is model the logarithm of the response variables, since this transformation maps strictly positive values to the real line. However, there is no reason to believe this is the best possible mapping: a better idea would be to learn it from the data.\nWarped GPs (Snelson et al., 2004) are an extension of GPs that allows the learning of arbitrary mappings. It does that by placing a monotonic warping function over the observations and modelling the warped values inside a standard GP. The posterior distribution is obtained by applying a change of variables:\np(y∗|x∗) = f ′(y∗)√ 2πσ2∗ exp\n( f(y∗)− µ∗\n2σ∗\n) ,\nwhere µ∗ and σ∗ are the mean and standard deviation of the latent (warped) response variable and f and f ′ are the warping function and its derivative.\nPoint predictions from this model depend on the loss function to be minimised. For absolute error, the median is the optimal value while for squared error it is the mean of the posterior. In standard GPs, since the posterior is Gaussian the median and mean coincide but this in general is not the case for a Warped GP posterior. The median can be easily obtained by applying the inverse warping function to the latent median:\nymed∗ = f −1(µ∗).\nWhile the inverse of the warping function is usually not available in closed form, we can use its gradient to have a numerical estimate.\nThe mean is obtained by integrating y∗ over the latent density:\nE[y∗] = ∫ f−1(z)Nz(µ∗, σ2∗)dz,\nwhere z is the latent variable. This can be easily approximated using Gauss-Hermite quadrature since it is a one dimensional integral over a Gaussian density.\nThe warping function should be flexible enough to allow the learning of complex mappings, but it needs to be monotonic. Snelson et al. (2004) proposes a parametric form composed of a sum of tanh functions, similar to a neural network layer:\nf(y) = y + I∑ i=1 ai tanh(bi(y + ci)),\n4\n313\n314\n315\n316\n317\n318\n319\n320\n321\n322\n323\n324\n325\n326\n327\n328\n329\n330\n331\n332\n333\n334\n335\n336\n337\n338\n339\n340\n341\n342\n343\n344\n345\n346\n347\n348\n349\n350\n351\n352\n353\n354\n355\n356\n357\n358\n359\n360\n361\n362\n363\n364\n365\n366\n367\n368\n369\n370\n371\n372\n373\n374\n375\n376\n377\n378\n379\n380\n381\n382\n383\n384\n385\n386\n387\n388\n389\n390\n391\n392\n393\n394\n395\n396\n397\n398\n399\n400\n401\n402\n403\n404\n405\n406\n407\n408\n409\n410\n411\n412\n413\n414\n415\nwhere I is the number of tanh terms and a,b and c are treated as model hyperparameters and optimised jointly with the kernel and likelihood hyperparameters. Large values for I allow more complex mappings to be learned but raise the risk of overfitting.\nWarped GPs provide an easy and elegant way to model response variables with non-Gaussian behaviour within the GP framework. In our experiments we explore models employing warping functions with up to 3 terms, which is the value recommended by Snelson et al. (2004). We also report results using the f(y) = log(y) warping function." }, { "heading" : "3 Intrinsic Uncertainty Evaluation", "text" : "Given a set of different probabilistic QE models, we are interested in evaluating the performance of these models, while also taking their uncertainty into account, particularly to distinguish among models with seemingly same or similar performance. A straightforward way to measure the performance of a probabilistic model is to inspect its negative (log) marginal likelihood. This measure, however, does not capture if a model overfit the training data.\nWe can have a better generalization measure by calculating the likelihood on test data instead. This was proposed in previous work and it is called Negative Log Predictive Density (NLPD) (Quiñonero-Candela et al., 2006):\nNLPD(ŷ,y) = − 1 n n∑ i=1 log p(ŷi = yi|xi).\nwhere ŷ is a set of test predictions, y is the set of true labels and n is the test set size. This metric has since been largely adopted by the ML community when evaluating GPs and other probabilistic models for regression (see Section 5 for some examples).\nAs with other error metrics, lower values are better. Intuitively, if two models produce equally incorrect predictions but they have different uncertainty estimates, NLPD will penalise the overconfident model more than the underconfident one. On the other hand, if predictions are close to the true value then NLPD will penalise the underconfident model instead.\nIn our first set of experiments we evaluate models proposed in Section 2 according to their negative log likelihood (NLL) and the NLPD on test\ndata. We also report two point estimate metrics on test data: Mean Absolute Error (MAE), the most commonly used evaluation metric in QE, and Pearson’s r, which has recently proposed by Graham (2015) as a more robust alternative." }, { "heading" : "3.1 Experimental Settings", "text" : "Our experiments comprise datasets containing three different language pairs, where the label to predict is post-editing time:\nEnglish-Spanish (en-es) This dataset was used in the WMT14 QE shared task (Bojar et al., 2014). It contains 858 sentences translated by one MT system and post-edited by a professional translator.\nFrench-English (fr-en) Described in (Specia, 2011), this dataset contains 2, 525 sentences translated by one MT system and post-edited by a professional translator.\nEnglish-German (en-de) This dataset is part of the WMT16 QE shared task2. It was translated by one MT system for consistency we use a subset of 2, 828 instances post-edited by a single professional translator.\nAs part of the process of creating these datasets, post-editing time was logged on an sentence basis for all datasets. Following common practice, we normalise the post-editing time by the length of the machine translated sentence to obtain postediting rates and use these as our response variables. For model building, we use a standard set of 17 features from the QuEst framework (Specia et al., 2015). These features are used in the strong baseline models provided by the WMT QE shared tasks. While the best performing systems in the shared tasks use larger feature sets, these are mostly resource-intensive and languagedependent, and therefore not equally applicable to all our language pairs. Moreover, our goal is to compare probabilistic QE models through the predictive uncertainty perspective, rather than improving the state-of-the-art in terms of point predictions. We perform 10-fold cross validation instead of using a single train/test splits and report averaged metric scores.\nThe model hyperparameters were optimised by maximising the likelihood on the training data. We perform a two-pass procedure similar to that\n2www.statmt.org/wmt16\n5\n417\n418\n419\n420\n421\n422\n423\n424\n425\n426\n427\n428\n429\n430\n431\n432\n433\n434\n435\n436\n437\n438\n439\n440\n441\n442\n443\n444\n445\n446\n447\n448\n449\n450\n451\n452\n453\n454\n455\n456\n457\n458\n459\n460\n461\n462\n463\n464\n465\n466\n467\n510\n511\n512\n513\n514\n515\n516\n517\n518\n519\nin (Cohn and Specia, 2013): first we employ an isotropic kernel and optimise all hyperparameters using 10 random restarts; then we move to an ARD equivalent kernel and perform a final optimisation step to fine tune feature lengthscales. Point predictions were fixed as the median of the distribution." }, { "heading" : "3.2 Results and Discussion", "text" : "Table 1 shows the results obtained for all datasets. The first two columns shows an interesting finding in terms of model learning: using a warping function drastically decreases both NLL and NLPD. The main reason behind this is that standard GPs distribute probability mass over negative values, while the warped models do not. For the fr-en and en-de datasets, NLL and NLPD follow similar trends. This means that we can trust NLL as a measure of uncertainty for these datasets. However, this is not observed in the en-es dataset. Since this dataset is considerably smaller than the others, we believe this is evidence of overfitting, thus showing that NLL is not a reliable metric for small datasets.\nIn terms of different warping functions, using the parametric tanh function with 3 terms performs better than the log for the fr-en and en-de datasets. This is not the case of the en-es dataset, where the log function tends to perform better. We believe that this is again due to the smaller dataset size. The gains from using a Matèrn kernel over EQ are less conclusive. While they do tend to perform better for fr-en, there does not seem to be any difference in the other datasets. Different kernels might be more appropriate depending on the language pair but more experiments are needed to check if this is the case, which we leave for future work.\nThe differences in uncertainty modelling are by and large not captured by the point estimate metrics. While MAE does show gains from standard to Warped GPs, it does not reflect the difference found between warping functions for fr-en. Pearson’s r is also quite inconclusive in this sense, except for some observed gains for en-es. This shows that NLPD indeed should be preferred as a evaluation metric when proper prediction uncertainty estimates are required by a QE model.\n6\n521\n522\n523\n524\n525\n526\n527\n528\n529\n530\n531\n532\n533\n534\n535\n536\n537\n538\n539\n540\n541\n542\n543\n544\n545\n546\n547\n548\n549\n550\n551\n552\n553\n554\n555\n556\n557\n558\n559\n560\n561\n562\n563\n564\n565\n566\n567\n568\n569\n570\n571\n572\n573\n574\n575\n576\n577\n578\n579\n599\n600\n601\n602\n603\n604\n605\n606\n607\n608\n609\n610\n611\n612\n613\n614\n615\n616\n617\n618\n619\n620\n621\n622\n623" }, { "heading" : "4 Asymmetric Risk Scenarios", "text" : "Evaluation metrics for QE, including those used in the WMT QE shared tasks, are assumed to be symmetric, i.e., they penalise over and underestimates equally. This assumption is however too simplistic for many possible applications of QE. For example:\n• In a post-editing scenario, a project manager may have translators with limited expertise in post-editing. In this case, automatic translations should not be provided to the translator unless they are highly likely to have very good quality. This can be enforced this by increasing the penalisation weight for underestimates. We call this the pessimistic scenario.\n• In a gisting scenario, a company wants to automatically translate their product reviews so that they can be published in a foreign language without human intervention. The company would prefer to publish only the reviews translated well enough, but having more reviews published will increase the chances of selling products. In this case, having better recall is more important and thus only reviews with very poor translation quality should be discarded. We can accomplish this by heavier penalisation on overestimates, a scenario we call optimistic.\nIn this Section we show how these scenarios can be addressed by well-calibrated predictive distributions and by employing asymmetric loss functions. An example of such a function is the asymmetric linear (henceforth, AL) loss, which is a generalisation of the absolute error:\nL(ŷ, y) = { w(ŷ − y) if ŷ > y y − ŷ if ŷ ≤ y,\nwhere w > 0 is the weight given to overestimates. If w > 1 we have the pessimistic scenario, and the optimistic one can be obtained using 0 < w < 1. For w = 1 we retrieve the original absolute error loss.\nAnother asymmetric loss is the linear exponential or linex loss (Zellner, 1986):\nL(ŷ, y) = exp[w(ŷ − y)]− (ŷ − y)− 1\nwhere w ∈ R is the weight. This loss attempts to keep a linear penalty in lesser risk regions, while\nimposing an exponential penalty in the higher risk ones. Negative values for w will result in a pessimistic setting, while positive values will result in the optimistic one. For w = 0, the loss approximates a squared error loss. Usual values for w tend to be close to 1 or−1 since for higher weights the loss can quickly reach very large scores. Both losses are shown on Figure 1." }, { "heading" : "4.1 Bayes Risk for Asymmetric Losses", "text" : "The losses introduced above can be incorporated directly into learning algorithms to obtain models for a given scenario. In the context of the AL loss this is called quantile regression (Koenker, 2005), since optimal estimators for this loss are posterior quantiles. However, in a production environment the loss can change over time. For instance, in the gisting scenario discussed above the parameter w could be changed based on feedback from indicators of sales revenue or user experience. If the loss is attached to the underlying learning algorithms, a change in w would require full model retraining, which can be costly.\nInstead of retraining the model everytime there is a different loss, we can train a single probabilistic model and derive Bayes risk estimators for the loss we are interested in. This allows estimates to\n7\n625\n626\n627\n628\n629\n630\n631\n632\n633\n634\n635\n636\n637\n638\n639\n640\n641\n642\n643\n644\n645\n646\n647\n648\n649\n650\n651\n652\n653\n654\n655\n656\n657\n658\n659\n660\n661\n662\n663\n664\n665\n666\n667\n668\n669\n670\n671\n672\n673\n674\n675\n704\n705\n706\n707\n708\n709\n710\n711\n712\n713\n714\n715\n716\n717\n718\n719\n720\n721\n722\n723\n724\n725\n726\n727\nbe obtained without having to retrain models when the loss changes. Additionally, this allows different losses/scenarios to be employed at the same time using the same model.\nMinimum Bayes risk estimators for asymmetric losses were proposed by Christoffersen and Diebold (1997) and we follow their derivations in our experiments. The best estimator for the AL loss is equivalent to the ww+1 quantile of the predictive distribution. Note that we retrieve the median when w = 1, as expected. The best estimator for the linex loss can be easily derived and results in:\nŷ = µy − wσ2y 2\nwhere µy and σ2y are the mean and the variance of the predictive posterior." }, { "heading" : "4.2 Experimental Settings", "text" : "Here we assess the models and datasets used in Section 3.1 in terms of their performance in the asymmetric setting. Following the explanation in the previous Section, we do not perform any retraining: we collect the predictions obtained using the 10-fold cross-validation protocol and apply different Bayes estimators corresponding to the asymmetric losses. Evaluation is performed using the same loss employed in the estimator (for instance, when using the linex estimator with w = 0.75 we report the results using the linex loss with same w) and averaged over the 10 folds.\nTo simulate both pessimistic and optimistic scenarios, we use w ∈ {3, 1/3} for the AL loss and w ∈ {−0.75, 0.75} for the linex loss. The only exception is the en-de dataset, where we report results for w ∈ −0.25, 0.75 for linex3. We also report results only for models using the Matèrn52 kernel. While we did experiment with different kernels and weighting schemes4 our findings showed similar trends so we omit them for the sake of clarity." }, { "heading" : "4.3 Results and Discussion", "text" : "Results are shown on Table 2. In the optimistic scenario the tanh-based warped GP models give\n3Using w = −0.75 in this case resulted in loss values on the order of 107. In fact, as it will be discussed in the next Section, the results for the linex loss in the pessimistic scenario were inconclusive. However, we report results using a higher w in this case for completeness and to clarify the inconclusive trends we found.\n4We also tried w ∈ {1/9, 1/7, 1/5, 5, 7, 9} for the AL loss and w ∈ {−0.5,−0.25, 0.25, 0.5} for the linex loss.\nconsistently better results than standard GPs. The log-based models also gives good results for AL but for Linex the results are mixed except for enes. This is probably again related to the larger sizes of the fr-en and en-de datasets, which allows the tanh-based models to learn richer representations.\nThe pessimistic scenario shows interesting trends. While the results for AL follow a similar pattern when compared to the optimistic setting, the results for linex are consistently worse than the standard GP baseline. A key difference between AL and linex is that the latter depends on the\n8\n729\n730\n731\n732\n733\n734\n735\n736\n737\n738\n739\n740\n741\n742\n743\n744\n745\n746\n747\n748\n749\n750\n751\n752\n753\n754\n755\n756\n757\n758\n759\n760\n761\n762\n763\n764\n765\n766\n767\n768\n769\n770\n771\n772\n773\n774\n775\n776\n777\n778\n779\n780\n781\n782\n783\n784\n785\n786\n787\n788\n789\n790\n791\n792\n793\n794\n795\n796\n797\n798\n799\n800\n801\n802\n803\n804\n805\n806\n807\n808\n809\n810\n811\n812\n813\n814\n815\n816\n817\n818\n819\n820\n821\n822\n823\n824\n825\n826\n827\n828\n829\n830\n831\nvariance of the predictive distribution. Since the warped models tend to have less variance, we believe the estimator is not being “pushed” towards the positive tails as much as in the standard GPs. This turns the resulting predictions not conservative enough (i.e. the post-editing time predicitions are lower) and this is heavily (exponentially) penalized by the loss. This might be a case where a standard GP is preferred but can also indicate that this loss is biased towards models with high variance, even if it does that by assigning probability mass to nonsensical values (like negative time). We leave further investigation of this phenomenon for future work." }, { "heading" : "5 Related Work", "text" : "Quality Estimation is generally framed as text regression task, similarly to many other applications such as movie revenue forecasting based on reviews (Joshi et al., 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012). In general, these applications are evaluated in terms of their point estimate predictions, arguably because not all of them employ probabilistic models.\nThe NLPD is common and established metric used in the GP literature to evaluate new approaches. Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al. (2013). It has also been used to evaluate recent work on uncertainty propagation methods for neural networks (Hernández-Lobato and Adams, 2015).\nAsymmetric loss functions are common in the econometrics literature and were studied by Zellner (1986) and Koenker (2005), among others. Besides the AL and the linex, another well studied loss is the asymmetric quadratic, which in turn relates to the concept of expectiles (Newey and Powell, 1987). This loss generalises the commonly used squared error loss. In terms of applications, Cain and Janssen (1995) gives an example in real estate assessment, where the consequences of under and overassessment are usually different depending on the specific scenario. An engineering example is given by Zellner (1986) in the context of dam construction, where an underestimate of peak water level is much more serious than an\noverestimate. Such real-world applications guided many developments in this field: we believe that translation and other language processing scenarios which rely on NLP technologies can heavily benefit from these advancements." }, { "heading" : "6 Conclusions", "text" : "This work explored new probabilistic models for machine translation QE that allow better uncertainty estimates. We proposed the use of NLPD, which can capture information on the whole predictive distribution, unlike usual point estimatebased metrics. By assessing models using NLPD we can make better informed decisions about which model to employ for different settings. Furthermore, we showed how information in the predictive distribution can be used in asymmetric loss scenarios and how the proposed models can be beneficial in these settings.\nUncertainty estimates can be useful in many other settings beyond the ones explored in this work. Active Learning can benefit from variance information in their query methods and it has shown to be useful for QE (Beck et al., 2013). Exploratory analysis is another avenue for future work, where error bars can provide further insights about the task, as shown in recent work (Nguyen and O’Connor, 2015). This kind of analysis can be useful for tracking post-editor behaviour and assessing cost estimates for translation projects, for instance.\nOur main goal in this paper was to raise awareness about how different modelling aspects should be taken into account when building QE models. Decision making can be risky using simple point estimates and we believe that uncertainty information can be beneficial in such scenarios by providing more informed solutions. These ideas are not restricted to QE and we hope to see similar studies in other natural language applications in the future." } ], "references" : [ { "title" : "Query learning strategies using boosting and bagging", "author" : [ "Naoki Abe", "Hiroshi Mamitsuka." ], "venue" : "Proceedings of the Fifteenth International Conference on Machine Learning, pages 1–9.", "citeRegEx" : "Abe and Mamitsuka.,? 1998", "shortCiteRegEx" : "Abe and Mamitsuka.", "year" : 1998 }, { "title" : "Reducing Annotation Effort for Quality Estimation via Active Learning", "author" : [ "Daniel Beck", "Lucia Specia", "Trevor Cohn." ], "venue" : "Proceedings of ACL.", "citeRegEx" : "Beck et al\\.,? 2013", "shortCiteRegEx" : "Beck et al\\.", "year" : 2013 }, { "title" : "Joint Emotion Analysis via Multi-task Gaussian Processes", "author" : [ "Daniel Beck", "Trevor Cohn", "Lucia Specia." ], "venue" : "Proceedings of EMNLP, pages 1798– 1803.", "citeRegEx" : "Beck et al\\.,? 2014a", "shortCiteRegEx" : "Beck et al\\.", "year" : 2014 }, { "title" : "SHEF-Lite 2.0 : Sparse Multi-task Gaussian Processes for Translation Quality Estimation", "author" : [ "Daniel Beck", "Kashif Shah", "Lucia Specia" ], "venue" : "In Proceedings of WMT14,", "citeRegEx" : "Beck et al\\.,? \\Q2014\\E", "shortCiteRegEx" : "Beck et al\\.", "year" : 2014 }, { "title" : "Non-Linear Text Regression with a Deep Convolutional Neural Network", "author" : [ "Zsolt Bitvai", "Trevor Cohn." ], "venue" : "Proceedings of ACL.", "citeRegEx" : "Bitvai and Cohn.,? 2015", "shortCiteRegEx" : "Bitvai and Cohn.", "year" : 2015 }, { "title" : "Confidence estimation for machine translation", "author" : [ "John Blatz", "Erin Fitzgerald", "George Foster." ], "venue" : "Proceedings of the 20th Conference on Computational Linguistics, pages 315–321.", "citeRegEx" : "Blatz et al\\.,? 2004", "shortCiteRegEx" : "Blatz et al\\.", "year" : 2004 }, { "title" : "Findings of the 2013 Workshop on Statistical Machine Translation", "author" : [ "Ondej Bojar", "Christian Buck", "Chris Callison-Burch", "Christian Federmann", "Barry Haddow", "Philipp Koehn", "Christof Monz", "Matt Post", "Radu Soricut", "Lucia Specia." ], "venue" : "Proceedings", "citeRegEx" : "Bojar et al\\.,? 2013", "shortCiteRegEx" : "Bojar et al\\.", "year" : 2013 }, { "title" : "Real Estate Price Prediction under Asymmetric Loss", "author" : [ "Michael Cain", "Christian Janssen." ], "venue" : "Annals of the Institute of Statististical Mathematics, 47(3):401–414.", "citeRegEx" : "Cain and Janssen.,? 1995", "shortCiteRegEx" : "Cain and Janssen.", "year" : 1995 }, { "title" : "Findings of the 2012 Workshop on Statistical Machine Translation", "author" : [ "Chris Callison-burch", "Philipp Koehn", "Christof Monz", "Matt Post", "Radu Soricut", "Lucia Specia." ], "venue" : "Proceedings of WMT12.", "citeRegEx" : "Callison.burch et al\\.,? 2012", "shortCiteRegEx" : "Callison.burch et al\\.", "year" : 2012 }, { "title" : "A Framework for Evaluating Approximation Methods for Gaussian Process Regression", "author" : [ "Krzysztof Chalupka", "Christopher K.I. Williams", "Iain Murray." ], "venue" : "Journal of Machine Learning Research, 14:333–350.", "citeRegEx" : "Chalupka et al\\.,? 2013", "shortCiteRegEx" : "Chalupka et al\\.", "year" : 2013 }, { "title" : "Optimal Prediction Under Asymmetric Loss", "author" : [ "Peter F. Christoffersen", "Francis X. Diebold." ], "venue" : "Econometric Theory, 13(06):808–817.", "citeRegEx" : "Christoffersen and Diebold.,? 1997", "shortCiteRegEx" : "Christoffersen and Diebold.", "year" : 1997 }, { "title" : "Modelling Annotator Bias with Multi-task Gaussian Processes: An Application to Machine Translation Quality Estimation", "author" : [ "Trevor Cohn", "Lucia Specia." ], "venue" : "Proceedings of ACL, pages 32–42.", "citeRegEx" : "Cohn and Specia.,? 2013", "shortCiteRegEx" : "Cohn and Specia.", "year" : 2013 }, { "title" : "Improving Evaluation of Machine Translation Quality Estimation", "author" : [ "Yvette Graham." ], "venue" : "Proceedings of ACL.", "citeRegEx" : "Graham.,? 2015", "shortCiteRegEx" : "Graham.", "year" : 2015 }, { "title" : "Gaussian Processes for Big Data", "author" : [ "James Hensman", "Nicolò Fusi", "Neil D. Lawrence." ], "venue" : "Proceedings of UAI, pages 282–290.", "citeRegEx" : "Hensman et al\\.,? 2013", "shortCiteRegEx" : "Hensman et al\\.", "year" : 2013 }, { "title" : "Probabilistic Backpropagation for Scalable Learning of Bayesian Neural Networks", "author" : [ "José Miguel Hernández-Lobato", "Ryan P. Adams." ], "venue" : "Proceedings of ICML.", "citeRegEx" : "Hernández.Lobato and Adams.,? 2015", "shortCiteRegEx" : "Hernández.Lobato and Adams.", "year" : 2015 }, { "title" : "Movie Reviews and Revenues: An Experiment in Text Regression", "author" : [ "Mahesh Joshi", "Dipanjan Das", "Kevin Gimpel", "Noah A. Smith." ], "venue" : "Proceedings of NAACL.", "citeRegEx" : "Joshi et al\\.,? 2010", "shortCiteRegEx" : "Joshi et al\\.", "year" : 2010 }, { "title" : "Quantile Regression", "author" : [ "Roger Koenker." ], "venue" : "Cambridge University Press.", "citeRegEx" : "Koenker.,? 2005", "shortCiteRegEx" : "Koenker.", "year" : 2005 }, { "title" : "Bayesian Warped Gaussian Processes", "author" : [ "Miguel Lázaro-Gredilla." ], "venue" : "Proceedings of NIPS, pages 1–9.", "citeRegEx" : "Lázaro.Gredilla.,? 2012", "shortCiteRegEx" : "Lázaro.Gredilla.", "year" : 2012 }, { "title" : "Lyrics, Music, and Emotions", "author" : [ "Rada Mihalcea", "Carlo Strapparava." ], "venue" : "Proceedings of the Joint Conference on Empirical Methods in Natural Language Processing and Computational Natural Language Learning, pages 590–599.", "citeRegEx" : "Mihalcea and Strapparava.,? 2012", "shortCiteRegEx" : "Mihalcea and Strapparava.", "year" : 2012 }, { "title" : "Asymmetric Least Squares Estimation and Testing", "author" : [ "Whitney K. Newey", "James L. Powell." ], "venue" : "Econometrica, 55(4).", "citeRegEx" : "Newey and Powell.,? 1987", "shortCiteRegEx" : "Newey and Powell.", "year" : 1987 }, { "title" : "Posterior Calibration and Exploratory Analysis for Natural Language Processing Models", "author" : [ "Khanh Nguyen", "Brendan O’Connor" ], "venue" : "In Proceedings of EMNLP, number September,", "citeRegEx" : "Nguyen and O.Connor.,? \\Q2015\\E", "shortCiteRegEx" : "Nguyen and O.Connor.", "year" : 2015 }, { "title" : "Evaluating Predictive Uncertainty Challenge", "author" : [ "Joaquin Quiñonero-Candela", "Carl Edward Rasmussen", "Fabian Sinz", "Olivier Bousquet", "Bernhard Schölkopf." ], "venue" : "MLCW 2005, Lecture Notes in Computer Science, 3944:1–27.", "citeRegEx" : "Quiñonero.Candela et al\\.,? 2006", "shortCiteRegEx" : "Quiñonero.Candela et al\\.", "year" : 2006 }, { "title" : "Gaussian processes for machine learning, volume 1", "author" : [ "Carl Edward Rasmussen", "Christopher K.I. Williams." ], "venue" : "MIT Press Cambridge.", "citeRegEx" : "Rasmussen and Williams.,? 2006", "shortCiteRegEx" : "Rasmussen and Williams.", "year" : 2006 }, { "title" : "An Investigation on the Effectiveness of Features for Translation Quality Estimation", "author" : [ "Kashif Shah", "Trevor Cohn", "Lucia Specia." ], "venue" : "Proceedings of MT Summit XIV.", "citeRegEx" : "Shah et al\\.,? 2013", "shortCiteRegEx" : "Shah et al\\.", "year" : 2013 }, { "title" : "Warped Gaussian Processes", "author" : [ "Edward Snelson", "Carl Edward Rasmussen", "Zoubin Ghahramani." ], "venue" : "Proceedings of NIPS.", "citeRegEx" : "Snelson et al\\.,? 2004", "shortCiteRegEx" : "Snelson et al\\.", "year" : 2004 }, { "title" : "Estimating the sentence-level quality of machine translation systems", "author" : [ "Lucia Specia", "Nicola Cancedda", "Marc Dymetman", "Marco Turchi", "Nello Cristianini." ], "venue" : "Proceedings of EAMT, pages 28–35.", "citeRegEx" : "Specia et al\\.,? 2009", "shortCiteRegEx" : "Specia et al\\.", "year" : 2009 }, { "title" : "Multi-level Translation Quality Prediction with QUEST++", "author" : [ "Lucia Specia", "Gustavo Henrique Paetzold", "Carolina Scarton." ], "venue" : "Proceedings of ACL Demo Session, pages 850–850.", "citeRegEx" : "Specia et al\\.,? 2015", "shortCiteRegEx" : "Specia et al\\.", "year" : 2015 }, { "title" : "Exploiting Objective Annotations for Measuring Translation Post-editing Effort", "author" : [ "Lucia Specia." ], "venue" : "Proceedings of EAMT, pages 73–80.", "citeRegEx" : "Specia.,? 2011", "shortCiteRegEx" : "Specia.", "year" : 2011 }, { "title" : "Learning to identify emotions in text", "author" : [ "Carlo Strapparava", "Rada Mihalcea." ], "venue" : "Proceedings of the 2008 ACM Symposium on Applied Computing, pages 1556–1560.", "citeRegEx" : "Strapparava and Mihalcea.,? 2008", "shortCiteRegEx" : "Strapparava and Mihalcea.", "year" : 2008 }, { "title" : "Bayesian Estimation and Prediction Using Asymmetric Loss Functions", "author" : [ "Arnold Zellner." ], "venue" : "Journal of the American Statistical Association, 81(394):446–451.", "citeRegEx" : "Zellner.,? 1986", "shortCiteRegEx" : "Zellner.", "year" : 1986 } ], "referenceMentions" : [ { "referenceID" : 5, "context" : "Quality Estimation (QE) (Blatz et al., 2004; Specia et al., 2009) models aim at predicting the quality of automatically translated text segments.", "startOffset" : 24, "endOffset" : 65 }, { "referenceID" : 25, "context" : "Quality Estimation (QE) (Blatz et al., 2004; Specia et al., 2009) models aim at predicting the quality of automatically translated text segments.", "startOffset" : 24, "endOffset" : 65 }, { "referenceID" : 11, "context" : "In QE, arguably the most successful probabilistic models are Gaussian Processes (GPs) since they considered the state-ofthe-art for regression (Cohn and Specia, 2013; Hensman et al., 2013), especially in the low-data regimes typical for this task.", "startOffset" : 143, "endOffset" : 188 }, { "referenceID" : 13, "context" : "In QE, arguably the most successful probabilistic models are Gaussian Processes (GPs) since they considered the state-ofthe-art for regression (Cohn and Specia, 2013; Hensman et al., 2013), especially in the low-data regimes typical for this task.", "startOffset" : 143, "endOffset" : 188 }, { "referenceID" : 5, "context" : "Quality Estimation (QE) (Blatz et al., 2004; Specia et al., 2009) models aim at predicting the quality of automatically translated text segments. Traditionally, these models provide point estimates and are evaluated using metrics like Mean Absolute Error (MAE), Root-Mean-Square Error (RMSE) and Pearson’s r correlation coefficient. However, in practice QE models are built for use in decision making in large workflows involving Machine Translation (MT). In these settings, relying on point estimates would mean that only very accurate prediction models can be useful in practice. A way to improve decision making based on quality predictions is to explore uncertainty estimates. Consider for example a post-editing scenario where professional translators use MT in an effort to speed-up the translation process. A QE model can be used to determine if an MT segment is good enough for post-editing or should be discarded and translated from scratch. But since QE models are not perfect they can end up allowing bad MT segments to go through for postediting because of a prediction error. In such a scenario, having an uncertainty estimate for the prediction can provide additional information for the filtering decision. For instance, in order to ensure good user experience for the human translator and maximise translation productivity, an MT segment could be forwarded for post-editing only if a QE model assigns a high quality score with low uncertainty (high confidence). Such a decision process is not possible with point estimates only. Good uncertainty estimates can be acquired from well-calibrated probability distributions over the quality predictions. In QE, arguably the most successful probabilistic models are Gaussian Processes (GPs) since they considered the state-ofthe-art for regression (Cohn and Specia, 2013; Hensman et al., 2013), especially in the low-data regimes typical for this task. We focus our analysis in this paper on GPs since other common models used in QE can only provide point estimates as predictions. Another reason why we focus on probabilistic models is because this lets us employ the ideas proposed by Quiñonero-Candela et al. (2006), which defined new evaluation metrics that take into account probability distributions over predictions.", "startOffset" : 25, "endOffset" : 2179 }, { "referenceID" : 6, "context" : "The most popular method for QE is Support Vector Regression (SVR), as shown in the multiple instances of the WMT QE shared tasks (Callisonburch et al., 2012; Bojar et al., 2013; Bojar et al., 2014; Bojar et al., 2015).", "startOffset" : 129, "endOffset" : 217 }, { "referenceID" : 0, "context" : "Bootstrapping approaches like bagging (Abe and Mamitsuka, 1998) can be applied, but this requires setting and optimising hyperparameters like bag size and number of bootstraps.", "startOffset" : 38, "endOffset" : 63 }, { "referenceID" : 22, "context" : "Gaussian Processes (GPs) (Rasmussen and Williams, 2006) is an alternative kernel-based framework that gives competitive results for point estimates (Cohn and Specia, 2013; Shah et al.", "startOffset" : 25, "endOffset" : 55 }, { "referenceID" : 11, "context" : "Gaussian Processes (GPs) (Rasmussen and Williams, 2006) is an alternative kernel-based framework that gives competitive results for point estimates (Cohn and Specia, 2013; Shah et al., 2013; Beck et al., 2014b).", "startOffset" : 148, "endOffset" : 210 }, { "referenceID" : 23, "context" : "Gaussian Processes (GPs) (Rasmussen and Williams, 2006) is an alternative kernel-based framework that gives competitive results for point estimates (Cohn and Specia, 2013; Shah et al., 2013; Beck et al., 2014b).", "startOffset" : 148, "endOffset" : 210 }, { "referenceID" : 22, "context" : "1 Gaussian Process Regression Here we follow closely the definition of GPs given by Rasmussen and Williams (2006). Let X = {(x1, y1), (x2, y2), .", "startOffset" : 84, "endOffset" : 114 }, { "referenceID" : 24, "context" : "Warped GPs (Snelson et al., 2004) are an extension of GPs that allows the learning of arbitrary mappings.", "startOffset" : 11, "endOffset" : 33 }, { "referenceID" : 24, "context" : "Snelson et al. (2004) proposes a parametric form composed of a sum of tanh functions, similar to a neural network layer:", "startOffset" : 0, "endOffset" : 22 }, { "referenceID" : 24, "context" : "In our experiments we explore models employing warping functions with up to 3 terms, which is the value recommended by Snelson et al. (2004). We also report results using the f(y) = log(y) warping function.", "startOffset" : 119, "endOffset" : 141 }, { "referenceID" : 21, "context" : "This was proposed in previous work and it is called Negative Log Predictive Density (NLPD) (Quiñonero-Candela et al., 2006):", "startOffset" : 91, "endOffset" : 123 }, { "referenceID" : 12, "context" : "We also report two point estimate metrics on test data: Mean Absolute Error (MAE), the most commonly used evaluation metric in QE, and Pearson’s r, which has recently proposed by Graham (2015) as a more robust alternative.", "startOffset" : 179, "endOffset" : 193 }, { "referenceID" : 27, "context" : "French-English (fr-en) Described in (Specia, 2011), this dataset contains 2, 525 sentences translated by one MT system and post-edited by a professional translator.", "startOffset" : 36, "endOffset" : 50 }, { "referenceID" : 26, "context" : "For model building, we use a standard set of 17 features from the QuEst framework (Specia et al., 2015).", "startOffset" : 82, "endOffset" : 103 }, { "referenceID" : 11, "context" : "in (Cohn and Specia, 2013): first we employ an isotropic kernel and optimise all hyperparameters using 10 random restarts; then we move to an ARD equivalent kernel and perform a final optimisation step to fine tune feature lengthscales.", "startOffset" : 3, "endOffset" : 26 }, { "referenceID" : 29, "context" : "Another asymmetric loss is the linear exponential or linex loss (Zellner, 1986):", "startOffset" : 64, "endOffset" : 79 }, { "referenceID" : 16, "context" : "In the context of the AL loss this is called quantile regression (Koenker, 2005), since optimal estimators for this loss are posterior quantiles.", "startOffset" : 65, "endOffset" : 80 }, { "referenceID" : 10, "context" : "Minimum Bayes risk estimators for asymmetric losses were proposed by Christoffersen and Diebold (1997) and we follow their derivations in our experiments.", "startOffset" : 69, "endOffset" : 103 }, { "referenceID" : 15, "context" : "Quality Estimation is generally framed as text regression task, similarly to many other applications such as movie revenue forecasting based on reviews (Joshi et al., 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al.", "startOffset" : 152, "endOffset" : 195 }, { "referenceID" : 4, "context" : "Quality Estimation is generally framed as text regression task, similarly to many other applications such as movie revenue forecasting based on reviews (Joshi et al., 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al.", "startOffset" : 152, "endOffset" : 195 }, { "referenceID" : 28, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012).", "startOffset" : 83, "endOffset" : 135 }, { "referenceID" : 2, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012).", "startOffset" : 83, "endOffset" : 135 }, { "referenceID" : 18, "context" : ", 2014a) and song lyrics (Mihalcea and Strapparava, 2012).", "startOffset" : 25, "endOffset" : 57 }, { "referenceID" : 24, "context" : "Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al.", "startOffset" : 49, "endOffset" : 71 }, { "referenceID" : 14, "context" : "It has also been used to evaluate recent work on uncertainty propagation methods for neural networks (Hernández-Lobato and Adams, 2015).", "startOffset" : 101, "endOffset" : 135 }, { "referenceID" : 19, "context" : "Besides the AL and the linex, another well studied loss is the asymmetric quadratic, which in turn relates to the concept of expectiles (Newey and Powell, 1987).", "startOffset" : 136, "endOffset" : 160 }, { "referenceID" : 1, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012). In general, these applications are evaluated in terms of their point estimate predictions, arguably because not all of them employ probabilistic models. The NLPD is common and established metric used in the GP literature to evaluate new approaches. Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al.", "startOffset" : 116, "endOffset" : 552 }, { "referenceID" : 1, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012). In general, these applications are evaluated in terms of their point estimate predictions, arguably because not all of them employ probabilistic models. The NLPD is common and established metric used in the GP literature to evaluate new approaches. Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al. (2013). It has also been used to evaluate recent work on uncertainty propagation methods for neural networks (Hernández-Lobato and Adams, 2015).", "startOffset" : 116, "endOffset" : 579 }, { "referenceID" : 1, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012). In general, these applications are evaluated in terms of their point estimate predictions, arguably because not all of them employ probabilistic models. The NLPD is common and established metric used in the GP literature to evaluate new approaches. Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al. (2013). It has also been used to evaluate recent work on uncertainty propagation methods for neural networks (Hernández-Lobato and Adams, 2015). Asymmetric loss functions are common in the econometrics literature and were studied by Zellner (1986) and Koenker (2005), among others.", "startOffset" : 116, "endOffset" : 820 }, { "referenceID" : 1, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012). In general, these applications are evaluated in terms of their point estimate predictions, arguably because not all of them employ probabilistic models. The NLPD is common and established metric used in the GP literature to evaluate new approaches. Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al. (2013). It has also been used to evaluate recent work on uncertainty propagation methods for neural networks (Hernández-Lobato and Adams, 2015). Asymmetric loss functions are common in the econometrics literature and were studied by Zellner (1986) and Koenker (2005), among others.", "startOffset" : 116, "endOffset" : 839 }, { "referenceID" : 1, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012). In general, these applications are evaluated in terms of their point estimate predictions, arguably because not all of them employ probabilistic models. The NLPD is common and established metric used in the GP literature to evaluate new approaches. Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al. (2013). It has also been used to evaluate recent work on uncertainty propagation methods for neural networks (Hernández-Lobato and Adams, 2015). Asymmetric loss functions are common in the econometrics literature and were studied by Zellner (1986) and Koenker (2005), among others. Besides the AL and the linex, another well studied loss is the asymmetric quadratic, which in turn relates to the concept of expectiles (Newey and Powell, 1987). This loss generalises the commonly used squared error loss. In terms of applications, Cain and Janssen (1995) gives an example in real estate assessment, where the consequences of under and overassessment are usually different depending on the specific scenario.", "startOffset" : 116, "endOffset" : 1126 }, { "referenceID" : 1, "context" : ", 2010; Bitvai and Cohn, 2015) and detection of emotion strength in news headlines (Strapparava and Mihalcea, 2008; Beck et al., 2014a) and song lyrics (Mihalcea and Strapparava, 2012). In general, these applications are evaluated in terms of their point estimate predictions, arguably because not all of them employ probabilistic models. The NLPD is common and established metric used in the GP literature to evaluate new approaches. Examples include the original work on Warped GPs (Snelson et al., 2004), but also others like Lázaro-Gredilla (2012) and Chalupka et al. (2013). It has also been used to evaluate recent work on uncertainty propagation methods for neural networks (Hernández-Lobato and Adams, 2015). Asymmetric loss functions are common in the econometrics literature and were studied by Zellner (1986) and Koenker (2005), among others. Besides the AL and the linex, another well studied loss is the asymmetric quadratic, which in turn relates to the concept of expectiles (Newey and Powell, 1987). This loss generalises the commonly used squared error loss. In terms of applications, Cain and Janssen (1995) gives an example in real estate assessment, where the consequences of under and overassessment are usually different depending on the specific scenario. An engineering example is given by Zellner (1986) in the context of dam construction, where an underestimate of peak water level is much more serious than an overestimate.", "startOffset" : 116, "endOffset" : 1329 }, { "referenceID" : 1, "context" : "Active Learning can benefit from variance information in their query methods and it has shown to be useful for QE (Beck et al., 2013).", "startOffset" : 114, "endOffset" : 133 }, { "referenceID" : 20, "context" : "Exploratory analysis is another avenue for future work, where error bars can provide further insights about the task, as shown in recent work (Nguyen and O’Connor, 2015).", "startOffset" : 142, "endOffset" : 169 } ], "year" : 2016, "abstractText" : "Machine Translation Quality Estimation is a notoriously difficult task, which lessens its usefulness in real-world translation environments. Such scenarios can be improved if quality predictions are accompanied by a measure of uncertainty. However, models in this task are traditionally evaluated only in terms of point estimate metrics, which do not take prediction uncertainty into account. We investigate probabilistic methods for Quality Estimation that can provide well-calibrated uncertainty estimates and evaluate them in terms of their full posterior predictive distributions. We also show how this posterior information can be useful in an asymmetric risk scenario, which aims to capture typical situations in translation workflows.", "creator" : "TeX" } }
{ "pile_set_name": "Github" }
using System; using OpenTK; using OpenGLES; using GLKit; using OpenTK.Graphics.ES20; using Foundation; using CoreGraphics; namespace GLKBaseEffectDrawingTexture { public class MCViewController : GLKViewController { float rotation; uint vertexArray; uint vertexBuffer; EAGLContext context; GLKBaseEffect effect; GLKTextureInfo texture; public MCViewController () { } public override void ViewDidLoad () { base.ViewDidLoad (); context = new EAGLContext (EAGLRenderingAPI.OpenGLES2); if (context == null) Console.WriteLine ("Failed to create ES context"); var view = View as GLKView; view.Context = context; view.DrawableDepthFormat = GLKViewDrawableDepthFormat.Format24; view.Delegate = this; setupGL (); } void setupGL () { EAGLContext.SetCurrentContext (context); effect = new GLKBaseEffect (); effect.LightingType = GLKLightingType.PerPixel; effect.Light0.Enabled = true; effect.Light0.DiffuseColor = new Vector4 (1.0f, 0.4f, 0.4f, 1.0f); GL.Enable (EnableCap.DepthTest); GL.Oes.GenVertexArrays (1, out vertexArray); GL.Oes.BindVertexArray (vertexArray); GL.GenBuffers (1, out vertexBuffer); GL.BindBuffer (BufferTarget.ArrayBuffer, vertexBuffer); GL.BufferData (BufferTarget.ArrayBuffer, (IntPtr) (Monkey.MeshVertexData.Length * sizeof (float)), Monkey.MeshVertexData, BufferUsage.StaticDraw); GL.EnableVertexAttribArray ((int) GLKVertexAttrib.Position); GL.VertexAttribPointer ((int) GLKVertexAttrib.Position, 3, VertexAttribPointerType.Float, false, 8 * sizeof (float), 0); GL.EnableVertexAttribArray ((int) GLKVertexAttrib.Normal); GL.VertexAttribPointer ((int) GLKVertexAttrib.Normal, 3, VertexAttribPointerType.Float, false, 8 * sizeof(float), 12); GL.EnableVertexAttribArray ((int) GLKVertexAttrib.TexCoord0); GL.VertexAttribPointer ((int) GLKVertexAttrib.TexCoord0, 2, VertexAttribPointerType.Float, false, 8 * sizeof(float), 24); GL.ActiveTexture (TextureUnit.Texture0); string path = NSBundle.MainBundle.PathForResource ("monkey", "png"); NSError error; NSDictionary options = new NSDictionary (GLKTextureLoader.OriginBottomLeft, true); texture = GLKTextureLoader.FromFile (path, options, out error); if (texture == null) Console.WriteLine (String.Format("Error loading texture: {0}", error.LocalizedDescription)); GLKEffectPropertyTexture tex = new GLKEffectPropertyTexture (); tex.Enabled = true; tex.EnvMode = GLKTextureEnvMode.Decal; tex.GLName = texture.Name; effect.Texture2d0.GLName = tex.GLName; GL.Oes.BindVertexArray (0); } public override void Update () { float aspect = (float)Math.Abs (View.Bounds.Size.Width / View.Bounds.Size.Height); Matrix4 projectionMatrix = Matrix4.CreatePerspectiveFieldOfView ((float) (Math.PI * 65f / 180.0f), aspect, 0.1f, 100.0f); effect.Transform.ProjectionMatrix = projectionMatrix; Matrix4 modelViewMatrix = Matrix4.CreateTranslation (new Vector3 (0f, 0f, -3.5f)); modelViewMatrix = Matrix4.Mult (Matrix4.CreateFromAxisAngle (new Vector3 (1f, 1f, 1f), rotation), modelViewMatrix); effect.Transform.ModelViewMatrix = modelViewMatrix; rotation += (float) TimeSinceLastUpdate * 0.5f; } public override void DrawInRect (GLKView view, CGRect rect) { GL.ClearColor (0.65f, 0.65f, 0.65f, 1f); GL.Clear (ClearBufferMask.ColorBufferBit | ClearBufferMask.DepthBufferBit); GL.Oes.BindVertexArray (vertexArray); effect.PrepareToDraw (); GL.DrawArrays (BeginMode.Triangles, 0, Monkey.MeshVertexData.Length / 8); } } }
{ "pile_set_name": "Github" }
<?php /** * @author Robin McCorkell <[email protected]> * * @copyright Copyright (c) 2018, ownCloud GmbH * @license AGPL-3.0 * * This code is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License, version 3, * as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License, version 3, * along with this program. If not, see <http://www.gnu.org/licenses/> * */ namespace OCP\Files; /** * Interface IMimeTypeLoader * @package OCP\Files * @since 8.2.0 * * Interface to load mimetypes **/ interface IMimeTypeLoader { /** * Get a mimetype from its ID * * @param int $id * @return string|null * @since 8.2.0 */ public function getMimetypeById($id); /** * Get a mimetype ID, adding the mimetype to the DB if it does not exist * * @param string $mimetype * @return int * @since 8.2.0 */ public function getId($mimetype); /** * Test if a mimetype exists in the database * * @param string $mimetype * @return bool * @since 8.2.0 */ public function exists($mimetype); /** * Clear all loaded mimetypes, allow for re-loading * * @since 8.2.0 */ public function reset(); }
{ "pile_set_name": "Github" }
kinput.onkeydown = kinput.onkeyup = kinput.onkeypress = handle; let lastTime = Date.now(); function handle(e) { if (form.elements[e.type + 'Ignore'].checked) return; let text = e.type + ' key=' + e.key + ' code=' + e.code + (e.shiftKey ? ' shiftKey' : '') + (e.ctrlKey ? ' ctrlKey' : '') + (e.altKey ? ' altKey' : '') + (e.metaKey ? ' metaKey' : '') + (e.repeat ? ' (repeat)' : '') + "\n"; if (area.value && Date.now() - lastTime > 250) { area.value += new Array(81).join('-') + '\n'; } lastTime = Date.now(); area.value += text; if (form.elements[e.type + 'Stop'].checked) { e.preventDefault(); } }
{ "pile_set_name": "Github" }
/* * Copyright 2012 Red Hat Inc. * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. * * Authors: Ben Skeggs */ #include "priv.h" #include "ram.h" #include <core/memory.h> #include <core/option.h> #include <subdev/bios.h> #include <subdev/bios/M0203.h> #include <engine/gr.h> #include <engine/mpeg.h> void nvkm_fb_tile_fini(struct nvkm_fb *fb, int region, struct nvkm_fb_tile *tile) { fb->func->tile.fini(fb, region, tile); } void nvkm_fb_tile_init(struct nvkm_fb *fb, int region, u32 addr, u32 size, u32 pitch, u32 flags, struct nvkm_fb_tile *tile) { fb->func->tile.init(fb, region, addr, size, pitch, flags, tile); } void nvkm_fb_tile_prog(struct nvkm_fb *fb, int region, struct nvkm_fb_tile *tile) { struct nvkm_device *device = fb->subdev.device; if (fb->func->tile.prog) { fb->func->tile.prog(fb, region, tile); if (device->gr) nvkm_engine_tile(&device->gr->engine, region); if (device->mpeg) nvkm_engine_tile(device->mpeg, region); } } int nvkm_fb_bios_memtype(struct nvkm_bios *bios) { struct nvkm_subdev *subdev = &bios->subdev; struct nvkm_device *device = subdev->device; const u8 ramcfg = (nvkm_rd32(device, 0x101000) & 0x0000003c) >> 2; struct nvbios_M0203E M0203E; u8 ver, hdr; if (nvbios_M0203Em(bios, ramcfg, &ver, &hdr, &M0203E)) { switch (M0203E.type) { case M0203E_TYPE_DDR2 : return NVKM_RAM_TYPE_DDR2; case M0203E_TYPE_DDR3 : return NVKM_RAM_TYPE_DDR3; case M0203E_TYPE_GDDR3: return NVKM_RAM_TYPE_GDDR3; case M0203E_TYPE_GDDR5: return NVKM_RAM_TYPE_GDDR5; default: nvkm_warn(subdev, "M0203E type %02x\n", M0203E.type); return NVKM_RAM_TYPE_UNKNOWN; } } nvkm_warn(subdev, "M0203E not matched!\n"); return NVKM_RAM_TYPE_UNKNOWN; } static void nvkm_fb_intr(struct nvkm_subdev *subdev) { struct nvkm_fb *fb = nvkm_fb(subdev); if (fb->func->intr) fb->func->intr(fb); } static int nvkm_fb_oneinit(struct nvkm_subdev *subdev) { struct nvkm_fb *fb = nvkm_fb(subdev); u32 tags = 0; if (fb->func->ram_new) { int ret = fb->func->ram_new(fb, &fb->ram); if (ret) { nvkm_error(subdev, "vram setup failed, %d\n", ret); return ret; } } if (fb->func->oneinit) { int ret = fb->func->oneinit(fb); if (ret) return ret; } /* Initialise compression tag allocator. * * LTC oneinit() will override this on Fermi and newer. */ if (fb->func->tags) { tags = fb->func->tags(fb); nvkm_debug(subdev, "%d comptags\n", tags); } return nvkm_mm_init(&fb->tags, 0, 0, tags, 1); } static int nvkm_fb_init(struct nvkm_subdev *subdev) { struct nvkm_fb *fb = nvkm_fb(subdev); int ret, i; if (fb->ram) { ret = nvkm_ram_init(fb->ram); if (ret) return ret; } for (i = 0; i < fb->tile.regions; i++) fb->func->tile.prog(fb, i, &fb->tile.region[i]); if (fb->func->init) fb->func->init(fb); if (fb->func->init_remapper) fb->func->init_remapper(fb); if (fb->func->init_page) { ret = fb->func->init_page(fb); if (WARN_ON(ret)) return ret; } if (fb->func->init_unkn) fb->func->init_unkn(fb); return 0; } static void * nvkm_fb_dtor(struct nvkm_subdev *subdev) { struct nvkm_fb *fb = nvkm_fb(subdev); int i; nvkm_memory_unref(&fb->mmu_wr); nvkm_memory_unref(&fb->mmu_rd); for (i = 0; i < fb->tile.regions; i++) fb->func->tile.fini(fb, i, &fb->tile.region[i]); nvkm_mm_fini(&fb->tags); nvkm_ram_del(&fb->ram); if (fb->func->dtor) return fb->func->dtor(fb); return fb; } static const struct nvkm_subdev_func nvkm_fb = { .dtor = nvkm_fb_dtor, .oneinit = nvkm_fb_oneinit, .init = nvkm_fb_init, .intr = nvkm_fb_intr, }; void nvkm_fb_ctor(const struct nvkm_fb_func *func, struct nvkm_device *device, int index, struct nvkm_fb *fb) { nvkm_subdev_ctor(&nvkm_fb, device, index, &fb->subdev); fb->func = func; fb->tile.regions = fb->func->tile.regions; fb->page = nvkm_longopt(device->cfgopt, "NvFbBigPage", fb->func->default_bigpage); } int nvkm_fb_new_(const struct nvkm_fb_func *func, struct nvkm_device *device, int index, struct nvkm_fb **pfb) { if (!(*pfb = kzalloc(sizeof(**pfb), GFP_KERNEL))) return -ENOMEM; nvkm_fb_ctor(func, device, index, *pfb); return 0; }
{ "pile_set_name": "Github" }
use std::fmt::{self, Write}; /// Escape a string to pass it into JavaScript. /// /// # Example /// /// ```rust,no_run /// # use web_view::WebView; /// # use std::mem; /// # /// # let mut view: WebView<()> = unsafe { mem::uninitialized() }; /// # /// let string = "Hello, world!"; /// /// // Calls the function callback with "Hello, world!" as its parameter. /// /// view.eval(&format!("callback({});", web_view::escape(string))); /// ``` pub fn escape(string: &str) -> Escaper { Escaper(string) } // "All code points may appear literally in a string literal except for the // closing quote code points, U+005C (REVERSE SOLIDUS), U+000D (CARRIAGE // RETURN), U+2028 (LINE SEPARATOR), U+2029 (PARAGRAPH SEPARATOR), and U+000A // (LINE FEED)." - ES6 Specification pub struct Escaper<'a>(&'a str); const SPECIAL: &[char] = &[ '\n', // U+000A (LINE FEED) '\r', // U+000D (CARRIAGE RETURN) '\'', // U+0027 (APOSTROPHE) '\\', // U+005C (REVERSE SOLIDUS) '\u{2028}', // U+2028 (LINE SEPARATOR) '\u{2029}', // U+2029 (PARAGRAPH SEPARATOR) ]; impl<'a> fmt::Display for Escaper<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let &Escaper(mut string) = self; f.write_char('\'')?; while !string.is_empty() { if let Some(i) = string.find(SPECIAL) { if i > 0 { f.write_str(&string[..i])?; } let mut chars = string[i..].chars(); f.write_str(match chars.next().unwrap() { '\n' => "\\n", '\r' => "\\r", '\'' => "\\'", '\\' => "\\\\", '\u{2028}' => "\\u2028", '\u{2029}' => "\\u2029", _ => unreachable!(), })?; string = chars.as_str(); } else { f.write_str(string)?; break; } } f.write_char('\'')?; Ok(()) } } #[test] fn test() { let plain = "ABC \n\r' abc \\ \u{2028} \u{2029}123"; let escaped = escape(plain).to_string(); assert!(escaped == "'ABC \\n\\r\\' abc \\\\ \\u2028 \\u2029123'"); }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2014, 2019 Oracle and/or its affiliates. All rights reserved. This * code is released under a tri EPL/GPL/LGPL license. You can use it, * redistribute it and/or modify it under the terms of the: * * Eclipse Public License version 2.0, or * GNU General Public License version 2, or * GNU Lesser General Public License version 2.1. */ package org.truffleruby.language.methods; import org.truffleruby.language.RubyContextSourceNode; import org.truffleruby.language.RubyNode; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.frame.VirtualFrame; public class ExceptionTranslatingNode extends RubyContextSourceNode { private final UnsupportedOperationBehavior unsupportedOperationBehavior; @Child private RubyNode child; @Child private TranslateExceptionNode translateExceptionNode; public ExceptionTranslatingNode(RubyNode child, UnsupportedOperationBehavior unsupportedOperationBehavior) { this.child = child; this.unsupportedOperationBehavior = unsupportedOperationBehavior; } @Override public Object execute(VirtualFrame frame) { try { return child.execute(frame); } catch (Throwable t) { if (translateExceptionNode == null) { CompilerDirectives.transferToInterpreterAndInvalidate(); translateExceptionNode = insert(TranslateExceptionNode.create()); } throw translateExceptionNode.executeTranslation(t, unsupportedOperationBehavior); } } @Override public RubyNode simplifyAsTailExpression() { return new ExceptionTranslatingNode(child.simplifyAsTailExpression(), unsupportedOperationBehavior) .copySourceSection(this); } }
{ "pile_set_name": "Github" }
/** * SyntaxHighlighter * http://alexgorbatchev.com/SyntaxHighlighter * * SyntaxHighlighter is donationware. If you are using it, please donate. * http://alexgorbatchev.com/SyntaxHighlighter/donate.html * * @version * 3.0.83 (July 02 2010) * * @copyright * Copyright (C) 2004-2010 Alex Gorbatchev. * * @license * Dual licensed under the MIT and GPL licenses. */ .syntaxhighlighter { background-color: black !important; } .syntaxhighlighter .line.alt1 { background-color: black !important; } .syntaxhighlighter .line.alt2 { background-color: black !important; } .syntaxhighlighter .line.highlighted.alt1, .syntaxhighlighter .line.highlighted.alt2 { background-color: #2a3133 !important; } .syntaxhighlighter .line.highlighted.number { color: white !important; } .syntaxhighlighter table caption { color: #d3d3d3 !important; } .syntaxhighlighter .gutter { color: #d3d3d3 !important; } .syntaxhighlighter .gutter .line { border-right: 3px solid #990000 !important; } .syntaxhighlighter .gutter .line.highlighted { background-color: #990000 !important; color: black !important; } .syntaxhighlighter.printing .line .content { border: none !important; } .syntaxhighlighter.collapsed { overflow: visible !important; } .syntaxhighlighter.collapsed .toolbar { color: #ebdb8d !important; background: black !important; border: 1px solid #990000 !important; } .syntaxhighlighter.collapsed .toolbar a { color: #ebdb8d !important; } .syntaxhighlighter.collapsed .toolbar a:hover { color: #ff7d27 !important; } .syntaxhighlighter .toolbar { color: white !important; background: #990000 !important; border: none !important; } .syntaxhighlighter .toolbar a { color: white !important; } .syntaxhighlighter .toolbar a:hover { color: #9ccff4 !important; } .syntaxhighlighter .plain, .syntaxhighlighter .plain a { color: #d3d3d3 !important; } .syntaxhighlighter .comments, .syntaxhighlighter .comments a { color: #ff7d27 !important; } .syntaxhighlighter .string, .syntaxhighlighter .string a { color: #ff9e7b !important; } .syntaxhighlighter .keyword { color: aqua !important; } .syntaxhighlighter .preprocessor { color: #aec4de !important; } .syntaxhighlighter .variable { color: #ffaa3e !important; } .syntaxhighlighter .value { color: #009900 !important; } .syntaxhighlighter .functions { color: #81cef9 !important; } .syntaxhighlighter .constants { color: #ff9e7b !important; } .syntaxhighlighter .script { font-weight: bold !important; color: aqua !important; background-color: none !important; } .syntaxhighlighter .color1, .syntaxhighlighter .color1 a { color: #ebdb8d !important; } .syntaxhighlighter .color2, .syntaxhighlighter .color2 a { color: #ff7d27 !important; } .syntaxhighlighter .color3, .syntaxhighlighter .color3 a { color: #aec4de !important; }
{ "pile_set_name": "Github" }
"use strict"; define(['test/test-helpers'], function(testHelpers) { var describeIf = testHelpers.describeIf; var it = testHelpers.itWithFreshLog; var originalConsole = window.console; var originalDocument = window.document; describeIf(testHelpers.isCookieStorageAvailable() && !testHelpers.isLocalStorageAvailable(), "Cookie-only persistence tests:", function() { beforeEach(function() { window.console = {"log" : jasmine.createSpy("console.log")}; this.addMatchers({ "toBeAtLevel" : testHelpers.toBeAtLevel, "toBeTheStoredLevel" : testHelpers.toBeTheLevelStoredByCookie }); }); afterEach(function() { window.console = originalConsole; }); describe("If no level is saved", function() { beforeEach(function() { testHelpers.clearStoredLevels(); }); it("log level is set to warn by default", function(log) { expect(log).toBeAtLevel("warn"); }); it("warn is persisted as the current level", function(log) { expect("warn").toBeTheStoredLevel(); }); it("log can be set to info level", function(log) { log.setLevel("info"); expect(log).toBeAtLevel("info"); }); it("log.setLevel() sets a cookie with the given level", function(log) { log.setLevel("debug"); expect("debug").toBeTheStoredLevel(); }); }); describe("If info level is saved", function() { beforeEach(function() { testHelpers.setStoredLevel("info"); }); it("info is the default log level", function(log) { expect(log).toBeAtLevel("info"); }); it("log can be changed to warn level", function(log) { log.setLevel("warn"); expect(log).toBeAtLevel("warn"); }); it("log.setLevel() overwrites the saved level", function(log) { log.setLevel("error"); expect("error").toBeTheStoredLevel(); expect("info").not.toBeTheStoredLevel(); }); }); describe("If the level is saved with other data", function() { beforeEach(function() { window.document.cookie = "qwe=asd"; window.document.cookie = "loglevel=ERROR"; window.document.cookie = "msg=hello world"; }); it("error is the default log level", function(log) { expect(log).toBeAtLevel("error"); }); it("log can be changed to silent level", function(log) { log.setLevel("silent"); expect(log).toBeAtLevel("silent"); }); it("log.setLevel() overrides the saved level only", function(log) { log.setLevel("debug"); expect('debug').toBeTheStoredLevel(); expect(window.document.cookie).toContain("msg=hello world"); }); }); describe("If the level cookie is set incorrectly", function() { beforeEach(function() { testHelpers.setCookieStoredLevel('gibberish'); }); it("warn is the default log level", function(log) { expect(log).toBeAtLevel("warn"); }); it("warn is persisted as the current level, overriding the invalid cookie", function(log) { expect("warn").toBeTheStoredLevel(); }); it("log can be changed to info level", function(log) { log.setLevel("info"); expect(log).toBeAtLevel("info"); }); it("log.setLevel() overrides the saved level with the new level", function(log) { expect('debug').not.toBeTheStoredLevel(); log.setLevel("debug"); expect('debug').toBeTheStoredLevel(); }); }); }); });
{ "pile_set_name": "Github" }
// Copyright (c) 2007-2018 Thong Nguyen ([email protected]) using System; using System.Reflection; using Platform; namespace Shaolinq.TypeBuilding { public class FieldInfoFastRef { public static readonly FieldInfo GuidEmptyGuid = TypeUtils.GetField(() => Guid.Empty); } }
{ "pile_set_name": "Github" }
紺野ゆみ最新番号 【AVT-006】アナルバイブの虜 6</a>2008-11-22プールクラブ・エンタテインメント$$$F120分钟
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <meta name="generator" content="rustdoc"> <meta name="description" content="API documentation for the Rust `clone` mod in crate `core`."> <meta name="keywords" content="rust, rustlang, rust-lang, clone"> <title>core::clone - Rust</title> <link rel="stylesheet" type="text/css" href="../../rustdoc.css"> <link rel="stylesheet" type="text/css" href="../../main.css"> <link rel="shortcut icon" href="https://doc.rust-lang.org/favicon.ico"> </head> <body class="rustdoc"> <!--[if lte IE 8]> <div class="warning"> This old browser is unsupported and will most likely display funky things. </div> <![endif]--> <nav class="sidebar"> <a href='../../core/index.html'><img src='https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png' alt='' width='100'></a> <p class='location'><a href='../index.html'>core</a></p><script>window.sidebarCurrent = {name: 'clone', ty: 'mod', relpath: '../'};</script><script defer src="../sidebar-items.js"></script> </nav> <nav class="sub"> <form class="search-form js-only"> <div class="search-container"> <input class="search-input" name="search" autocomplete="off" placeholder="Click or press ‘S’ to search, ‘?’ for more options…" type="search"> </div> </form> </nav> <section id='main' class="content mod"> <h1 class='fqn'><span class='in-band'>Module <a href='../index.html'>core</a>::<wbr><a class='mod' href=''>clone</a></span><span class='out-of-band'><span id='render-detail'> <a id="toggle-all-docs" href="javascript:void(0)" title="collapse all docs"> [<span class='inner'>&#x2212;</span>] </a> </span><a id='src-21705' class='srclink' href='../../src/core/clone.rs.html#11-90' title='goto source code'>[src]</a></span></h1> <div class='docblock'><p>The <code>Clone</code> trait for types that cannot be &#39;implicitly copied&#39;</p> <p>In Rust, some simple types are &quot;implicitly copyable&quot; and when you assign them or pass them as arguments, the receiver will get a copy, leaving the original value in place. These types do not require allocation to copy and do not have finalizers (i.e. they do not contain owned boxes or implement <code>Drop</code>), so the compiler considers them cheap and safe to copy. For other types copies must be made explicitly, by convention implementing the <code>Clone</code> trait and calling the <code>clone</code> method.</p> </div><h2 id='traits' class='section-header'><a href="#traits">Traits</a></h2> <table> <tr class=' module-item'> <td><a class='trait' href='trait.Clone.html' title='core::clone::Clone'>Clone</a></td> <td class='docblock short'> <p>A common trait for cloning an object.</p> </td> </tr> </table></section> <section id='search' class="content hidden"></section> <section class="footer"></section> <aside id="help" class="hidden"> <div> <h1 class="hidden">Help</h1> <div class="shortcuts"> <h2>Keyboard Shortcuts</h2> <dl> <dt>?</dt> <dd>Show this help dialog</dd> <dt>S</dt> <dd>Focus the search field</dd> <dt>&larrb;</dt> <dd>Move up in search results</dd> <dt>&rarrb;</dt> <dd>Move down in search results</dd> <dt>&#9166;</dt> <dd>Go to active search result</dd> </dl> </div> <div class="infos"> <h2>Search Tricks</h2> <p> Prefix searches with a type followed by a colon (e.g. <code>fn:</code>) to restrict the search to a given type. </p> <p> Accepted types are: <code>fn</code>, <code>mod</code>, <code>struct</code>, <code>enum</code>, <code>trait</code>, <code>type</code>, <code>macro</code>, and <code>const</code>. </p> <p> Search functions by type signature (e.g. <code>vec -> usize</code>) </p> </div> </div> </aside> <script> window.rootPath = "../../"; window.currentCrate = "core"; window.playgroundUrl = "https://play.rust-lang.org/"; </script> <script src="../../jquery.js"></script> <script src="../../main.js"></script> <script src="../../playpen.js"></script> <script defer src="../../search-index.js"></script> </body> </html>
{ "pile_set_name": "Github" }
# this type is only for testing the dryrun feature, it does nothing usefull echo 'echo "this gencode-remote script should never be executed >&2"' echo 'exit 1'
{ "pile_set_name": "Github" }
/** * Copyright © 2002 Instituto Superior Técnico * * This file is part of FenixEdu Academic. * * FenixEdu Academic is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * FenixEdu Academic is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with FenixEdu Academic. If not, see <http://www.gnu.org/licenses/>. */ package org.fenixedu.academic.ui.renderers.providers; import java.util.ArrayList; import java.util.List; import org.fenixedu.academic.domain.DegreeCurricularPlan; import org.fenixedu.academic.domain.DegreeCurricularPlanEquivalencePlan; import org.fenixedu.academic.domain.degree.DegreeType; import org.fenixedu.academic.ui.struts.action.coordinator.StudentSearchBean; import pt.ist.fenixWebFramework.rendererExtensions.converters.DomainObjectKeyConverter; import pt.ist.fenixWebFramework.renderers.DataProvider; import pt.ist.fenixWebFramework.renderers.components.converters.Converter; public class DegreeCurricularPlanForStudentEquivalenciesProvider implements DataProvider { @Override public Object provide(Object source, Object currentValue) { final StudentSearchBean studentSearchBean = (StudentSearchBean) source; return studentSearchBean.getDegreeCurricularPlan() == studentSearchBean.getOldDegreeCurricularPlan() ? getAllDegreeCurricularPlans() : getDestinationsDegreeCurricularPlans(studentSearchBean .getOldDegreeCurricularPlan()); } private List<DegreeCurricularPlan> getDestinationsDegreeCurricularPlans(DegreeCurricularPlan oldDegreeCurricularPlan) { final List<DegreeCurricularPlan> result = new ArrayList<DegreeCurricularPlan>(); for (final DegreeCurricularPlanEquivalencePlan degreeCurricularPlanEquivalencePlan : oldDegreeCurricularPlan .getTargetEquivalencePlans()) { result.add(degreeCurricularPlanEquivalencePlan.getDegreeCurricularPlan()); } return result; } private List<DegreeCurricularPlan> getAllDegreeCurricularPlans() { return new ArrayList<DegreeCurricularPlan>(DegreeCurricularPlan.getDegreeCurricularPlans(DegreeType.oneOf( DegreeType::isBolonhaDegree, DegreeType::isBolonhaMasterDegree, DegreeType::isIntegratedMasterDegree))); } @Override public Converter getConverter() { return new DomainObjectKeyConverter(); } }
{ "pile_set_name": "Github" }
from _locale import (setlocale, LC_ALL, LC_CTYPE, LC_NUMERIC, localeconv, Error) try: from _locale import (RADIXCHAR, THOUSEP, nl_langinfo) except ImportError: nl_langinfo = None import codecs import locale import sys import unittest from platform import uname if uname().system == "Darwin": maj, min, mic = [int(part) for part in uname().release.split(".")] if (maj, min, mic) < (8, 0, 0): raise unittest.SkipTest("locale support broken for OS X < 10.4") candidate_locales = ['es_UY', 'fr_FR', 'fi_FI', 'es_CO', 'pt_PT', 'it_IT', 'et_EE', 'es_PY', 'no_NO', 'nl_NL', 'lv_LV', 'el_GR', 'be_BY', 'fr_BE', 'ro_RO', 'ru_UA', 'ru_RU', 'es_VE', 'ca_ES', 'se_NO', 'es_EC', 'id_ID', 'ka_GE', 'es_CL', 'wa_BE', 'hu_HU', 'lt_LT', 'sl_SI', 'hr_HR', 'es_AR', 'es_ES', 'oc_FR', 'gl_ES', 'bg_BG', 'is_IS', 'mk_MK', 'de_AT', 'pt_BR', 'da_DK', 'nn_NO', 'cs_CZ', 'de_LU', 'es_BO', 'sq_AL', 'sk_SK', 'fr_CH', 'de_DE', 'sr_YU', 'br_FR', 'nl_BE', 'sv_FI', 'pl_PL', 'fr_CA', 'fo_FO', 'bs_BA', 'fr_LU', 'kl_GL', 'fa_IR', 'de_BE', 'sv_SE', 'it_CH', 'uk_UA', 'eu_ES', 'vi_VN', 'af_ZA', 'nb_NO', 'en_DK', 'tg_TJ', 'ps_AF', 'en_US', 'fr_FR.ISO8859-1', 'fr_FR.UTF-8', 'fr_FR.ISO8859-15@euro', 'ru_RU.KOI8-R', 'ko_KR.eucKR'] def setUpModule(): global candidate_locales # Issue #13441: Skip some locales (e.g. cs_CZ and hu_HU) on Solaris to # workaround a mbstowcs() bug. For example, on Solaris, the hu_HU locale uses # the locale encoding ISO-8859-2, the thousauds separator is b'\xA0' and it is # decoded as U+30000020 (an invalid character) by mbstowcs(). if sys.platform == 'sunos5': old_locale = locale.setlocale(locale.LC_ALL) try: locales = [] for loc in candidate_locales: try: locale.setlocale(locale.LC_ALL, loc) except Error: continue encoding = locale.getpreferredencoding(False) try: localeconv() except Exception as err: print("WARNING: Skip locale %s (encoding %s): [%s] %s" % (loc, encoding, type(err), err)) else: locales.append(loc) candidate_locales = locales finally: locale.setlocale(locale.LC_ALL, old_locale) # Workaround for MSVC6(debug) crash bug if "MSC v.1200" in sys.version: def accept(loc): a = loc.split(".") return not(len(a) == 2 and len(a[-1]) >= 9) candidate_locales = [loc for loc in candidate_locales if accept(loc)] # List known locale values to test against when available. # Dict formatted as ``<locale> : (<decimal_point>, <thousands_sep>)``. If a # value is not known, use '' . known_numerics = { 'en_US': ('.', ','), 'de_DE' : (',', '.'), 'fr_FR.UTF-8' : (',', ' '), 'ps_AF': ('\u066b', '\u066c'), } class _LocaleTests(unittest.TestCase): def setUp(self): self.oldlocale = setlocale(LC_ALL) def tearDown(self): setlocale(LC_ALL, self.oldlocale) # Want to know what value was calculated, what it was compared against, # what function was used for the calculation, what type of data was used, # the locale that was supposedly set, and the actual locale that is set. lc_numeric_err_msg = "%s != %s (%s for %s; set to %s, using %s)" def numeric_tester(self, calc_type, calc_value, data_type, used_locale): """Compare calculation against known value, if available""" try: set_locale = setlocale(LC_NUMERIC) except Error: set_locale = "<not able to determine>" known_value = known_numerics.get(used_locale, ('', ''))[data_type == 'thousands_sep'] if known_value and calc_value: self.assertEqual(calc_value, known_value, self.lc_numeric_err_msg % ( calc_value, known_value, calc_type, data_type, set_locale, used_locale)) return True @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available") def test_lc_numeric_nl_langinfo(self): # Test nl_langinfo against known values tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) setlocale(LC_CTYPE, loc) except Error: continue for li, lc in ((RADIXCHAR, "decimal_point"), (THOUSEP, "thousands_sep")): if self.numeric_tester('nl_langinfo', nl_langinfo(li), lc, loc): tested = True if not tested: self.skipTest('no suitable locales') def test_lc_numeric_localeconv(self): # Test localeconv against known values tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) setlocale(LC_CTYPE, loc) except Error: continue formatting = localeconv() for lc in ("decimal_point", "thousands_sep"): if self.numeric_tester('localeconv', formatting[lc], lc, loc): tested = True if not tested: self.skipTest('no suitable locales') @unittest.skipUnless(nl_langinfo, "nl_langinfo is not available") def test_lc_numeric_basic(self): # Test nl_langinfo against localeconv tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) setlocale(LC_CTYPE, loc) except Error: continue for li, lc in ((RADIXCHAR, "decimal_point"), (THOUSEP, "thousands_sep")): nl_radixchar = nl_langinfo(li) li_radixchar = localeconv()[lc] try: set_locale = setlocale(LC_NUMERIC) except Error: set_locale = "<not able to determine>" self.assertEqual(nl_radixchar, li_radixchar, "%s (nl_langinfo) != %s (localeconv) " "(set to %s, using %s)" % ( nl_radixchar, li_radixchar, loc, set_locale)) tested = True if not tested: self.skipTest('no suitable locales') def test_float_parsing(self): # Bug #1391872: Test whether float parsing is okay on European # locales. tested = False for loc in candidate_locales: try: setlocale(LC_NUMERIC, loc) setlocale(LC_CTYPE, loc) except Error: continue # Ignore buggy locale databases. (Mac OS 10.4 and some other BSDs) if loc == 'eu_ES' and localeconv()['decimal_point'] == "' ": continue self.assertEqual(int(eval('3.14') * 100), 314, "using eval('3.14') failed for %s" % loc) self.assertEqual(int(float('3.14') * 100), 314, "using float('3.14') failed for %s" % loc) if localeconv()['decimal_point'] != '.': self.assertRaises(ValueError, float, localeconv()['decimal_point'].join(['1', '23'])) tested = True if not tested: self.skipTest('no suitable locales') if __name__ == '__main__': unittest.main()
{ "pile_set_name": "Github" }
/******************************************************************************* * KindEditor - WYSIWYG HTML Editor for Internet * Copyright (C) 2006-2011 kindsoft.net * * @author Roddy <[email protected]> * @site http://www.kindsoft.net/ * @licence http://www.kindsoft.net/license.php *******************************************************************************/ KindEditor.plugin('image', function(K) { var self = this, name = 'image', allowImageUpload = K.undef(self.allowImageUpload, true), allowImageRemote = K.undef(self.allowImageRemote, true), formatUploadUrl = K.undef(self.formatUploadUrl, true), allowFileManager = K.undef(self.allowFileManager, false), uploadJson = K.undef(self.uploadJson, self.basePath + 'php/upload_json.php'), imageTabIndex = K.undef(self.imageTabIndex, 0), imgPath = self.pluginsPath + 'image/images/', extraParams = K.undef(self.extraFileUploadParams, {}), filePostName = K.undef(self.filePostName, 'imgFile'), fillDescAfterUploadImage = K.undef(self.fillDescAfterUploadImage, false), lang = self.lang(name + '.'); self.plugin.imageDialog = function(options) { var imageUrl = options.imageUrl, imageWidth = K.undef(options.imageWidth, ''), imageHeight = K.undef(options.imageHeight, ''), imageTitle = K.undef(options.imageTitle, ''), imageAlign = K.undef(options.imageAlign, ''), showRemote = K.undef(options.showRemote, true), showLocal = K.undef(options.showLocal, true), tabIndex = K.undef(options.tabIndex, 0), clickFn = options.clickFn; var target = 'kindeditor_upload_iframe_' + new Date().getTime(); var hiddenElements = []; for(var k in extraParams){ hiddenElements.push('<input type="hidden" name="' + k + '" value="' + extraParams[k] + '" />'); } var html = [ '<div style="padding:20px;">', //tabs '<div class="tabs"></div>', //remote image - start '<div class="tab1" style="display:none;">', //url '<div class="ke-dialog-row">', '<label for="remoteUrl" style="width:60px;">' + lang.remoteUrl + '</label>', '<input type="text" id="remoteUrl" class="ke-input-text" name="url" value="" style="width:200px;" /> &nbsp;', '<span class="ke-button-common ke-button-outer">', '<input type="button" class="ke-button-common ke-button" name="viewServer" value="' + lang.viewServer + '" />', '</span>', '</div>', //size '<div class="ke-dialog-row">', '<label for="remoteWidth" style="width:60px;">' + lang.size + '</label>', lang.width + ' <input type="text" id="remoteWidth" class="ke-input-text ke-input-number" name="width" value="" maxlength="4" /> ', lang.height + ' <input type="text" class="ke-input-text ke-input-number" name="height" value="" maxlength="4" /> ', '<img class="ke-refresh-btn" src="' + imgPath + 'refresh.png" width="16" height="16" alt="" style="cursor:pointer;" title="' + lang.resetSize + '" />', '</div>', //align '<div class="ke-dialog-row">', '<label style="width:60px;">' + lang.align + '</label>', '<input type="radio" name="align" class="ke-inline-block" value="" checked="checked" /> <img name="defaultImg" src="' + imgPath + 'align_top.gif" width="23" height="25" alt="" />', ' <input type="radio" name="align" class="ke-inline-block" value="left" /> <img name="leftImg" src="' + imgPath + 'align_left.gif" width="23" height="25" alt="" />', ' <input type="radio" name="align" class="ke-inline-block" value="right" /> <img name="rightImg" src="' + imgPath + 'align_right.gif" width="23" height="25" alt="" />', '</div>', //title '<div class="ke-dialog-row">', '<label for="remoteTitle" style="width:60px;">' + lang.imgTitle + '</label>', '<input type="text" id="remoteTitle" class="ke-input-text" name="title" value="" style="width:200px;" />', '</div>', '</div>', //remote image - end //local upload - start '<div class="tab2" style="display:none;">', '<iframe name="' + target + '" style="display:none;"></iframe>', '<form class="ke-upload-area ke-form" method="post" enctype="multipart/form-data" target="' + target + '" action="' + K.addParam(uploadJson, 'dir=image') + '">', //file '<div class="ke-dialog-row">', hiddenElements.join(''), '<label style="width:60px;">' + lang.localUrl + '</label>', '<input type="text" name="localUrl" class="ke-input-text" tabindex="-1" style="width:200px;" readonly="true" /> &nbsp;', '<input type="button" class="ke-upload-button" value="' + lang.upload + '" />', '</div>', '</form>', '</div>', //local upload - end '</div>' ].join(''); var dialogWidth = showLocal || allowFileManager ? 450 : 400, dialogHeight = showLocal && showRemote ? 300 : 250; var dialog = self.createDialog({ name : name, width : dialogWidth, height : dialogHeight, title : self.lang(name), body : html, yesBtn : { name : self.lang('yes'), click : function(e) { // Bugfix: http://code.google.com/p/kindeditor/issues/detail?id=319 if (dialog.isLoading) { return; } // insert local image if (showLocal && showRemote && tabs && tabs.selectedIndex === 1 || !showRemote) { if (uploadbutton.fileBox.val() == '') { alert(self.lang('pleaseSelectFile')); return; } dialog.showLoading(self.lang('uploadLoading')); uploadbutton.submit(); localUrlBox.val(''); return; } // insert remote image var url = K.trim(urlBox.val()), width = widthBox.val(), height = heightBox.val(), title = titleBox.val(), align = ''; alignBox.each(function() { if (this.checked) { align = this.value; return false; } }); if (url == 'http://' || K.invalidUrl(url)) { alert(self.lang('invalidUrl')); urlBox[0].focus(); return; } if (!/^\d*$/.test(width)) { alert(self.lang('invalidWidth')); widthBox[0].focus(); return; } if (!/^\d*$/.test(height)) { alert(self.lang('invalidHeight')); heightBox[0].focus(); return; } clickFn.call(self, url, title, width, height, 0, align); } }, beforeRemove : function() { viewServerBtn.unbind(); widthBox.unbind(); heightBox.unbind(); refreshBtn.unbind(); } }), div = dialog.div; var urlBox = K('[name="url"]', div), localUrlBox = K('[name="localUrl"]', div), viewServerBtn = K('[name="viewServer"]', div), widthBox = K('.tab1 [name="width"]', div), heightBox = K('.tab1 [name="height"]', div), refreshBtn = K('.ke-refresh-btn', div), titleBox = K('.tab1 [name="title"]', div), alignBox = K('.tab1 [name="align"]', div); var tabs; if (showRemote && showLocal) { tabs = K.tabs({ src : K('.tabs', div), afterSelect : function(i) {} }); tabs.add({ title : lang.remoteImage, panel : K('.tab1', div) }); tabs.add({ title : lang.localImage, panel : K('.tab2', div) }); tabs.select(tabIndex); } else if (showRemote) { K('.tab1', div).show(); } else if (showLocal) { K('.tab2', div).show(); } var uploadbutton = K.uploadbutton({ button : K('.ke-upload-button', div)[0], fieldName : filePostName, form : K('.ke-form', div), target : target, width: 60, afterUpload : function(data) { dialog.hideLoading(); if (data.error === 0) { var url = data.url; if (formatUploadUrl) { url = K.formatUrl(url, 'absolute'); } if (self.afterUpload) { self.afterUpload.call(self, url, data, name); } if (!fillDescAfterUploadImage) { clickFn.call(self, url, data.title, data.width, data.height, data.border, data.align); } else { K(".ke-dialog-row #remoteUrl", div).val(url); K(".ke-tabs-li", div)[0].click(); K(".ke-refresh-btn", div).click(); } } else { alert(data.message); } }, afterError : function(html) { dialog.hideLoading(); self.errorDialog(html); } }); uploadbutton.fileBox.change(function(e) { localUrlBox.val(uploadbutton.fileBox.val()); }); if (allowFileManager) { viewServerBtn.click(function(e) { self.loadPlugin('filemanager', function() { self.plugin.filemanagerDialog({ viewType : 'VIEW', dirName : 'image', clickFn : function(url, title) { if (self.dialogs.length > 1) { K('[name="url"]', div).val(url); if (self.afterSelectFile) { self.afterSelectFile.call(self, url); } self.hideDialog(); } } }); }); }); } else { viewServerBtn.hide(); } var originalWidth = 0, originalHeight = 0; function setSize(width, height) { widthBox.val(width); heightBox.val(height); originalWidth = width; originalHeight = height; } refreshBtn.click(function(e) { var tempImg = K('<img src="' + urlBox.val() + '" />', document).css({ position : 'absolute', visibility : 'hidden', top : 0, left : '-1000px' }); tempImg.bind('load', function() { setSize(tempImg.width(), tempImg.height()); tempImg.remove(); }); K(document.body).append(tempImg); }); widthBox.change(function(e) { if (originalWidth > 0) { heightBox.val(Math.round(originalHeight / originalWidth * parseInt(this.value, 10))); } }); heightBox.change(function(e) { if (originalHeight > 0) { widthBox.val(Math.round(originalWidth / originalHeight * parseInt(this.value, 10))); } }); urlBox.val(options.imageUrl); setSize(options.imageWidth, options.imageHeight); titleBox.val(options.imageTitle); alignBox.each(function() { if (this.value === options.imageAlign) { this.checked = true; return false; } }); if (showRemote && tabIndex === 0) { urlBox[0].focus(); urlBox[0].select(); } return dialog; }; self.plugin.image = { edit : function() { var img = self.plugin.getSelectedImage(); self.plugin.imageDialog({ imageUrl : img ? img.attr('data-ke-src') : 'http://', imageWidth : img ? img.width() : '', imageHeight : img ? img.height() : '', imageTitle : img ? img.attr('title') : '', imageAlign : img ? img.attr('align') : '', showRemote : allowImageRemote, showLocal : allowImageUpload, tabIndex: img ? 0 : imageTabIndex, clickFn : function(url, title, width, height, border, align) { if (img) { img.attr('src', url); img.attr('data-ke-src', url); img.attr('width', width); img.attr('height', height); img.attr('title', title); img.attr('align', align); img.attr('alt', title); } else { self.exec('insertimage', url, title, width, height, border, align); } // Bugfix: [Firefox] 上传图片后,总是出现正在加载的样式,需要延迟执行hideDialog setTimeout(function() { self.hideDialog().focus(); }, 0); } }); }, 'delete' : function() { var target = self.plugin.getSelectedImage(); if (target.parent().name == 'a') { target = target.parent(); } target.remove(); // [IE] 删除图片后立即点击图片按钮出错 self.addBookmark(); } }; self.clickToolbar(name, self.plugin.image.edit); });
{ "pile_set_name": "Github" }
/// Copyright (c) 2012 Ecma International. All rights reserved. /** * @path ch15/15.4/15.4.4/15.4.4.17/15.4.4.17-7-9.js * @description Array.prototype.some - modifications to length don't change number of iterations */ function testcase() { var called = 0; function callbackfn(val, idx, obj) { called++; return val > 10; } var obj = { 0: 9, 2: 12, length: 3 }; Object.defineProperty(obj, "1", { get: function () { obj.length = 2; return 8; }, configurable: true }); return Array.prototype.some.call(obj, callbackfn) && called === 3; } runTestCase(testcase);
{ "pile_set_name": "Github" }
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* * This file contains prototypes for the public SSL functions. * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #ifndef __sslt_h_ #define __sslt_h_ #include "certt.h" #include "keyhi.h" #include "prtypes.h" #include "secitem.h" typedef enum { ssl_hs_hello_request = 0, ssl_hs_client_hello = 1, ssl_hs_server_hello = 2, ssl_hs_hello_verify_request = 3, ssl_hs_new_session_ticket = 4, ssl_hs_end_of_early_data = 5, ssl_hs_hello_retry_request = 6, ssl_hs_encrypted_extensions = 8, ssl_hs_certificate = 11, ssl_hs_server_key_exchange = 12, ssl_hs_certificate_request = 13, ssl_hs_server_hello_done = 14, ssl_hs_certificate_verify = 15, ssl_hs_client_key_exchange = 16, ssl_hs_finished = 20, ssl_hs_certificate_status = 22, ssl_hs_key_update = 24, ssl_hs_next_proto = 67, ssl_hs_message_hash = 254, /* Not a real message. */ } SSLHandshakeType; typedef enum { ssl_ct_change_cipher_spec = 20, ssl_ct_alert = 21, ssl_ct_handshake = 22, ssl_ct_application_data = 23, ssl_ct_ack = 25 } SSLContentType; typedef enum { ssl_secret_read = 1, ssl_secret_write = 2, } SSLSecretDirection; typedef struct SSL3StatisticsStr { /* statistics from ssl3_SendClientHello (sch) */ long sch_sid_cache_hits; long sch_sid_cache_misses; long sch_sid_cache_not_ok; /* statistics from ssl3_HandleServerHello (hsh) */ long hsh_sid_cache_hits; long hsh_sid_cache_misses; long hsh_sid_cache_not_ok; /* statistics from ssl3_HandleClientHello (hch) */ long hch_sid_cache_hits; long hch_sid_cache_misses; long hch_sid_cache_not_ok; /* statistics related to stateless resume */ long sch_sid_stateless_resumes; long hsh_sid_stateless_resumes; long hch_sid_stateless_resumes; long hch_sid_ticket_parse_failures; } SSL3Statistics; /* Key Exchange algorithm values */ typedef enum { ssl_kea_null = 0, ssl_kea_rsa = 1, ssl_kea_dh = 2, ssl_kea_fortezza = 3, /* deprecated, now unused */ ssl_kea_ecdh = 4, ssl_kea_ecdh_psk = 5, ssl_kea_dh_psk = 6, ssl_kea_tls13_any = 7, ssl_kea_size /* number of ssl_kea_ algorithms */ } SSLKEAType; /* The following defines are for backwards compatibility. ** They will be removed in a forthcoming release to reduce namespace pollution. ** programs that use the kt_ symbols should convert to the ssl_kt_ symbols ** soon. */ #define kt_null ssl_kea_null #define kt_rsa ssl_kea_rsa #define kt_dh ssl_kea_dh #define kt_fortezza ssl_kea_fortezza /* deprecated, now unused */ #define kt_ecdh ssl_kea_ecdh #define kt_kea_size ssl_kea_size /* Values of this enum match the SignatureAlgorithm enum from * https://tools.ietf.org/html/rfc5246#section-7.4.1.4.1 */ typedef enum { ssl_sign_null = 0, /* "anonymous" in TLS */ ssl_sign_rsa = 1, ssl_sign_dsa = 2, ssl_sign_ecdsa = 3 } SSLSignType; /* Values of this enum match the HashAlgorithm enum from * https://tools.ietf.org/html/rfc5246#section-7.4.1.4.1 */ typedef enum { /* ssl_hash_none is used internally to mean the pre-1.2 combination of MD5 * and SHA1. The other values are only used in TLS 1.2. */ ssl_hash_none = 0, ssl_hash_md5 = 1, ssl_hash_sha1 = 2, ssl_hash_sha224 = 3, ssl_hash_sha256 = 4, ssl_hash_sha384 = 5, ssl_hash_sha512 = 6 } SSLHashType; /* Deprecated */ typedef struct SSLSignatureAndHashAlgStr { SSLHashType hashAlg; SSLSignType sigAlg; } SSLSignatureAndHashAlg; typedef enum { ssl_sig_none = 0, ssl_sig_rsa_pkcs1_sha1 = 0x0201, ssl_sig_rsa_pkcs1_sha256 = 0x0401, ssl_sig_rsa_pkcs1_sha384 = 0x0501, ssl_sig_rsa_pkcs1_sha512 = 0x0601, /* For ECDSA, the pairing of the hash with a specific curve is only enforced * in TLS 1.3; in TLS 1.2 any curve can be used with each of these. */ ssl_sig_ecdsa_secp256r1_sha256 = 0x0403, ssl_sig_ecdsa_secp384r1_sha384 = 0x0503, ssl_sig_ecdsa_secp521r1_sha512 = 0x0603, ssl_sig_rsa_pss_rsae_sha256 = 0x0804, ssl_sig_rsa_pss_rsae_sha384 = 0x0805, ssl_sig_rsa_pss_rsae_sha512 = 0x0806, ssl_sig_ed25519 = 0x0807, ssl_sig_ed448 = 0x0808, ssl_sig_rsa_pss_pss_sha256 = 0x0809, ssl_sig_rsa_pss_pss_sha384 = 0x080a, ssl_sig_rsa_pss_pss_sha512 = 0x080b, ssl_sig_dsa_sha1 = 0x0202, ssl_sig_dsa_sha256 = 0x0402, ssl_sig_dsa_sha384 = 0x0502, ssl_sig_dsa_sha512 = 0x0602, ssl_sig_ecdsa_sha1 = 0x0203, /* The following value (which can't be used in the protocol), represents * the RSA signature using SHA-1 and MD5 that is used in TLS 1.0 and 1.1. * This is reported as a signature scheme when TLS 1.0 or 1.1 is used. * This should not be passed to SSL_SignatureSchemePrefSet(); this * signature scheme is always used and cannot be disabled. */ ssl_sig_rsa_pkcs1_sha1md5 = 0x10101, } SSLSignatureScheme; /* Deprecated names maintained only for source compatibility. */ #define ssl_sig_rsa_pss_sha256 ssl_sig_rsa_pss_rsae_sha256 #define ssl_sig_rsa_pss_sha384 ssl_sig_rsa_pss_rsae_sha384 #define ssl_sig_rsa_pss_sha512 ssl_sig_rsa_pss_rsae_sha512 /* ** SSLAuthType describes the type of key that is used to authenticate a ** connection. That is, the type of key in the end-entity certificate. */ typedef enum { ssl_auth_null = 0, ssl_auth_rsa_decrypt = 1, /* RSA key exchange. */ ssl_auth_dsa = 2, ssl_auth_kea = 3, /* unused */ ssl_auth_ecdsa = 4, ssl_auth_ecdh_rsa = 5, /* ECDH cert with an RSA signature. */ ssl_auth_ecdh_ecdsa = 6, /* ECDH cert with an ECDSA signature. */ ssl_auth_rsa_sign = 7, /* RSA signing with an rsaEncryption key. */ ssl_auth_rsa_pss = 8, /* RSA signing with a PSS key. */ ssl_auth_psk = 9, ssl_auth_tls13_any = 10, ssl_auth_size /* number of authentication types */ } SSLAuthType; /* This is defined for backward compatibility reasons */ #define ssl_auth_rsa ssl_auth_rsa_decrypt typedef enum { ssl_calg_null = 0, ssl_calg_rc4 = 1, ssl_calg_rc2 = 2, ssl_calg_des = 3, ssl_calg_3des = 4, ssl_calg_idea = 5, ssl_calg_fortezza = 6, /* deprecated, now unused */ ssl_calg_aes = 7, ssl_calg_camellia = 8, ssl_calg_seed = 9, ssl_calg_aes_gcm = 10, ssl_calg_chacha20 = 11 } SSLCipherAlgorithm; typedef enum { ssl_mac_null = 0, ssl_mac_md5 = 1, ssl_mac_sha = 2, ssl_hmac_md5 = 3, /* TLS HMAC version of mac_md5 */ ssl_hmac_sha = 4, /* TLS HMAC version of mac_sha */ ssl_hmac_sha256 = 5, ssl_mac_aead = 6, ssl_hmac_sha384 = 7 } SSLMACAlgorithm; typedef enum { ssl_compression_null = 0, ssl_compression_deflate = 1 /* RFC 3749 */ } SSLCompressionMethod; typedef enum { ssl_grp_ec_sect163k1 = 1, ssl_grp_ec_sect163r1 = 2, ssl_grp_ec_sect163r2 = 3, ssl_grp_ec_sect193r1 = 4, ssl_grp_ec_sect193r2 = 5, ssl_grp_ec_sect233k1 = 6, ssl_grp_ec_sect233r1 = 7, ssl_grp_ec_sect239k1 = 8, ssl_grp_ec_sect283k1 = 9, ssl_grp_ec_sect283r1 = 10, ssl_grp_ec_sect409k1 = 11, ssl_grp_ec_sect409r1 = 12, ssl_grp_ec_sect571k1 = 13, ssl_grp_ec_sect571r1 = 14, ssl_grp_ec_secp160k1 = 15, ssl_grp_ec_secp160r1 = 16, ssl_grp_ec_secp160r2 = 17, ssl_grp_ec_secp192k1 = 18, ssl_grp_ec_secp192r1 = 19, ssl_grp_ec_secp224k1 = 20, ssl_grp_ec_secp224r1 = 21, ssl_grp_ec_secp256k1 = 22, ssl_grp_ec_secp256r1 = 23, ssl_grp_ec_secp384r1 = 24, ssl_grp_ec_secp521r1 = 25, ssl_grp_ec_curve25519 = 29, /* RFC4492 */ ssl_grp_ffdhe_2048 = 256, /* RFC7919 */ ssl_grp_ffdhe_3072 = 257, ssl_grp_ffdhe_4096 = 258, ssl_grp_ffdhe_6144 = 259, ssl_grp_ffdhe_8192 = 260, ssl_grp_none = 65537, /* special value */ ssl_grp_ffdhe_custom = 65538 /* special value */ } SSLNamedGroup; typedef struct SSLExtraServerCertDataStr { /* When this struct is passed to SSL_ConfigServerCert, and authType is set * to a value other than ssl_auth_null, this limits the use of the key to * the type defined; otherwise, the certificate is configured for all * compatible types. */ SSLAuthType authType; /* The remainder of the certificate chain. */ const CERTCertificateList* certChain; /* A set of one or more stapled OCSP responses for the certificate. This is * used to generate the OCSP stapling answer provided by the server. */ const SECItemArray* stapledOCSPResponses; /* A serialized sign_certificate_timestamp extension, used to answer * requests from clients for this data. */ const SECItem* signedCertTimestamps; /* Delegated credentials. * * A serialized delegated credential (DC) to use for authentication to peers * who indicate support for this extension (ietf-drafts-tls-subcerts). DCs * are used opportunistically if (1) the client indicates support, (2) TLS * 1.3 or higher is negotiated, and (3) the selected certificate is * configured with a DC. * * Note that it's the caller's responsibility to ensure that the DC is * well-formed. */ const SECItem* delegCred; /* The secret key corresponding to the |delegCred|. * * Note that it's the caller's responsibility to ensure that this matches * the DC public key. */ const SECKEYPrivateKey* delegCredPrivKey; } SSLExtraServerCertData; typedef struct SSLChannelInfoStr { /* On return, SSL_GetChannelInfo sets |length| to the smaller of * the |len| argument and the length of the struct used by NSS. * Callers must ensure the application uses a version of NSS that * isn't older than the version used at compile time. */ PRUint32 length; PRUint16 protocolVersion; PRUint16 cipherSuite; /* The strength of the key used to authenticate the peer. Before * interpreting this value, check authType, signatureScheme, and * peerDelegCred, to determine the type of the key and how it was used. * * Typically, this is the length of the key from the peer's end-entity * certificate. If delegated credentials are used (i.e., peerDelegCred is * PR_TRUE), then this is the strength of the delegated credential key. */ PRUint32 authKeyBits; /* key exchange algorithm info */ PRUint32 keaKeyBits; /* session info */ PRUint32 creationTime; /* seconds since Jan 1, 1970 */ PRUint32 lastAccessTime; /* seconds since Jan 1, 1970 */ PRUint32 expirationTime; /* seconds since Jan 1, 1970 */ PRUint32 sessionIDLength; /* up to 32 */ PRUint8 sessionID[32]; /* The following fields are added in NSS 3.12.5. */ /* compression method info */ const char* compressionMethodName; SSLCompressionMethod compressionMethod; /* The following fields are added in NSS 3.21. * This field only has meaning in TLS < 1.3 and will be set to * PR_FALSE in TLS 1.3. */ PRBool extendedMasterSecretUsed; /* The following fields were added in NSS 3.25. * This field only has meaning in TLS >= 1.3, and indicates on the * client side that the server accepted early (0-RTT) data. */ PRBool earlyDataAccepted; /* The following fields were added in NSS 3.28. */ /* These fields have the same meaning as in SSLCipherSuiteInfo. */ SSLKEAType keaType; SSLNamedGroup keaGroup; SSLCipherAlgorithm symCipher; SSLMACAlgorithm macAlgorithm; SSLAuthType authType; SSLSignatureScheme signatureScheme; /* The following fields were added in NSS 3.34. */ /* When the session was resumed this holds the key exchange group of the * original handshake. */ SSLNamedGroup originalKeaGroup; /* This field is PR_TRUE when the session is resumed and PR_FALSE * otherwise. */ PRBool resumed; /* Indicates whether the peer used a delegated credential (DC) for * authentication. */ PRBool peerDelegCred; /* When adding new fields to this structure, please document the * NSS version in which they were added. */ } SSLChannelInfo; /* Preliminary channel info */ #define ssl_preinfo_version (1U << 0) #define ssl_preinfo_cipher_suite (1U << 1) #define ssl_preinfo_0rtt_cipher_suite (1U << 2) /* ssl_preinfo_peer_auth covers peerDelegCred, authKeyBits, and scheme. Not * included in ssl_preinfo_all as it is client-only. */ #define ssl_preinfo_peer_auth (1U << 3) /* ssl_preinfo_all doesn't contain ssl_preinfo_0rtt_cipher_suite because that * field is only set if 0-RTT is sent (client) or accepted (server). */ #define ssl_preinfo_all (ssl_preinfo_version | ssl_preinfo_cipher_suite) typedef struct SSLPreliminaryChannelInfoStr { /* On return, SSL_GetPreliminaryChannelInfo sets |length| to the smaller of * the |len| argument and the length of the struct used by NSS. * Callers must ensure the application uses a version of NSS that * isn't older than the version used at compile time. */ PRUint32 length; /* A bitfield over SSLPreliminaryValueSet that describes which * preliminary values are set (see ssl_preinfo_*). */ PRUint32 valuesSet; /* Protocol version: test (valuesSet & ssl_preinfo_version) */ PRUint16 protocolVersion; /* Cipher suite: test (valuesSet & ssl_preinfo_cipher_suite) */ PRUint16 cipherSuite; /* The following fields were added in NSS 3.29. */ /* |canSendEarlyData| is true when a 0-RTT is enabled. This can only be * true after sending the ClientHello and before the handshake completes. */ PRBool canSendEarlyData; /* The following fields were added in NSS 3.31. */ /* The number of early data octets that a client is permitted to send on * this connection. The value will be zero if the connection was not * resumed or early data is not permitted. For a client, this value only * has meaning if |canSendEarlyData| is true. For a server, this indicates * the value that was advertised in the session ticket that was used to * resume this session. */ PRUint32 maxEarlyDataSize; /* The following fields were added in NSS 3.43. */ /* This reports the cipher suite used for 0-RTT if it sent or accepted. For * a client, this is set earlier than |cipherSuite|, and will match that * value if 0-RTT is accepted by the server. The server only sets this * after accepting 0-RTT, so this will contain the same value. */ PRUint16 zeroRttCipherSuite; /* The following fields were added in NSS 3.48. */ /* These fields contain information about the key that will be used in * the CertificateVerify message. If Delegated Credentials are being used, * this is the DC-contained SPKI, else the EE-cert SPKI. These fields are * valid only after the Certificate message is handled. This can be determined * by checking the valuesSet field against |ssl_preinfo_peer_auth|. */ PRBool peerDelegCred; PRUint32 authKeyBits; SSLSignatureScheme signatureScheme; /* When adding new fields to this structure, please document the * NSS version in which they were added. */ } SSLPreliminaryChannelInfo; typedef struct SSLCipherSuiteInfoStr { /* On return, SSL_GetCipherSuitelInfo sets |length| to the smaller of * the |len| argument and the length of the struct used by NSS. * Callers must ensure the application uses a version of NSS that * isn't older than the version used at compile time. */ PRUint16 length; PRUint16 cipherSuite; /* Cipher Suite Name */ const char* cipherSuiteName; /* server authentication info */ const char* authAlgorithmName; SSLAuthType authAlgorithm; /* deprecated, use |authType| */ /* key exchange algorithm info */ const char* keaTypeName; SSLKEAType keaType; /* symmetric encryption info */ const char* symCipherName; SSLCipherAlgorithm symCipher; PRUint16 symKeyBits; PRUint16 symKeySpace; PRUint16 effectiveKeyBits; /* MAC info */ /* AEAD ciphers don't have a MAC. For an AEAD cipher, macAlgorithmName * is "AEAD", macAlgorithm is ssl_mac_aead, and macBits is the length in * bits of the authentication tag. */ const char* macAlgorithmName; SSLMACAlgorithm macAlgorithm; PRUint16 macBits; PRUintn isFIPS : 1; PRUintn isExportable : 1; /* deprecated, don't use */ PRUintn nonStandard : 1; PRUintn reservedBits : 29; /* The following fields were added in NSS 3.24. */ /* This reports the correct authentication type for the cipher suite, use * this instead of |authAlgorithm|. */ SSLAuthType authType; /* The following fields were added in NSS 3.43. */ /* This reports the hash function used in the TLS KDF, or HKDF for TLS 1.3. * For suites defined for versions of TLS earlier than TLS 1.2, this reports * ssl_hash_none. */ SSLHashType kdfHash; /* When adding new fields to this structure, please document the * NSS version in which they were added. */ } SSLCipherSuiteInfo; typedef enum { ssl_variant_stream = 0, ssl_variant_datagram = 1 } SSLProtocolVariant; typedef struct SSLVersionRangeStr { PRUint16 min; PRUint16 max; } SSLVersionRange; typedef enum { SSL_sni_host_name = 0, SSL_sni_type_total } SSLSniNameType; /* Supported extensions. */ /* Update SSL_MAX_EXTENSIONS whenever a new extension type is added. */ typedef enum { ssl_server_name_xtn = 0, ssl_cert_status_xtn = 5, ssl_supported_groups_xtn = 10, ssl_ec_point_formats_xtn = 11, ssl_signature_algorithms_xtn = 13, ssl_use_srtp_xtn = 14, ssl_app_layer_protocol_xtn = 16, /* signed_certificate_timestamp extension, RFC 6962 */ ssl_signed_cert_timestamp_xtn = 18, ssl_padding_xtn = 21, ssl_extended_master_secret_xtn = 23, ssl_record_size_limit_xtn = 28, ssl_session_ticket_xtn = 35, /* 40 was used in draft versions of TLS 1.3; it is now reserved. */ ssl_tls13_pre_shared_key_xtn = 41, ssl_tls13_early_data_xtn = 42, ssl_tls13_supported_versions_xtn = 43, ssl_tls13_cookie_xtn = 44, ssl_tls13_psk_key_exchange_modes_xtn = 45, ssl_tls13_ticket_early_data_info_xtn = 46, /* Deprecated. */ ssl_tls13_certificate_authorities_xtn = 47, ssl_tls13_post_handshake_auth_xtn = 49, ssl_signature_algorithms_cert_xtn = 50, ssl_tls13_key_share_xtn = 51, ssl_next_proto_nego_xtn = 13172, /* Deprecated. */ ssl_renegotiation_info_xtn = 0xff01, ssl_delegated_credentials_xtn = 0xff02, ssl_tls13_short_header_xtn = 0xff03, /* Deprecated. */ ssl_tls13_encrypted_sni_xtn = 0xffce, } SSLExtensionType; /* This is the old name for the supported_groups extensions. */ #define ssl_elliptic_curves_xtn ssl_supported_groups_xtn /* SSL_MAX_EXTENSIONS includes the maximum number of extensions that are * supported for any single message type. That is, a ClientHello; ServerHello * and TLS 1.3 NewSessionTicket and HelloRetryRequest extensions have fewer. */ #define SSL_MAX_EXTENSIONS 21 /* Deprecated */ typedef enum { ssl_dhe_group_none = 0, ssl_ff_dhe_2048_group = 1, ssl_ff_dhe_3072_group = 2, ssl_ff_dhe_4096_group = 3, ssl_ff_dhe_6144_group = 4, ssl_ff_dhe_8192_group = 5, ssl_dhe_group_max } SSLDHEGroupType; #endif /* __sslt_h_ */
{ "pile_set_name": "Github" }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.rollup.job; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.RollupIndexerJobStats; import org.elasticsearch.xpack.rollup.Rollup; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.stream.Collectors; /** * These utilities are used to convert agg responses into a set of rollup documents. * They are extracted out as static classes mainly to make testing easier. */ class IndexerUtils { private static final Logger logger = LogManager.getLogger(IndexerUtils.class); /** * The only entry point in this class. You hand this method an aggregation and an index * pattern, and it returns a list of rolled documents that you can index * * @param agg The aggregation response that you want to rollup * @param rollupIndex The index that holds rollups for this job * @param stats The stats accumulator for this job's task * @param groupConfig The grouping configuration for the job * @param jobId The ID for the job * @return A list of rolled documents derived from the response */ static List<IndexRequest> processBuckets(CompositeAggregation agg, String rollupIndex, RollupIndexerJobStats stats, GroupConfig groupConfig, String jobId) { logger.debug("Buckets: [" + agg.getBuckets().size() + "][" + jobId + "]"); return agg.getBuckets().stream().map(b ->{ stats.incrementNumDocuments(b.getDocCount()); // Put the composite keys into a treemap so that the key iteration order is consistent // TODO would be nice to avoid allocating this treemap in the future TreeMap<String, Object> keys = new TreeMap<>(b.getKey()); List<Aggregation> metrics = b.getAggregations().asList(); RollupIDGenerator idGenerator = new RollupIDGenerator(jobId); Map<String, Object> doc = new HashMap<>(keys.size() + metrics.size()); processKeys(keys, doc, b.getDocCount(), groupConfig, idGenerator); idGenerator.add(jobId); processMetrics(metrics, doc); doc.put(RollupField.ROLLUP_META + "." + RollupField.VERSION_FIELD, Rollup.CURRENT_ROLLUP_VERSION ); doc.put(RollupField.ROLLUP_META + "." + RollupField.ID.getPreferredName(), jobId); IndexRequest request = new IndexRequest(rollupIndex).id(idGenerator.getID()); request.source(doc); return request; }).collect(Collectors.toList()); } private static void processKeys(Map<String, Object> keys, Map<String, Object> doc, long count, GroupConfig groupConfig, RollupIDGenerator idGenerator) { keys.forEach((k, v) -> { // Also add a doc count for each key. This will duplicate data, but makes search easier later doc.put(k + "." + RollupField.COUNT_FIELD, count); if (k.endsWith("." + DateHistogramAggregationBuilder.NAME)) { assert v != null; doc.put(k + "." + RollupField.TIMESTAMP, v); doc.put(k + "." + RollupField.INTERVAL, groupConfig.getDateHistogram().getInterval()); doc.put(k + "." + DateHistogramGroupConfig.TIME_ZONE, groupConfig.getDateHistogram().getTimeZone()); idGenerator.add((Long)v); } else if (k.endsWith("." + HistogramAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); doc.put(k + "." + RollupField.INTERVAL, groupConfig.getHistogram().getInterval()); if (v == null) { idGenerator.addNull(); } else { idGenerator.add((Double) v); } } else if (k.endsWith("." + TermsAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); if (v == null) { idGenerator.addNull(); } else if (v instanceof String) { idGenerator.add((String)v); } else if (v instanceof Long) { idGenerator.add((Long)v); } else if (v instanceof Double) { idGenerator.add((Double)v); } else { throw new RuntimeException("Encountered value of type [" + v.getClass() + "], which was unable to be processed."); } } else { throw new ElasticsearchException("Could not identify key in agg [" + k + "]"); } }); } private static void processMetrics(List<Aggregation> metrics, Map<String, Object> doc) { List<String> emptyCounts = new ArrayList<>(); metrics.forEach(m -> { if (m instanceof InternalNumericMetricsAggregation.SingleValue) { Double value = ((InternalNumericMetricsAggregation.SingleValue) m).value(); if (value.isInfinite() == false) { if (m.getName().endsWith(RollupField.COUNT_FIELD) && value == 0) { emptyCounts.add(m.getName()); } else { doc.put(m.getName(), value); } } } else { throw new ElasticsearchException("Aggregation [" + m.getName() + "] is of non-supported type [" + m.getType() + "]"); } }); // Go back through and remove all empty counts emptyCounts.forEach(m -> doc.remove(m.replace(RollupField.COUNT_FIELD, RollupField.VALUE))); } }
{ "pile_set_name": "Github" }
<resources> <!-- Default screen margins, per the Android Design guidelines. --> <dimen name="activity_horizontal_margin">16dp</dimen> <dimen name="activity_vertical_margin">16dp</dimen> <dimen name="activity_main_btn_size_top">42dp</dimen> <dimen name="activity_main_btn_size_botton">42dp</dimen> <dimen name="activity_main_btn_textsize">16dp</dimen> <dimen name="activity_main_btn_fairy_padding">6dp</dimen> <dimen name="activity_main_btn_tools_padding">8dp</dimen> <dimen name="activity_text_size">14sp</dimen> <dimen name="view_text_title">20dp</dimen> <dimen name="view_padding">8dp</dimen> <dimen name="fleetview_text_statusarea">14dp</dimen> <dimen name="fleetview_text_moralearea">40dp</dimen> <dimen name="fleetview_text_tbutton">15dp</dimen> <dimen name="fleetview_text_rbutton">20dp</dimen> <dimen name="fleetview_text_ship_large">14dp</dimen> <dimen name="fleetview_text_ship_medium">12dp</dimen> <dimen name="fleetview_text_ship_small">10dp</dimen> <dimen name="questview_tag_size">36dp</dimen> <dimen name="questview_desc_height">32dp</dimen> <dimen name="questview_text_top">16dp</dimen> <dimen name="questview_button_top">18dp</dimen> <dimen name="questview_text_large">14dp</dimen> <dimen name="questview_text_medium">11dp</dimen> <dimen name="questview_text_small">10dp</dimen> <dimen name="battleview_text_label_large">18dp</dimen> <dimen name="battleview_text_label_medium">14dp</dimen> <dimen name="battleview_text_label_small">10dp</dimen> <dimen name="battleview_text_label_width">60dp</dimen> <dimen name="battleview_text_n_large">13dp</dimen> <dimen name="battleview_text_n_medium">11dp</dimen> <dimen name="battleview_text_n_small">10dp</dimen> <dimen name="battleview_text_c_large">10dp</dimen> <dimen name="battleview_text_c_medium">9dp</dimen> <dimen name="battleview_text_c_small">8dp</dimen> <dimen name="battleview_text_c_xsmall">7dp</dimen> <dimen name="battleview_hp_height">4dp</dimen> <dimen name="battleview_padding">8dp</dimen> <dimen name="battleview_nodeinfo_minwidth">72dp</dimen> <dimen name="battleview_nodeinfo_margin">20dp</dimen> <dimen name="item_popup_icon_size">14dp</dimen> <dimen name="item_popup_text_size_large">11dp</dimen> <dimen name="item_popup_text_size_small">10dp</dimen> <dimen name="item_popup_xmargin">72dp</dimen> <dimen name="button_size_xsmall">32dp</dimen> <dimen name="button_size_small">48dp</dimen> <dimen name="button_size_normal">72dp</dimen> <dimen name="button_size_large">96dp</dimen> <dimen name="button_size_xlarge">128dp</dimen> <dimen name="button_select_size">48dp</dimen> <dimen name="text_description_medium">15dp</dimen> <dimen name="text_description_small">12dp</dimen> <dimen name="excheckview_text_tbutton">15dp</dimen> <dimen name="excheckview_text_statusarea">14dp</dimen> <dimen name="excheckview_text_cbutton">16dp</dimen> <dimen name="excheckview_popup_width">240dp</dimen> <dimen name="excheckview_label_width">64dp</dimen> <dimen name="popup_text_large">16dp</dimen> <dimen name="popup_text_normal">14dp</dimen> <dimen name="popup_text_small">12dp</dimen> <dimen name="popup_text_xsmall">10dp</dimen> <dimen name="popup_text_xxsmall">8dp</dimen> <dimen name="popup_item_icon_size">22dp</dimen> <dimen name="popup_t2_width">250dp</dimen> <dimen name="popup_t3_width">300dp</dimen> <dimen name="popup_t4_width">350dp</dimen> <dimen name="popup_padding">6dp</dimen> <dimen name="aircombat_text_mid">11dp</dimen> <dimen name="aircombat_mid_width">100dp</dimen> <dimen name="toast_text_size">16dp</dimen> <dimen name="toast_margin_tb">4dp</dimen> <dimen name="toast_margin_se">12dp</dimen> <dimen name="toast_minwidth">72dp</dimen> </resources>
{ "pile_set_name": "Github" }
<?php /*Language Format: Add a new file(.lang.php) with your module name at /phpcms/languages/ translation save at the array:$LANG */ $LANG['announce'] = 'Announcement'; $LANG['announce_manage'] = 'Announcement manager'; $LANG['announce_list'] = 'List of announcement'; $LANG['announce_add'] = 'Add'; $LANG['announce_title'] = 'title'; $LANG['title_cannot_empty'] = 'Title is required'; $LANG['announcements_cannot_be_empty'] = 'Content is required'; $LANG['announcement_successful_added'] = 'Announcement added successfully'; $LANG['announced_a'] = 'Announcement edited successfully'; $LANG['announce_passed'] = 'Announcement has already been processed successfully.'; $LANG['announce_deleted'] = 'Batch delete processing is done successfully'; $LANG['startdate'] = 'Start Date'; $LANG['enddate'] = 'Due Date'; $LANG['announce_content'] = 'Content'; $LANG['announce_status'] = 'Status'; $LANG['edit_announce'] = 'Edit'; $LANG['inputer'] = 'Editor'; $LANG['check_annonuce'] = 'Review'; $LANG['preview'] = 'Preview'; $LANG['index'] = 'Frontend'; $LANG['cancel_all_selected'] = 'Cancel all selected'; $LANG['pass_all_selected'] = 'Approve selected items'; $LANG['remove_all_selected'] = 'Remove selected items'; $LANG['overdue_announce'] = 'Expired announcement'; $LANG['no_pass_announce'] = 'Pending announcement'; $LANG['no_exists'] = 'The announcement does not exist. It may have been deleted.'; $LANG['affirm_delete'] = 'Are you sure you want to delete it?'; $LANG['input_announce_title'] = 'Please insert announcement title'; $LANG['title_min_3_chars'] = 'at least 6 characters'; $LANG['right'] = 'Correct'; $LANG['server_no_data'] = 'Server is busy now, please try again later'; $LANG['announce_exist'] = 'The announcement already exists'; $LANG['checking'] = 'Checking validation...'; $LANG['select_stardate'] = 'Please choose start date'; $LANG['right_all'] = 'Correct!'; $LANG['select_downdate'] = 'Please choose due date'; $LANG['select_style'] = 'Please choose a style'; $LANG['available_style'] = 'Available style'; $LANG['lists'] = 'List'; $LANG['sitename'] = 'Site'; ?>
{ "pile_set_name": "Github" }
#!/bin/bash # Copyright 2006-2016 Christian Stigen Larsen # # This is a small script to test if jp2a has been correctly built. ## PATH TO EXECUTABLE jp2a JP=../src/jp2a UNAME=`uname -a` UPDATE=no if test "`echo ${UNAME} | cut -c1-6`" == "CYGWIN" ; then JP=../src/jp2a.exe fi if test "${1}" == "update" ; then UPDATE=yes fi ## INITIALIZE VARS RESULT_OK=0 RESULT_FAILED=0 FAILED_STR="" function print_intense() { echo -e -n "\033[1m${1}\033[0m" } function test_ok() { print_intense "OK" RESULT_OK=$((RESULT_OK + 1)) } function test_failed() { print_intense "FAILED" RESULT_FAILED=$((RESULT_FAILED + 1)) FAILED_STR="${FAILED_STR}\n${2} | diff --strip-trailing-cr --brief - ${1}" } function test_jp2a() { CMD="${JP} ${2}" printf "test (%2s) %-32s " "$((RESULT_OK+RESULT_FAILED+1))" "(${1})" if [ ! -e "${3}" ] ; then print_intense "(missing ${3}) " test_failed ${3} "${CMD}" else if test "${UPDATE}" == "no" ; then eval ${CMD} | diff --strip-trailing-cr --brief - ${3} 1>/dev/null && test_ok || test_failed ${3} "${CMD}" else eval ${CMD} > ${3} test_ok fi fi echo "" } function test_results() { echo "" echo "TEST RESULTS FOR JP2A" echo "" printf "Tests OK : %2d of %2d (%4s)\n" "$((RESULT_OK))" "$((RESULT_OK+RESULT_FAILED))" "$((100*RESULT_OK/(RESULT_OK+RESULT_FAILED)))%" printf "Tests FAILED: %2d of %2d (%4s)\n" "$((RESULT_FAILED))" "$((RESULT_OK+RESULT_FAILED))" "$((100*RESULT_FAILED/(RESULT_OK+RESULT_FAILED)))%" echo "" if test "x${FAILED_STR}" != "x" ; then echo "Summary of failed tests:" echo -e "${FAILED_STR}" fi } echo "-------------------------------------------------------------" echo " TESTING JP2A BUILD" echo " " echo " Note that the output may vary a bit on different platforms," echo " so some tests may fail. This does not mean that jp2a is" echo " completely broken." echo "-------------------------------------------------------------" echo "" test_jp2a "width" "--width=78 jp2a.jpg" normal.txt test_jp2a "border, width" "-b --width=78 jp2a.jpg" normal-b.txt test_jp2a "size" "--size=160x49 jp2a.jpg" 160x49.txt test_jp2a "height" "--height=10 jp2a.jpg" 10h.txt test_jp2a "size" "--size=40x40 jp2a.jpg" 40x40.txt test_jp2a "size" "--size=1x1 --invert jp2a.jpg" 1x1-inv.txt test_jp2a "invert, border" "-i -b --width=110 --height=30 jp2a.jpg" 110x30-i-b.txt test_jp2a "width, flip, invert" "--width=78 --flipx --flipy --invert jp2a.jpg" flip-xy-invert.txt test_jp2a "width, border" "--width=78 -b jp2a.jpg jp2a.jpg" 2xnormal-b.txt test_jp2a "width, verbose" "--verbose --width=78 jp2a.jpg 2>&1 | tr -d '\r'" normal-verbose.txt TEMPFILE=`mktemp /tmp/jp2a-test-XXXXXX` test_jp2a "width, outfile" "--width=78 jp2a.jpg --output=${TEMPFILE} && cat ${TEMPFILE}" normal.txt rm -f ${TEMPFILE} test_jp2a "width, clear" "--width=78 jp2a.jpg --clear" normal-clear.txt test_jp2a "height, grayscale" "logo-40x25-gray.jpg --height=30" logo-30.txt test_jp2a "size, invert" "grind.jpg -i --size=80x30" grind.txt test_jp2a "size, invert, red channel" "grind.jpg -i --size=80x30 --red=1.0 --green=0.0 --blue=0.0" grind-red.txt test_jp2a "size, invert, blue channel" "grind.jpg -i --size=80x30 --red=0.0 --green=1.0 --blue=0.0" grind-green.txt test_jp2a "size, invert, green channel" "grind.jpg -i --size=80x30 --red=0.0 --green=0.0 --blue=1.0" grind-blue.txt test_jp2a "width, grayscale" "--width=78 dalsnuten-640x480-gray-low.jpg" dalsnuten-normal.txt test_jp2a "invert, width, grayscale" "--invert --width=78 dalsnuten-640x480-gray-low.jpg" dalsnuten-invert.txt test_jp2a "invert, size, grayscale" "--invert --size=80x49 dalsnuten-640x480-gray-low.jpg" dalsnuten-80x49-inv.txt test_jp2a "size, invert, border" "dalsnuten-640x480-gray-low.jpg --size=80x25 --invert --border --size=150x45" dalsnuten-640x480-gray-low.txt test_jp2a "size, html" "--size=80x50 --html --html-fontsize=7 jp2a.jpg" logo.html test_jp2a "size, color, html, flipx" "--size=80x50 --html --color --html-fontsize=8 --flipx grind.jpg" grind-flipx.html test_jp2a "size, color, html, flipy" "--size=80x50 --html --color --html-fontsize=8 --flipy grind.jpg" grind-flipy.html test_jp2a "size, color, html, flipxy" "--size=80x50 --html --color --html-fontsize=8 --flipx --flipy grind.jpg" grind-flipxy.html test_jp2a "width, html, gray, dark" "dalsnuten-640x480-gray-low.jpg --width=128 --html --html-fontsize=8" dalsnuten-256.html test_jp2a "width, html, gray, light" "dalsnuten-640x480-gray-low.jpg --width=128 --background=light --html --html-fontsize=8" dalsnuten-256-light.html test_jp2a "color, html, dark" "grind.jpg --color --background=dark --width=60 --html --html-fontsize=8" grind-color-dark.html test_jp2a "color, html, dark fill" "grind.jpg --color --fill --background=dark --width=60 --html --html-fontsize=8" grind-color-dark-fill.html test_jp2a "color, html, light" "grind.jpg --color --fill --background=light --width=60 --html --html-fontsize=8" grind-color.html test_jp2a "color, html, grayscale" "dalsnuten-640x480-gray-low.jpg --color --width=78 --html --background=light --fill --html-fontsize=8" dalsnuten-color.html test_jp2a "color, html, --grayscale" "grind.jpg --color --width=78 --html --grayscale" grind-2grayscale.html test_jp2a "color, html, --grayscale, fill" "grind.jpg --color --width=78 --html --grayscale --fill" grind-2grayscale-fill.html test_jp2a "color, fill" "grind.jpg --colors --fill --width=78" grind-fill.txt test_jp2a "color, fill, --grayscale" "grind.jpg --colors --fill --grayscale --width=78" grind-2grayscale-fill.txt test_jp2a "color, html, no-bold" "grind.jpg --colors --html --html-no-bold --width=78" grind-nobold.html test_jp2a "html-title, html" "--width=10 --html --html-title='just testing' jp2a.jpg" html-title.txt test_jp2a "color, html-raw" "--width=10 --color --html-raw jp2a.jpg" html-raw.txt test_jp2a "color" "grind.jpg --color --width=60" grind-color.txt test_jp2a "color, grayscale" "dalsnuten-640x480-gray-low.jpg --color --width=78" dalsnuten-color.txt test_jp2a "standard input, width" " 2>/dev/null ; cat jp2a.jpg | ${JP} --width=78 -" normal.txt test_jp2a "standard input, width, height" " 2>/dev/null ; cat jp2a.jpg | ${JP} - --width=40 --height=40" 40x40.txt test_jp2a "big size" "--size=2000x2000 dalsnuten-640x480-gray-low.jpg jp2a.jpg | tr -d '\r' | wc -c | tr -d ' '" dalsnuten-jp2a-2000x2000-md5.txt test_jp2a "size, curl download" "--size=454x207 http://jp2a.sourceforge.net/jp2a.jpg" normal-curl.txt test_jp2a "size, curl download" "--size=454x207 http://jp2a.sf.net/jp2a.jpg" normal-curl.txt test_results
{ "pile_set_name": "Github" }
return { atlanteans_castle = {}, }
{ "pile_set_name": "Github" }
/* definitions generated by preprocessor, copy into defines.h */ #ifndef PPINC #define _ATMEGA644P /* device select: _ATMEGAxxxx */ #define _B2048 /* boot size select: _Bxxxx (words), powers of two only */ #ifdef __ICCAVR__ #include "iom644.h" #endif #if __GNUC__ #include <avr/io.h> #if (__GNUC__ <= 4) && (__GNUC_MINOR__ < 3) #if !defined(EEWE) && defined(EEPE) #define EEWE EEPE #endif #if !defined(EEMWE) && defined(EEMPE) #define EEMWE EEMPE #endif #endif #endif /* define pin for enter-self-prog-mode */ #define PROGPORT PORTB #define PROGPIN PINB #define PROG_NO PB0 /* baud rate register value calculation */ #define CPU_FREQ 18430000 #define BAUD_RATE 115200 #define BRREG_VALUE 9 /* definitions for UART control */ #define BAUD_RATE_LOW_REG UBRR1 #define UART_CONTROL_REG UCSR1B #define ENABLE_TRANSMITTER_BIT TXEN1 #define ENABLE_RECEIVER_BIT RXEN1 #define UART_STATUS_REG UCSR1A #define TRANSMIT_COMPLETE_BIT TXC1 #define RECEIVE_COMPLETE_BIT RXC1 #define UART_DATA_REG UDR1 /* definitions for SPM control */ #define SPMCR_REG SPMCSR #define PAGESIZE 256 #define APP_END 61440 /*#define LARGE_MEMORY */ /* definitions for device recognition */ #define PARTCODE 0 #define SIGNATURE_BYTE_1 0x1E #define SIGNATURE_BYTE_2 0x96 #define SIGNATURE_BYTE_3 0x0A /* indicate that preprocessor result is included */ #define PPINC #endif
{ "pile_set_name": "Github" }
package pflag import ( "fmt" "strconv" "strings" ) // -- intSlice Value type intSliceValue struct { value *[]int changed bool } func newIntSliceValue(val []int, p *[]int) *intSliceValue { isv := new(intSliceValue) isv.value = p *isv.value = val return isv } func (s *intSliceValue) Set(val string) error { ss := strings.Split(val, ",") out := make([]int, len(ss)) for i, d := range ss { var err error out[i], err = strconv.Atoi(d) if err != nil { return err } } if !s.changed { *s.value = out } else { *s.value = append(*s.value, out...) } s.changed = true return nil } func (s *intSliceValue) Type() string { return "intSlice" } func (s *intSliceValue) String() string { out := make([]string, len(*s.value)) for i, d := range *s.value { out[i] = fmt.Sprintf("%d", d) } return "[" + strings.Join(out, ",") + "]" } func intSliceConv(val string) (interface{}, error) { val = strings.Trim(val, "[]") // Empty string would cause a slice with one (empty) entry if len(val) == 0 { return []int{}, nil } ss := strings.Split(val, ",") out := make([]int, len(ss)) for i, d := range ss { var err error out[i], err = strconv.Atoi(d) if err != nil { return nil, err } } return out, nil } // GetIntSlice return the []int value of a flag with the given name func (f *FlagSet) GetIntSlice(name string) ([]int, error) { val, err := f.getFlagType(name, "intSlice", intSliceConv) if err != nil { return []int{}, err } return val.([]int), nil } // IntSliceVar defines a intSlice flag with specified name, default value, and usage string. // The argument p points to a []int variable in which to store the value of the flag. func (f *FlagSet) IntSliceVar(p *[]int, name string, value []int, usage string) { f.VarP(newIntSliceValue(value, p), name, "", usage) } // IntSliceVarP is like IntSliceVar, but accepts a shorthand letter that can be used after a single dash. func (f *FlagSet) IntSliceVarP(p *[]int, name, shorthand string, value []int, usage string) { f.VarP(newIntSliceValue(value, p), name, shorthand, usage) } // IntSliceVar defines a int[] flag with specified name, default value, and usage string. // The argument p points to a int[] variable in which to store the value of the flag. func IntSliceVar(p *[]int, name string, value []int, usage string) { CommandLine.VarP(newIntSliceValue(value, p), name, "", usage) } // IntSliceVarP is like IntSliceVar, but accepts a shorthand letter that can be used after a single dash. func IntSliceVarP(p *[]int, name, shorthand string, value []int, usage string) { CommandLine.VarP(newIntSliceValue(value, p), name, shorthand, usage) } // IntSlice defines a []int flag with specified name, default value, and usage string. // The return value is the address of a []int variable that stores the value of the flag. func (f *FlagSet) IntSlice(name string, value []int, usage string) *[]int { p := []int{} f.IntSliceVarP(&p, name, "", value, usage) return &p } // IntSliceP is like IntSlice, but accepts a shorthand letter that can be used after a single dash. func (f *FlagSet) IntSliceP(name, shorthand string, value []int, usage string) *[]int { p := []int{} f.IntSliceVarP(&p, name, shorthand, value, usage) return &p } // IntSlice defines a []int flag with specified name, default value, and usage string. // The return value is the address of a []int variable that stores the value of the flag. func IntSlice(name string, value []int, usage string) *[]int { return CommandLine.IntSliceP(name, "", value, usage) } // IntSliceP is like IntSlice, but accepts a shorthand letter that can be used after a single dash. func IntSliceP(name, shorthand string, value []int, usage string) *[]int { return CommandLine.IntSliceP(name, shorthand, value, usage) }
{ "pile_set_name": "Github" }
<script type="text/javascript"><!-- // These variables are used by the web2py_ajax_init function in web2py_ajax.js (which is loaded below). var w2p_ajax_confirm_message = "{{=T('Are you sure you want to delete this object?')}}"; var w2p_ajax_date_format = "{{=T('%Y-%m-%d')}}"; var w2p_ajax_datetime_format = "{{=T('%Y-%m-%d %H:%M:%S')}}"; //--></script> {{ response.files.insert(0,URL('static','js/jquery.js')) response.files.insert(1,URL('static','css/calendar.css')) response.files.insert(2,URL('static','js/calendar.js')) response.files.insert(3,URL('static','js/web2py.js')) response.include_meta() response.include_files() }}
{ "pile_set_name": "Github" }
package com.glumes.openglbasicshape.multitest.testfragment import android.content.Context import android.opengl.GLES20 import android.opengl.GLSurfaceView import android.os.Bundle import android.support.v4.app.Fragment import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import com.glumes.importobject.BallForControl import com.glumes.importobject.BallTextureByVertex import com.glumes.importobject.TextureRect import com.glumes.openglbasicshape.R import com.glumes.openglbasicshape.utils.MatrixState import com.glumes.openglbasicshape.utils.TextureHelper import javax.microedition.khronos.egl.EGLConfig import javax.microedition.khronos.opengles.GL10 /** * Created by glumes on 25/06/2018 */ class StencilTestFragment : Fragment() { lateinit var surfaceView: StencilSurfaceView override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?, savedInstanceState: Bundle?): View? { surfaceView = StencilSurfaceView(context!!) surfaceView.requestFocus() surfaceView.isFocusableInTouchMode = true return surfaceView } } class StencilSurfaceView(var mContext: Context) : GLSurfaceView(mContext) { val UNIT_SIZE = 0.8f//球单位尺寸 val BALL_SCALE = 1.0f//球单位尺寸 val ANGLE_SPAN = 11.25f//将球进行单位切分的角度 init { setEGLContextClientVersion(2) setRenderer(StencilRenderer()) renderMode = GLSurfaceView.RENDERMODE_CONTINUOUSLY } inner class StencilRenderer : GLSurfaceView.Renderer { var texureRect: TextureRect? = null var ball: BallTextureByVertex? = null var bfg: BallForControl? = null var textureFloor: Int? = 0 var textureFloorBTM: Int? = 0 var textureBallId: Int? = 0 override fun onDrawFrame(gl: GL10?) { GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT or GLES20.GL_COLOR_BUFFER_BIT) MatrixState.pushMatrix() MatrixState.translate(0f, -2f, 0f) GLES20.glClear(GLES20.GL_STENCIL_BUFFER_BIT) GLES20.glEnable(GLES20.GL_STENCIL_TEST) GLES20.glStencilFunc(GLES20.GL_ALWAYS, 1, 1) GLES20.glStencilOp(GLES20.GL_KEEP, GLES20.GL_KEEP, GLES20.GL_REPLACE) texureRect!!.drawSelf(textureFloor!!) GLES20.glStencilFunc(GLES20.GL_ALWAYS, 1, 1) GLES20.glStencilOp(GLES20.GL_KEEP, GLES20.GL_KEEP, GLES20.GL_REPLACE) bfg!!.drawSelfMirror(textureBallId!!) GLES20.glDisable(GLES20.GL_STENCIL_TEST) GLES20.glEnable(GLES20.GL_BLEND) GLES20.glBlendFunc(GLES20.GL_SRC_ALPHA, GLES20.GL_ONE_MINUS_SRC_ALPHA) texureRect!!.drawSelf(textureFloorBTM!!) GLES20.glDisable(GLES20.GL_BLEND) bfg!!.drawSelf(textureBallId!!) MatrixState.popMatrix() } override fun onSurfaceChanged(gl: GL10?, width: Int, height: Int) { GLES20.glViewport(0, 0, width, height) var ratio = width.toFloat() / height.toFloat() MatrixState.setProjectFrustum(-ratio, ratio, -1f, 1f, 3f, 100f) MatrixState.setCamera(0f, 8f, 8f, 0f, 0f, 0f, 0f, 1f, 0f) } override fun onSurfaceCreated(gl: GL10?, config: EGLConfig?) { GLES20.glClearColor(0f, 0f, 0f, 1f) texureRect = TextureRect(mContext.resources, 4f, 2.568f) ball = BallTextureByVertex(mContext.resources, BALL_SCALE) bfg = BallForControl(ball, 3f) GLES20.glEnable(GLES20.GL_DEPTH_TEST) textureFloor = TextureHelper.loadTexture(mContext, R.drawable.mdb) textureFloorBTM = TextureHelper.loadTexture(mContext, R.drawable.mdbtm) textureBallId = TextureHelper.loadTexture(mContext, R.drawable.basketball) GLES20.glEnable(GLES20.GL_CULL_FACE) MatrixState.setInitStack() } } }
{ "pile_set_name": "Github" }
################################################################################ # # findutils # ################################################################################ FINDUTILS_VERSION = 4.7.0 FINDUTILS_SOURCE = findutils-$(FINDUTILS_VERSION).tar.xz FINDUTILS_SITE = $(BR2_GNU_MIRROR)/findutils FINDUTILS_LICENSE = GPL-3.0+ FINDUTILS_LICENSE_FILES = COPYING FINDUTILS_CONF_ENV = \ gl_cv_func_stdin=yes \ ac_cv_func_working_mktime=yes \ gl_cv_func_wcwidth_works=yes $(eval $(autotools-package))
{ "pile_set_name": "Github" }
from datetime import datetime, timedelta from django.contrib.auth.models import Group from django.core import mail from django.core.management import call_command from nose.tools import eq_ from kitsune.questions.tests import AnswerFactory, QuestionFactory from kitsune.sumo.tests import TestCase from kitsune.users.tests import UserFactory class TestEmployeeReportCron(TestCase): def test_report_employee_answers(self): # Note: This depends on two groups that are created in migrations. # If we fix the tests to not run migrations, we'll need to create the # two groups here: 'Support Forum Tracked', 'Support Forum Metrics' tracked_group = Group.objects.get(name="Support Forum Tracked") tracked_user = UserFactory() tracked_user.groups.add(tracked_group) report_group = Group.objects.get(name="Support Forum Metrics") report_user = UserFactory() report_user.groups.add(report_group) # An unanswered question that should get reported QuestionFactory(created=datetime.now() - timedelta(days=2)) # An answered question that should get reported q = QuestionFactory(created=datetime.now() - timedelta(days=2)) AnswerFactory(question=q) # A question answered by a tracked user that should get reported q = QuestionFactory(created=datetime.now() - timedelta(days=2)) AnswerFactory(creator=tracked_user, question=q) # More questions that shouldn't get reported q = QuestionFactory(created=datetime.now() - timedelta(days=3)) AnswerFactory(creator=tracked_user, question=q) q = QuestionFactory(created=datetime.now() - timedelta(days=1)) AnswerFactory(question=q) QuestionFactory() call_command("report_employee_answers") # Get the last email and verify contents email = mail.outbox[len(mail.outbox) - 1] assert "Number of questions asked: 3" in email.body assert "Number of questions answered: 2" in email.body assert "{username}: 1".format(username=tracked_user.username) in email.body eq_([report_user.email], email.to)
{ "pile_set_name": "Github" }
{{## def.checkMissingProperty:_properties: {{~ _properties:$propertyKey:$i }} {{?$i}} || {{?}} {{ var $prop = it.util.getProperty($propertyKey) , $useData = $data + $prop; }} ( ({{# def.noPropertyInData }}) && (missing{{=$lvl}} = {{= it.util.toQuotedString(it.opts.jsonPointers ? $propertyKey : $prop) }}) ) {{~}} #}} {{## def.errorMissingProperty:_error: {{ var $propertyPath = 'missing' + $lvl , $missingProperty = '\' + ' + $propertyPath + ' + \''; if (it.opts._errorDataPathProperty) { it.errorPath = it.opts.jsonPointers ? it.util.getPathExpr($currentErrorPath, $propertyPath, true) : $currentErrorPath + ' + ' + $propertyPath; } }} {{# def.error:_error }} #}} {{## def.allErrorsMissingProperty:_error: {{ var $prop = it.util.getProperty($propertyKey) , $missingProperty = it.util.escapeQuotes($propertyKey) , $useData = $data + $prop; if (it.opts._errorDataPathProperty) { it.errorPath = it.util.getPath($currentErrorPath, $propertyKey, it.opts.jsonPointers); } }} if ({{# def.noPropertyInData }}) { {{# def.addError:_error }} } #}}
{ "pile_set_name": "Github" }
// // UIProgressView+Webkit.h // Pods // // Created by 01 on 17/8/25. // // @protocol LYWebViewProgressDelegate <NSObject> - (void)updateBarItemStatus; @end @interface UIProgressView (WebKit) @property(assign, nonatomic) BOOL ly_hiddenWhenProgressApproachFullSize; @property(assign, nonatomic) id<LYWebViewProgressDelegate> delegate; @end
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <ctml> <validate reactions="yes" species="yes"/> <!-- phase gas --> <phase dim="3" id="air"> <elementArray datasrc="elements.xml"> O H C Fe Ca N </elementArray> <speciesArray datasrc="#species_data"> O2 H2 H CH2O CHO OH CO2 H2O CH4 CO N2 </speciesArray> <reactionArray datasrc="#reaction_data"/> <state> <temperature units="K">1500.0</temperature> <pressure units="Pa">101325.0</pressure> </state> <thermo model="IdealGas"/> <kinetics model="GasKinetics"/> <transport model="None"/> </phase> <!-- species definitions --> <speciesData id="species_data"> <!-- species O2 --> <species name="O2"> <atomArray>O:2 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> 0.0 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="H2"> <atomArray>H:2 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> 0.0 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="H"> <atomArray>H:1 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> 1.0E6 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="CH2O"> <atomArray>C:1 H:2 O:1 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> -8.611E4 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="CHO"> <atomArray>C:1 H:1 O:1 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> -6.255E4 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="OH"> <atomArray> H:1 O:1 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> 2.259E4 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="CO2"> <atomArray> C:1 O:2 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> -3.9597E5 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="H2O"> <atomArray> H:2 O:1 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> -1.8987E5 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="CH4"> <atomArray> C:1 H:4 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> 2.485E4 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <species name="CO"> <atomArray> C:1 O:1 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> -2.0464E5 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> <!-- species N2 --> <species name="N2"> <atomArray>N:2 </atomArray> <thermo> <const_cp Tmax="5000.0" Tmin="100.0"> <t0 units="K"> 1500.0 </t0> <h0 units="J/mol"> 0.0 </h0> <s0 units="J/mol/K">0.0</s0> <cp0 units="J/mol/K">0.0</cp0> </const_cp> </thermo> </species> </speciesData> </ctml>
{ "pile_set_name": "Github" }
from __future__ import absolute_import import decimal import six from plotly import exceptions from plotly.colors import ( DEFAULT_PLOTLY_COLORS, PLOTLY_SCALES, color_parser, colorscale_to_colors, colorscale_to_scale, convert_to_RGB_255, find_intermediate_color, hex_to_rgb, label_rgb, n_colors, unconvert_from_RGB_255, unlabel_rgb, validate_colors, validate_colors_dict, validate_colorscale, validate_scale_values, ) try: from collections.abc import Sequence except ImportError: from collections import Sequence def is_sequence(obj): return isinstance(obj, Sequence) and not isinstance(obj, str) def validate_index(index_vals): """ Validates if a list contains all numbers or all strings :raises: (PlotlyError) If there are any two items in the list whose types differ """ from numbers import Number if isinstance(index_vals[0], Number): if not all(isinstance(item, Number) for item in index_vals): raise exceptions.PlotlyError( "Error in indexing column. " "Make sure all entries of each " "column are all numbers or " "all strings." ) elif isinstance(index_vals[0], str): if not all(isinstance(item, str) for item in index_vals): raise exceptions.PlotlyError( "Error in indexing column. " "Make sure all entries of each " "column are all numbers or " "all strings." ) def validate_dataframe(array): """ Validates all strings or numbers in each dataframe column :raises: (PlotlyError) If there are any two items in any list whose types differ """ from numbers import Number for vector in array: if isinstance(vector[0], Number): if not all(isinstance(item, Number) for item in vector): raise exceptions.PlotlyError( "Error in dataframe. " "Make sure all entries of " "each column are either " "numbers or strings." ) elif isinstance(vector[0], str): if not all(isinstance(item, str) for item in vector): raise exceptions.PlotlyError( "Error in dataframe. " "Make sure all entries of " "each column are either " "numbers or strings." ) def validate_equal_length(*args): """ Validates that data lists or ndarrays are the same length. :raises: (PlotlyError) If any data lists are not the same length. """ length = len(args[0]) if any(len(lst) != length for lst in args): raise exceptions.PlotlyError( "Oops! Your data lists or ndarrays " "should be the same length." ) def validate_positive_scalars(**kwargs): """ Validates that all values given in key/val pairs are positive. Accepts kwargs to improve Exception messages. :raises: (PlotlyError) If any value is < 0 or raises. """ for key, val in kwargs.items(): try: if val <= 0: raise ValueError("{} must be > 0, got {}".format(key, val)) except TypeError: raise exceptions.PlotlyError("{} must be a number, got {}".format(key, val)) def flatten(array): """ Uses list comprehension to flatten array :param (array): An iterable to flatten :raises (PlotlyError): If iterable is not nested. :rtype (list): The flattened list. """ try: return [item for sublist in array for item in sublist] except TypeError: raise exceptions.PlotlyError( "Your data array could not be " "flattened! Make sure your data is " "entered as lists or ndarrays!" ) def endpts_to_intervals(endpts): """ Returns a list of intervals for categorical colormaps Accepts a list or tuple of sequentially increasing numbers and returns a list representation of the mathematical intervals with these numbers as endpoints. For example, [1, 6] returns [[-inf, 1], [1, 6], [6, inf]] :raises: (PlotlyError) If input is not a list or tuple :raises: (PlotlyError) If the input contains a string :raises: (PlotlyError) If any number does not increase after the previous one in the sequence """ length = len(endpts) # Check if endpts is a list or tuple if not (isinstance(endpts, (tuple)) or isinstance(endpts, (list))): raise exceptions.PlotlyError( "The intervals_endpts argument must " "be a list or tuple of a sequence " "of increasing numbers." ) # Check if endpts contains only numbers for item in endpts: if isinstance(item, str): raise exceptions.PlotlyError( "The intervals_endpts argument " "must be a list or tuple of a " "sequence of increasing " "numbers." ) # Check if numbers in endpts are increasing for k in range(length - 1): if endpts[k] >= endpts[k + 1]: raise exceptions.PlotlyError( "The intervals_endpts argument " "must be a list or tuple of a " "sequence of increasing " "numbers." ) else: intervals = [] # add -inf to intervals intervals.append([float("-inf"), endpts[0]]) for k in range(length - 1): interval = [] interval.append(endpts[k]) interval.append(endpts[k + 1]) intervals.append(interval) # add +inf to intervals intervals.append([endpts[length - 1], float("inf")]) return intervals def annotation_dict_for_label( text, lane, num_of_lanes, subplot_spacing, row_col="col", flipped=True, right_side=True, text_color="#0f0f0f", ): """ Returns annotation dict for label of n labels of a 1xn or nx1 subplot. :param (str) text: the text for a label. :param (int) lane: the label number for text. From 1 to n inclusive. :param (int) num_of_lanes: the number 'n' of rows or columns in subplot. :param (float) subplot_spacing: the value for the horizontal_spacing and vertical_spacing params in your plotly.tools.make_subplots() call. :param (str) row_col: choose whether labels are placed along rows or columns. :param (bool) flipped: flips text by 90 degrees. Text is printed horizontally if set to True and row_col='row', or if False and row_col='col'. :param (bool) right_side: only applicable if row_col is set to 'row'. :param (str) text_color: color of the text. """ l = (1 - (num_of_lanes - 1) * subplot_spacing) / (num_of_lanes) if not flipped: xanchor = "center" yanchor = "middle" if row_col == "col": x = (lane - 1) * (l + subplot_spacing) + 0.5 * l y = 1.03 textangle = 0 elif row_col == "row": y = (lane - 1) * (l + subplot_spacing) + 0.5 * l x = 1.03 textangle = 90 else: if row_col == "col": xanchor = "center" yanchor = "bottom" x = (lane - 1) * (l + subplot_spacing) + 0.5 * l y = 1.0 textangle = 270 elif row_col == "row": yanchor = "middle" y = (lane - 1) * (l + subplot_spacing) + 0.5 * l if right_side: x = 1.0 xanchor = "left" else: x = -0.01 xanchor = "right" textangle = 0 annotation_dict = dict( textangle=textangle, xanchor=xanchor, yanchor=yanchor, x=x, y=y, showarrow=False, xref="paper", yref="paper", text=text, font=dict(size=13, color=text_color), ) return annotation_dict def list_of_options(iterable, conj="and", period=True): """ Returns an English listing of objects seperated by commas ',' For example, ['foo', 'bar', 'baz'] becomes 'foo, bar and baz' if the conjunction 'and' is selected. """ if len(iterable) < 2: raise exceptions.PlotlyError( "Your list or tuple must contain at least 2 items." ) template = (len(iterable) - 2) * "{}, " + "{} " + conj + " {}" + period * "." return template.format(*iterable)
{ "pile_set_name": "Github" }
// RUN: %dxc -E main -T ps_6_0 %s | FileCheck %s // CHECK: !"llvm.loop.unroll.disable" float main(float2 a : A, int3 b : B) : SV_Target { float s = 0; [loop] for(int i = 0; i < b.x; i++) { s += a.x; } return s; }
{ "pile_set_name": "Github" }
{ "title": "$:/languages/ko-KR", "name": "ko-KR", "plugin-type": "language", "description": "Korean (Korea Republic)", "author": "Myeongjin", "core-version": ">=5.1.4" }
{ "pile_set_name": "Github" }
# FrontlineSMS English translation by the FrontlineSMS team, Nairobi language.name=Русский # General info app.version.label=Версия # Common action imperatives - to be used for button labels and similar action.ok=ok action.close=Закрыть action.cancel=Отменить action.done=Выполнено action.next=Следующий action.prev=Предыдущий action.back=Назад action.create=Создать action.edit=Редактировать action.rename=Переименовать action.save=Сохранить action.save.all=Save Selected action.delete=Удалить action.delete.all=Delete Selected action.send=Оправить action.export=Экспортировать action.view=View content.loading=Loading... # Messages when FrontlineSMS server connection is lost server.connection.fail.title=Соединение с сервером было потеряно. server.connection.fail.info=Пожалуйста, перезагрузите ФронтлайнСМС или закройте это окно. #Connections: connection.creation.failed=Связь не может быть установлена {0} connection.route.disabled=Удалён маршрут от {0} до {1} connection.route.successNotification=Успешно создан маршрут на {0} connection.route.failNotification=Failed to create connection on {1}: {2} [[[edit]](({0}))]. connection.route.disableNotification=Удалён маршрут на {0} connection.route.pauseNotification=Paused connection on {0} connection.route.resumeNotification=Resumed connection on {0} connection.test.sent=Тестовое сообщение успешно отправлено {0} используя {1} connection.route.exception={1} # Connection exception messages connection.error.org.smslib.alreadyconnectedexception=Устройство уже подключено connection.error.org.smslib.gsmnetworkregistrationexception=Не удалось соедениться с сетью GSM connection.error.org.smslib.invalidpinexception=Неправильный PIN код connection.error.org.smslib.nopinexception=Необходимо ввести PIN код connection.error.org.smslib.notconnectedexception={0} connection.error.org.smslib.nosuchportexception=Порт не найдён, или не доступны connection.error.java.io.ioexception=Ошибка ввода/вывода: {0} connection.error.frontlinesms2.camel.exception.invalidapiidexception={0} connection.error.frontlinesms2.camel.exception.authenticationexception={0} connection.error.frontlinesms2.camel.exception.insufficientcreditexception={0} connection.error.serial.nosuchportexception=Порт не может быть найден connection.error.org.apache.camel.runtimecamelexception=Не удается подключиться к связи connection.error.onsave={0} connection.header=Settings > Connections connection.list.none=Соединение отсутствует. connection.edit=Редактировать настройки соединения connection.delete=Прервать соединение connection.deleted=Соединение было удалено connection.route.enable=Enable connection.route.retryconnection=Попробовать снова connection.add=Создать новое соединение connection.createtest.message.label=Тестовое сообщение connection.route.disable=Удалить маршрут connection.send.test.message=Отправить тестовое сообщение connection.test.message=Поздравление от ФтонрлайнСМС \\o/ вы успешно настроили {0} для отправки СМС \\o/ connection.validation.prompt=Пожалуйста, заполните все обязательные поля connection.select=Выберите тип соединения connection.type=Выбрать тип connection.details=Ввести детали connection.confirm=Подтвердить connection.createtest.number=Номер connection.confirm.header=Подтвердить настройки connection.name.autoconfigured=Авто-настроено {0} {1} на порт {2}" status.connection.title=Связи status.connection.manage=Manage your connections status.connection.none=У вас нет настроенного соединения. status.devises.header=Обнаруженные устройства status.detect.modems=Обнаружения Модемов status.modems.none=Устройство не обнаружено status.header=Usage Statistics connectionstatus.connecting=Подключается connectionstatus.connected=Подключено connectionstatus.disabled=Disabled connectionstatus.failed=Не удалось connectionstatus.not_connected=Не подключен default.doesnt.match.message=Свойство [{0}] класса [{1}] со значением [{2}] не соответствует требуемому образцу [{3}] default.invalid.url.message=Свойство [{0}] класса [{1}] со значением [{2}] не является допустимым URL default.invalid.creditCard.message=Свойство [{0}] класса [{1}] со значением [{2}] не является действительным номером кредитной карточки default.invalid.email.message=Свойство [{0}] класса [{1}] со значением [{2}] не является дейтвителным адресом электронной почты default.invalid.range.message=Свойство [{0}] класса [{1}] со значением [{2}] не входит в допустимый диапазон от [{3}] на [{4}] default.invalid.size.message=Свойство [{0}] класса [{1}] со значением [{2}] не входит в допустимый диапазон размеров от [{3}] на [{4}] default.invalid.max.message=Свойство [{0}] класса [{1}] со значением [{2}] превышает максимальное значение [{3}] default.invalid.min.message=Свойство [{0}] класса [{1}] со значением [{2}] меньше чем минимальное значение [{3}] default.invalid.max.size.message=Свойство [{0}] класса [{1}] со значением [{2}] превышает максимальный размер [{3}] default.invalid.min.size.message=Свойство [{0}] класса [{1}] со значением [{2}] меньше чем минимальное значение [{3}] default.invalid.validator.message=Свойство [{0}] класса [{1}] со значением [{2}] не проходит авторизацию default.not.inlist.message=Свойство [{0}] класса [{1}] со значением [{2}] не содержится в списке [{3}] default.blank.message=Свойство [{0}] класса [{1}] не может быть пустым default.not.equal.message=Свойство [{0}] класса [{1}] со значением [{2}] не может выть равным [{3}] default.null.message=Свойство [{0}] класса [{1}] не может быть нулевой default.not.unique.message=Свойство [{0}] класса [{1}] со значением [{2}] должен быть уникальным default.paginate.prev=Предыдущий default.paginate.next=Следующий default.boolean.true=Корректный default.boolean.false=Ложный default.date.format=dd MMMM, yyyy hh:mm default.number.format=0 default.unarchived={0} не архивирован default.unarchive.failed=разархивирование не удалось {0} default.restored={0} востановленно default.restore.failed=Не удалось восстановить {0} с идентификатором {1} default.archived.multiple={0} заархивировано default.created={0} созданный default.created.message={0} {1} было создано default.create.failed=Не удалось создать{0} default.updated={0} был обновлен default.update.failed=не удалось обновить {0} с идентификатором {1} default.updated.multiple={0} были обновлены default.updated.message={0} обновлен default.deleted={0} удаленный default.trashed={0} moved to trash default.trashed.multiple={0} перемещены в корзину default.archived={0} archived default.unarchive.keyword.failed=Архивация не удалась{0}. Ключевое слово уже используеться default.unarchived.multiple={0} не заархивирован default.delete.failed=Невозможно удалить {0} с идентификатором {1} default.notfound=Не найдено {0} с идентификатором {1} default.optimistic.locking.failure=Другой пользователь обновил этот {0} пока вы редактировали default.home.label=Дом default.list.label={0} Лист default.add.label=Добавить{0} default.new.label=Новый {0} default.create.label=Создать {0} default.show.label=Показать {0} default.edit.label=Редактировать {0} search.clear=Очистить поиск default.button.create.label=Создать default.button.edit.label=Редактировать default.button.update.label=Обновить default.button.delete.label=Удалить default.button.search.label=Искать default.button.apply.label=Применить default.button.delete.confirm.message=Вы уверены? default.deleted.message={0} удалено # Data binding errors. Use "typeMismatch.$className.$propertyName to customize (eg typeMismatch.Book.author) typeMismatch.java.net.URL=Свойство {0} должен быть действительный URL typeMismatch.java.net.URI=Свойство {0} должен быть действительный URL typeMismatch.java.util.Date=Свойство {0} должна быть действительной датой typeMismatch.java.lang.Double=Свойство{0} должно быть действительным номером typeMismatch.java.lang.Integer=Свойство {0} должно быть действительным номером typeMismatch.java.lang.Long=Свойство {0} должно быть действительным номером typeMismatch.java.lang.Short=Свойство {0} должно быть действительным номером typeMismatch.java.math.BigDecimal=Свойство {0} должно быть действительным номером typeMismatch.java.math.BigInteger=Свойство {0} должно быть действительным номером typeMismatch.int={0} номер должен быть действительным # Application specific messages messages.trash.confirmation=Это очистит корзину и удалит сообщения навсегда. Вы хотите продолжить? default.created.poll=Опрос был создан! default.search.label=Очистить поиск default.search.betweendates.title=Между датами: default.search.moresearchoption.label=Дополнительне параметры поиска default.search.date.format=д/М/гггг default.search.moreoption.label=Дополнительные варианты # SMSLib Fconnection smslib.label=Телефон/Модем smslib.type.label=Тип smslib.name.label=Название smslib.manufacturer.label=Производитель smslib.model.label=Модель smslib.port.label=Порт smslib.baud.label=Скорость передачи данных smslib.pin.label=PIN smslib.imsi.label=SIM IMSI smslib.serial.label=Серия устройства # smslib.sendEnabled.label=Use for sending smslib.receiveEnabled.label=Use for receiving smslibFconnection.sendEnabled.validator.error.send=Modem should be used for sending smslibFconnection.receiveEnabled.validator.error.receive=or receiving messages smslib.description=Подключения к USB, последовательный порт и Bluetooth модемы и телефоны smslib.global.info=ФтонрлайнСМС попытается автоматически настроить любой подключенный модем или телефон, но вы можете вручную настроить их здесь # Email Fconnection email.label=Электронная почта email.type.label=Тип email.name.label=Название email.receiveProtocol.label=Протокол email.serverName.label=Имя сервера email.serverPort.label=Порт Сервера email.username.label=Имя пользователя email.password.label=Пароль # CLickatell Fconnection clickatell.label=Акаунт в Clickatell clickatell.type.label=Тип clickatell.name.label=Название clickatell.apiId.label=Идентификатор clickatell.username.label=Имя пользователя clickatell.password.label=Пароль clickatell.sendToUsa.label=Отправить в США clickatell.fromNumber.label=От число clickatell.description=Отправлять и получать сообщения через аккаунт Clickatell clickatell.global.info=Вам нужно будет настроить аккаунт с Clickatell (<a href="http://www.clickatell.com">www.clickatell.com</a>). clickatellFconnection.fromNumber.validator.invalid=B США 'Номер Oт' необходим для отправки сообщений # TODO: Change markup below to markdown clickatell.info-local=In order to set up a Clickatell connection, you must first have a Clickatell account. If you do not have one, please <a href='https://www.clickatell.com/register' target='_blank'>go to the Clickatell site and register for a 'Developer's Central Account'</a>. It is free to sign up for test messages, and the process should take less that 5 minutes.</br></br> Once you have an active Clickatell account, you will need to <a href='https://central.clickatell.com/central/get_connections.php' target='_blank'>'Create a Connection (API ID)'</a> from the front page. First, select 'APIs,' then select 'Set up a new API.' From there, choose 'add HTTP API' with the default settings, then enter the relevant details below. </br></br>The 'Name' field is just for your own reference for your Frontline account, and not related to the Clickatell API, e.g. 'My local message connection'. clickatell.info-clickatell=The following details should be copied and pasted directly from the Clickatell HTTP API screen. #Nexmo Fconnection nexmo.label=Nexmo nexmo.type.label=Nexmo connection nexmo.name.label=Название nexmo.api_key.label=ключ API nexmo.api_secret.label=API secret nexmo.fromNumber.label=From number nexmo.description=Send and receive messages through a Nexmo account. nexmo.receiveEnabled.label=Receiving enabled nexmo.sendEnabled.label=Sending enabled # Smssync Fconnection smssync.label=СМС Синхронизация smssync.name.label=Название smssync.type.label=Тип smssync.receiveEnabled.label=Приём овеспечен smssync.sendEnabled.label=Oтправление овеспечено smssync.secret.label=Засекретить smssync.timeout.label=Timeout (mins) smssync.description=Используйте телефон Android с установлено приложение СМС синхронизации для отправки и получения СМС с ФтонрлайнСМС smssync.field.secret.info=Засекрет эту часть программе smssync.global.info=Скачай программю СМС синхронизации от <a href="http://smssync.ushahidi.com">smssync.ushahidi.com</a> smssync.timeout=The Android phone associated with "{0}" has not contacted your Frontline account for {1} minute(s) [<a href="#" onclick="mediumPopup.editConnection({2})">edit</a>] smssync.info-setup=Frontline products enable you to send and receive messages through your Android phone. In order to do this you will need to:\n\n1. Input a 'Secret' and name your connection. A secret is simply a password of your choice.\n2. Download and install [SMSSync from the Android App store](https://play.google.com/store/apps/details?id=org.addhen.smssync&hl=en) to your Android phone\n3. Once you have created this connection, you can create a new Sync URL within SMSSync on your Android phone by entering the connection URL (generated by your Frontline product and displayed on the next page) and your chosen secret. See [The SMSSync Site](http://smssync.ushahidi.com/howto) for more help. smssync.info-timeout=If SMSSync does not contact your Frontline product for a certain duration (default 60 minutes), your queued messages will NOT be sent, and you will see a notification that the messages failed to send. Select this duration below: smssync.info-name=Finally, you should name your SMSSync connection with a name of your choice, e.g. 'Bob's work Android'. # Messages Tab message.create.prompt=Введите текст сообщения message.character.count=Осталось символов {0} ({1} СМС сообщение (ия)) message.character.count.warning=Может быть увеличена после проведения изменений message.header.inbox=Входящие message.header.sent=Посланный message.header.pending=В ожидании message.header.trash=Мусор message.header.folder=Папка message.header.activityList=Список Деятельность message.header.folderList=Список Папок announcement.label=Объявление announcement.description=Отправить объявление и упорядочить ответы announcement.info1=Объявление было сохранено и ответы были добавлены в очередь ожидаемых сообщений. announcement.info2=Отправка всех сообщений может занять время в зависимости от количества сообщений и подключения к сети. announcement.info3=Для того чтобы увидеть статус ваших сообщений, откройте папку 'в ожидании'. announcement.info4=Для того чтобы увидеть объявление щелкните по нему в меню слева. announcement.validation.prompt=Пожалуйста, заполните все обязательные поля announcement.select.recipients=Выберите получателей announcement.confirm=Подтвердите announcement.delete.warn=Удалите {0} ВНИМАНИЕ: Это действие не может быть отменено! announcement.prompt=Тема объявления announcement.confirm.message=Сообщение announcement.details.label=Подтвердите детали announcement.message.label=Сообщение announcement.message.none=Ничто announcement.recipients.label=Получатели announcement.create.message=Создать сообщение #TODO embed javascript values announcement.recipients.count=контакты выбраны announcement.messages.count=сообщения будут отправлены announcement.moreactions.delete=Удалить объявление announcement.moreactions.rename=Переименовать объявление announcement.moreactions.edit=Редактировать объявление announcement.moreactions.export=Экспортировать объявление frontlinesms2.Announcement.name.unique.error.frontlinesms2.Announcement.name={1} {0} "{2}" должен быть уникальным archive.inbox=Заархивировать ящик archive.sent=Отправить архив archive.activity=Статус архива archive.folder=Папка архива archive.folder.name=Название archive.folder.date=Дата archive.folder.messages=Сообщение archive.folder.none=&nbsp; Нет архивированных папок archive.activity.name=Название archive.activity.type=Тип archive.activity.date=Дата archive.activity.messages=Сообщения archive.activity.list.none=&nbsp; Нет архивированных действий archive.header=Архив autoreply.enter.keyword=Введите ключевое слово autoreply.create.message=Введите текст сообщения activity.autoreply.sort.description=Если люди посылают в сообщениях начиная с определенного ключевого слова, ФтонрлайнСМС может автоматически обрабатывать сообщения в вашей системе. activity.autoreply.disable.sorting.description=Сообщения не будут автоматически перемещаются в эту деятельность и получать ответы autoreply.confirm=Подтвердите autoreply.name.label=Сообщение autoreply.details.label=Подтвердите детали autoreply.label=Автоответчик autoreply.keyword.label=Ключевое слово autoreply.description=Автоматически ответить на входящее сообщение autoreply.info=Автоматический ответ был создан, любые соощбения содержащие ключевое слово будут добавлены в Аутоответчик, который можно просмотреть, нажав на нее в правом меню. autoreply.info.warning=Автоответчик без ключевого слова будет отвечать на все входящие сообщения autoreply.info.note=Примечание: Если вы заархивируете Автоответчик, то входяшие сообщения не будут отсортированы. autoreply.validation.prompt=Пожалуйста, заполните все необходимые поля. autoreply.message.title=Сообщение будет отправлено на этот автоответчик: autoreply.keyword.title=Автоматичски сортировать сообщения по ключевым словам: autoreply.name.prompt=Имя автоответчика autoreply.message.count=0 символов (1 СМС сообщение) autoreply.moreactions.delete=Удалить автоответчик autoreply.moreactions.rename=Переименовать автоответчик autoreply.moreactions.edit=Редактировать автоответчик autoreply.moreactions.export=Экспортировать автоответчик autoreply.all.messages=Не использовать ключевые слова (Все входяшии сообщения получат этот автоответ) autoreply.text.none=Пусто frontlinesms2.Autoreply.name.unique.error.frontlinesms2.Autoreply.name={1} {0} "{2}" должен быть уникален frontlinesms2.Autoreply.name.validator.error.frontlinesms2.Autoreply.name=Навание автоответчика должно быть уникальным frontlinesms2.Keyword.value.validator.error.frontlinesms2.Autoreply.keyword.value=Ключевое слово Keyword "{2}" уже используется frontlinesms2.Autoreply.autoreplyText.nullable.error.frontlinesms2.Autoreply.autoreplyText=Сообщение не может быть пустым autoforward.title={0} autoforward.label=Aвтоматическая переадресация autoforward.description=Автоматической переадресации входящих сообщений контактам autoforward.recipientcount.current=Tекущей {0} получатель autoforward.create.message=Введите сообщение autoforward.confirm=Подтвердите autoforward.recipients=Получатели autoforward.name.prompt=Даете название автоматическаму сообщению autoforward.details.label=Подтвердите детали autoforward.keyword.label=Ключевое слово(а) autoforward.name.label=Cообщение autoforward.contacts=Kонтакты autoforward.groups=Группы autoforward.info=Автоматическая переадресация была создана, любые сообщения, содержащие ключевое слово будет добавлено в эту автоматическую пересылку деятельности, которые можно просмотреть, нажав на нее в правом меню руку. autoforward.info.warning=Автоматическая переадресация без ключевого слова приведет к все входящие сообщения направляются autoforward.info.note=TODO:Note: If you archive the Autoforward, incoming messages will no longer be sorted for it. autoforward.save=Автоматическая пересылка была спасена! autoforward.save.success={0} Аftomatichiskoye saabshenye saxtoneno!) Автоматическая пересылка была спасена! autoforward.global.keyword=Hичто (все входящие сообщения будут обрабатываться) autoforward.disabled.keyword=Hичто (автоматическая сортировка выключена) autoforward.keyword.none.generic=Hичто autoforward.groups.none=Hичто autoforward.contacts.none=Hичто autoforward.message.format=Cообщение contact.new=Новый контакт contact.list.no.contact=Здесь контактов нет! contact.header=Контакты contact.header.group=Kонтакты >> {0} contact.all.contacts=Все контакты contact.create=Создать новый контакт contact.groups.header=Группы contact.create.group=Создать новую группу contact.smartgroup.header=Смарт - группы contact.create.smartgroup=Создать новую смарт-группу contact.add.to.group=Добавить в группу... contact.remove.from.group=Удалить из группы contact.customfield.addmoreinformation=Добавить дополнительную информацию...A contact.customfield.option.createnew=Создать новую... contact.name.label=Название contact.phonenumber.label=Мобильный contact.notes.label=Записи contact.email.label=Электронная почта contact.groups.label=Группы contact.notinanygroup.label=Не принадлежит никакой группе contact.messages.label=Сообещения contact.messages.sent={0} сообщение отравлено contact.received.messages={0} сообщения приняты contact.search.messages=Поиск сообщений contact.select.all=Выбрать все опции contact.search.placeholder=Search your contacts, or enter phone numbers contact.search.contact=Kонтакты contact.search.smartgroup=Смарт - группы contact.search.group=Группы contact.search.address=Добавьте номер телефона: contact.not.found=Контакты не найдены group.not.found=Группа не найдена smartgroup.not.found=Yмная группа не найдена group.rename=Переименовать группу group.edit=Редактировать группу group.delete=Удалить группу group.moreactions=Дополнительные действия... customfield.validation.prompt=Пожалуйста, введите имя customfield.validation.error=Имя уже существует customfield.name.label=Имя export.contact.info=Чтобы экспортировать контакты из ФронтлайнСМС, выберите тип экспорта и информацию которая должна быть экспортирована. export.message.info=Чтобы экспортировать сообщения из ФронтлайнСМС, выберите тип экспорта и иформацию которая должна быть экспортирована. export.selectformat=Выберите формат для экспорта данных export.csv=CSV формат для использования в электронных таблицах export.pdf=PDF формат для печати folder.name.label=Имя group.delete.prompt=Вы уверены что хотите удалить{0}? ВНИМАНИЕ: Это не может быть отменено layout.settings.header=Настройки activities.header=Событие activities.create=Создать новое событие folder.header=Папки folder.create=Создать новую папку folder.label=Папка message.folder.header={0} Папка fmessage.trash.actions=Свойства корзины fmessage.trash.empty=Очистить корзину fmessage.to.label=Кому trash.empty.prompt=Все сообщения и деятельность в корзине будут удалены навсегда fmessage.responses.total={0} Количество ответов fmessage.label=Сообщение fmessage.label.multiple={0} сообщения poll.prompt=Назовите этот опрос poll.details.label=Подтвердите детали poll.message.label=Сообещение poll.choice.validation.error.deleting.response=Сохраненный выбор не может иметь пустое значение poll.alias=Псевдонимы poll.keywords=Kлючевые слова poll.aliases.prompt=Введите псевдонимы для соответствующих вариантов. poll.keywords.prompt.details=Верхнего уровня ключевого слова назовешь опрос и направляйтесь в сообщении опроса инструкции. Каждый ответ также может иметь альтернативные короткие ключевые слова разреза. poll.keywords.prompt.more.details=Вы можете ввести несколько ключевых слов, разделенных запятыми для верхнего уровня и ответов. Если нет верхнего уровня ключевые слова не будут введены ниже, то это ответ ключевые слова должны быть уникальны по всем видам деятельности. poll.keywords.response.label=Ключевые слова poll.response.keyword=Устанавливает реакцию ключевые слова poll.set.keyword=Установить верхнего уровня ключевого слова poll.keywords.validation.error=Ключевые слова должны быть уникальными poll.sort.label=Автоматическая сортировка poll.autosort.no.description=Сообщения не будут сортированы автоматически. poll.autosort.description=Сортировать сообщения по ключевым словам. poll.sort.keyword=ключевое слово poll.sort.toplevel.keyword.label=Верхнего уровня ключевое слово (а) (опционально) poll.sort.by=Сортировать по poll.autoreply.label=Автоответчик poll.autoreply.none=Ничего poll.recipients.label=Получатели poll.recipients.none=Никого poll.toplevelkeyword=Верхнего уровня ключевых слов poll.sort.example.toplevel=например КОМАНДА poll.sort.example.keywords.A=например A, Поразителяный poll.sort.example.keywords.B=например B, Красивый poll.sort.example.keywords.C=например C, Смелый poll.sort.example.keywords.D=например D, Восхитительный poll.sort.example.keywords.E=например E, Образцовый poll.sort.example.keywords.yn.A=например ДА poll.sort.example.keywords.yn.B=например Нет #TODO embed javascript values poll.recipients.count=выделенные контакты poll.messages.count=сообщения будут отравлены poll.yes=Да poll.no=Нет poll.label=Опрос poll.description=Оправить сообщение и анализироNo, NOPвать ответы poll.messages.sent={0} сообщение отравлено poll.response.enabled=Автоответчик активирован poll.message.edit=Редактировать сообщение для отправки получателям poll.message.prompt=Сообщение будет отправлено получателям опроса poll.message.count=Осталось символов 160 (1 СМС сообщение) poll.moreactions.delete=Удалить опрос poll.moreactions.rename=Переименовать опрос poll.moreactions.edit=Редактировать опрос poll.moreactions.export=Экспортировать опрос folder.moreactions.delete=Удалить директорию folder.moreactions.rename=Переминовать директорию folder.moreactions.export=Перислать директорию #TODO embed javascript values poll.reply.text=Ответ "{0}" на Да , "{1}" на Нет. poll.reply.text1={0} "{1}" для {2} poll.reply.text2=Просьба ответить 'Да' или 'Нет' poll.reply.text3=или poll.reply.text5=Ответить poll.reply.text6=Пожалуста, ответьте poll.message.send={0} {1} poll.recipients.validation.error=Выбрать контакты для отправки сообщений frontlinesms2.Poll.name.unique.error.frontlinesms2.Poll.name={1} {0} "{2}" должен быть уникальный frontlinesms2.Poll.responses.validator.error.frontlinesms2.Poll.responses=Варианты ответов не могут быть одинаковыми frontlinesms2.Keyword.value.validator.error.frontlinesms2.Poll.keyword.value=Ключевое "{2}" уже используется wizard.title.new=Новый wizard.fmessage.edit.title=Редактировать {0} popup.title.saved={0} сохранен! popup.activity.create=Выбрать новую деятельность: Выбрать тип popup.smartgroup.create=Создать смарт-группу popup.help.title=Помощь smallpopup.customfield.create.title=Создать специальное поле smallpopup.group.rename.title=Переименовать группу smallpopup.group.edit.title=Редактировать группу smallpopup.group.delete.title=Удалить группу smallpopup.fmessage.rename.title=Переименовать {0} smallpopup.fmessage.delete.title=Удалить{0} smallpopup.fmessage.export.title=Экспортировать smallpopup.delete.prompt=Удалить{0}? smallpopup.delete.many.prompt=Удалить из {0} контактов? smallpopup.empty.trash.prompt=Очистить карзину? smallpopup.messages.export.title=Результат экспорта ({0} messages) smallpopup.test.message.title=Тестовое сообщение smallpopup.recipients.title=Получатели smallpopup.folder.title=Папка smallpopup.contact.export.title=Экспортировать smallpopup.contact.delete.title=Удалить contact.selected.many={0} контакты выбраны group.join.reply.message=Добро пожаловать group.leave.reply.message=Досвидания fmessage.new.info=У вас {0} новых сообщений. Нажмите чтобы посмотреть wizard.quickmessage.title=Send Message wizard.messages.replyall.title=Ответить Всем wizard.send.message.title=Отправленые Сообщения wizard.ok=Хорошо wizard.create=Создать wizard.save=Сохранить wizard.send=Отправить common.settings=Настройки common.help=Помощь validation.nospaces.error=Ключевые слова не должны содержать растаяние activity.validation.prompt=Пожалуйста, заполните все обязательные поля validator.invalid.name=Another activity exists with the name {2} autoreply.blank.keyword=Пустое ключевое слово. Ответ будет отправлен на все входящие сообщения poll.type.prompt=Выберите тип опроса poll.question.yes.no=Вопросы с ответом 'Да' или 'Нет' poll.question.multiple=Вопросы с выбором (пример 'Красный', 'Синий', 'Зеленый') poll.question.prompt=Введите вопрос poll.message.none=Не отправлять сообщения для этого опроса (только сбор ответов). poll.replies.header=Ответить автоматически на отклики опроса (по желанию) poll.replies.description=Если входящее сообщение определяется как ответ на опрос, отправьте сообщение отправителю. poll.autoreply.send=Отправить автоматический ответ на отклик опроса poll.responses.prompt=Введите потенциальные ответы (от 2 до 5) poll.sort.header=Сортировка сообщений автоматически по ключевому слову (по желанию) poll.sort.enter.keywords=Введите ключевые слова для опроса и ответов poll.sort.description=Если люди посылают ответы опроса с помощью ключевых слов, ФронтлайнСМС может автоматически сортировать сообщения в вашей системе. poll.no.automatic.sort=Не сортировать сообщения автоматически poll.sort.automatically=Сортировать сообщения автоматически если есть следующие ключевые слова poll.validation.prompt=Пожалуйста, заполните все обязательные поля poll.name.validator.error.name=Названия опроса должны быть уникальными pollResponse.value.blank.value=Значение ответа Опроса не может быть пустым poll.keywords.validation.error.invalid.keyword=Недесвительный ключивоя слово. Поброй имя, слова poll.question=Введите Вопрос poll.response=Список откликов poll.sort=Атоматическая сортировка poll.reply=Атоматический ответ poll.edit.message=Редактировать Сообщение poll.recipients=Выбрать получателей poll.confirm=Подтвердить poll.save=Опрос сохранен! poll.save.success={0} Опрос сахранён! poll.messages.queue=Если вы хотите отправить сообщение с опросом, сообщения, добавлены в очереди ожидании сообщений. ???????????????????? poll.messages.queue.status=Отправление сообщений может занять некоторое время, в зависимости от количества сообщений и подключения к сети. poll.pending.messages=Чтобы увидеть статус вашего сообщения, окройте папку 'Очередь'. poll.send.messages.none=Никакие сообщения не будут отправляться quickmessage.details.label=Подтвердите детали quickmessage.message.label=Сообщение quickmessage.message.none=Ничто quickmessage.recipient.label=Получатель quickmessage.recipients.label=Получатели quickmessage.message.count=Осталось символов 160 (1 СМС сообщение) quickmessage.enter.message=Введите сообщение quickmessage.select.recipients=Выберите получателей quickmessage.confirm=Подтвердите #TODO embed javascript values quickmessage.recipients.count=выбранные контакты quickmessage.messages.count=сообщения будут отправлены quickmessage.count.label=Счет Сообщений: quickmessage.messages.label=Введите сообщение quickmessage.phonenumber.label=Добавьте номер телефона: quickmessage.phonenumber.add=Добавить quickmessage.selected.recipients=Получатели выбраны quickmessage.validation.prompt=Пожалуйста, заполните все обязательные поля fmessage.number.error=Non-numeric characters in this field will be removed when saved search.filter.label=Ограничить поиск до search.filter.group=Выбрать группу search.filter.activities=Выбрать действие/папку search.filter.messages.all=Все исходящие и входящие search.filter.inbox=Только входящие сообщения search.filter.sent=Только исходящие сообщения search.filter.archive=Добавить/Включить архив search.betweendates.label=Между датами search.header=Поиск search.quickmessage=Send message search.export=Экспортировать результат search.keyword.label=Ключевое слово или фраза search.contact.name.label=Контактное лицо search.contact.name=Контактное лицо search.result.header=Результаты search.moreoptions.label=Доболнительные фукции settings.general=Общий settings.porting=Import and Export settings.connections=Телефоны и соединения settings.logs=Система settings.general.header=Настройки > Оосновные settings.logs.header=Settings > System Logs logs.none=У вас нет журнала. logs.content=Сообщение logs.date=Время logs.filter.label=Показать журнал за logs.filter.anytime=все время logs.filter.days.1=last 24 hours logs.filter.days.3=last 3 days logs.filter.days.7=last 7 days logs.filter.days.14=last 14 days logs.filter.days.28=last 28 days logs.download.label=Скачать журналы системы logs.download.buttontext=Скачать журналы logs.download.title=Скачать журналы для отправки logs.download.continue=Продолжать smartgroup.validation.prompt=Пожалуйста, заполните все необходимые поля. Вы можете выбрать только одно правило на каждое поле. smartgroup.info=Для того чтобы создать Смарт группу, выберите критерий который должен совпадать с контактами в этой группе. smartgroup.contains.label=Содержит smartgroup.startswith.label=начинается с smartgroup.add.anotherrule=Добавьте другое правило smartgroup.name.label=Название modem.port=Порт modem.description=Описание modem.locked=Закрытая? traffic.header=Трафик traffic.update.chart=Обновление traffic.filter.2weeks=Показать последние две недели traffic.filter.between.dates=Между датами traffic.filter.reset=Перезагрузить фильтры traffic.allgroups=Показать все группы traffic.all.folders.activities=Показать все действия traffic.sent=Отправлено traffic.received=Принято traffic.total=Итого tab.message=Сообщения tab.archive=Архив tab.contact=Контакты tab.status=Статус tab.search=Поиск help.info=Это бета версия программы, поэтому встроенная помощь отсутствует. Пожалуйста, зайдите на форумы пользователей, чтобы получить помощь на этом этапе. help.notfound=This help file is not yet available, sorry. # IntelliSms Fconnection intellisms.label=IntelliSms Счет intellisms.type.label=Тип intellisms.name.label=Название intellisms.username.label=Пользователь intellisms.password.label=Пароль intellisms.sendEnabled.label=Use for sending intellisms.receiveEnabled.label=Use for receiving intellisms.receiveProtocol.label=Протокол intellisms.serverName.label=Название Сервера intellisms.serverPort.label=Порт Сервера intellisms.emailUserName.label=Пользователь intellisms.emailPassword.label=Пароль intellisms.description=Отправлять и получать сообщения через счета Intellisms intellisms.global.info=Вам нужно будет настроить аккаунт с Intellisms (<a href="http://www.intellisms.co.uk">www.intellisms.co.uk</a>). intelliSmsFconnection.send.validator.invalid=Вы не можете настроить соединение без отправки или получения функциональности intelliSmsFconnection.receive.validator.invalid=Вы не можете настроить соединение без отправки или получения функциональности #Controllers contact.label=Контакт (ы) contact.edited.by.another.user=Другой пользователь обновил этот Контакт пока вы редактировали contact.exists.prompt=Уже существует контакт с таким номером contact.exists.warn=Контакт с этим номером уже существует contact.view.duplicate=Просмотреть дупликат contact.addtogroup.error=Не можете добавлять и удалять из той же группы! contact.mobile.label=Мобильный fconnection.label=Fсвязь fconnection.name=Fсвясь fconnection.unknown.type=Неизвестный тип соединения: fconnection.test.message.sent=Тестовое сообщение отправлено! announcement.saved=Объявление было сохранено и сообщение (я) поставлено (ы) в очередь для отправки announcement.not.saved=Не удалось сохранить объявление! announcement.save.success={0} Соовщение сохранён! announcement.id.exist.not=Не удалось найти объявления с идентификатором {0} autoreply.save.success={0} Автоответчик был сохранен! autoreply.not.saved=Не удалось сохранить автоответчик! report.creation.error=Ошибка при создании отчета export.message.title=Экспорт Сообщений ФронтлайнСМС export.database.id=База данных export.message.date.created=Дата создания export.message.text=Текст export.message.destination.name=Название получателя export.message.destination.mobile=Мобильный получателя export.message.source.name=Название источника export.message.source.mobile=Мобильный источника export.contact.title=Экспорт Сообщений ФронтлайнСМС export.contact.name=Название export.contact.mobile=Мобильный export.contact.email=Электронная почта export.contact.notes=Заметки export.contact.groups=Группы export.messages.name1={0} {1} ({2} сообщения) export.messages.name2={0} ({1} сообщения) export.contacts.name1={0} группа ({1} контакты) export.contacts.name2={0} смарт-группа ({1} контакты) export.contacts.name3=Все контакты ({0} контакты) folder.archived.successfully=Папка была успешно архивировонна! folder.unarchived.successfully=Папка успешно извлечена! folder.trashed=Папка в корзине! folder.restored=Папка восстановленна! folder.exist.not=Не удалось найти папку с идентификатором {0} folder.renamed=Папка переименована group.label=Группа group.name.label=Название group.update.success=Группа успешно обновлена group.save.fail=Не удалось сохранить группу group.delete.fail=Не удалось удалить группу import.label=Import contacts and messages import.backup.label=You can use the form below to import your contacts. You can also import messages that you have from a previous Frontline project. import.prompt.type=Select the type of data you wish to import before uploading import.messages=Messages in Frontline's CSV format import.prompt=Select the file containing your data to initiate the import import.upload.failed=По не известной причине загрузка файла не удалась. import.contact.save.error=Обнаружена ошибка при сохранении контактов import.contact.complete={0} контактов было импортировано; {1} не удалось import.contact.exist=The imported contacts already exist. import.contact.failed.label=Failed contact imports import.contact.failed.info={0} contact(s) successfully imported.<br>{1} contact(s) could not be imported.<br>{2} import.download.failed.contacts=Download a file containing the failed contacts. import.message.save.error=Обнаружена ошибка при сохранении сообщения import.message.complete={0} сообщений было импортировано; {1} не удалось export.label=Export data from your Frontline workspace export.backup.label=You can export your Frontline data as VCF/VCard, CSV or PDF export.prompt.type=Select which data you wish to export export.allcontacts=All of your contacts export.inboxmessages=Your Inbox messages export.submit.label=Export and download data many.selected={0} {1}s выделено flash.message.activity.found.not=Деятельность не может быть обнаружена flash.message.folder.found.not=Папка не можеть быть обнраружена flash.message=Сообщение flash.message.fmessage={0} сообщение (я) flash.message.fmessages.many={0} СМС сообщения flash.message.fmessages.many.one=1 СМС сообщение fmessage.exist.not=Не удается найти сообщение с идентификатором {0} flash.message.poll.queued=Опрос был сохранен и сообщение (я) были поставлены в очередь для отправки flash.message.poll.not.saved=Не удалось сохранить опрос! system.notification.ok=Да system.notification.fail=ОШИБКА flash.smartgroup.delete.unable=Не удается удалить смарт-группу flash.smartgroup.saved=Смарт-группа {0} сохранена flash.smartgroup.save.failed=Не удалось сохранить смарт-группу. Ошибки были {0} smartgroup.id.exist.not=Не удалось найти смарт-группу с идентификатором {0} smartgroup.save.failed=Не удалось сохранить смарт-группы {0} с параметрами {1} {2} ошибки: {3} searchdescriptor.searching=Идет поиск searchdescriptor.all.messages=все сообщения searchdescriptor.archived.messages=, включая архивированные сообщения searchdescriptor.exclude.archived.messages=, без архивированных сообщений searchdescriptor.only=, только only {0} searchdescriptor.between=, между {0} и {1} searchdescriptor.from=, от {0} searchdescriptor.until=, до {0} poll.title={0} announcement.title={0} autoreply.title={0} folder.title={0} frontlinesms.welcome=Добро пожаловать в ФронтлайнСМС! \\o/ failed.pending.fmessages={0} Срок ожидания отправки сообщения истёк. Просмотрите папку отложенных сообщений /сообщения в ожидание провалились. Просмотрите раздел ожидающих сообщений. subscription.title={0} subscription.info.group=группа: {0} subscription.info.groupMemberCount={0} члены subscription.info.keyword=Верхнего уровня ключевые слова: {0} subscription.sorting.disable=Отключить автоматическую сортировку subscription.info.joinKeywords=Присоединиться: {0} subscription.info.leaveKeywords=Yходить: {0} subscription.group.goto=Посмотреть группы subscription.group.required.error=Подписки должны иметь группу subscription.save.success={0} Подписка был сохранен! language.label=Язык language.prompt=Поменять язык интерфейса пользователя ФронтлайнСМС frontlinesms.user.support=Поддержка пользователей ФронтлайнСМС download.logs.info1=ВНИМАНИЕ!Команда ФронтлайнСМС не может непосредственно реагировать на представленные отчёты. Если у вас есть запрос пожалуйста,проверьте файлы справки поддержки пользователей, чтобы увидеть, можете ли вы найти ответ там. Если нет, сообщить о своей проблеме через наш форум поддержки пользователей: download.logs.info2=Другие пользователи, возможно, тоже сообщили о той же проблеме и возможно уже нашли решение! Чтобы ввести и представить ваши log-файлы, пожалуйста, нажмите "Продолжить" # Configuration location info configuration.location.title=Расположение Конфигурация configuration.location.description=These files include your database and other settings, which you may wish to back up elsewhere. configuration.location.instructions=Вы можете найти свои конфигурации приложения на <a href="{0}"> {1} </ A>. Эти файлы включают базы данных и другие параметры, которые вы можете создать резервную копию в другом месте. dynamicfield.contact_name.label=Имя контакта dynamicfield.contact_number.label=Номер Контакта dynamicfield.keyword.label=Ключевое слово dynamicfield.message_content.label=Содержание сообщения # TextMessage domain fmessage.queued=Сообщение поставлено в очередь для отправки на {0} fmessage.queued.multiple=Сообщение поставлено в очередь для отправки для {0} получателей fmessage.retry.success=Сообщение было назначено для отправки повторно в {0} fmessage.retry.success.multiple={0} сообщение (ий) были назначены для отправки повторно{0} fmessage.displayName.label=Название fmessage.text.label=Сообщение fmessage.date.label=Дата fmessage.to=Кому: {0} fmessage.to.multiple=Кому: {0} получатели fmessage.quickmessage=Send message fmessage.archive=Архив fmessage.activity.archive=Архив {0} fmessage.unarchive=Извлечь из архива {0} fmessage.export=Экспортировать fmessage.rename=Переименовать{0} fmessage.edit=Редактировать {0} fmessage.delete=Delete fmessage.moreactions=Дополнительные действия... fmessage.footer.show=Показать fmessage.footer.show.failed=Не удалось fmessage.footer.show.all=Все fmessage.footer.show.starred=Поменченные fmessage.footer.show.incoming=Bходящий fmessage.footer.show.outgoing=Исходящий fmessage.archive.back=Назад fmessage.activity.sentmessage=({0} сообщений отравлено) fmessage.failed=не удалось fmessage.header=сообщения fmessage.section.inbox=Входящие fmessage.section.sent=Исходящие fmessage.section.pending=В ожидании fmessage.section.trash=Карзина fmessage.addsender=Добавить в контакты fmessage.resend=Переслать fmessage.retry=Попробовать снова fmessage.reply=Ответить fmessage.forward=Перенаправить fmessage.messages.none=Здесь сообщений нет! fmessage.selected.none=Сообщения не выделены fmessage.move.to.header=Переместить сообщения в... fmessage.move.to.inbox=Входящие fmessage.archive.many=Archive selected fmessage.count=1 сообщение fmessage.count.many={0} сообщения fmessage.many=сообшения fmessage.delete.many=Delete selected fmessage.reply.many=Reply selected fmessage.restore=Востановить fmessage.restore.many=Востановить fmessage.retry.many=Retry selected fmessage.selected.many={0} сообщений выделено fmessage.unarchive.many=Unarchive selected # TODO move to poll.* fmessage.showpolldetails=Показать график fmessage.hidepolldetails=Спрятать график # TODO move to search.* fmessage.search.none=Сообщений не найдено fmessage.search.description=Начать новый поиск слева fmessage.connection.receivedon=Получено: activity.name=Название activity.delete.prompt=Переместить Move {0} в ящик to trash. Это перенест все связанные с ними сообщения в мусорную корзину. activity.label=Деятельность activity.categorize=Классифицировать ответ magicwand.title=Добавить замены выражения folder.create.success=Успешно удалось создать папку folder.create.failed=Не удалось создать папку folder.name.validator.error=Имя папки уже используется folder.name.blank.error=Название папки не может быть пустым poll.name.blank.error=Название опроса не может быть пустым poll.name.validator.error=Название опроса уже используется autoreply.name.blank.error=Название автоответчика не может быть пустым autoreply.name.validator.error=Название автоответчика уже используетя announcement.name.blank.error=Навание объявления не может быть пустым announcement.name.validator.error=Навание объявления уже используется group.name.blank.error=Название группы не может быть пустым group.name.validator.error=Название группы уже используется #Jquery Validation messages jquery.validation.required=Это поле является обязательным. jquery.validation.remote=Пожалуйста, исправьте это поле. jquery.validation.email=Пожалуйста, введите действительный адрес электронной почты. jquery.validation.url=Пожалуйста, введите правильный URL. jquery.validation.date=Пожалуйста, введите действительную дату. jquery.validation.dateISO=Пожалуйста, введите действительную дату(ISO). jquery.validation.number=Пожалуйста, введите действительный номер. jquery.validation.digits=Пожалуйста, введите только цифры. jquery.validation.creditcard=Пожалуйста, введите действительный номер кредитной карты. jquery.validation.equalto=Пожалуйста, введите одинаковые значения снова. jquery.validation.accept=Пожалуйста, введите значение с допустимым расширением. jquery.validation.maxlength=Пожалуйста, введите не более {0} символов. jquery.validation.minlength=Пожалуйста, введите по крайней мере, {0} символов. jquery.validation.rangelength=Пожалуйста, введите значение от {0} и {1} символов. jquery.validation.range=Пожалуйста, введите значение от {0} и {1}. jquery.validation.max=Пожалуйста, введите значение меньше или равное {0}. jquery.validation.min=Пожалуйста, введите значение, большее или равное {0}. # Webconnection common webconnection.select.type=Выберите веб-службы или приложения для подключения к: webconnection.type=Выберите тип webconnection.title={0} webconnection.label=TODO:Web Connection webconnection.description=Подключение к веб-службе. webconnection.sorting=Автоматическая сортировка webconnection.configure=Настроить службу webconnection.api=Pазоблачать API webconnection.api.info=ФтонрлайнСМС может быть сконфигурирован для приема входящих запросов от удаленного обслуживания и вызвать исходящих сообщений. Более подробную информацию см. в справке раздела Web Connection. webconnection.api.enable.label=давать возможность API webconnection.api.secret.label=Секретный ключ: webconnection.api.disabled=API disabled webconnection.api.url=TODO:API URL webconnection.moreactions.retryFailed=retry failed uploads webconnection.failed.retried=Failed web connections have been scheduled for resending. webconnection.url.error.locahost.invalid.use.ip=Please use 127.0.0.1 instead of "locahost" for localhost urls webconnection.url.error.url.start.with.http=Invalid URL (should start with http:// or https://) # Webconnection - generic webconnection.generic.label=Другие веб-сервис webconnection.generic.description=Отправлять сообщения на другие веб-службы webconnection.generic.subtitle=TODO:HTTP Web Connection # Webconnection - Ushahidi/Crowdmap webconnection.ushahidi.label=TODO:Crowdmap / Ushahidi webconnection.ushahidi.description=Отправлять сообщения CrowdMap или сервер Ushahidi. webconnection.ushahidi.key.description=Ключ API для любой Crowdmap или Ushahidi можно найти в настройках Crowdmap Ushahidi или веб-сайт. webconnection.ushahidi.url.label=Ushahidi deployment address: webconnection.ushahidi.key.label=Ushahidi ключ API: webconnection.crowdmap.url.label=Crowdmap развертывания адресу: webconnection.crowdmap.key.label=Crowdmap ключ API: webconnection.ushahidi.serviceType.label=Выберите службу webconnection.ushahidi.serviceType.crowdmap=Crowdmap webconnection.ushahidi.serviceType.ushahidi=Ushahidi webconnection.crowdmap.url.suffix.label=.crowdmap.com webconnection.ushahidi.subtitle=Подключение к веб {0} webconnection.ushahidi.service.label=Cервис: webconnection.ushahidi.fsmskey.label=ФронтлайнСМС API секрет: webconnection.ushahidi.crowdmapkey.label=Crowdmap/Ushahidi API Key: webconnection.ushahidi.keyword.label=Ключевое слово: url.invalid.url=The URL provided is invalid. webconnection.confirm=Подтвердить webconnection.keyword.title=Передача каждого полученного сообщения, содержащие следующие ключевые слова: webconnection.all.messages=Не используйте ключевые слова (все входящие сообщения будут пересылаться на этот Web Connection webconnection.httpMethod.label=Выберите HTTP метод: webconnection.httpMethod.get=Получить webconnection.httpMethod.post=Поместить webconnection.name.prompt=Назовите этот веб-соединение webconnection.details.label=подтвердить детали webconnection.parameters=Настройка информация отправляется на сервер webconnection.parameters.confirm=Сконфигурированные данные отправляются на сервер webconnection.keyword.label=Ключевое слово webconnection.none.label=Ни один webconnection.url.label=Cервер Url: webconnection.param.name=Имя: webconnection.param.value=значение: webconnection.add.anotherparam=Добавить параметр dynamicfield.message_body.label=Текст сообщения dynamicfield.message_body_with_keyword.label=Текст сообщения с ключевым словом dynamicfield.message_src_number.label=Контактный номер dynamicfield.message_src_name.label=Контактное имя dynamicfield.message_timestamp.label=Cообщение Timestamp webconnection.keyword.validation.error=Требуется ключевое слово webconnection.url.validation.error=Tребуется URL webconnection.save=был сохранен! webconnection.saved=сохранен! webconnection.save.success={0} был сохранен! webconnection.generic.service.label=Обслуживание webconnection.generic.httpMethod.label=Http метод: webconnection.generic.url.label=Адрес: webconnection.generic.parameters.label=Сконфигурированные данные отправляются на сервер: webconnection.generic.keyword.label=Ключевое слово: webconnection.generic.key.label=ключ API frontlinesms2.Keyword.value.validator.error.frontlinesms2.UshahidiWebconnection.keyword.value=Неверное значение ключевого слова #Subscription i18n subscription.label=подписка subscription.name.prompt=Дайте имя етой подписке subscription.details.label=Подтвердите детали subscription.description=Позвольте людям автоматически присоединиться и оставить контактные группы с использованием ключевого слова сообщения subscription.select.group=Выберите группу для подписки subscription.group.none.selected=Выберите группу subscription.autoreplies=автоответчики subscription.sorting=Автоматическая сортировка subscription.sorting.header=Сообщения процесса автоматически с помощью ключевых слов (не обязательно) subscription.confirm=подтверждить subscription.group.header=Выберите группу subscription.group.description=TODO:Contacts can be added and removed from groups automatically when FrontlineSMS receives a message that includes a special keyword. subscription.keyword.header=TODO:Enter keywords for this subscription subscription.top.keyword.description=TODO:Enter the top-level keywords that users will use to select this group. subscription.top.keyword.more.description=TODO:You may enter multiple top-level keywords for each option, separated with commas. Top-level keywords need to be unique across all activities. subscription.keywords.header=TODO:Enter keywords for joining and leaving this group. subscription.keywords.description=TODO:You may enter multiple keywords separated by commas. If no top-level keywords are entered above, then these join and leave keywords need to be unique across all activities. subscription.default.action.header=TODO:Select an action when no keywords sent subscription.default.action.description=TODO:Select the desired action when a message matches the top-level keyword but none of the join or leave keywords: subscription.keywords.leave=TODO:Leave keyword(s) subscription.keywords.join=TODO:Join keyword(s) subscription.default.action.join=TODO:Add the contact to the group subscription.default.action.leave=TODO:Remove the contact from the group subscription.default.action.toggle=TODO:Toggle the contact's group membership subscription.autoreply.join=TODO:Send an automatic reply when a contact joins the group subscription.autoreply.leave=TODO:Send an automatic reply when a contact leaves the group subscription.confirm.group=группа subscription.confirm.keyword=ключевое слово subscription.confirm.join.alias=Регистрация Ключевые слова subscription.confirm.leave.alias=Оставьте Ключевые слова subscription.confirm.default.action=Действие по умолчанию subscription.confirm.join.autoreply=TODO:Join Autoreply subscription.confirm.leave.autoreply=TODO:Leave Autoreply subscription.info1=Подписка была сохранена и сейчас активно subscription.info2=Входящие сообщения, которые соответствуют этим ключевым словом теперь изменить членство в группе контактов, как это определено subscription.info3=Чтобы увидеть подписки, нажмите на нее в меню слева subscription.categorise.title=Категоризовать сообщения subscription.categorise.info=Пожалуйста, выберите действие, которое будет выполняться с отправителями выбранное сообщение, когда они добавляются к {0} subscription.categorise.join.label=Добавить отправителя в {0} subscription.categorise.leave.label=Удалить отправителей от {0} subscription.categorise.toggle.label=TODO:Toggle senders&#39; membership of {0} subscription.join=присоединиться subscription.leave=бросать subscription.sorting.example.toplevel=например РЕШЕНИЕ subscription.sorting.example.join=например ПОДПИСКА, присоединиться subscription.sorting.example.leave=например Отказаться от подписки, ОСТАВИТЬ subscription.keyword.required=Требуется ключевое слово subscription.jointext.required=TODO:Please enter join autoreply text subscription.leavetext.required=TODO:Please enter leave autoreply text subscription.moreactions.delete=Удалить подписку subscription.moreactions.rename=Переименовать подписки subscription.moreactions.edit=Редактирование подписки subscription.moreactions.export=Экспорт подписки # Generic activity sorting activity.generic.sorting=Автоматическая обработка activity.generic.sorting.subtitle=Сообщения процесса автоматически с помощью ключевых слов (не обязательно) activity.generic.sort.header=Сообщения процесса автоматически с помощью ключевых слов (не обязательно) activity.generic.sort.description=TODO:If people send in messages beginning with a particular keyword, FrontlineSMS can automatically process the messages on your system. activity.generic.keywords.title=TODO:Enter keywords for activity. You can enter multiple keywords separated by commas: activity.generic.keywords.subtitle=Введите ключевые слова для деятельности activity.generic.keywords.info=TODO:You can enter multiple keywords separated by commas: activity.generic.no.keywords.title=Не используйте ключевые слова activity.generic.no.keywords.description=Все входящие сообщения, которые не соответствуют ни другие ключевые слова будут вызывать эту деятельность activity.generic.disable.sorting=Не автоматической сортировки сообщений activity.generic.disable.sorting.description=Сообщения не будут автоматически обрабатываются этой деятельности activity.generic.enable.sorting=TODO:Process responses containing a keyword automatically activity.generic.sort.validation.unique.error=Ключевые слова должны быть уникальными activity.generic.keyword.in.use=Ключевое слово {0} уже используется деятельности {1} activity.generic.global.keyword.in.use=TODO:Activity {0} is set to receive all messages that do not match other keywords. You can only have one active activity with this setting #basic authentication auth.basic.label=Basic Authentication auth.basic.info=Require a username and password for accessing FrontlineSMS across the network auth.basic.enabled.label=Enable Basic Authentication auth.basic.username.label=Username auth.basic.password.label=Password auth.basic.confirmPassword.label=Confirm Password auth.basic.password.mismatch=Passwords don't match newfeatures.popup.title=Новые возможности newfeatures.popup.showinfuture=Показывать это окно в будущеm dynamicfield.message_text.label=текст сообщения dynamicfield.message_text_with_keyword.label=Текст сообщения с ключевым словам dynamicfield.sender_name.label=Имя Отправителя dynamicfield.sender_number.label=номер отправителя dynamicfield.recipient_number.label=номер получателя dynamicfield.recipient_name.label=Имя получателя # Smpp Fconnection smpp.label=SMPP Account smpp.type.label=Тип smpp.name.label=Название smpp.send.label=Use for sending smpp.receive.label=Use for receiving smpp.url.label=SMSC URL smpp.port.label=SMSC Port smpp.username.label=Username smpp.password.label=Password smpp.fromNumber.label=From number smpp.description=Send and receive messages through an SMSC smpp.global.info=You will need to get an account with your phone network of choice. smpp.send.validator.invalid=You cannot configure a connection without SEND or RECEIVE fuctionality. routing.title=Create rules for which phone number is used by outgoing messages. routing.info=These rules will determine how the system selects which connection or phone number to use to send outgoing messages. Remember, the phone number seen by recipients may depend on the rules you set here. Also, changing this configuration may affect the cost of sending messages. routing.rules.sending=When sending outgoing messages: routing.rules.not_selected=If none of the above rules match: routing.rules.otherwise=Otherwise: routing.rules.device=Use {0} routing.rule.uselastreceiver=Send through most recent number that the contact messaged routing.rule.useany=Use any available connection's phone number routing.rule.dontsend=Do not send the message routing.notification.no-available-route=Outgoing message(s) not sent due to your routing preferences. routing.rules.none-selected.warning=Warning: You have no rules or phone numbers selected. No messages will be sent. If you wish to send messages, please enable a connection. customactivity.overview=Overview customactivity.title={0} customactivity.confirm=Подтвердить customactivity.label=Custom Activity Builder customactivity.description=Create your own activity from scratch by applying a custom set of actions to your specified keyword customactivity.name.prompt=Name this activity customactivity.moreactions.delete=Delete activity customactivity.moreactions.rename=Rename activity customactivity.moreactions.edit=Edit activity customactivity.moreactions.export=Export activity customactivity.text.none=Пусто customactivity.config=Configure customactivity.config.description=Build and configure a set of actions for this activity. The actions will all be executed when a message matches the criteria you set on the previous step. customactivity.info=Your Custom Activity has been created, and any messages containing your keyword will have the specified actions applied to it. customactivity.info.warning=Without a keyword, all incoming messages will trigger the actions in this Custom Activity. customactivity.info.note=Note: If you archive the Custom Activity, incoming messages will no longer be sorted for it. customactivity.save.success={0} activity saved customactivity.action.steps.label=Action Steps validation.group.notnull=Please select a group customactivity.join.description=Joining "{0}" group customactivity.leave.description=Leaving "{0}" group customactivity.forward.description=Forwarding with "{0}" customactivity.webconnectionStep.description=Upload to "{0}" customactivity.reply.description=Reply with "{0}" customactivity.step.join.add=Add sender to group customactivity.step.join.title=Add sender to group* customactivity.step.leave.add=Удалить отправителей от {0} customactivity.step.leave.title=Remove sender from group* customactivity.step.reply.add=Send Autoreply customactivity.step.reply.title=Enter message to autoreply to sender* customactivity.step.forward.add=Forward message customactivity.step.forward.title=Automatically forward a message to one or more contacts customactivity.manual.sorting=Automatic processing disabled customactivity.step.webconnectionStep.add=Upload message to a URL customactivity.step.webconnectionStep.title=Upload message to a URL customactivity.validation.error.autoreplytext=Reply message is required customactivity.validation.error.name=Tребуется URL customactivity.validation.error.url=Tребуется URL customactivity.validation.error.paramname=Parameter name is required recipientSelector.keepTyping=Keep typing... recipientSelector.searching=Идет поиск validation.recipients.notnull=Please select at least one recipient localhost.ip.placeholder=your-ip-address
{ "pile_set_name": "Github" }
# 给文字加上下划线 - 《css揭秘》笔记 其实很简单: `text-decoration: underline` 或者 ```css a[href] { border-bottom: 1px solid #333; text-decoration: none; } ``` 虽然用 border-bottom 模拟下划线可以对颜色、线宽、线型进行控制,但是明显这些下划线和文本之间的间距太大。如图: ![](http://ww3.sinaimg.cn/large/72f96cbagw1f67qdziceej20ve03agm9.jpg) 当然可以给a标签加一个 `display:inline-block;` 再制定一个小一点的 `line-height`: ```css display: inline-block; border-bottom: 1px solid #333; line-height: .9; ``` 但是一旦换行就悲剧了,阻止了正常的文本换行: ![](http://ww3.sinaimg.cn/large/72f96cbagw1f67qi4nu5pj20kl04bdgh.jpg) 其实还可以用一层内嵌的 `box-shadow: 0 -1px #333 inset`让鲜花县李文本近一些,但是微乎其微只是近了线宽那么一点的距离,不明显。 ## 解决方案 最佳的解决方案是用意想不到的 `background-image` ```css background: linear-gradient(#f00, #f00) repeat-x; background-size: 100% 1px; background-position: 0 1em; ``` 这样就显得很优雅柔和了 ![](http://ww4.sinaimg.cn/large/72f96cbagw1f67qutxbm9j20jx047gm9.jpg) 不过还有问题,字母 **p** 和 **y** 被下划线穿过了,如果遇到字母能自动避开会更好,所以,加入背景是一片实色,即可以设置两层与背景色相同的 `text-shadow` 来模拟这种效果 ```css background: linear-gradient(#f00, #f00) repeat-x; background-size: 100% 1px; background-position: 0 1em; text-shadow: .05em 0 #fff, -.05em 0 #fff; ``` ![](http://ww3.sinaimg.cn/large/72f96cbagw1f67qzc2g22j20jr03q3z6.jpg) 使用背景渐变来实现下划线可以做到相当灵活的转换: 比如一条绿色虚线下划线 ```css background: linear-gradient(90deg, #f00 70%, transparent 0) repeat-x; background-size: .2em 2px; background-position: 0 1em; text-shadow: .05em 0 #fff, -.05em 0 #fff; ``` 通过色标的百分比调整虚线的虚实比例, 用 background-size 来调整虚线的疏密。 ![](http://ww1.sinaimg.cn/large/72f96cbagw1f67r61o72uj20um028gma.jpg) ![](http://ww3.sinaimg.cn/large/72f96cbagw1f67raauuhqj20u0026js4.jpg) [demo地址](http://ccforward.github.io/css-secrets/underline/index.html)
{ "pile_set_name": "Github" }
--- title: IMPORT (Experimental) summary: Import CSV data into your CockroachDB cluster. toc: true --- The `IMPORT` [statement](sql-statements.html) imports tabular data (e.g., CSVs) into a single table. {{site.data.alerts.callout_danger}}<strong>This is an experimental feature</strong>. To enable it, you must run <a href="set-cluster-setting.html"><code>SET CLUSTER SETTING experimental.importcsv.enabled = true</code></a>{{site.data.alerts.end}} {{site.data.alerts.callout_info}}For details about importing SQL dumps, see <a href="import-data.html">Import Data</a>.{{site.data.alerts.end}} ## Glossary Term | Definition -----|----------- **Import file** | The tabular data file you want to import. **Processing node** | The single node processing the [`IMPORT`](import.html) statement/ **Temp directory** | A location where the processing node can store data from the import file it converts to CockroachDB-compatible key-value data.<br/><br/>This directory *must* be available to all nodes using the same address (i.e., cannot use the processing node's local file storage). ## Functional Overview Because importing data is a complex task, it can be useful to have a high-level understanding of the process. 1. A single node receives the [`IMPORT`](import.html) request, which becomes the processing node. 2. The processing node streams the contents of the import file, converting its contents into CockroachDB-compatible key-value data. 3. As the key-value data is generated, the node stores it in the temp directory. 4. Once the entire import file has been converted to key-value data, relevant nodes import key-value data from the temp directory. After the import has completed, you should also delete the files from your temp directory. ## Preparation Before using [`IMPORT`](import.html), you should have: - The schema of the table you want to import. - The tabular data you want to import (e.g., CSV), preferably hosted on cloud storage. - A location to store data before it is fully imported into all your nodes (referred to in this document as a "temp" directory). This location *must* be accessible to all nodes using the same address (i.e., cannot use a node's local file storage). For ease of use, we recommend using cloud storage. However, if that isn't readily available to you, we also have a [guide on easily creating your own file server](create-a-file-server.html). ## Details ### Import Targets Imported tables must not exist and must be created in the [`IMPORT`](import.html) statement. If the table you want to import already exists, you must drop it with [`DROP TABLE`](drop-table.html). You can only import a single table at a time. ### Create Table Your [`IMPORT`](import.html) statement must include a `CREATE TABLE` statement (representing the schema of the data you want to import) using one of the following methods: - A reference to a file that contains a `CREATE TABLE` statement - An inline `CREATE TABLE` statement We also recommend [all secondary indexes you want to use in the `CREATE TABLE` statement](create-table.html#create-a-table-with-secondary-indexes). It is possible to add secondary indexes later, but it is significantly faster to specify them during import. ### Object Dependencies When importing tables, you must be mindful of the following rules because [`IMPORT`](import.html) only creates single tables which must not already exist: - Objects that the imported table depends on must already exist - Objects that depend on the imported table can only be created after the import completes ### Operational Requirements & Concerns Because [`IMPORT`](import.html) has a number of moving parts, there are a number of operational concerns in executing the statement, the most important of which is ensuring that the processing node can execute [`IMPORT`](import.html) successfully. #### Choose Node to Process Request Because of [`IMPORT`](import.html)'s current implementation, the entire task is executed on a single node. If your deployment is not entirely symmetric, sending the request to a random node might have undesirable effects. Instead, we recommend bypassing any load balancers, connecting to a machine directly, and running the [`IMPORT`](import.html) statement on it. It's important to note, though, that after the single machine creates the CockroachDB-compatible key-value data, the process of importing the data is distributed among nodes in the cluster. {{site.data.alerts.callout_info}}Future versions of <code>IMPORT</code> will let you distribute the entire process among many nodes.{{site.data.alerts.end}} #### Available Storage Requirements The node's first-listed/default [`store`](start-a-node.html#store) directory must have enough available storage equal to or greater than the size of the file you're importing. On [`cockroach start`](start-a-node.html), if you set `--max-disk-temp-storage`, it must also be greater than the size of the file you're importing. For example, if you're importing approximately 10GiB of data, the node that ends up running the [`IMPORT`](import.html) command must have at least 10GiB of available storage in its `store` directory. ### Import File Location You can store the tabular data you want to import using either a node's local storage or remote cloud storage (Amazon S3, Google Cloud Platform, etc.). For simplicity's sake, we *highly recommend* using cloud/remote storage for the data you want to import. However, if you do want to store the file locally to import it, there are a number of things to understand. #### Importing Data From Local Storage {{site.data.alerts.callout_info}}Because you must have remote/cloud storage available to complete the <code>IMPORT</code> process, we recommend using it instead of local file storage.<br/><br/>If you do not have access to cloud storage, you can easily create a file server using <a href="create-a-file-server.html">this guide</a>.{{site.data.alerts.end}} Because CockroachDB is designed as a distributed system, the ergonomics of local file storage require some understanding to use successfully. Though we do not recommend this process, if you do want to use a locally stored file, this procedure is likely to cause you the fewest headaches: 1. Ensure the node you want to use has available storage space at least 2x the size of the data you want to import; 1x for the file itself, and 1x for the converted key-value data. For example, if you want to import 10GiB of data, your node needs 20GiB of available storage. 2. Upload the tabular data file to a single node, and then connect to that node. 3. Execute the [`IMPORT`](import.html) statement, importing to the locally stored file with the `nodelocal` prefix, e.g., `nodelocal://backup.csv`. However, the "temp" directory you choose must use a location available to all nodes in the cluster (i.e., you cannot use local file storage). You will need to use either cloud storage, a custom HTTP server, or NFS connected to all nodes in the cluster. ### Temp Directory To distribute the data you want to import to all nodes in your cluster, the [`IMPORT`](import.html) process requires the CockroachDB-compatible key-value data be stored in a location that is accessible to all nodes in the cluster using the same address. To achieve this you can use: - Cloud storage, such as Amazon S3 or Google Cloud Platform - Network file storage mounted to every node - HTTP file server {{site.data.alerts.callout_info}}If you do not currently have any of these options available, you can easily <a href="create-a-file-server.html">create a file server</a>.{{site.data.alerts.end}} The temp directory must have at least as much storage space as the size of the data you want to import. #### Temp Directory Cleanup After completing the [`IMPORT`](import.html) process, you must manually remove the key-value data stored in the temp directory. ### Table Users and Privileges Imported tables are treated as new tables, so you must [`GRANT`](grant.html) privileges to them. ## Performance Currently, [`IMPORT`](import.html) uses a single node to convert your tabular data into key-value data, which means the node's CPU and RAM will be partially consumed by the [`IMPORT`](import.html) task in addition to serving normal traffic. Later steps of the import process distribute work among many nodes and have less impact on the nodes' resources. ## Synopsis {% include {{ page.version.version }}/sql/diagrams/import.html %} ## Required Privileges Only the `root` user can run [`IMPORT`](import.html). ## Parameters | Parameter | Description | |-----------|-------------| | **table_name** | The name of the table you want to import/create. | | **create_table_file** | The URL of a plain text file containing the [`CREATE TABLE`](create-table.html) statement you want to use (see [this example for syntax](#use-create-table-statement-from-a-file)). | | **table_elem_list** | The table definition you want to use (see [this example for syntax](#use-create-table-statement-from-a-statement)). | | **file_to_import** | The URL of the file you want to import.| | `WITH` **kv_option** | Control your import's behavior with [these options](#import-options). The **temp** option (which represents the [temp directory](#temp-directory)'s URL) is required. | ### Import File & Temp Directory URLs URLs for the file you want to import and your temp directory must use the following format: {% include {{ page.version.version }}/misc/external-urls.md %} #### Notes [<sup>1</sup>](#import-file-temp-directory-urls) Only supports instance auth. [<sup>2</sup>](#import-file-temp-directory-urls) You can easily create your own HTTP server with [Caddy or nginx](create-a-file-server.html). [<sup>3</sup>](#import-file-temp-directory-urls) If using NFS for your temp directory, each node in the cluster must have access to the NFS using the same URL. ### Import Options You can control the [`IMPORT`](import.html) process's behavior using any of the following key-value pairs as a `kv_option`. #### `temp` A directory accessible by all nodes, which is used to store the CockroachDB-compatible key-value data before all nodes import the data. <table> <tbody> <tr> <td><strong>Required?</strong></td> <td>Yes</td> </tr> <tr> <td><strong>Key</strong></td> <td><code>temp</code></td> </tr> <tr> <td><strong>Value</strong></td> <td>The URL of the temp directory</td> </tr> <tr> <td><strong>Example</strong></td> <td><code>WITH temp = 'azure://acme-co/import-temp?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co'</code></td> </tr> </tbody> </table> #### `delimiter` If not using comma as your column delimiter, you can specify another Unicode character as the delimiter. <table> <tbody> <tr> <td><strong>Required?</strong></td> <td>No</td> </tr> <tr> <td><strong>Key</strong></td> <td><code>delimiter</code></td> </tr> <tr> <td><strong>Value</strong></td> <td>The unicode character that delimits columns in your rows</td> </tr> <tr> <td><strong>Example</strong></td> <td>To use tab-delimited values: <code>WITH temp = '...', delimiter = e'\t'</code></td> </tr> </tbody> </table> #### `comment` Do not import rows that begin with this character. <table> <tbody> <tr> <td><strong>Required?</strong></td> <td>No</td> </tr> <tr> <td><strong>Key</strong></td> <td><code>comment</code></td> </tr> <tr> <td><strong>Value</strong></td> <td>The unicode character that identifies rows to skip</td> </tr> <tr> <td><strong>Example</strong></td> <td><code>WITH temp = '...', comment = '#'</code></td> </tr> </tbody> </table> #### `nullif` Convert values to SQL *NULL* if they match the specified string. <table> <tbody> <tr> <td><strong>Required?</strong></td> <td>No</td> </tr> <tr> <td><strong>Key</strong></td> <td><code>nullif</code></td> </tr> <tr> <td><strong>Value</strong></td> <td>The string that should be converted to <em>NULL</em></td> </tr> <tr> <td><strong>Example</strong></td> <td>To use empty columns as <em>NULL</em>: <code>WITH temp = '...', nullif = ''</code></td> </tr> </tbody> </table> ## Examples ### Use Create Table Statement from a File ~~~ sql > IMPORT TABLE customers CREATE USING 'azure://acme-co/customer-create-table.sql?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co' CSV DATA ('azure://acme-co/customer-import-data.csv?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co') WITH temp = 'azure://acme-co/temp/?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co' ; ~~~ ### Use Create Table Statement from a Statement ~~~ sql > IMPORT TABLE customers ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), name TEXT, INDEX name_idx (name) ) CSV DATA ('azure://acme-co/customer-import-data.csv?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co') WITH temp = 'azure://acme-co/temp/?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co' ; ~~~ ### Import a Tab-Separated File ~~~ sql > IMPORT TABLE customers ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), name TEXT, INDEX name_idx (name) ) CSV DATA ('azure://acme-co/customer-import-data.tsc?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co') WITH temp = 'azure://acme-co/temp/?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co', delimiter = e'\t' ; ~~~ ### Skip Commented Lines ~~~ sql > IMPORT TABLE customers ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), name TEXT, INDEX name_idx (name) ) CSV DATA ('azure://acme-co/customer-import-data.tsc?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co') WITH temp = 'azure://acme-co/temp/?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co', comment = '#' ; ~~~ ### Use Blank Characters as *NULL* ~~~ sql > IMPORT TABLE customers ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), name TEXT, INDEX name_idx (name) ) CSV DATA ('azure://acme-co/customer-import-data.tsc?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co') WITH temp = 'azure://acme-co/temp/?AZURE_ACCOUNT_KEY=hash&AZURE_ACCOUNT_NAME=acme-co', nullif = '' ; ~~~ ## See Also - [Create a File Server](create-a-file-server.html) - [Importing Data](import-data.html)
{ "pile_set_name": "Github" }
function sF = cos(sF, varargin) % cost of a function % Syntax % sF = cos(sF) % sF = cos(sF, 'bandwidth', bandwidth) % % Input % sF - @S2FunHarmonic % % Output % sF - @S2FunHarmonic % % Options % bandwidth - minimal degree of the spherical harmonic % sF = sF.quadrature(@(v) cos(sF.eval(v)),varargin{:}); end
{ "pile_set_name": "Github" }
// Copyright 2018 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package impl import ( "fmt" "reflect" "strconv" "google.golang.org/protobuf/encoding/prototext" "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/proto" pref "google.golang.org/protobuf/reflect/protoreflect" piface "google.golang.org/protobuf/runtime/protoiface" ) // Export is a zero-length named type that exists only to export a set of // functions that we do not want to appear in godoc. type Export struct{} // NewError formats a string according to the format specifier and arguments and // returns an error that has a "proto" prefix. func (Export) NewError(f string, x ...interface{}) error { return errors.New(f, x...) } // enum is any enum type generated by protoc-gen-go // and must be a named int32 type. type enum = interface{} // EnumOf returns the protoreflect.Enum interface over e. // It returns nil if e is nil. func (Export) EnumOf(e enum) pref.Enum { switch e := e.(type) { case nil: return nil case pref.Enum: return e default: return legacyWrapEnum(reflect.ValueOf(e)) } } // EnumDescriptorOf returns the protoreflect.EnumDescriptor for e. // It returns nil if e is nil. func (Export) EnumDescriptorOf(e enum) pref.EnumDescriptor { switch e := e.(type) { case nil: return nil case pref.Enum: return e.Descriptor() default: return LegacyLoadEnumDesc(reflect.TypeOf(e)) } } // EnumTypeOf returns the protoreflect.EnumType for e. // It returns nil if e is nil. func (Export) EnumTypeOf(e enum) pref.EnumType { switch e := e.(type) { case nil: return nil case pref.Enum: return e.Type() default: return legacyLoadEnumType(reflect.TypeOf(e)) } } // EnumStringOf returns the enum value as a string, either as the name if // the number is resolvable, or the number formatted as a string. func (Export) EnumStringOf(ed pref.EnumDescriptor, n pref.EnumNumber) string { ev := ed.Values().ByNumber(n) if ev != nil { return string(ev.Name()) } return strconv.Itoa(int(n)) } // message is any message type generated by protoc-gen-go // and must be a pointer to a named struct type. type message = interface{} // legacyMessageWrapper wraps a v2 message as a v1 message. type legacyMessageWrapper struct{ m pref.ProtoMessage } func (m legacyMessageWrapper) Reset() { proto.Reset(m.m) } func (m legacyMessageWrapper) String() string { return Export{}.MessageStringOf(m.m) } func (m legacyMessageWrapper) ProtoMessage() {} // ProtoMessageV1Of converts either a v1 or v2 message to a v1 message. // It returns nil if m is nil. func (Export) ProtoMessageV1Of(m message) piface.MessageV1 { switch mv := m.(type) { case nil: return nil case piface.MessageV1: return mv case unwrapper: return Export{}.ProtoMessageV1Of(mv.protoUnwrap()) case pref.ProtoMessage: return legacyMessageWrapper{mv} default: panic(fmt.Sprintf("message %T is neither a v1 or v2 Message", m)) } } func (Export) protoMessageV2Of(m message) pref.ProtoMessage { switch mv := m.(type) { case nil: return nil case pref.ProtoMessage: return mv case legacyMessageWrapper: return mv.m case piface.MessageV1: return nil default: panic(fmt.Sprintf("message %T is neither a v1 or v2 Message", m)) } } // ProtoMessageV2Of converts either a v1 or v2 message to a v2 message. // It returns nil if m is nil. func (Export) ProtoMessageV2Of(m message) pref.ProtoMessage { if m == nil { return nil } if mv := (Export{}).protoMessageV2Of(m); mv != nil { return mv } return legacyWrapMessage(reflect.ValueOf(m)).Interface() } // MessageOf returns the protoreflect.Message interface over m. // It returns nil if m is nil. func (Export) MessageOf(m message) pref.Message { if m == nil { return nil } if mv := (Export{}).protoMessageV2Of(m); mv != nil { return mv.ProtoReflect() } return legacyWrapMessage(reflect.ValueOf(m)) } // MessageDescriptorOf returns the protoreflect.MessageDescriptor for m. // It returns nil if m is nil. func (Export) MessageDescriptorOf(m message) pref.MessageDescriptor { if m == nil { return nil } if mv := (Export{}).protoMessageV2Of(m); mv != nil { return mv.ProtoReflect().Descriptor() } return LegacyLoadMessageDesc(reflect.TypeOf(m)) } // MessageTypeOf returns the protoreflect.MessageType for m. // It returns nil if m is nil. func (Export) MessageTypeOf(m message) pref.MessageType { if m == nil { return nil } if mv := (Export{}).protoMessageV2Of(m); mv != nil { return mv.ProtoReflect().Type() } return legacyLoadMessageInfo(reflect.TypeOf(m), "") } // MessageStringOf returns the message value as a string, // which is the message serialized in the protobuf text format. func (Export) MessageStringOf(m pref.ProtoMessage) string { return prototext.MarshalOptions{Multiline: false}.Format(m) }
{ "pile_set_name": "Github" }
// // Client.swift // // Generated by swagger-codegen // https://github.com/swagger-api/swagger-codegen // import Foundation public struct Client: Codable { public var client: String? public init(client: String?) { self.client = client } }
{ "pile_set_name": "Github" }
[ { "Description": "defender-win2016", "Format": "ova", "UserBucket": { "S3Bucket": "xxxx", "S3Key": "vulnvms/defender-win2016.ova" } } ]
{ "pile_set_name": "Github" }
//===-- LegalizeVectorOps.cpp - Implement SelectionDAG::LegalizeVectors ---===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file implements the SelectionDAG::LegalizeVectors method. // // The vector legalizer looks for vector operations which might need to be // scalarized and legalizes them. This is a separate step from Legalize because // scalarizing can introduce illegal types. For example, suppose we have an // ISD::SDIV of type v2i64 on x86-32. The type is legal (for example, addition // on a v2i64 is legal), but ISD::SDIV isn't legal, so we have to unroll the // operation, which introduces nodes with the illegal type i64 which must be // expanded. Similarly, suppose we have an ISD::SRA of type v16i8 on PowerPC; // the operation must be unrolled, which introduces nodes with the illegal // type i8 which must be promoted. // // This does not legalize vector manipulations like ISD::BUILD_VECTOR, // or operations that happen to take a vector which are custom-lowered; // the legalization for such operations never produces nodes // with illegal types, so it's okay to put off legalizing them until // SelectionDAG::Legalize runs. // //===----------------------------------------------------------------------===// #include "llvm/CodeGen/SelectionDAG.h" #include "llvm/Target/TargetLowering.h" using namespace llvm; namespace { class VectorLegalizer { SelectionDAG& DAG; const TargetLowering &TLI; bool Changed; // Keep track of whether anything changed /// For nodes that are of legal width, and that have more than one use, this /// map indicates what regularized operand to use. This allows us to avoid /// legalizing the same thing more than once. SmallDenseMap<SDValue, SDValue, 64> LegalizedNodes; /// \brief Adds a node to the translation cache. void AddLegalizedOperand(SDValue From, SDValue To) { LegalizedNodes.insert(std::make_pair(From, To)); // If someone requests legalization of the new node, return itself. if (From != To) LegalizedNodes.insert(std::make_pair(To, To)); } /// \brief Legalizes the given node. SDValue LegalizeOp(SDValue Op); /// \brief Assuming the node is legal, "legalize" the results. SDValue TranslateLegalizeResults(SDValue Op, SDValue Result); /// \brief Implements unrolling a VSETCC. SDValue UnrollVSETCC(SDValue Op); /// \brief Implement expand-based legalization of vector operations. /// /// This is just a high-level routine to dispatch to specific code paths for /// operations to legalize them. SDValue Expand(SDValue Op); /// \brief Implements expansion for FNEG; falls back to UnrollVectorOp if /// FSUB isn't legal. /// /// Implements expansion for UINT_TO_FLOAT; falls back to UnrollVectorOp if /// SINT_TO_FLOAT and SHR on vectors isn't legal. SDValue ExpandUINT_TO_FLOAT(SDValue Op); /// \brief Implement expansion for SIGN_EXTEND_INREG using SRL and SRA. SDValue ExpandSEXTINREG(SDValue Op); /// \brief Implement expansion for ANY_EXTEND_VECTOR_INREG. /// /// Shuffles the low lanes of the operand into place and bitcasts to the proper /// type. The contents of the bits in the extended part of each element are /// undef. SDValue ExpandANY_EXTEND_VECTOR_INREG(SDValue Op); /// \brief Implement expansion for SIGN_EXTEND_VECTOR_INREG. /// /// Shuffles the low lanes of the operand into place, bitcasts to the proper /// type, then shifts left and arithmetic shifts right to introduce a sign /// extension. SDValue ExpandSIGN_EXTEND_VECTOR_INREG(SDValue Op); /// \brief Implement expansion for ZERO_EXTEND_VECTOR_INREG. /// /// Shuffles the low lanes of the operand into place and blends zeros into /// the remaining lanes, finally bitcasting to the proper type. SDValue ExpandZERO_EXTEND_VECTOR_INREG(SDValue Op); /// \brief Expand bswap of vectors into a shuffle if legal. SDValue ExpandBSWAP(SDValue Op); /// \brief Implement vselect in terms of XOR, AND, OR when blend is not /// supported by the target. SDValue ExpandVSELECT(SDValue Op); SDValue ExpandSELECT(SDValue Op); SDValue ExpandLoad(SDValue Op); SDValue ExpandStore(SDValue Op); SDValue ExpandFNEG(SDValue Op); /// \brief Implements vector promotion. /// /// This is essentially just bitcasting the operands to a different type and /// bitcasting the result back to the original type. SDValue Promote(SDValue Op); /// \brief Implements [SU]INT_TO_FP vector promotion. /// /// This is a [zs]ext of the input operand to the next size up. SDValue PromoteINT_TO_FP(SDValue Op); /// \brief Implements FP_TO_[SU]INT vector promotion of the result type. /// /// It is promoted to the next size up integer type. The result is then /// truncated back to the original type. SDValue PromoteFP_TO_INT(SDValue Op, bool isSigned); public: /// \brief Begin legalizer the vector operations in the DAG. bool Run(); VectorLegalizer(SelectionDAG& dag) : DAG(dag), TLI(dag.getTargetLoweringInfo()), Changed(false) {} }; bool VectorLegalizer::Run() { // Before we start legalizing vector nodes, check if there are any vectors. bool HasVectors = false; for (SelectionDAG::allnodes_iterator I = DAG.allnodes_begin(), E = std::prev(DAG.allnodes_end()); I != std::next(E); ++I) { // Check if the values of the nodes contain vectors. We don't need to check // the operands because we are going to check their values at some point. for (SDNode::value_iterator J = I->value_begin(), E = I->value_end(); J != E; ++J) HasVectors |= J->isVector(); // If we found a vector node we can start the legalization. if (HasVectors) break; } // If this basic block has no vectors then no need to legalize vectors. if (!HasVectors) return false; // The legalize process is inherently a bottom-up recursive process (users // legalize their uses before themselves). Given infinite stack space, we // could just start legalizing on the root and traverse the whole graph. In // practice however, this causes us to run out of stack space on large basic // blocks. To avoid this problem, compute an ordering of the nodes where each // node is only legalized after all of its operands are legalized. DAG.AssignTopologicalOrder(); for (SelectionDAG::allnodes_iterator I = DAG.allnodes_begin(), E = std::prev(DAG.allnodes_end()); I != std::next(E); ++I) LegalizeOp(SDValue(I, 0)); // Finally, it's possible the root changed. Get the new root. SDValue OldRoot = DAG.getRoot(); assert(LegalizedNodes.count(OldRoot) && "Root didn't get legalized?"); DAG.setRoot(LegalizedNodes[OldRoot]); LegalizedNodes.clear(); // Remove dead nodes now. DAG.RemoveDeadNodes(); return Changed; } SDValue VectorLegalizer::TranslateLegalizeResults(SDValue Op, SDValue Result) { // Generic legalization: just pass the operand through. for (unsigned i = 0, e = Op.getNode()->getNumValues(); i != e; ++i) AddLegalizedOperand(Op.getValue(i), Result.getValue(i)); return Result.getValue(Op.getResNo()); } SDValue VectorLegalizer::LegalizeOp(SDValue Op) { // Note that LegalizeOp may be reentered even from single-use nodes, which // means that we always must cache transformed nodes. DenseMap<SDValue, SDValue>::iterator I = LegalizedNodes.find(Op); if (I != LegalizedNodes.end()) return I->second; SDNode* Node = Op.getNode(); // Legalize the operands SmallVector<SDValue, 8> Ops; for (const SDValue &Op : Node->op_values()) Ops.push_back(LegalizeOp(Op)); SDValue Result = SDValue(DAG.UpdateNodeOperands(Op.getNode(), Ops), 0); bool HasVectorValue = false; if (Op.getOpcode() == ISD::LOAD) { LoadSDNode *LD = cast<LoadSDNode>(Op.getNode()); ISD::LoadExtType ExtType = LD->getExtensionType(); if (LD->getMemoryVT().isVector() && ExtType != ISD::NON_EXTLOAD) switch (TLI.getLoadExtAction(LD->getExtensionType(), LD->getValueType(0), LD->getMemoryVT())) { default: llvm_unreachable("This action is not supported yet!"); case TargetLowering::Legal: return TranslateLegalizeResults(Op, Result); case TargetLowering::Custom: if (SDValue Lowered = TLI.LowerOperation(Result, DAG)) { if (Lowered == Result) return TranslateLegalizeResults(Op, Lowered); Changed = true; if (Lowered->getNumValues() != Op->getNumValues()) { // This expanded to something other than the load. Assume the // lowering code took care of any chain values, and just handle the // returned value. assert(Result.getValue(1).use_empty() && "There are still live users of the old chain!"); return LegalizeOp(Lowered); } else { return TranslateLegalizeResults(Op, Lowered); } } case TargetLowering::Expand: Changed = true; return LegalizeOp(ExpandLoad(Op)); } } else if (Op.getOpcode() == ISD::STORE) { StoreSDNode *ST = cast<StoreSDNode>(Op.getNode()); EVT StVT = ST->getMemoryVT(); MVT ValVT = ST->getValue().getSimpleValueType(); if (StVT.isVector() && ST->isTruncatingStore()) switch (TLI.getTruncStoreAction(ValVT, StVT.getSimpleVT())) { default: llvm_unreachable("This action is not supported yet!"); case TargetLowering::Legal: return TranslateLegalizeResults(Op, Result); case TargetLowering::Custom: { SDValue Lowered = TLI.LowerOperation(Result, DAG); Changed = Lowered != Result; return TranslateLegalizeResults(Op, Lowered); } case TargetLowering::Expand: Changed = true; return LegalizeOp(ExpandStore(Op)); } } else if (Op.getOpcode() == ISD::MSCATTER) HasVectorValue = true; for (SDNode::value_iterator J = Node->value_begin(), E = Node->value_end(); J != E; ++J) HasVectorValue |= J->isVector(); if (!HasVectorValue) return TranslateLegalizeResults(Op, Result); EVT QueryType; switch (Op.getOpcode()) { default: return TranslateLegalizeResults(Op, Result); case ISD::ADD: case ISD::SUB: case ISD::MUL: case ISD::SDIV: case ISD::UDIV: case ISD::SREM: case ISD::UREM: case ISD::FADD: case ISD::FSUB: case ISD::FMUL: case ISD::FDIV: case ISD::FREM: case ISD::AND: case ISD::OR: case ISD::XOR: case ISD::SHL: case ISD::SRA: case ISD::SRL: case ISD::ROTL: case ISD::ROTR: case ISD::BSWAP: case ISD::CTLZ: case ISD::CTTZ: case ISD::CTLZ_ZERO_UNDEF: case ISD::CTTZ_ZERO_UNDEF: case ISD::CTPOP: case ISD::SELECT: case ISD::VSELECT: case ISD::SELECT_CC: case ISD::SETCC: case ISD::ZERO_EXTEND: case ISD::ANY_EXTEND: case ISD::TRUNCATE: case ISD::SIGN_EXTEND: case ISD::FP_TO_SINT: case ISD::FP_TO_UINT: case ISD::FNEG: case ISD::FABS: case ISD::FMINNUM: case ISD::FMAXNUM: case ISD::FCOPYSIGN: case ISD::FSQRT: case ISD::FSIN: case ISD::FCOS: case ISD::FPOWI: case ISD::FPOW: case ISD::FLOG: case ISD::FLOG2: case ISD::FLOG10: case ISD::FEXP: case ISD::FEXP2: case ISD::FCEIL: case ISD::FTRUNC: case ISD::FRINT: case ISD::FNEARBYINT: case ISD::FROUND: case ISD::FFLOOR: case ISD::FP_ROUND: case ISD::FP_EXTEND: case ISD::FMA: case ISD::SIGN_EXTEND_INREG: case ISD::ANY_EXTEND_VECTOR_INREG: case ISD::SIGN_EXTEND_VECTOR_INREG: case ISD::ZERO_EXTEND_VECTOR_INREG: case ISD::SMIN: case ISD::SMAX: case ISD::UMIN: case ISD::UMAX: QueryType = Node->getValueType(0); break; case ISD::FP_ROUND_INREG: QueryType = cast<VTSDNode>(Node->getOperand(1))->getVT(); break; case ISD::SINT_TO_FP: case ISD::UINT_TO_FP: QueryType = Node->getOperand(0).getValueType(); break; case ISD::MSCATTER: QueryType = cast<MaskedScatterSDNode>(Node)->getValue().getValueType(); break; } switch (TLI.getOperationAction(Node->getOpcode(), QueryType)) { case TargetLowering::Promote: Result = Promote(Op); Changed = true; break; case TargetLowering::Legal: break; case TargetLowering::Custom: { SDValue Tmp1 = TLI.LowerOperation(Op, DAG); if (Tmp1.getNode()) { Result = Tmp1; break; } // FALL THROUGH } case TargetLowering::Expand: Result = Expand(Op); } // Make sure that the generated code is itself legal. if (Result != Op) { Result = LegalizeOp(Result); Changed = true; } // Note that LegalizeOp may be reentered even from single-use nodes, which // means that we always must cache transformed nodes. AddLegalizedOperand(Op, Result); return Result; } SDValue VectorLegalizer::Promote(SDValue Op) { // For a few operations there is a specific concept for promotion based on // the operand's type. switch (Op.getOpcode()) { case ISD::SINT_TO_FP: case ISD::UINT_TO_FP: // "Promote" the operation by extending the operand. return PromoteINT_TO_FP(Op); case ISD::FP_TO_UINT: case ISD::FP_TO_SINT: // Promote the operation by extending the operand. return PromoteFP_TO_INT(Op, Op->getOpcode() == ISD::FP_TO_SINT); } // There are currently two cases of vector promotion: // 1) Bitcasting a vector of integers to a different type to a vector of the // same overall length. For example, x86 promotes ISD::AND v2i32 to v1i64. // 2) Extending a vector of floats to a vector of the same number of larger // floats. For example, AArch64 promotes ISD::FADD on v4f16 to v4f32. MVT VT = Op.getSimpleValueType(); assert(Op.getNode()->getNumValues() == 1 && "Can't promote a vector with multiple results!"); MVT NVT = TLI.getTypeToPromoteTo(Op.getOpcode(), VT); SDLoc dl(Op); SmallVector<SDValue, 4> Operands(Op.getNumOperands()); for (unsigned j = 0; j != Op.getNumOperands(); ++j) { if (Op.getOperand(j).getValueType().isVector()) if (Op.getOperand(j) .getValueType() .getVectorElementType() .isFloatingPoint() && NVT.isVector() && NVT.getVectorElementType().isFloatingPoint()) Operands[j] = DAG.getNode(ISD::FP_EXTEND, dl, NVT, Op.getOperand(j)); else Operands[j] = DAG.getNode(ISD::BITCAST, dl, NVT, Op.getOperand(j)); else Operands[j] = Op.getOperand(j); } Op = DAG.getNode(Op.getOpcode(), dl, NVT, Operands); if ((VT.isFloatingPoint() && NVT.isFloatingPoint()) || (VT.isVector() && VT.getVectorElementType().isFloatingPoint() && NVT.isVector() && NVT.getVectorElementType().isFloatingPoint())) return DAG.getNode(ISD::FP_ROUND, dl, VT, Op, DAG.getIntPtrConstant(0, dl)); else return DAG.getNode(ISD::BITCAST, dl, VT, Op); } SDValue VectorLegalizer::PromoteINT_TO_FP(SDValue Op) { // INT_TO_FP operations may require the input operand be promoted even // when the type is otherwise legal. EVT VT = Op.getOperand(0).getValueType(); assert(Op.getNode()->getNumValues() == 1 && "Can't promote a vector with multiple results!"); // Normal getTypeToPromoteTo() doesn't work here, as that will promote // by widening the vector w/ the same element width and twice the number // of elements. We want the other way around, the same number of elements, // each twice the width. // // Increase the bitwidth of the element to the next pow-of-two // (which is greater than 8 bits). EVT NVT = VT.widenIntegerVectorElementType(*DAG.getContext()); assert(NVT.isSimple() && "Promoting to a non-simple vector type!"); SDLoc dl(Op); SmallVector<SDValue, 4> Operands(Op.getNumOperands()); unsigned Opc = Op.getOpcode() == ISD::UINT_TO_FP ? ISD::ZERO_EXTEND : ISD::SIGN_EXTEND; for (unsigned j = 0; j != Op.getNumOperands(); ++j) { if (Op.getOperand(j).getValueType().isVector()) Operands[j] = DAG.getNode(Opc, dl, NVT, Op.getOperand(j)); else Operands[j] = Op.getOperand(j); } return DAG.getNode(Op.getOpcode(), dl, Op.getValueType(), Operands); } // For FP_TO_INT we promote the result type to a vector type with wider // elements and then truncate the result. This is different from the default // PromoteVector which uses bitcast to promote thus assumning that the // promoted vector type has the same overall size. SDValue VectorLegalizer::PromoteFP_TO_INT(SDValue Op, bool isSigned) { assert(Op.getNode()->getNumValues() == 1 && "Can't promote a vector with multiple results!"); EVT VT = Op.getValueType(); EVT NewVT; unsigned NewOpc; while (1) { NewVT = VT.widenIntegerVectorElementType(*DAG.getContext()); assert(NewVT.isSimple() && "Promoting to a non-simple vector type!"); if (TLI.isOperationLegalOrCustom(ISD::FP_TO_SINT, NewVT)) { NewOpc = ISD::FP_TO_SINT; break; } if (!isSigned && TLI.isOperationLegalOrCustom(ISD::FP_TO_UINT, NewVT)) { NewOpc = ISD::FP_TO_UINT; break; } } SDLoc loc(Op); SDValue promoted = DAG.getNode(NewOpc, SDLoc(Op), NewVT, Op.getOperand(0)); return DAG.getNode(ISD::TRUNCATE, SDLoc(Op), VT, promoted); } SDValue VectorLegalizer::ExpandLoad(SDValue Op) { SDLoc dl(Op); LoadSDNode *LD = cast<LoadSDNode>(Op.getNode()); SDValue Chain = LD->getChain(); SDValue BasePTR = LD->getBasePtr(); EVT SrcVT = LD->getMemoryVT(); ISD::LoadExtType ExtType = LD->getExtensionType(); SmallVector<SDValue, 8> Vals; SmallVector<SDValue, 8> LoadChains; unsigned NumElem = SrcVT.getVectorNumElements(); EVT SrcEltVT = SrcVT.getScalarType(); EVT DstEltVT = Op.getNode()->getValueType(0).getScalarType(); if (SrcVT.getVectorNumElements() > 1 && !SrcEltVT.isByteSized()) { // When elements in a vector is not byte-addressable, we cannot directly // load each element by advancing pointer, which could only address bytes. // Instead, we load all significant words, mask bits off, and concatenate // them to form each element. Finally, they are extended to destination // scalar type to build the destination vector. EVT WideVT = TLI.getPointerTy(DAG.getDataLayout()); assert(WideVT.isRound() && "Could not handle the sophisticated case when the widest integer is" " not power of 2."); assert(WideVT.bitsGE(SrcEltVT) && "Type is not legalized?"); unsigned WideBytes = WideVT.getStoreSize(); unsigned Offset = 0; unsigned RemainingBytes = SrcVT.getStoreSize(); SmallVector<SDValue, 8> LoadVals; while (RemainingBytes > 0) { SDValue ScalarLoad; unsigned LoadBytes = WideBytes; if (RemainingBytes >= LoadBytes) { ScalarLoad = DAG.getLoad(WideVT, dl, Chain, BasePTR, LD->getPointerInfo().getWithOffset(Offset), LD->isVolatile(), LD->isNonTemporal(), LD->isInvariant(), MinAlign(LD->getAlignment(), Offset), LD->getAAInfo()); } else { EVT LoadVT = WideVT; while (RemainingBytes < LoadBytes) { LoadBytes >>= 1; // Reduce the load size by half. LoadVT = EVT::getIntegerVT(*DAG.getContext(), LoadBytes << 3); } ScalarLoad = DAG.getExtLoad(ISD::EXTLOAD, dl, WideVT, Chain, BasePTR, LD->getPointerInfo().getWithOffset(Offset), LoadVT, LD->isVolatile(), LD->isNonTemporal(), LD->isInvariant(), MinAlign(LD->getAlignment(), Offset), LD->getAAInfo()); } RemainingBytes -= LoadBytes; Offset += LoadBytes; BasePTR = DAG.getNode(ISD::ADD, dl, BasePTR.getValueType(), BasePTR, DAG.getConstant(LoadBytes, dl, BasePTR.getValueType())); LoadVals.push_back(ScalarLoad.getValue(0)); LoadChains.push_back(ScalarLoad.getValue(1)); } // Extract bits, pack and extend/trunc them into destination type. unsigned SrcEltBits = SrcEltVT.getSizeInBits(); SDValue SrcEltBitMask = DAG.getConstant((1U << SrcEltBits) - 1, dl, WideVT); unsigned BitOffset = 0; unsigned WideIdx = 0; unsigned WideBits = WideVT.getSizeInBits(); for (unsigned Idx = 0; Idx != NumElem; ++Idx) { SDValue Lo, Hi, ShAmt; if (BitOffset < WideBits) { ShAmt = DAG.getConstant( BitOffset, dl, TLI.getShiftAmountTy(WideVT, DAG.getDataLayout())); Lo = DAG.getNode(ISD::SRL, dl, WideVT, LoadVals[WideIdx], ShAmt); Lo = DAG.getNode(ISD::AND, dl, WideVT, Lo, SrcEltBitMask); } BitOffset += SrcEltBits; if (BitOffset >= WideBits) { WideIdx++; BitOffset -= WideBits; if (BitOffset > 0) { ShAmt = DAG.getConstant( SrcEltBits - BitOffset, dl, TLI.getShiftAmountTy(WideVT, DAG.getDataLayout())); Hi = DAG.getNode(ISD::SHL, dl, WideVT, LoadVals[WideIdx], ShAmt); Hi = DAG.getNode(ISD::AND, dl, WideVT, Hi, SrcEltBitMask); } } if (Hi.getNode()) Lo = DAG.getNode(ISD::OR, dl, WideVT, Lo, Hi); switch (ExtType) { default: llvm_unreachable("Unknown extended-load op!"); case ISD::EXTLOAD: Lo = DAG.getAnyExtOrTrunc(Lo, dl, DstEltVT); break; case ISD::ZEXTLOAD: Lo = DAG.getZExtOrTrunc(Lo, dl, DstEltVT); break; case ISD::SEXTLOAD: ShAmt = DAG.getConstant(WideBits - SrcEltBits, dl, TLI.getShiftAmountTy(WideVT, DAG.getDataLayout())); Lo = DAG.getNode(ISD::SHL, dl, WideVT, Lo, ShAmt); Lo = DAG.getNode(ISD::SRA, dl, WideVT, Lo, ShAmt); Lo = DAG.getSExtOrTrunc(Lo, dl, DstEltVT); break; } Vals.push_back(Lo); } } else { unsigned Stride = SrcVT.getScalarType().getSizeInBits()/8; for (unsigned Idx=0; Idx<NumElem; Idx++) { SDValue ScalarLoad = DAG.getExtLoad(ExtType, dl, Op.getNode()->getValueType(0).getScalarType(), Chain, BasePTR, LD->getPointerInfo().getWithOffset(Idx * Stride), SrcVT.getScalarType(), LD->isVolatile(), LD->isNonTemporal(), LD->isInvariant(), MinAlign(LD->getAlignment(), Idx * Stride), LD->getAAInfo()); BasePTR = DAG.getNode(ISD::ADD, dl, BasePTR.getValueType(), BasePTR, DAG.getConstant(Stride, dl, BasePTR.getValueType())); Vals.push_back(ScalarLoad.getValue(0)); LoadChains.push_back(ScalarLoad.getValue(1)); } } SDValue NewChain = DAG.getNode(ISD::TokenFactor, dl, MVT::Other, LoadChains); SDValue Value = DAG.getNode(ISD::BUILD_VECTOR, dl, Op.getNode()->getValueType(0), Vals); AddLegalizedOperand(Op.getValue(0), Value); AddLegalizedOperand(Op.getValue(1), NewChain); return (Op.getResNo() ? NewChain : Value); } SDValue VectorLegalizer::ExpandStore(SDValue Op) { SDLoc dl(Op); StoreSDNode *ST = cast<StoreSDNode>(Op.getNode()); SDValue Chain = ST->getChain(); SDValue BasePTR = ST->getBasePtr(); SDValue Value = ST->getValue(); EVT StVT = ST->getMemoryVT(); unsigned Alignment = ST->getAlignment(); bool isVolatile = ST->isVolatile(); bool isNonTemporal = ST->isNonTemporal(); AAMDNodes AAInfo = ST->getAAInfo(); unsigned NumElem = StVT.getVectorNumElements(); // The type of the data we want to save EVT RegVT = Value.getValueType(); EVT RegSclVT = RegVT.getScalarType(); // The type of data as saved in memory. EVT MemSclVT = StVT.getScalarType(); // Cast floats into integers unsigned ScalarSize = MemSclVT.getSizeInBits(); // Round odd types to the next pow of two. if (!isPowerOf2_32(ScalarSize)) ScalarSize = NextPowerOf2(ScalarSize); // Store Stride in bytes unsigned Stride = ScalarSize/8; // Extract each of the elements from the original vector // and save them into memory individually. SmallVector<SDValue, 8> Stores; for (unsigned Idx = 0; Idx < NumElem; Idx++) { SDValue Ex = DAG.getNode( ISD::EXTRACT_VECTOR_ELT, dl, RegSclVT, Value, DAG.getConstant(Idx, dl, TLI.getVectorIdxTy(DAG.getDataLayout()))); // This scalar TruncStore may be illegal, but we legalize it later. SDValue Store = DAG.getTruncStore(Chain, dl, Ex, BasePTR, ST->getPointerInfo().getWithOffset(Idx*Stride), MemSclVT, isVolatile, isNonTemporal, MinAlign(Alignment, Idx*Stride), AAInfo); BasePTR = DAG.getNode(ISD::ADD, dl, BasePTR.getValueType(), BasePTR, DAG.getConstant(Stride, dl, BasePTR.getValueType())); Stores.push_back(Store); } SDValue TF = DAG.getNode(ISD::TokenFactor, dl, MVT::Other, Stores); AddLegalizedOperand(Op, TF); return TF; } SDValue VectorLegalizer::Expand(SDValue Op) { switch (Op->getOpcode()) { case ISD::SIGN_EXTEND_INREG: return ExpandSEXTINREG(Op); case ISD::ANY_EXTEND_VECTOR_INREG: return ExpandANY_EXTEND_VECTOR_INREG(Op); case ISD::SIGN_EXTEND_VECTOR_INREG: return ExpandSIGN_EXTEND_VECTOR_INREG(Op); case ISD::ZERO_EXTEND_VECTOR_INREG: return ExpandZERO_EXTEND_VECTOR_INREG(Op); case ISD::BSWAP: return ExpandBSWAP(Op); case ISD::VSELECT: return ExpandVSELECT(Op); case ISD::SELECT: return ExpandSELECT(Op); case ISD::UINT_TO_FP: return ExpandUINT_TO_FLOAT(Op); case ISD::FNEG: return ExpandFNEG(Op); case ISD::SETCC: return UnrollVSETCC(Op); default: return DAG.UnrollVectorOp(Op.getNode()); } } SDValue VectorLegalizer::ExpandSELECT(SDValue Op) { // Lower a select instruction where the condition is a scalar and the // operands are vectors. Lower this select to VSELECT and implement it // using XOR AND OR. The selector bit is broadcasted. EVT VT = Op.getValueType(); SDLoc DL(Op); SDValue Mask = Op.getOperand(0); SDValue Op1 = Op.getOperand(1); SDValue Op2 = Op.getOperand(2); assert(VT.isVector() && !Mask.getValueType().isVector() && Op1.getValueType() == Op2.getValueType() && "Invalid type"); unsigned NumElem = VT.getVectorNumElements(); // If we can't even use the basic vector operations of // AND,OR,XOR, we will have to scalarize the op. // Notice that the operation may be 'promoted' which means that it is // 'bitcasted' to another type which is handled. // Also, we need to be able to construct a splat vector using BUILD_VECTOR. if (TLI.getOperationAction(ISD::AND, VT) == TargetLowering::Expand || TLI.getOperationAction(ISD::XOR, VT) == TargetLowering::Expand || TLI.getOperationAction(ISD::OR, VT) == TargetLowering::Expand || TLI.getOperationAction(ISD::BUILD_VECTOR, VT) == TargetLowering::Expand) return DAG.UnrollVectorOp(Op.getNode()); // Generate a mask operand. EVT MaskTy = VT.changeVectorElementTypeToInteger(); // What is the size of each element in the vector mask. EVT BitTy = MaskTy.getScalarType(); Mask = DAG.getSelect(DL, BitTy, Mask, DAG.getConstant(APInt::getAllOnesValue(BitTy.getSizeInBits()), DL, BitTy), DAG.getConstant(0, DL, BitTy)); // Broadcast the mask so that the entire vector is all-one or all zero. SmallVector<SDValue, 8> Ops(NumElem, Mask); Mask = DAG.getNode(ISD::BUILD_VECTOR, DL, MaskTy, Ops); // Bitcast the operands to be the same type as the mask. // This is needed when we select between FP types because // the mask is a vector of integers. Op1 = DAG.getNode(ISD::BITCAST, DL, MaskTy, Op1); Op2 = DAG.getNode(ISD::BITCAST, DL, MaskTy, Op2); SDValue AllOnes = DAG.getConstant( APInt::getAllOnesValue(BitTy.getSizeInBits()), DL, MaskTy); SDValue NotMask = DAG.getNode(ISD::XOR, DL, MaskTy, Mask, AllOnes); Op1 = DAG.getNode(ISD::AND, DL, MaskTy, Op1, Mask); Op2 = DAG.getNode(ISD::AND, DL, MaskTy, Op2, NotMask); SDValue Val = DAG.getNode(ISD::OR, DL, MaskTy, Op1, Op2); return DAG.getNode(ISD::BITCAST, DL, Op.getValueType(), Val); } SDValue VectorLegalizer::ExpandSEXTINREG(SDValue Op) { EVT VT = Op.getValueType(); // Make sure that the SRA and SHL instructions are available. if (TLI.getOperationAction(ISD::SRA, VT) == TargetLowering::Expand || TLI.getOperationAction(ISD::SHL, VT) == TargetLowering::Expand) return DAG.UnrollVectorOp(Op.getNode()); SDLoc DL(Op); EVT OrigTy = cast<VTSDNode>(Op->getOperand(1))->getVT(); unsigned BW = VT.getScalarType().getSizeInBits(); unsigned OrigBW = OrigTy.getScalarType().getSizeInBits(); SDValue ShiftSz = DAG.getConstant(BW - OrigBW, DL, VT); Op = Op.getOperand(0); Op = DAG.getNode(ISD::SHL, DL, VT, Op, ShiftSz); return DAG.getNode(ISD::SRA, DL, VT, Op, ShiftSz); } // Generically expand a vector anyext in register to a shuffle of the relevant // lanes into the appropriate locations, with other lanes left undef. SDValue VectorLegalizer::ExpandANY_EXTEND_VECTOR_INREG(SDValue Op) { SDLoc DL(Op); EVT VT = Op.getValueType(); int NumElements = VT.getVectorNumElements(); SDValue Src = Op.getOperand(0); EVT SrcVT = Src.getValueType(); int NumSrcElements = SrcVT.getVectorNumElements(); // Build a base mask of undef shuffles. SmallVector<int, 16> ShuffleMask; ShuffleMask.resize(NumSrcElements, -1); // Place the extended lanes into the correct locations. int ExtLaneScale = NumSrcElements / NumElements; int EndianOffset = DAG.getDataLayout().isBigEndian() ? ExtLaneScale - 1 : 0; for (int i = 0; i < NumElements; ++i) ShuffleMask[i * ExtLaneScale + EndianOffset] = i; return DAG.getNode( ISD::BITCAST, DL, VT, DAG.getVectorShuffle(SrcVT, DL, Src, DAG.getUNDEF(SrcVT), ShuffleMask)); } SDValue VectorLegalizer::ExpandSIGN_EXTEND_VECTOR_INREG(SDValue Op) { SDLoc DL(Op); EVT VT = Op.getValueType(); SDValue Src = Op.getOperand(0); EVT SrcVT = Src.getValueType(); // First build an any-extend node which can be legalized above when we // recurse through it. Op = DAG.getAnyExtendVectorInReg(Src, DL, VT); // Now we need sign extend. Do this by shifting the elements. Even if these // aren't legal operations, they have a better chance of being legalized // without full scalarization than the sign extension does. unsigned EltWidth = VT.getVectorElementType().getSizeInBits(); unsigned SrcEltWidth = SrcVT.getVectorElementType().getSizeInBits(); SDValue ShiftAmount = DAG.getConstant(EltWidth - SrcEltWidth, DL, VT); return DAG.getNode(ISD::SRA, DL, VT, DAG.getNode(ISD::SHL, DL, VT, Op, ShiftAmount), ShiftAmount); } // Generically expand a vector zext in register to a shuffle of the relevant // lanes into the appropriate locations, a blend of zero into the high bits, // and a bitcast to the wider element type. SDValue VectorLegalizer::ExpandZERO_EXTEND_VECTOR_INREG(SDValue Op) { SDLoc DL(Op); EVT VT = Op.getValueType(); int NumElements = VT.getVectorNumElements(); SDValue Src = Op.getOperand(0); EVT SrcVT = Src.getValueType(); int NumSrcElements = SrcVT.getVectorNumElements(); // Build up a zero vector to blend into this one. EVT SrcScalarVT = SrcVT.getScalarType(); SDValue ScalarZero = DAG.getTargetConstant(0, DL, SrcScalarVT); SmallVector<SDValue, 4> BuildVectorOperands(NumSrcElements, ScalarZero); SDValue Zero = DAG.getNode(ISD::BUILD_VECTOR, DL, SrcVT, BuildVectorOperands); // Shuffle the incoming lanes into the correct position, and pull all other // lanes from the zero vector. SmallVector<int, 16> ShuffleMask; ShuffleMask.reserve(NumSrcElements); for (int i = 0; i < NumSrcElements; ++i) ShuffleMask.push_back(i); int ExtLaneScale = NumSrcElements / NumElements; int EndianOffset = DAG.getDataLayout().isBigEndian() ? ExtLaneScale - 1 : 0; for (int i = 0; i < NumElements; ++i) ShuffleMask[i * ExtLaneScale + EndianOffset] = NumSrcElements + i; return DAG.getNode(ISD::BITCAST, DL, VT, DAG.getVectorShuffle(SrcVT, DL, Zero, Src, ShuffleMask)); } SDValue VectorLegalizer::ExpandBSWAP(SDValue Op) { EVT VT = Op.getValueType(); // Generate a byte wise shuffle mask for the BSWAP. SmallVector<int, 16> ShuffleMask; int ScalarSizeInBytes = VT.getScalarSizeInBits() / 8; for (int I = 0, E = VT.getVectorNumElements(); I != E; ++I) for (int J = ScalarSizeInBytes - 1; J >= 0; --J) ShuffleMask.push_back((I * ScalarSizeInBytes) + J); EVT ByteVT = EVT::getVectorVT(*DAG.getContext(), MVT::i8, ShuffleMask.size()); // Only emit a shuffle if the mask is legal. if (!TLI.isShuffleMaskLegal(ShuffleMask, ByteVT)) return DAG.UnrollVectorOp(Op.getNode()); SDLoc DL(Op); Op = DAG.getNode(ISD::BITCAST, DL, ByteVT, Op.getOperand(0)); Op = DAG.getVectorShuffle(ByteVT, DL, Op, DAG.getUNDEF(ByteVT), ShuffleMask.data()); return DAG.getNode(ISD::BITCAST, DL, VT, Op); } SDValue VectorLegalizer::ExpandVSELECT(SDValue Op) { // Implement VSELECT in terms of XOR, AND, OR // on platforms which do not support blend natively. SDLoc DL(Op); SDValue Mask = Op.getOperand(0); SDValue Op1 = Op.getOperand(1); SDValue Op2 = Op.getOperand(2); EVT VT = Mask.getValueType(); // If we can't even use the basic vector operations of // AND,OR,XOR, we will have to scalarize the op. // Notice that the operation may be 'promoted' which means that it is // 'bitcasted' to another type which is handled. // This operation also isn't safe with AND, OR, XOR when the boolean // type is 0/1 as we need an all ones vector constant to mask with. // FIXME: Sign extend 1 to all ones if thats legal on the target. if (TLI.getOperationAction(ISD::AND, VT) == TargetLowering::Expand || TLI.getOperationAction(ISD::XOR, VT) == TargetLowering::Expand || TLI.getOperationAction(ISD::OR, VT) == TargetLowering::Expand || TLI.getBooleanContents(Op1.getValueType()) != TargetLowering::ZeroOrNegativeOneBooleanContent) return DAG.UnrollVectorOp(Op.getNode()); // If the mask and the type are different sizes, unroll the vector op. This // can occur when getSetCCResultType returns something that is different in // size from the operand types. For example, v4i8 = select v4i32, v4i8, v4i8. if (VT.getSizeInBits() != Op1.getValueType().getSizeInBits()) return DAG.UnrollVectorOp(Op.getNode()); // Bitcast the operands to be the same type as the mask. // This is needed when we select between FP types because // the mask is a vector of integers. Op1 = DAG.getNode(ISD::BITCAST, DL, VT, Op1); Op2 = DAG.getNode(ISD::BITCAST, DL, VT, Op2); SDValue AllOnes = DAG.getConstant( APInt::getAllOnesValue(VT.getScalarType().getSizeInBits()), DL, VT); SDValue NotMask = DAG.getNode(ISD::XOR, DL, VT, Mask, AllOnes); Op1 = DAG.getNode(ISD::AND, DL, VT, Op1, Mask); Op2 = DAG.getNode(ISD::AND, DL, VT, Op2, NotMask); SDValue Val = DAG.getNode(ISD::OR, DL, VT, Op1, Op2); return DAG.getNode(ISD::BITCAST, DL, Op.getValueType(), Val); } SDValue VectorLegalizer::ExpandUINT_TO_FLOAT(SDValue Op) { EVT VT = Op.getOperand(0).getValueType(); SDLoc DL(Op); // Make sure that the SINT_TO_FP and SRL instructions are available. if (TLI.getOperationAction(ISD::SINT_TO_FP, VT) == TargetLowering::Expand || TLI.getOperationAction(ISD::SRL, VT) == TargetLowering::Expand) return DAG.UnrollVectorOp(Op.getNode()); EVT SVT = VT.getScalarType(); assert((SVT.getSizeInBits() == 64 || SVT.getSizeInBits() == 32) && "Elements in vector-UINT_TO_FP must be 32 or 64 bits wide"); unsigned BW = SVT.getSizeInBits(); SDValue HalfWord = DAG.getConstant(BW/2, DL, VT); // Constants to clear the upper part of the word. // Notice that we can also use SHL+SHR, but using a constant is slightly // faster on x86. uint64_t HWMask = (SVT.getSizeInBits()==64)?0x00000000FFFFFFFF:0x0000FFFF; SDValue HalfWordMask = DAG.getConstant(HWMask, DL, VT); // Two to the power of half-word-size. SDValue TWOHW = DAG.getConstantFP((((uint64_t)1)<<(BW/2)), DL, Op.getValueType()); // HLSL Change: do the 64-bit conversion before shift not after // Clear upper part of LO, lower HI SDValue HI = DAG.getNode(ISD::SRL, DL, VT, Op.getOperand(0), HalfWord); SDValue LO = DAG.getNode(ISD::AND, DL, VT, Op.getOperand(0), HalfWordMask); // Convert hi and lo to floats // Convert the hi part back to the upper values SDValue fHI = DAG.getNode(ISD::SINT_TO_FP, DL, Op.getValueType(), HI); fHI = DAG.getNode(ISD::FMUL, DL, Op.getValueType(), fHI, TWOHW); SDValue fLO = DAG.getNode(ISD::SINT_TO_FP, DL, Op.getValueType(), LO); // Add the two halves return DAG.getNode(ISD::FADD, DL, Op.getValueType(), fHI, fLO); } SDValue VectorLegalizer::ExpandFNEG(SDValue Op) { if (TLI.isOperationLegalOrCustom(ISD::FSUB, Op.getValueType())) { SDLoc DL(Op); SDValue Zero = DAG.getConstantFP(-0.0, DL, Op.getValueType()); return DAG.getNode(ISD::FSUB, DL, Op.getValueType(), Zero, Op.getOperand(0)); } return DAG.UnrollVectorOp(Op.getNode()); } SDValue VectorLegalizer::UnrollVSETCC(SDValue Op) { EVT VT = Op.getValueType(); unsigned NumElems = VT.getVectorNumElements(); EVT EltVT = VT.getVectorElementType(); SDValue LHS = Op.getOperand(0), RHS = Op.getOperand(1), CC = Op.getOperand(2); EVT TmpEltVT = LHS.getValueType().getVectorElementType(); SDLoc dl(Op); SmallVector<SDValue, 8> Ops(NumElems); for (unsigned i = 0; i < NumElems; ++i) { SDValue LHSElem = DAG.getNode( ISD::EXTRACT_VECTOR_ELT, dl, TmpEltVT, LHS, DAG.getConstant(i, dl, TLI.getVectorIdxTy(DAG.getDataLayout()))); SDValue RHSElem = DAG.getNode( ISD::EXTRACT_VECTOR_ELT, dl, TmpEltVT, RHS, DAG.getConstant(i, dl, TLI.getVectorIdxTy(DAG.getDataLayout()))); Ops[i] = DAG.getNode(ISD::SETCC, dl, TLI.getSetCCResultType(DAG.getDataLayout(), *DAG.getContext(), TmpEltVT), LHSElem, RHSElem, CC); Ops[i] = DAG.getSelect(dl, EltVT, Ops[i], DAG.getConstant(APInt::getAllOnesValue (EltVT.getSizeInBits()), dl, EltVT), DAG.getConstant(0, dl, EltVT)); } return DAG.getNode(ISD::BUILD_VECTOR, dl, VT, Ops); } } bool SelectionDAG::LegalizeVectors() { return VectorLegalizer(*this).Run(); }
{ "pile_set_name": "Github" }
using SisoDb.EnsureThat; using SisoDb.Structures.Schemas; namespace SisoDb.Querying.Lambdas.Parsers { public class ExpressionParsers : IExpressionParsers { private IWhereParser _whereParser; private IOrderByParser _orderByParser; public ExpressionParsers(IDataTypeConverter dataTypeConverter) { WhereParser = new WhereParser(dataTypeConverter); OrderByParser = new OrderByParser(dataTypeConverter); } public IWhereParser WhereParser { get { return _whereParser; } set { Ensure.That(value, "WhereParser").IsNotNull(); _whereParser = value; } } public IOrderByParser OrderByParser { get { return _orderByParser; } set { Ensure.That(value, "OrderByParser").IsNotNull(); _orderByParser = value; } } } }
{ "pile_set_name": "Github" }
<?php /* * $Id: Oracle.php 7490 2010-03-29 19:53:27Z jwage $ * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * This software consists of voluntary contributions made by many individuals * and is licensed under the LGPL. For more information, see * <http://www.doctrine-project.org>. */ /** * @package Doctrine * @subpackage Import * @license http://www.opensource.org/licenses/lgpl-license.php LGPL * @author Konsta Vesterinen <[email protected]> * @version $Revision: 7490 $ * @link www.doctrine-project.org * @since 1.0 */ class Doctrine_Import_Oracle extends Doctrine_Import { /** * lists all databases * * @return array */ public function listDatabases() { if ( ! $this->conn->getAttribute(Doctrine_Core::ATTR_EMULATE_DATABASE)) { throw new Doctrine_Import_Exception('database listing is only supported if the "emulate_database" option is enabled'); } $query = 'SELECT username FROM sys.user_users'; $result2 = $this->conn->standaloneQuery($query); $result = $result2->fetchColumn(); return $result; } /** * lists all availible database functions * * @return array */ public function listFunctions() { $query = "SELECT name FROM sys.user_source WHERE line = 1 AND type = 'FUNCTION'"; return $this->conn->fetchColumn($query); } /** * lists all database triggers * * @param string|null $database * @return array */ public function listTriggers($database = null) { $query = "SELECT trigger_name FROM sys.user_triggers"; return $this->conn->fetchColumn($query); } /** * lists all database sequences * * @param string|null $database * @return array */ public function listSequences($database = null) { $query = "SELECT sequence_name FROM sys.user_sequences"; $tableNames = $this->conn->fetchColumn($query); return array_map(array($this->conn->formatter, 'fixSequenceName'), $tableNames); } /** * lists table constraints * * @param string $table database table name * @return array */ public function listTableConstraints($table) { $table = $this->conn->quote($table, 'text'); $query = 'SELECT index_name name FROM user_constraints' . ' WHERE table_name = ' . $table . ' OR table_name = ' . strtoupper($table); $constraints = $this->conn->fetchColumn($query); return array_map(array($this->conn->formatter, 'fixIndexName'), $constraints); } /** * lists table constraints * * @param string $table database table name * @return array */ public function listTableColumns($table) { $sql = <<<QEND SELECT tc.column_name, data_type, CASE WHEN data_type = 'NUMBER' THEN data_precision ELSE data_length END AS data_length, nullable, data_default, data_scale, data_precision, pk.primary FROM all_tab_columns tc LEFT JOIN ( select 'primary' primary, cc.table_name, cc.column_name from all_constraints cons join all_cons_columns cc on cons.constraint_name = cc.constraint_name where cons.constraint_type = 'P' ) pk ON pk.column_name = tc.column_name and pk.table_name = tc.table_name WHERE tc.table_name = :tableName ORDER BY column_id QEND; $result = $this->conn->fetchAssoc($sql, array(':tableName' => $table)); $descr = array(); foreach($result as $val) { $val = array_change_key_case($val, CASE_LOWER); $decl = $this->conn->dataDict->getPortableDeclaration($val); $descr[$val['column_name']] = array( 'name' => $val['column_name'], 'notnull' => (bool) ($val['nullable'] === 'N'), 'ntype' => $val['data_type'], 'type' => $decl['type'][0], 'alltypes' => $decl['type'], 'fixed' => (bool) $decl['fixed'], 'unsigned' => (bool) $decl['unsigned'], 'default' => $val['data_default'], 'length' => $val['data_length'], 'primary' => (bool) $val['primary'], 'scale' => isset($val['scale']) ? $val['scale']:null, ); } return $descr; } /** * lists table constraints * * @param string $table database table name * @return array */ public function listTableIndexes($table) { $table = $this->conn->quote($table, 'text'); $query = 'SELECT index_name name FROM user_indexes' . ' WHERE table_name = ' . $table . ' OR table_name = ' . strtoupper($table) . ' AND generated = ' . $this->conn->quote('N', 'text'); $indexes = $this->conn->fetchColumn($query); return array_map(array($this->conn->formatter, 'fixIndexName'), $indexes); } /** * list table relations */ public function listTableRelations($table) { $relations = array(); $sql = 'SELECT ' . 'rcc.table_name AS referenced_table_name, ' . 'lcc.column_name AS local_column_name, ' . 'rcc.column_name AS referenced_column_name ' . 'FROM user_constraints ac ' . 'JOIN user_cons_columns rcc ON ac.r_constraint_name = rcc.constraint_name ' . 'JOIN user_cons_columns lcc ON ac.constraint_name = lcc.constraint_name ' . "WHERE ac.constraint_type = 'R' AND ac.table_name = :tableName"; $results = $this->conn->fetchAssoc($sql, array(':tableName' => $table)); foreach ($results as $result) { $result = array_change_key_case($result, CASE_LOWER); $relations[] = array('table' => $result['referenced_table_name'], 'local' => $result['local_column_name'], 'foreign' => $result['referenced_column_name']); } return $relations; } /** * lists tables * * @param string|null $database * @return array */ public function listTables($database = null) { $query = "SELECT * FROM user_objects WHERE object_type = 'TABLE' and object_name in (select table_name from user_tables)"; return $this->conn->fetchColumn($query); } /** * lists table triggers * * @param string $table database table name * @return array */ public function listTableTriggers($table) { } /** * lists table views * * @param string $table database table name * @return array */ public function listTableViews($table) { } /** * lists database users * * @return array */ public function listUsers() { $query = 'SELECT username FROM sys.all_users'; return $this->conn->fetchColumn($query); } /** * lists database views * * @param string|null $database * @return array */ public function listViews($database = null) { $query = 'SELECT view_name FROM sys.user_views'; return $this->conn->fetchColumn($query); } }
{ "pile_set_name": "Github" }
import TagsTextField from 'part:@sanity/components/tags/textfield' import {array} from 'part:@sanity/storybook/addons/knobs' import Sanity from 'part:@sanity/storybook/addons/sanity' import React from 'react' const centerStyle = { display: 'flex', alignItems: 'center', justifyContent: 'center', height: '100%', width: '100%', position: 'absolute', top: 0, left: 0 } class DefaultTextFieldTagsImplementation extends React.PureComponent { constructor(...args) { super(...args) this.state = { tags: this.props.tags || [] } } handleChange = tags => { this.setState({ tags: tags }) } render() { return ( <TagsTextField label="Tags" placeholder="This is the placeholder" value={this.state.tags} onChange={this.handleChange} /> ) } } export function TagsTestStory() { const tags = [ 'Test', 'Sanity', 'React', 'Computer', 'Macbook', 'Awesome', 'Windows', 'CPU', 'Moore', 'Intel', 'Ada', 'Enigma' ] return ( <div style={centerStyle}> <Sanity part="part:@sanity/components/tags/textfield" propTables={[TagsTextField]}> <DefaultTextFieldTagsImplementation tags={array('tags', tags, 'props')} /> </Sanity> </div> ) }
{ "pile_set_name": "Github" }
MIT License Copyright (c) Kevin Mårtensson <[email protected]> (github.com/kevva) Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
{ "pile_set_name": "Github" }
MWF.xApplication.Minder = MWF.xApplication.Minder || {}; MWF.xApplication.Minder.Actions = MWF.xApplication.Minder.Actions || {}; MWF.require("MWF.xDesktop.Actions.RestActions", null, false); MWF.xApplication.Minder.Actions.RestActions2 = new Class({ Extends : MWF.xDesktop.Actions.RestActions, invoke: function(option){ this.getActions(function(){ //name, parameter, data, async, success, failure, withCredentials, urlEncode var action = this.actions[option.name]; var method = action.method || "GET"; var uri = action.uri; if (option.parameter){ Object.each(option.parameter, function(value, key){ var reg = new RegExp("{"+key+"}", "g"); if (option.urlEncode===false){ uri = uri.replace(reg, value); }else{ uri = uri.replace(reg, encodeURIComponent(value)); } }); } if( !this.address )this.getAddress(); uri = this.address+uri; var async = (option.async===false) ? false : true; var progress = (option.progress===false) ? false : true; //控制是否显示进度条 var callback = new MWF.xDesktop.Actions.RestActions.Callback(option.success, option.failure); if (action.enctype && (action.enctype.toLowerCase()=="formdata")){ this.invokeFormData(method, uri, option.data, option.file, callback, async, progress); }else{ var data = (option.data) ? JSON.encode(option.data) : ""; var credentials = true; if (option.withCredentials===false){ credentials = false; } MWF.restful(method, uri, data, callback, async, credentials); } }.bind(this)); }, invokeFormData: function(method, uri, data, file, callback, async, progress){ var xhr = new COMMON.Browser.Request(); data.append('fileName', file.name); if (xhr.upload && progress){ this.invokeFormDataWithProgress(xhr, method, uri, data, file, callback, async); }else{ this.invokeFormDataWithoutProgress(xhr, method, uri, data, file, callback, async); } }, invokeFormDataWithoutProgress: function(xhr, method, uri, data, file, callback, async){ var messageItem = null; var currentDate = new Date(); xhr.addEventListener("readystatechange", function(e){ if (xhr.readyState == 4){ //this.transferComplete(e, xhr, messageItem, currentDate, file) this.xhrStateChange(e, xhr, messageItem, callback); } }.bind(this), false); xhr.open(method, uri, true); xhr.withCredentials = true; //messageItem = this.addFormDataMessage(file, true); xhr.send(data); //this.setMessageText(messageItem, MWF.LP.desktop.action.sendStart); } }); MWF.xAction.RestActions.Action["x_mind_assemble_control"] = new Class({ Extends: MWF.xAction.RestActions.Action, initialize: function(root, actions){ this.action = new MWF.xApplication.Minder.Actions.RestActions2("/xAction/services/"+root+".json", root, ""); this.action.actions = actions; Object.each(this.action.actions, function(service, key){ if (service.uri) if (!this[key]) this.createMethod(service, key); }.bind(this)); }, uploadMindIcon: function(mindId, size, success, failure, formData, file, progress){ this.action.invoke({"name": "uploadMindIcon", "parameter": {"mindId": mindId, "size" : size},"data": formData,"file": file,"success": success,"failure": failure, "progress" : progress}); } });
{ "pile_set_name": "Github" }
package TestApp::Controller::Priorities::loc_vs_index; use strict; use base 'Catalyst::Controller'; sub index :Private { $_[1]->res->body( 'index' ) } 1;
{ "pile_set_name": "Github" }
/** * @fileoverview Look for useless escapes in strings and regexes * @author Onur Temizkan */ "use strict"; const astUtils = require("../ast-utils"); //------------------------------------------------------------------------------ // Rule Definition //------------------------------------------------------------------------------ /** * Returns the union of two sets. * @param {Set} setA The first set * @param {Set} setB The second set * @returns {Set} The union of the two sets */ function union(setA, setB) { return new Set(function *() { yield* setA; yield* setB; }()); } const VALID_STRING_ESCAPES = union(new Set("\\nrvtbfux"), astUtils.LINEBREAKS); const REGEX_GENERAL_ESCAPES = new Set("\\bcdDfnrsStvwWxu0123456789]"); const REGEX_NON_CHARCLASS_ESCAPES = union(REGEX_GENERAL_ESCAPES, new Set("^/.$*+?[{}|()B")); /** * Parses a regular expression into a list of characters with character class info. * @param {string} regExpText The raw text used to create the regular expression * @returns {Object[]} A list of characters, each with info on escaping and whether they're in a character class. * @example * * parseRegExp('a\\b[cd-]') * * returns: * [ * {text: 'a', index: 0, escaped: false, inCharClass: false, startsCharClass: false, endsCharClass: false}, * {text: 'b', index: 2, escaped: true, inCharClass: false, startsCharClass: false, endsCharClass: false}, * {text: 'c', index: 4, escaped: false, inCharClass: true, startsCharClass: true, endsCharClass: false}, * {text: 'd', index: 5, escaped: false, inCharClass: true, startsCharClass: false, endsCharClass: false}, * {text: '-', index: 6, escaped: false, inCharClass: true, startsCharClass: false, endsCharClass: false} * ] */ function parseRegExp(regExpText) { const charList = []; regExpText.split("").reduce((state, char, index) => { if (!state.escapeNextChar) { if (char === "\\") { return Object.assign(state, { escapeNextChar: true }); } if (char === "[" && !state.inCharClass) { return Object.assign(state, { inCharClass: true, startingCharClass: true }); } if (char === "]" && state.inCharClass) { if (charList.length && charList[charList.length - 1].inCharClass) { charList[charList.length - 1].endsCharClass = true; } return Object.assign(state, { inCharClass: false, startingCharClass: false }); } } charList.push({ text: char, index, escaped: state.escapeNextChar, inCharClass: state.inCharClass, startsCharClass: state.startingCharClass, endsCharClass: false }); return Object.assign(state, { escapeNextChar: false, startingCharClass: false }); }, { escapeNextChar: false, inCharClass: false, startingCharClass: false }); return charList; } module.exports = { meta: { docs: { description: "disallow unnecessary escape characters", category: "Best Practices", recommended: false }, schema: [] }, create(context) { const sourceCode = context.getSourceCode(); /** * Reports a node * @param {ASTNode} node The node to report * @param {number} startOffset The backslash's offset from the start of the node * @param {string} character The uselessly escaped character (not including the backslash) * @returns {void} */ function report(node, startOffset, character) { context.report({ node, loc: sourceCode.getLocFromIndex(sourceCode.getIndexFromLoc(node.loc.start) + startOffset), message: "Unnecessary escape character: \\{{character}}.", data: { character } }); } /** * Checks if the escape character in given string slice is unnecessary. * * @private * @param {ASTNode} node - node to validate. * @param {string} match - string slice to validate. * @returns {void} */ function validateString(node, match) { const isTemplateElement = node.type === "TemplateElement"; const escapedChar = match[0][1]; let isUnnecessaryEscape = !VALID_STRING_ESCAPES.has(escapedChar); let isQuoteEscape; if (isTemplateElement) { isQuoteEscape = escapedChar === "`"; if (escapedChar === "$") { // Warn if `\$` is not followed by `{` isUnnecessaryEscape = match.input[match.index + 2] !== "{"; } else if (escapedChar === "{") { /* Warn if `\{` is not preceded by `$`. If preceded by `$`, escaping * is necessary and the rule should not warn. If preceded by `/$`, the rule * will warn for the `/$` instead, as it is the first unnecessarily escaped character. */ isUnnecessaryEscape = match.input[match.index - 1] !== "$"; } } else { isQuoteEscape = escapedChar === node.raw[0]; } if (isUnnecessaryEscape && !isQuoteEscape) { report(node, match.index + 1, match[0].slice(1)); } } /** * Checks if a node has an escape. * * @param {ASTNode} node - node to check. * @returns {void} */ function check(node) { const isTemplateElement = node.type === "TemplateElement"; if ( isTemplateElement && node.parent && node.parent.parent && node.parent.parent.type === "TaggedTemplateExpression" && node.parent === node.parent.parent.quasi ) { // Don't report tagged template literals, because the backslash character is accessible to the tag function. return; } if (typeof node.value === "string" || isTemplateElement) { /* * JSXAttribute doesn't have any escape sequence: https://facebook.github.io/jsx/. * In addition, backticks are not supported by JSX yet: https://github.com/facebook/jsx/issues/25. */ if (node.parent.type === "JSXAttribute" || node.parent.type === "JSXElement") { return; } const value = isTemplateElement ? node.value.raw : node.raw.slice(1, -1); const pattern = /\\[^\d]/g; let match; while ((match = pattern.exec(value))) { validateString(node, match); } } else if (node.regex) { parseRegExp(node.regex.pattern) /* * The '-' character is a special case, because it's only valid to escape it if it's in a character * class, and is not at either edge of the character class. To account for this, don't consider '-' * characters to be valid in general, and filter out '-' characters that appear in the middle of a * character class. */ .filter(charInfo => !(charInfo.text === "-" && charInfo.inCharClass && !charInfo.startsCharClass && !charInfo.endsCharClass)) /* * The '^' character is also a special case; it must always be escaped outside of character classes, but * it only needs to be escaped in character classes if it's at the beginning of the character class. To * account for this, consider it to be a valid escape character outside of character classes, and filter * out '^' characters that appear at the start of a character class. */ .filter(charInfo => !(charInfo.text === "^" && charInfo.startsCharClass)) // Filter out characters that aren't escaped. .filter(charInfo => charInfo.escaped) // Filter out characters that are valid to escape, based on their position in the regular expression. .filter(charInfo => !(charInfo.inCharClass ? REGEX_GENERAL_ESCAPES : REGEX_NON_CHARCLASS_ESCAPES).has(charInfo.text)) // Report all the remaining characters. .forEach(charInfo => report(node, charInfo.index, charInfo.text)); } } return { Literal: check, TemplateElement: check }; } };
{ "pile_set_name": "Github" }
/* * Copyright (c) 2008, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.javafx.tk.quantum; import com.sun.glass.events.MouseEvent; import com.sun.glass.events.ViewEvent; import com.sun.glass.events.WindowEvent; class GlassEventUtils { private GlassEventUtils() { } public static String getMouseEventString(int type) { switch (type) { case MouseEvent.BUTTON_NONE: return "BUTTON_NONE"; case MouseEvent.BUTTON_LEFT: return "BUTTON_LEFT"; case MouseEvent.BUTTON_RIGHT: return "BUTTON_RIGHT"; case MouseEvent.BUTTON_OTHER: return "BUTTON_OTHER"; case MouseEvent.BUTTON_BACK: return "BUTTON_BACK"; case MouseEvent.BUTTON_FORWARD: return "BUTTON_FORWARD"; case MouseEvent.DOWN: return "DOWN"; case MouseEvent.UP: return "UP"; case MouseEvent.DRAG: return "DRAG"; case MouseEvent.MOVE: return "MOVE"; case MouseEvent.ENTER: return "ENTER"; case MouseEvent.EXIT: return "EXIT"; case MouseEvent.CLICK: return "CLICK"; case MouseEvent.WHEEL: return "WHEEL"; default: return "UNKNOWN"; } } public static String getViewEventString(int type) { switch (type) { case ViewEvent.ADD: return "ADD"; case ViewEvent.REMOVE: return "REMOVE"; case ViewEvent.REPAINT: return "REPAINT"; case ViewEvent.RESIZE: return "RESIZE"; case ViewEvent.MOVE: return "MOVE"; case ViewEvent.FULLSCREEN_ENTER: return "FULLSCREEN_ENTER"; case ViewEvent.FULLSCREEN_EXIT: return "FULLSCREEN_EXIT"; default: return "UNKNOWN"; } } public static String getWindowEventString(int type) { switch (type) { case WindowEvent.RESIZE: return "RESIZE"; case WindowEvent.MOVE: return "MOVE"; case WindowEvent.CLOSE: return "CLOSE"; case WindowEvent.DESTROY: return "DESTROY"; case WindowEvent.MINIMIZE: return "MINIMIZE"; case WindowEvent.MAXIMIZE: return "MAXIMIZE"; case WindowEvent.RESTORE: return "RESTORE"; case WindowEvent.FOCUS_LOST: return "FOCUS_LOST"; case WindowEvent.FOCUS_GAINED: return "FOCUS_GAINED"; case WindowEvent.FOCUS_GAINED_FORWARD: return "FOCUS_GAINED_FORWARD"; case WindowEvent.FOCUS_GAINED_BACKWARD: return "FOCUS_GAINED_BACKWARD"; case WindowEvent.FOCUS_DISABLED: return "FOCUS_DISABLED"; case WindowEvent.FOCUS_UNGRAB: return "FOCUS_UNGRAB"; default: return "UNKNOWN"; } } }
{ "pile_set_name": "Github" }
// Distributed under the terms of the MIT license // Test case submitted to project by https://github.com/practicalswift (practicalswift) // Test case found by fuzzing struct d{{{ { } }} func Void{E[Void{}} } let A{ Void{
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <title>Linked Brushing &amp; Panning</title> <script type="text/javascript" src="/static/js/bokeh.js"></script> <script type="text/javascript" src="/static/js/bokeh-api.js"></script> <script type="text/javascript"> var require = Bokeh.require var exports = {} </script> </head> <body> <script type="text/javascript" src="/static/examples/linked/linked.js"></script> </body> </html>
{ "pile_set_name": "Github" }
#TargetFrameworkVersion=v4.0:PlatformToolSet=v120:EnableManagedIncrementalBuild=false:VCToolArchitecture=Native32Bit Debug|Win32|F:\pclsync\lib\mbedtls\visualc\VS2010\|
{ "pile_set_name": "Github" }
// -*- C++ -*- //===------------------------------ span ---------------------------------===// // // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. // See https://llvm.org/LICENSE.txt for license information. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception // //===---------------------------------------------------------------------===// // UNSUPPORTED: c++03, c++11, c++14, c++17 // <span> // template<class OtherElementType, size_t OtherExtent> // constexpr span(const span<OtherElementType, OtherExtent>& s) noexcept; // // Remarks: This constructor shall not participate in overload resolution unless: // Extent == dynamic_extent || Extent == OtherExtent is true, and // OtherElementType(*)[] is convertible to ElementType(*)[]. #include <span> #include <cassert> #include <string> #include "test_macros.h" template<class T, size_t extent, size_t otherExtent> std::span<T, extent> createImplicitSpan(std::span<T, otherExtent> s) { return {s}; // expected-error {{chosen constructor is explicit in copy-initialization}} } void checkCV () { // std::span< int> sp; std::span<const int> csp; std::span< volatile int> vsp; std::span<const volatile int> cvsp; // std::span< int, 0> sp0; std::span<const int, 0> csp0; std::span< volatile int, 0> vsp0; std::span<const volatile int, 0> cvsp0; // Try to remove const and/or volatile (dynamic -> dynamic) { std::span< int> s1{ csp}; // expected-error {{no matching constructor for initialization of 'std::span<int>'}} std::span< int> s2{ vsp}; // expected-error {{no matching constructor for initialization of 'std::span<int>'}} std::span< int> s3{cvsp}; // expected-error {{no matching constructor for initialization of 'std::span<int>'}} std::span<const int> s4{ vsp}; // expected-error {{no matching constructor for initialization of 'std::span<const int>'}} std::span<const int> s5{cvsp}; // expected-error {{no matching constructor for initialization of 'std::span<const int>'}} std::span< volatile int> s6{ csp}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int>'}} std::span< volatile int> s7{cvsp}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int>'}} } // Try to remove const and/or volatile (static -> static) { std::span< int, 0> s1{ csp0}; // expected-error {{no matching constructor for initialization of 'std::span<int, 0>'}} std::span< int, 0> s2{ vsp0}; // expected-error {{no matching constructor for initialization of 'std::span<int, 0>'}} std::span< int, 0> s3{cvsp0}; // expected-error {{no matching constructor for initialization of 'std::span<int, 0>'}} std::span<const int, 0> s4{ vsp0}; // expected-error {{no matching constructor for initialization of 'std::span<const int, 0>'}} std::span<const int, 0> s5{cvsp0}; // expected-error {{no matching constructor for initialization of 'std::span<const int, 0>'}} std::span< volatile int, 0> s6{ csp0}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int, 0>'}} std::span< volatile int, 0> s7{cvsp0}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int, 0>'}} } // Try to remove const and/or volatile (static -> dynamic) { std::span< int> s1{ csp0}; // expected-error {{no matching constructor for initialization of 'std::span<int>'}} std::span< int> s2{ vsp0}; // expected-error {{no matching constructor for initialization of 'std::span<int>'}} std::span< int> s3{cvsp0}; // expected-error {{no matching constructor for initialization of 'std::span<int>'}} std::span<const int> s4{ vsp0}; // expected-error {{no matching constructor for initialization of 'std::span<const int>'}} std::span<const int> s5{cvsp0}; // expected-error {{no matching constructor for initialization of 'std::span<const int>'}} std::span< volatile int> s6{ csp0}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int>'}} std::span< volatile int> s7{cvsp0}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int>'}} } // Try to remove const and/or volatile (static -> static) { std::span< int, 0> s1{ csp}; // expected-error {{no matching constructor for initialization of 'std::span<int, 0>'}} std::span< int, 0> s2{ vsp}; // expected-error {{no matching constructor for initialization of 'std::span<int, 0>'}} std::span< int, 0> s3{cvsp}; // expected-error {{no matching constructor for initialization of 'std::span<int, 0>'}} std::span<const int, 0> s4{ vsp}; // expected-error {{no matching constructor for initialization of 'std::span<const int, 0>'}} std::span<const int, 0> s5{cvsp}; // expected-error {{no matching constructor for initialization of 'std::span<const int, 0>'}} std::span< volatile int, 0> s6{ csp}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int, 0>'}} std::span< volatile int, 0> s7{cvsp}; // expected-error {{no matching constructor for initialization of 'std::span<volatile int, 0>'}} } } int main(int, char**) { std::span<int> sp; std::span<int, 0> sp0; std::span<float> s1{sp}; // expected-error {{no matching constructor for initialization of 'std::span<float>'}} std::span<float> s2{sp0}; // expected-error {{no matching constructor for initialization of 'std::span<float>'}} std::span<float, 0> s3{sp}; // expected-error {{no matching constructor for initialization of 'std::span<float, 0>'}} std::span<float, 0> s4{sp0}; // expected-error {{no matching constructor for initialization of 'std::span<float, 0>'}} checkCV(); // explicit constructor necessary { createImplicitSpan<int, 1>(sp); } return 0; }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8"?> <document type="com.apple.InterfaceBuilder3.CocoaTouch.XIB" version="3.0" toolsVersion="12120" systemVersion="16F73" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES"> <device id="retina4_0" orientation="portrait"> <adaptation id="fullscreen"/> </device> <dependencies> <plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="12088"/> <capability name="Constraints to layout margins" minToolsVersion="6.0"/> <capability name="documents saved in the Xcode 8 format" minToolsVersion="8.0"/> </dependencies> <objects> <placeholder placeholderIdentifier="IBFilesOwner" id="-1" userLabel="File's Owner"/> <placeholder placeholderIdentifier="IBFirstResponder" id="-2" customClass="UIResponder"/> <tableViewCell clipsSubviews="YES" contentMode="scaleToFill" selectionStyle="default" indentationWidth="10" reuseIdentifier="CPMesNotificationCellIdentifier" id="uar-vU-mdN" customClass="CPMesNotificationCell" customModule="BeeFun" customModuleProvider="target"> <rect key="frame" x="0.0" y="0.0" width="375" height="55"/> <autoresizingMask key="autoresizingMask"/> <tableViewCellContentView key="contentView" opaque="NO" clipsSubviews="YES" multipleTouchEnabled="YES" contentMode="center" tableViewCell="uar-vU-mdN" id="gdA-iC-d3f"> <rect key="frame" x="0.0" y="0.0" width="375" height="54.5"/> <autoresizingMask key="autoresizingMask"/> <subviews> <imageView userInteractionEnabled="NO" contentMode="scaleToFill" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="octicon_star_35" translatesAutoresizingMaskIntoConstraints="NO" id="Kc8-Aq-56U"> <rect key="frame" x="10" y="15" width="25" height="25"/> <constraints> <constraint firstAttribute="height" constant="25" id="2rI-01-ZL5"/> <constraint firstAttribute="width" constant="25" id="7bV-YY-3m0"/> </constraints> </imageView> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="starred" textAlignment="natural" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="9xt-gQ-JtV"> <rect key="frame" x="43" y="8" width="324" height="23"/> <constraints> <constraint firstAttribute="height" constant="23" id="zUN-Bm-C4L"/> </constraints> <fontDescription key="fontDescription" type="system" pointSize="17"/> <color key="textColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> <nil key="highlightedColor"/> </label> <label opaque="NO" userInteractionEnabled="NO" contentMode="left" horizontalHuggingPriority="251" verticalHuggingPriority="251" text="1 day ago" textAlignment="right" lineBreakMode="tailTruncation" baselineAdjustment="alignBaselines" adjustsFontSizeToFit="NO" translatesAutoresizingMaskIntoConstraints="NO" id="m7H-xt-xwd"> <rect key="frame" x="255" y="32" width="112" height="16"/> <constraints> <constraint firstAttribute="width" constant="112" id="dud-Iu-x4e"/> <constraint firstAttribute="height" constant="16" id="hy4-FG-kA0"/> </constraints> <fontDescription key="fontDescription" type="system" pointSize="12"/> <color key="textColor" red="0.0" green="0.0" blue="0.0" alpha="1" colorSpace="custom" customColorSpace="sRGB"/> <nil key="highlightedColor"/> </label> <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" buttonType="roundedRect" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="fsm-IG-Pmv"> <rect key="frame" x="43" y="32" width="204" height="16"/> <constraints> <constraint firstAttribute="height" constant="16" id="Ob5-ah-d9P"/> </constraints> <fontDescription key="fontDescription" type="system" pointSize="12"/> <state key="normal" title="Button"/> </button> </subviews> <constraints> <constraint firstItem="Kc8-Aq-56U" firstAttribute="top" secondItem="gdA-iC-d3f" secondAttribute="top" constant="15" id="1zw-z2-DWI"/> <constraint firstItem="9xt-gQ-JtV" firstAttribute="leading" secondItem="Kc8-Aq-56U" secondAttribute="trailing" constant="8" id="9Cp-qU-bdd"/> <constraint firstItem="9xt-gQ-JtV" firstAttribute="top" secondItem="gdA-iC-d3f" secondAttribute="topMargin" id="BLc-ey-3h0"/> <constraint firstItem="fsm-IG-Pmv" firstAttribute="top" secondItem="9xt-gQ-JtV" secondAttribute="bottom" constant="1" id="I4S-Ly-AYI"/> <constraint firstItem="m7H-xt-xwd" firstAttribute="leading" secondItem="fsm-IG-Pmv" secondAttribute="trailing" constant="8" id="KY5-bF-CBK"/> <constraint firstItem="9xt-gQ-JtV" firstAttribute="trailing" secondItem="gdA-iC-d3f" secondAttribute="trailingMargin" id="QEs-px-Id3"/> <constraint firstAttribute="trailing" secondItem="m7H-xt-xwd" secondAttribute="trailing" constant="8" id="Spl-pA-LHg"/> <constraint firstItem="Kc8-Aq-56U" firstAttribute="leading" secondItem="gdA-iC-d3f" secondAttribute="leading" constant="10" id="TFR-IW-yDc"/> <constraint firstItem="fsm-IG-Pmv" firstAttribute="leading" secondItem="Kc8-Aq-56U" secondAttribute="trailing" constant="8" id="lXT-wN-YbO"/> <constraint firstItem="m7H-xt-xwd" firstAttribute="top" secondItem="9xt-gQ-JtV" secondAttribute="bottom" constant="1" id="yqa-NI-kWA"/> </constraints> </tableViewCellContentView> <connections> <outlet property="notificationLabel" destination="9xt-gQ-JtV" id="WNo-TB-MQu"/> <outlet property="reposBtn" destination="fsm-IG-Pmv" id="FPU-5S-Eii"/> <outlet property="timeLabel" destination="m7H-xt-xwd" id="MU8-9J-MTN"/> <outlet property="typeImageV" destination="Kc8-Aq-56U" id="0lG-d4-yOh"/> </connections> <point key="canvasLocation" x="327.5" y="137"/> </tableViewCell> </objects> <resources> <image name="octicon_star_35" width="35" height="35"/> </resources> </document>
{ "pile_set_name": "Github" }
## How-To Guides ### Reset a Form Block and All Its Elements In the Action Function, the second argument is the `callbacks` object. By calling `callbacks.reset()` the Form Block will reset every Form Element inside it. ```javascript Template.createNews.helpers({ getAction: function() { return function(els, callbacks, changed) { // Save data. If successful, then... callbacks.success(); callbacks.reset(); } } }); ``` This is the simplest way to completely reset a form. See the [Action Function](../api/ActionFunction.md) section of the API Reference for more information.
{ "pile_set_name": "Github" }
/* * (C) Copyright 2003, Psyent Corporation <www.psyent.com> * Scott McNutt <[email protected]> * * SPDX-License-Identifier: GPL-2.0+ */ OUTPUT_FORMAT("elf32-sparc", "elf32-sparc", "elf32-sparc") OUTPUT_ARCH(sparc) ENTRY(_start) SECTIONS { .text : { *(.text) } __text_end = .; . = ALIGN(4); .rodata : { *(SORT_BY_ALIGNMENT(SORT_BY_NAME(.rodata*))) } __rodata_end = .; . = ALIGN(4); .data : { *(.data) } . = ALIGN(4); __data_end = .; __bss_start = .; . = ALIGN(4); .bss : { *(.bss) } . = ALIGN(4); __bss_end = .; _end = .; }
{ "pile_set_name": "Github" }
///////////////////////////////////////////////////////////////////////////// // Copyright (c) 2009-2014 Alan Wright. All rights reserved. // Distributable under the terms of either the Apache License (Version 2.0) // or the GNU Lesser General Public License. ///////////////////////////////////////////////////////////////////////////// #ifndef BUFFEREDREADER_H #define BUFFEREDREADER_H #include "Reader.h" namespace Lucene { /// Read text from a character-input stream, buffering characters so as to provide /// for the efficient reading of characters, arrays, and lines. class LPPAPI BufferedReader : public Reader { public: /// Create a buffering character-input stream. BufferedReader(const ReaderPtr& reader, int32_t size = READER_BUFFER); virtual ~BufferedReader(); LUCENE_CLASS(BufferedReader); protected: ReaderPtr reader; int32_t bufferSize; int32_t bufferLength; // end of valid bytes int32_t bufferPosition; // next byte to read CharArray buffer; public: static const int32_t READER_BUFFER; public: /// Read a single character. virtual int32_t read(); /// Read characters into a portion of an array. virtual int32_t read(wchar_t* b, int32_t offset, int32_t length); /// Read a line of text. virtual bool readLine(String& line); /// Close the stream. virtual void close(); /// Tell whether this stream supports the mark() operation virtual bool markSupported(); /// Reset the stream. virtual void reset(); protected: /// Refill buffer in preparation for reading. int32_t refill(); /// Read a single character without moving position. int32_t peek(); }; } #endif
{ "pile_set_name": "Github" }
var/log/aeternity/node opt/aeternity/node/log opt/aeternity/node/docs usr/share/doc/aeternity-node/docs
{ "pile_set_name": "Github" }
// // MessagePack for C++ static resolution routine // // Copyright (C) 2015-2016 KONDO Takatoshi // // Distributed under the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // #ifndef MSGPACK_V1_TYPE_BOOST_MSGPACK_VARIANT_HPP #define MSGPACK_V1_TYPE_BOOST_MSGPACK_VARIANT_HPP #if defined(MSGPACK_USE_BOOST) #include "msgpack/v1/adaptor/boost/msgpack_variant_decl.hpp" #include "msgpack/adaptor/check_container_size.hpp" #include "msgpack/adaptor/boost/string_ref.hpp" #include "msgpack/adaptor/nil.hpp" #include "msgpack/adaptor/bool.hpp" #include "msgpack/adaptor/int.hpp" #include "msgpack/adaptor/float.hpp" #include "msgpack/adaptor/string.hpp" #include "msgpack/adaptor/vector_char.hpp" #include "msgpack/adaptor/raw.hpp" #include "msgpack/adaptor/ext.hpp" #include "msgpack/adaptor/vector.hpp" #include "msgpack/adaptor/map.hpp" #include <boost/variant.hpp> #include <boost/operators.hpp> namespace msgpack { /// @cond MSGPACK_API_VERSION_NAMESPACE(v1) { /// @endcond namespace type { template <typename STR, typename BIN, typename EXT> struct basic_variant : boost::variant< nil_t, // NIL bool, // BOOL int64_t, // NEGATIVE_INTEGER uint64_t, // POSITIVE_INTEGER double, // FLOAT32, FLOAT64 std::string, // STR #if (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 boost::string_ref, // STR #endif // (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 std::vector<char>, // BIN msgpack::type::raw_ref, // BIN msgpack::type::ext, // EXT msgpack::type::ext_ref, // EXT boost::recursive_wrapper<std::vector<basic_variant<STR, BIN, EXT> > >, // ARRAY boost::recursive_wrapper<std::map<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >, // MAP boost::recursive_wrapper<std::multimap<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >// MAP >, private boost::totally_ordered<basic_variant<STR, BIN, EXT> > { typedef boost::variant< nil_t, // NIL bool, // BOOL int64_t, // NEGATIVE_INTEGER uint64_t, // POSITIVE_INTEGER double, // FLOAT32, FLOAT64 std::string, // STR #if (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 boost::string_ref, // STR #endif // (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 std::vector<char>, // BIN msgpack::type::raw_ref, // BIN msgpack::type::ext, // EXT msgpack::type::ext_ref, // EXT boost::recursive_wrapper<std::vector<basic_variant<STR, BIN, EXT> > >, // ARRAY boost::recursive_wrapper<std::map<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >, // MAP boost::recursive_wrapper<std::multimap<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >// MAP > base; basic_variant() {} template <typename T> basic_variant(T const& t):base(t) {} #if defined(_MSC_VER) && _MSC_VER < 1700 // The following redundant functions are required to avoid MSVC // See https://svn.boost.org/trac/boost/ticket/592 basic_variant(basic_variant const& other):base(static_cast<base const&>(other)) {} basic_variant& operator=(basic_variant const& other) { *static_cast<base*>(this) = static_cast<base const&>(other); return *this; } #endif // defined(_MSC_VER) && _MSC_VER < 1700 basic_variant(char const* p):base(std::string(p)) {} basic_variant(char v) { int_init(v); } basic_variant(signed char v) { int_init(v); } basic_variant(unsigned char v):base(uint64_t(v)) {} basic_variant(signed int v) { int_init(v); } basic_variant(unsigned int v):base(uint64_t(v)) {} basic_variant(signed long v) { int_init(v); } basic_variant(unsigned long v):base(uint64_t(v)) {} basic_variant(signed long long v) { int_init(v); } basic_variant(unsigned long long v):base(uint64_t(v)) {} bool is_nil() const { return boost::get<msgpack::type::nil_t>(this) != MSGPACK_NULLPTR; } bool is_bool() const { return boost::get<bool>(this) != MSGPACK_NULLPTR; } bool is_int64_t() const { return boost::get<int64_t>(this) != MSGPACK_NULLPTR; } bool is_uint64_t() const { return boost::get<uint64_t>(this) != MSGPACK_NULLPTR; } bool is_double() const { return boost::get<double>(this) != MSGPACK_NULLPTR; } bool is_string() const { return boost::get<std::string>(this) != MSGPACK_NULLPTR; } #if (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 bool is_boost_string_ref() const { return boost::get<boost::string_ref>(this) != MSGPACK_NULLPTR; } #endif // (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 bool is_vector_char() const { return boost::get<std::vector<char> >(this) != MSGPACK_NULLPTR; } bool is_vector_char() { return boost::get<std::vector<char> >(this) != MSGPACK_NULLPTR; } bool is_raw_ref() const { return boost::get<raw_ref>(this) != MSGPACK_NULLPTR; } bool is_ext() const { return boost::get<ext>(this) != MSGPACK_NULLPTR; } bool is_ext_ref() const { return boost::get<ext_ref>(this) != MSGPACK_NULLPTR; } bool is_vector() const { return boost::get<std::vector<basic_variant<STR, BIN, EXT> > >(this) != MSGPACK_NULLPTR; } bool is_map() const { return boost::get<std::map<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >(this) != MSGPACK_NULLPTR; } bool is_multimap() const { return boost::get<std::multimap<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >(this) != MSGPACK_NULLPTR; } bool as_bool() const { return boost::get<bool>(*this); } int64_t as_int64_t() const { return boost::get<int64_t>(*this); } int64_t& as_int64_t() { return boost::get<int64_t>(*this); } uint64_t as_uint64_t() const { return boost::get<uint64_t>(*this); } uint64_t& as_uint64_t() { return boost::get<uint64_t>(*this); } double as_double() const { return boost::get<double>(*this); } double& as_double() { return boost::get<double>(*this); } std::string const& as_string() const { return boost::get<std::string>(*this); } std::string& as_string() { return boost::get<std::string>(*this); } #if (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 boost::string_ref const& as_boost_string_ref() const { return boost::get<boost::string_ref>(*this); } boost::string_ref& as_boost_string_ref() { return boost::get<boost::string_ref>(*this); } #endif // (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 std::vector<char> const& as_vector_char() const { return boost::get<std::vector<char> >(*this); } std::vector<char>& as_vector_char() { return boost::get<std::vector<char> >(*this); } raw_ref const& as_raw_ref() const { return boost::get<raw_ref>(*this); } ext const& as_ext() const { return boost::get<ext>(*this); } ext& as_ext() { return boost::get<ext>(*this); } ext_ref const& as_ext_ref() const { return boost::get<ext_ref>(*this); } std::vector<basic_variant<STR, BIN, EXT> > const& as_vector() const { return boost::get<std::vector<basic_variant<STR, BIN, EXT> > >(*this); } std::vector<basic_variant<STR, BIN, EXT> >& as_vector() { return boost::get<std::vector<basic_variant<STR, BIN, EXT> > >(*this); } std::map<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > const& as_map() const { return boost::get<std::map<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >(*this); } std::map<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> >& as_map() { return boost::get<std::map<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >(*this); } std::multimap<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > const& as_multimap() const { return boost::get<std::multimap<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >(*this); } std::multimap<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> >& as_multimap() { return boost::get<std::multimap<basic_variant<STR, BIN, EXT>, basic_variant<STR, BIN, EXT> > >(*this); } private: template <typename T> void int_init(T v) { if (v < 0) { static_cast<base&>(*this) = int64_t(v); } else { static_cast<base&>(*this) = uint64_t(v); } } }; template <typename STR, typename BIN, typename EXT> inline bool operator<(basic_variant<STR, BIN, EXT> const& lhs, basic_variant<STR, BIN, EXT> const& rhs) { return static_cast<typename basic_variant<STR, BIN, EXT>::base const&>(lhs) < static_cast<typename basic_variant<STR, BIN, EXT>::base const&>(rhs); } template <typename STR, typename BIN, typename EXT> inline bool operator==(basic_variant<STR, BIN, EXT> const& lhs, basic_variant<STR, BIN, EXT> const& rhs) { return static_cast<typename basic_variant<STR, BIN, EXT>::base const&>(lhs) == static_cast<typename basic_variant<STR, BIN, EXT>::base const&>(rhs); } typedef basic_variant<std::string, std::vector<char>, ext> variant; typedef basic_variant< #if (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 boost::string_ref, #else // (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 std::string, #endif // (BOOST_VERSION / 100000) >= 1 && ((BOOST_VERSION / 100) % 1000) >= 53 raw_ref, ext_ref> variant_ref; } // namespace type namespace adaptor { #if !defined (MSGPACK_USE_CPP03) template <typename STR, typename BIN, typename EXT> struct as<type::basic_variant<STR, BIN, EXT> > { type::basic_variant<STR, BIN, EXT> operator()(msgpack::object const& o) const { switch(o.type) { case type::NIL: return o.as<msgpack::type::nil_t>(); case type::BOOLEAN: return o.as<bool>(); case type::POSITIVE_INTEGER: return o.as<uint64_t>(); case type::NEGATIVE_INTEGER: return o.as<int64_t>(); case type::FLOAT32: case type::FLOAT64: return o.as<double>(); case type::STR: return o.as<STR>(); case type::BIN: return o.as<BIN>(); case type::EXT: return o.as<EXT>(); case type::ARRAY: return o.as<std::vector<type::basic_variant<STR, BIN, EXT> > >(); case type::MAP: return o.as<std::multimap<type::basic_variant<STR, BIN, EXT>, type::basic_variant<STR, BIN, EXT> > >(); default: break; } return type::basic_variant<STR, BIN, EXT>(); } }; #endif // !defined (MSGPACK_USE_CPP03) template <typename STR, typename BIN, typename EXT> struct convert<type::basic_variant<STR, BIN, EXT> > { msgpack::object const& operator()( msgpack::object const& o, type::basic_variant<STR, BIN, EXT>& v) const { switch(o.type) { case type::NIL: v = o.as<msgpack::type::nil_t>(); break; case type::BOOLEAN: v = o.as<bool>(); break; case type::POSITIVE_INTEGER: v = o.as<uint64_t>(); break; case type::NEGATIVE_INTEGER: v = o.as<int64_t>(); break; case type::FLOAT32: case type::FLOAT64: v = o.as<double>(); break; case type::STR: v = o.as<STR>(); break; case type::BIN: v = o.as<BIN>(); break; case type::EXT: v = o.as<EXT>(); break; case type::ARRAY: v = o.as<std::vector<type::basic_variant<STR, BIN, EXT> > >(); break; case type::MAP: v = o.as<std::multimap<type::basic_variant<STR, BIN, EXT>, type::basic_variant<STR, BIN, EXT> > >(); break; default: break; } return o; } }; namespace detail { template <typename Stream> struct pack_imp : boost::static_visitor<void> { template <typename T> void operator()(T const& value) const { pack<T>()(o_, value); } pack_imp(packer<Stream>& o):o_(o) {} packer<Stream>& o_; }; } // namespace detail template <typename STR, typename BIN, typename EXT> struct pack<type::basic_variant<STR, BIN, EXT> > { template <typename Stream> msgpack::packer<Stream>& operator()(msgpack::packer<Stream>& o, const type::basic_variant<STR, BIN, EXT>& v) const { boost::apply_visitor(detail::pack_imp<Stream>(o), v); return o; } }; namespace detail { struct object_imp : boost::static_visitor<void> { void operator()(msgpack::type::nil_t const& v) const { object<msgpack::type::nil_t>()(o_, v); } void operator()(bool const& v) const { object<bool>()(o_, v); } void operator()(uint64_t const& v) const { object<uint64_t>()(o_, v); } void operator()(int64_t const& v) const { object<int64_t>()(o_, v); } void operator()(double const& v) const { object<double>()(o_, v); } template <typename T> void operator()(T const&) const { throw msgpack::type_error(); } object_imp(msgpack::object& o):o_(o) {} msgpack::object& o_; }; } // namespace detail template <typename STR, typename BIN, typename EXT> struct object<type::basic_variant<STR, BIN, EXT> > { void operator()(msgpack::object& o, const type::basic_variant<STR, BIN, EXT>& v) const { boost::apply_visitor(detail::object_imp(o), v); } }; namespace detail { struct object_with_zone_imp : boost::static_visitor<void> { template <typename T> void operator()(T const& v) const { object_with_zone<T>()(o_, v); } object_with_zone_imp(msgpack::object::with_zone& o):o_(o) {} msgpack::object::with_zone& o_; }; } // namespace detail template <typename STR, typename BIN, typename EXT> struct object_with_zone<type::basic_variant<STR, BIN, EXT> > { void operator()(msgpack::object::with_zone& o, const type::basic_variant<STR, BIN, EXT>& v) const { boost::apply_visitor(detail::object_with_zone_imp(o), v); } }; } // namespace adaptor /// @cond } // MSGPACK_API_VERSION_NAMESPACE(v1) /// @endcond } // namespace msgpack #endif // MSGPACK_USE_BOOST #endif // MSGPACK_V1_TYPE_BOOST_MSGPACK_VARIANT_HPP
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Oct 15 2018 10:31:50). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <Foundation/NSValueTransformer.h> @interface INIntentRentalCarReservationSlotValueTransformer : NSValueTransformer { } + (long long)_intents_valueType; + (BOOL)allowsReverseTransformation; + (Class)reverseTransformedValueClass; + (Class)transformedValueClass; - (id)reverseTransformedValue:(id)arg1; - (id)transformedValue:(id)arg1; @end
{ "pile_set_name": "Github" }
[metadata] description-file = README.rst [bdist_wheel] universal=1 [zest.releaser] create-wheel = yes
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <link rel="shortcut icon" type="image/ico" href="http://www.datatables.net/favicon.ico"> <meta name="viewport" content="initial-scale=1.0, maximum-scale=2.0"> <title>TableTools example - Button collections</title> <link rel="stylesheet" type="text/css" href="../../../media/css/jquery.dataTables.css"> <link rel="stylesheet" type="text/css" href="../css/dataTables.tableTools.css"> <link rel="stylesheet" type="text/css" href="../../../examples/resources/syntax/shCore.css"> <link rel="stylesheet" type="text/css" href="../../../examples/resources/demo.css"> <style type="text/css" class="init"> </style> <script type="text/javascript" language="javascript" src="../../../media/js/jquery.js"></script> <script type="text/javascript" language="javascript" src="../../../media/js/jquery.dataTables.js"></script> <script type="text/javascript" language="javascript" src="../js/dataTables.tableTools.js"></script> <script type="text/javascript" language="javascript" src="../../../examples/resources/syntax/shCore.js"></script> <script type="text/javascript" language="javascript" src="../../../examples/resources/demo.js"></script> <script type="text/javascript" language="javascript" class="init"> $(document).ready(function() { $('#example').DataTable( { "dom": 'T<"clear">lfrtip', "tableTools": { "aButtons": [ "copy", "print", { "sExtends": "collection", "sButtonText": "Save", "aButtons": [ "csv", "xls", "pdf" ] } ] } } ); } ); </script> </head> <body class="dt-example"> <div class="container"> <section> <h1>TableTools example <span>Button collections</span></h1> <div class="info"> <p>TableTools provides the ability to group buttons into a hidden drop down list, which is activated by clicking on a top-level button. This is achieved by extending the 'collection' predefined button type and setting it's <code>aButtons</code> parameter with the same options as the top level buttons (note that you cannot currently use a collection within a collection).</p> <p>The example below shows the file save buttons grouped into a collection, while the copy and print buttons are left on the top level.</p> </div> <table id="example" class="display" cellspacing="0" width="100%"> <thead> <tr> <th>Name</th> <th>Position</th> <th>Office</th> <th>Age</th> <th>Start date</th> <th>Salary</th> </tr> </thead> <tfoot> <tr> <th>Name</th> <th>Position</th> <th>Office</th> <th>Age</th> <th>Start date</th> <th>Salary</th> </tr> </tfoot> <tbody> <tr> <td>Tiger Nixon</td> <td>System Architect</td> <td>Edinburgh</td> <td>61</td> <td>2011/04/25</td> <td>$320,800</td> </tr> <tr> <td>Garrett Winters</td> <td>Accountant</td> <td>Tokyo</td> <td>63</td> <td>2011/07/25</td> <td>$170,750</td> </tr> <tr> <td>Ashton Cox</td> <td>Junior Technical Author</td> <td>San Francisco</td> <td>66</td> <td>2009/01/12</td> <td>$86,000</td> </tr> <tr> <td>Cedric Kelly</td> <td>Senior Javascript Developer</td> <td>Edinburgh</td> <td>22</td> <td>2012/03/29</td> <td>$433,060</td> </tr> <tr> <td>Airi Satou</td> <td>Accountant</td> <td>Tokyo</td> <td>33</td> <td>2008/11/28</td> <td>$162,700</td> </tr> <tr> <td>Brielle Williamson</td> <td>Integration Specialist</td> <td>New York</td> <td>61</td> <td>2012/12/02</td> <td>$372,000</td> </tr> <tr> <td>Herrod Chandler</td> <td>Sales Assistant</td> <td>San Francisco</td> <td>59</td> <td>2012/08/06</td> <td>$137,500</td> </tr> <tr> <td>Rhona Davidson</td> <td>Integration Specialist</td> <td>Tokyo</td> <td>55</td> <td>2010/10/14</td> <td>$327,900</td> </tr> <tr> <td>Colleen Hurst</td> <td>Javascript Developer</td> <td>San Francisco</td> <td>39</td> <td>2009/09/15</td> <td>$205,500</td> </tr> <tr> <td>Sonya Frost</td> <td>Software Engineer</td> <td>Edinburgh</td> <td>23</td> <td>2008/12/13</td> <td>$103,600</td> </tr> <tr> <td>Jena Gaines</td> <td>Office Manager</td> <td>London</td> <td>30</td> <td>2008/12/19</td> <td>$90,560</td> </tr> <tr> <td>Quinn Flynn</td> <td>Support Lead</td> <td>Edinburgh</td> <td>22</td> <td>2013/03/03</td> <td>$342,000</td> </tr> <tr> <td>Charde Marshall</td> <td>Regional Director</td> <td>San Francisco</td> <td>36</td> <td>2008/10/16</td> <td>$470,600</td> </tr> <tr> <td>Haley Kennedy</td> <td>Senior Marketing Designer</td> <td>London</td> <td>43</td> <td>2012/12/18</td> <td>$313,500</td> </tr> <tr> <td>Tatyana Fitzpatrick</td> <td>Regional Director</td> <td>London</td> <td>19</td> <td>2010/03/17</td> <td>$385,750</td> </tr> <tr> <td>Michael Silva</td> <td>Marketing Designer</td> <td>London</td> <td>66</td> <td>2012/11/27</td> <td>$198,500</td> </tr> <tr> <td>Paul Byrd</td> <td>Chief Financial Officer (CFO)</td> <td>New York</td> <td>64</td> <td>2010/06/09</td> <td>$725,000</td> </tr> <tr> <td>Gloria Little</td> <td>Systems Administrator</td> <td>New York</td> <td>59</td> <td>2009/04/10</td> <td>$237,500</td> </tr> <tr> <td>Bradley Greer</td> <td>Software Engineer</td> <td>London</td> <td>41</td> <td>2012/10/13</td> <td>$132,000</td> </tr> <tr> <td>Dai Rios</td> <td>Personnel Lead</td> <td>Edinburgh</td> <td>35</td> <td>2012/09/26</td> <td>$217,500</td> </tr> <tr> <td>Jenette Caldwell</td> <td>Development Lead</td> <td>New York</td> <td>30</td> <td>2011/09/03</td> <td>$345,000</td> </tr> <tr> <td>Yuri Berry</td> <td>Chief Marketing Officer (CMO)</td> <td>New York</td> <td>40</td> <td>2009/06/25</td> <td>$675,000</td> </tr> <tr> <td>Caesar Vance</td> <td>Pre-Sales Support</td> <td>New York</td> <td>21</td> <td>2011/12/12</td> <td>$106,450</td> </tr> <tr> <td>Doris Wilder</td> <td>Sales Assistant</td> <td>Sidney</td> <td>23</td> <td>2010/09/20</td> <td>$85,600</td> </tr> <tr> <td>Angelica Ramos</td> <td>Chief Executive Officer (CEO)</td> <td>London</td> <td>47</td> <td>2009/10/09</td> <td>$1,200,000</td> </tr> <tr> <td>Gavin Joyce</td> <td>Developer</td> <td>Edinburgh</td> <td>42</td> <td>2010/12/22</td> <td>$92,575</td> </tr> <tr> <td>Jennifer Chang</td> <td>Regional Director</td> <td>Singapore</td> <td>28</td> <td>2010/11/14</td> <td>$357,650</td> </tr> <tr> <td>Brenden Wagner</td> <td>Software Engineer</td> <td>San Francisco</td> <td>28</td> <td>2011/06/07</td> <td>$206,850</td> </tr> <tr> <td>Fiona Green</td> <td>Chief Operating Officer (COO)</td> <td>San Francisco</td> <td>48</td> <td>2010/03/11</td> <td>$850,000</td> </tr> <tr> <td>Shou Itou</td> <td>Regional Marketing</td> <td>Tokyo</td> <td>20</td> <td>2011/08/14</td> <td>$163,000</td> </tr> <tr> <td>Michelle House</td> <td>Integration Specialist</td> <td>Sidney</td> <td>37</td> <td>2011/06/02</td> <td>$95,400</td> </tr> <tr> <td>Suki Burks</td> <td>Developer</td> <td>London</td> <td>53</td> <td>2009/10/22</td> <td>$114,500</td> </tr> <tr> <td>Prescott Bartlett</td> <td>Technical Author</td> <td>London</td> <td>27</td> <td>2011/05/07</td> <td>$145,000</td> </tr> <tr> <td>Gavin Cortez</td> <td>Team Leader</td> <td>San Francisco</td> <td>22</td> <td>2008/10/26</td> <td>$235,500</td> </tr> <tr> <td>Martena Mccray</td> <td>Post-Sales support</td> <td>Edinburgh</td> <td>46</td> <td>2011/03/09</td> <td>$324,050</td> </tr> <tr> <td>Unity Butler</td> <td>Marketing Designer</td> <td>San Francisco</td> <td>47</td> <td>2009/12/09</td> <td>$85,675</td> </tr> <tr> <td>Howard Hatfield</td> <td>Office Manager</td> <td>San Francisco</td> <td>51</td> <td>2008/12/16</td> <td>$164,500</td> </tr> <tr> <td>Hope Fuentes</td> <td>Secretary</td> <td>San Francisco</td> <td>41</td> <td>2010/02/12</td> <td>$109,850</td> </tr> <tr> <td>Vivian Harrell</td> <td>Financial Controller</td> <td>San Francisco</td> <td>62</td> <td>2009/02/14</td> <td>$452,500</td> </tr> <tr> <td>Timothy Mooney</td> <td>Office Manager</td> <td>London</td> <td>37</td> <td>2008/12/11</td> <td>$136,200</td> </tr> <tr> <td>Jackson Bradshaw</td> <td>Director</td> <td>New York</td> <td>65</td> <td>2008/09/26</td> <td>$645,750</td> </tr> <tr> <td>Olivia Liang</td> <td>Support Engineer</td> <td>Singapore</td> <td>64</td> <td>2011/02/03</td> <td>$234,500</td> </tr> <tr> <td>Bruno Nash</td> <td>Software Engineer</td> <td>London</td> <td>38</td> <td>2011/05/03</td> <td>$163,500</td> </tr> <tr> <td>Sakura Yamamoto</td> <td>Support Engineer</td> <td>Tokyo</td> <td>37</td> <td>2009/08/19</td> <td>$139,575</td> </tr> <tr> <td>Thor Walton</td> <td>Developer</td> <td>New York</td> <td>61</td> <td>2013/08/11</td> <td>$98,540</td> </tr> <tr> <td>Finn Camacho</td> <td>Support Engineer</td> <td>San Francisco</td> <td>47</td> <td>2009/07/07</td> <td>$87,500</td> </tr> <tr> <td>Serge Baldwin</td> <td>Data Coordinator</td> <td>Singapore</td> <td>64</td> <td>2012/04/09</td> <td>$138,575</td> </tr> <tr> <td>Zenaida Frank</td> <td>Software Engineer</td> <td>New York</td> <td>63</td> <td>2010/01/04</td> <td>$125,250</td> </tr> <tr> <td>Zorita Serrano</td> <td>Software Engineer</td> <td>San Francisco</td> <td>56</td> <td>2012/06/01</td> <td>$115,000</td> </tr> <tr> <td>Jennifer Acosta</td> <td>Junior Javascript Developer</td> <td>Edinburgh</td> <td>43</td> <td>2013/02/01</td> <td>$75,650</td> </tr> <tr> <td>Cara Stevens</td> <td>Sales Assistant</td> <td>New York</td> <td>46</td> <td>2011/12/06</td> <td>$145,600</td> </tr> <tr> <td>Hermione Butler</td> <td>Regional Director</td> <td>London</td> <td>47</td> <td>2011/03/21</td> <td>$356,250</td> </tr> <tr> <td>Lael Greer</td> <td>Systems Administrator</td> <td>London</td> <td>21</td> <td>2009/02/27</td> <td>$103,500</td> </tr> <tr> <td>Jonas Alexander</td> <td>Developer</td> <td>San Francisco</td> <td>30</td> <td>2010/07/14</td> <td>$86,500</td> </tr> <tr> <td>Shad Decker</td> <td>Regional Director</td> <td>Edinburgh</td> <td>51</td> <td>2008/11/13</td> <td>$183,000</td> </tr> <tr> <td>Michael Bruce</td> <td>Javascript Developer</td> <td>Singapore</td> <td>29</td> <td>2011/06/27</td> <td>$183,000</td> </tr> <tr> <td>Donna Snider</td> <td>Customer Support</td> <td>New York</td> <td>27</td> <td>2011/01/25</td> <td>$112,000</td> </tr> </tbody> </table> <ul class="tabs"> <li class="active">Javascript</li> <li>HTML</li> <li>CSS</li> <li>Ajax</li> <li>Server-side script</li> </ul> <div class="tabs"> <div class="js"> <p>The Javascript shown below is used to initialise the table shown in this example:</p><code class="multiline language-js">$(document).ready(function() { $('#example').DataTable( { &quot;dom&quot;: 'T&lt;&quot;clear&quot;&gt;lfrtip', &quot;tableTools&quot;: { &quot;aButtons&quot;: [ &quot;copy&quot;, &quot;print&quot;, { &quot;sExtends&quot;: &quot;collection&quot;, &quot;sButtonText&quot;: &quot;Save&quot;, &quot;aButtons&quot;: [ &quot;csv&quot;, &quot;xls&quot;, &quot;pdf&quot; ] } ] } } ); } );</code> <p>In addition to the above code, the following Javascript library files are loaded for use in this example:</p> <ul> <li><a href="../../../media/js/jquery.js">../../../media/js/jquery.js</a></li> <li><a href="../../../media/js/jquery.dataTables.js">../../../media/js/jquery.dataTables.js</a></li> <li><a href="../js/dataTables.tableTools.js">../js/dataTables.tableTools.js</a></li> </ul> </div> <div class="table"> <p>The HTML shown below is the raw HTML table element, before it has been enhanced by DataTables:</p> </div> <div class="css"> <div> <p>This example uses a little bit of additional CSS beyond what is loaded from the library files (below), in order to correctly display the table. The additional CSS used is shown below:</p><code class="multiline language-css"></code> </div> <p>The following CSS library files are loaded for use in this example to provide the styling of the table:</p> <ul> <li><a href="../../../media/css/jquery.dataTables.css">../../../media/css/jquery.dataTables.css</a></li> <li><a href="../css/dataTables.tableTools.css">../css/dataTables.tableTools.css</a></li> </ul> </div> <div class="ajax"> <p>This table loads data by Ajax. The latest data that has been loaded is shown below. This data will update automatically as any additional data is loaded.</p> </div> <div class="php"> <p>The script used to perform the server-side processing for this table is shown below. Please note that this is just an example script using PHP. Server-side processing scripts can be written in any language, using <a href="//datatables.net/manual/server-side">the protocol described in the DataTables documentation</a>.</p> </div> </div> </section> </div> <section> <div class="footer"> <div class="gradient"></div> <div class="liner"> <h2>Other examples</h2> <div class="toc"> <div class="toc-group"> <h3><a href="./index.html">Examples</a></h3> <ul class="toc active"> <li><a href="./simple.html">Basic initialisation</a></li> <li><a href="./swf_path.html">Setting the SWF path</a></li> <li><a href="./new_init.html">Initialisation with `new`</a></li> <li><a href="./defaults.html">Defaults</a></li> <li><a href="./select_single.html">Row selection - single row select</a></li> <li><a href="./select_multi.html">Row selection - multi-row select</a></li> <li><a href="./select_os.html">Row selection - operating system style</a></li> <li><a href="./select_column.html">Row selection - row selector on specific cells</a></li> <li><a href="./multiple_tables.html">Multiple tables</a></li> <li><a href="./multi_instance.html">Multiple toolbars</a></li> <li class="active"><a href="./collection.html">Button collections</a></li> <li><a href="./plug-in.html">Plug-in button types</a></li> <li><a href="./button_text.html">Custom button text</a></li> <li><a href="./alter_buttons.html">Button arrangement</a></li> <li><a href="./ajax.html">Ajax loaded data</a></li> <li><a href="./pdf_message.html">PDF message</a></li> <li><a href="./bootstrap.html">Bootstrap styling</a></li> <li><a href="./jqueryui.html">jQuery UI styling</a></li> </ul> </div> </div> <div class="epilogue"> <p>Please refer to the <a href="http://www.datatables.net">DataTables documentation</a> for full information about its API properties and methods.<br> Additionally, there are a wide range of <a href="http://www.datatables.net/extras">extras</a> and <a href="http://www.datatables.net/plug-ins">plug-ins</a> which extend the capabilities of DataTables.</p> <p class="copyright">DataTables designed and created by <a href="http://www.sprymedia.co.uk">SpryMedia Ltd</a> &#169; 2007-2015<br> DataTables is licensed under the <a href="http://www.datatables.net/mit">MIT license</a>.</p> </div> </div> </div> </section> </body> </html>
{ "pile_set_name": "Github" }
require_relative '../../../spec_helper' require 'csv' describe "CSV::IOBuf#terminate" do it "needs to be reviewed for spec completeness" end
{ "pile_set_name": "Github" }
framework module Pods_RecoilApp { umbrella header "Pods-RecoilApp-umbrella.h" export * module * { export * } }
{ "pile_set_name": "Github" }
# Decision Tree A Ruby library which implements [ID3 (information gain)](https://en.wikipedia.org/wiki/ID3_algorithm) algorithm for decision tree learning. Currently, continuous and discrete datasets can be learned. - Discrete model assumes unique labels & can be graphed and converted into a png for visual analysis - Continuous looks at all possible values for a variable and iteratively chooses the best threshold between all possible assignments. This results in a binary tree which is partitioned by the threshold at every step. (e.g. temperate > 20C) ## Features - ID3 algorithms for continuous and discrete cases, with support for inconsistent datasets. - [Graphviz component](http://rockit.sourceforge.net/subprojects/graphr/) to visualize the learned tree - Support for multiple, and symbolic outputs and graphing of continuous trees. - Returns default value when no branches are suitable for input ## Implementation - Ruleset is a class that trains an ID3Tree with 2/3 of the training data, converts it into set of rules and prunes the rules with the remaining 1/3 of the training data (in a [C4.5](https://en.wikipedia.org/wiki/C4.5_algorithm) way). - Bagging is a bagging-based trainer (quite obvious), which trains 10 Ruleset trainers and when predicting chooses the best output based on voting. [Blog post with explanation & examples](http://www.igvita.com/2007/04/16/decision-tree-learning-in-ruby/) ## Example ```ruby require 'decisiontree' attributes = ['Temperature'] training = [ [36.6, 'healthy'], [37, 'sick'], [38, 'sick'], [36.7, 'healthy'], [40, 'sick'], [50, 'really sick'], ] # Instantiate the tree, and train it based on the data (set default to '1') dec_tree = DecisionTree::ID3Tree.new(attributes, training, 'sick', :continuous) dec_tree.train test = [37, 'sick'] decision = dec_tree.predict(test) puts "Predicted: #{decision} ... True decision: #{test.last}" # => Predicted: sick ... True decision: sick # Specify type ("discrete" or "continuous") in the training data labels = ["hunger", "color"] training = [ [8, "red", "angry"], [6, "red", "angry"], [7, "red", "angry"], [7, "blue", "not angry"], [2, "red", "not angry"], [3, "blue", "not angry"], [2, "blue", "not angry"], [1, "red", "not angry"] ] dec_tree = DecisionTree::ID3Tree.new(labels, training, "not angry", color: :discrete, hunger: :continuous) dec_tree.train test = [7, "red", "angry"] decision = dec_tree.predict(test) puts "Predicted: #{decision} ... True decision: #{test.last}" # => Predicted: angry ... True decision: angry ``` ## License The [MIT License](https://opensource.org/licenses/MIT) - Copyright (c) 2006 Ilya Grigorik
{ "pile_set_name": "Github" }
> 下载地址:https://c-t.work/s/f7dbad9e43e145
{ "pile_set_name": "Github" }
import 'package:hive/src/binary/frame.dart'; import 'package:test/test.dart'; import '../common.dart'; void main() { group('Frame', () { group('constructors verifies', () { test('int keys', () { Frame(0, null); Frame.lazy(0); Frame.deleted(0); Frame(4294967295, null); Frame.lazy(4294967295); Frame.deleted(4294967295); expect(() => Frame(-1, null), throwsHiveError()); expect(() => Frame.lazy(-1), throwsHiveError()); expect(() => Frame.deleted(-1), throwsHiveError()); expect(() => Frame(4294967296, null), throwsHiveError()); expect(() => Frame.lazy(4294967296), throwsHiveError()); expect(() => Frame.deleted(4294967296), throwsHiveError()); }); test('string keys', () { Frame('', null); Frame.lazy(''); Frame.deleted(''); Frame('a' * 255, null); Frame.lazy('a' * 255); Frame.deleted('a' * 255); expect(() => Frame('hellö', null), throwsHiveError()); expect(() => Frame.lazy('hellö'), throwsHiveError()); expect(() => Frame.deleted('hellö'), throwsHiveError()); expect(() => Frame('a' * 256, null), throwsHiveError()); expect(() => Frame.lazy('a' * 256), throwsHiveError()); expect(() => Frame.deleted('a' * 256), throwsHiveError()); }); }); test('.toString()', () { expect(Frame('key', 'val', offset: 1, length: 2).toString(), 'Frame(key: key, value: val, length: 2, offset: 1)'); expect(Frame.lazy('key', offset: 1, length: 2).toString(), 'Frame.lazy(key: key, length: 2, offset: 1)'); expect(Frame.deleted('key', length: 2).toString(), 'Frame.deleted(key: key, length: 2)'); }); }); }
{ "pile_set_name": "Github" }
function results = vl_test_whistc(varargin) % VL_TEST_WHISTC vl_test_init ; function test_acc() x = ones(1, 10) ; e = 1 ; o = 1:10 ; vl_assert_equal(vl_whistc(x, o, e), 55) ; function test_basic() x = 1:10 ; e = 1:10 ; o = ones(1, 10) ; vl_assert_equal(histc(x, e), vl_whistc(x, o, e)) ; x = linspace(-1,11,100) ; o = ones(size(x)) ; vl_assert_equal(histc(x, e), vl_whistc(x, o, e)) ; function test_multidim() x = rand(10, 20, 30) ; e = linspace(0,1,10) ; o = ones(size(x)) ; vl_assert_equal(histc(x, e), vl_whistc(x, o, e)) ; vl_assert_equal(histc(x, e, 1), vl_whistc(x, o, e, 1)) ; vl_assert_equal(histc(x, e, 2), vl_whistc(x, o, e, 2)) ; vl_assert_equal(histc(x, e, 3), vl_whistc(x, o, e, 3)) ; function test_nan() x = rand(10, 20, 30) ; e = linspace(0,1,10) ; o = ones(size(x)) ; x(1:7:end) = NaN ; vl_assert_equal(histc(x, e), vl_whistc(x, o, e)) ; vl_assert_equal(histc(x, e, 1), vl_whistc(x, o, e, 1)) ; vl_assert_equal(histc(x, e, 2), vl_whistc(x, o, e, 2)) ; vl_assert_equal(histc(x, e, 3), vl_whistc(x, o, e, 3)) ; function test_no_edges() x = rand(10, 20, 30) ; o = ones(size(x)) ; vl_assert_equal(histc(1, []), vl_whistc(1, 1, [])) ; vl_assert_equal(histc(x, []), vl_whistc(x, o, [])) ; vl_assert_equal(histc(x, [], 1), vl_whistc(x, o, [], 1)) ; vl_assert_equal(histc(x, [], 2), vl_whistc(x, o, [], 2)) ; vl_assert_equal(histc(x, [], 3), vl_whistc(x, o, [], 3)) ;
{ "pile_set_name": "Github" }
issuerepo: golang/go
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8" ?> <configuration> <startup> <supportedRuntime version="v4.0" sku=".NETFramework,Version=v4.5" /> </startup> </configuration>
{ "pile_set_name": "Github" }
/** @format */ import { By } from 'selenium-webdriver'; import AsyncBaseContainer from '../../async-base-container'; import * as driverHelper from '../../driver-helper'; export default class BusinessAddressPage extends AsyncBaseContainer { constructor( driver ) { super( driver, By.css( '.jetpack-onboarding' ) ); } async selectAddBusinessAddress() { const businessAddressSelector = By.css( '.card[data-e2e-type="business-address"] button' ); await driverHelper.waitTillPresentAndDisplayed( this.driver, businessAddressSelector ); return await driverHelper.clickWhenClickable( this.driver, businessAddressSelector ); } async selectContinue() { const continueSelector = By.css( '.card[data-e2e-type="continue"] button' ); await driverHelper.waitTillPresentAndDisplayed( this.driver, continueSelector ); return await driverHelper.clickWhenClickable( this.driver, continueSelector ); } async enterBusinessAddressAndSubmit( name, street, city, state, zip, country ) { await driverHelper.waitTillPresentAndDisplayed( this.driver, By.css( '#name' ) ); await driverHelper.setWhenSettable( this.driver, By.css( '#name' ), name ); await driverHelper.setWhenSettable( this.driver, By.css( '#street' ), street ); await driverHelper.setWhenSettable( this.driver, By.css( '#city' ), city ); await driverHelper.setWhenSettable( this.driver, By.css( '#state' ), state ); await driverHelper.setWhenSettable( this.driver, By.css( '#zip' ), zip ); await driverHelper.setWhenSettable( this.driver, By.css( '#country' ), country ); return await driverHelper.clickWhenClickable( this.driver, By.css( 'button.is-primary' ) ); } }
{ "pile_set_name": "Github" }
// © 2016 and later: Unicode, Inc. and others. // License & terms of use: http://www.unicode.org/copyright.html#License pa_PK{ "%%ALIAS"{"pa_Arab_PK"} }
{ "pile_set_name": "Github" }
<?php /** * PHPExcel * * Copyright (c) 2006 - 2013 PHPExcel * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * @category PHPExcel * @package PHPExcel_Writer_Excel2007 * @copyright Copyright (c) 2006 - 2013 PHPExcel (http://www.codeplex.com/PHPExcel) * @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL * @version ##VERSION##, ##DATE## */ /** * PHPExcel_Writer_Excel2007_Drawing * * @category PHPExcel * @package PHPExcel_Writer_Excel2007 * @copyright Copyright (c) 2006 - 2013 PHPExcel (http://www.codeplex.com/PHPExcel) */ class PHPExcel_Writer_Excel2007_Drawing extends PHPExcel_Writer_Excel2007_WriterPart { /** * Write drawings to XML format * * @param PHPExcel_Worksheet $pWorksheet * @param int &$chartRef Chart ID * @param boolean $includeCharts Flag indicating if we should include drawing details for charts * @return string XML Output * @throws PHPExcel_Writer_Exception */ public function writeDrawings(PHPExcel_Worksheet $pWorksheet = null, &$chartRef, $includeCharts = FALSE) { // Create XML writer $objWriter = null; if ($this->getParentWriter()->getUseDiskCaching()) { $objWriter = new PHPExcel_Shared_XMLWriter(PHPExcel_Shared_XMLWriter::STORAGE_DISK, $this->getParentWriter()->getDiskCachingDirectory()); } else { $objWriter = new PHPExcel_Shared_XMLWriter(PHPExcel_Shared_XMLWriter::STORAGE_MEMORY); } // XML header $objWriter->startDocument('1.0','UTF-8','yes'); // xdr:wsDr $objWriter->startElement('xdr:wsDr'); $objWriter->writeAttribute('xmlns:xdr', 'http://schemas.openxmlformats.org/drawingml/2006/spreadsheetDrawing'); $objWriter->writeAttribute('xmlns:a', 'http://schemas.openxmlformats.org/drawingml/2006/main'); // Loop through images and write drawings $i = 1; $iterator = $pWorksheet->getDrawingCollection()->getIterator(); while ($iterator->valid()) { $this->_writeDrawing($objWriter, $iterator->current(), $i); $iterator->next(); ++$i; } if ($includeCharts) { $chartCount = $pWorksheet->getChartCount(); // Loop through charts and write the chart position if ($chartCount > 0) { for ($c = 0; $c < $chartCount; ++$c) { $this->_writeChart($objWriter, $pWorksheet->getChartByIndex($c), $c+$i); } } } $objWriter->endElement(); // Return return $objWriter->getData(); } /** * Write drawings to XML format * * @param PHPExcel_Shared_XMLWriter $objWriter XML Writer * @param PHPExcel_Chart $pChart * @param int $pRelationId * @throws PHPExcel_Writer_Exception */ public function _writeChart(PHPExcel_Shared_XMLWriter $objWriter = null, PHPExcel_Chart $pChart = null, $pRelationId = -1) { $tl = $pChart->getTopLeftPosition(); $tl['colRow'] = PHPExcel_Cell::coordinateFromString($tl['cell']); $br = $pChart->getBottomRightPosition(); $br['colRow'] = PHPExcel_Cell::coordinateFromString($br['cell']); $objWriter->startElement('xdr:twoCellAnchor'); $objWriter->startElement('xdr:from'); $objWriter->writeElement('xdr:col', PHPExcel_Cell::columnIndexFromString($tl['colRow'][0]) - 1); $objWriter->writeElement('xdr:colOff', PHPExcel_Shared_Drawing::pixelsToEMU($tl['xOffset'])); $objWriter->writeElement('xdr:row', $tl['colRow'][1] - 1); $objWriter->writeElement('xdr:rowOff', PHPExcel_Shared_Drawing::pixelsToEMU($tl['yOffset'])); $objWriter->endElement(); $objWriter->startElement('xdr:to'); $objWriter->writeElement('xdr:col', PHPExcel_Cell::columnIndexFromString($br['colRow'][0]) - 1); $objWriter->writeElement('xdr:colOff', PHPExcel_Shared_Drawing::pixelsToEMU($br['xOffset'])); $objWriter->writeElement('xdr:row', $br['colRow'][1] - 1); $objWriter->writeElement('xdr:rowOff', PHPExcel_Shared_Drawing::pixelsToEMU($br['yOffset'])); $objWriter->endElement(); $objWriter->startElement('xdr:graphicFrame'); $objWriter->writeAttribute('macro', ''); $objWriter->startElement('xdr:nvGraphicFramePr'); $objWriter->startElement('xdr:cNvPr'); $objWriter->writeAttribute('name', 'Chart '.$pRelationId); $objWriter->writeAttribute('id', 1025 * $pRelationId); $objWriter->endElement(); $objWriter->startElement('xdr:cNvGraphicFramePr'); $objWriter->startElement('a:graphicFrameLocks'); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); $objWriter->startElement('xdr:xfrm'); $objWriter->startElement('a:off'); $objWriter->writeAttribute('x', '0'); $objWriter->writeAttribute('y', '0'); $objWriter->endElement(); $objWriter->startElement('a:ext'); $objWriter->writeAttribute('cx', '0'); $objWriter->writeAttribute('cy', '0'); $objWriter->endElement(); $objWriter->endElement(); $objWriter->startElement('a:graphic'); $objWriter->startElement('a:graphicData'); $objWriter->writeAttribute('uri', 'http://schemas.openxmlformats.org/drawingml/2006/chart'); $objWriter->startElement('c:chart'); $objWriter->writeAttribute('xmlns:c', 'http://schemas.openxmlformats.org/drawingml/2006/chart'); $objWriter->writeAttribute('xmlns:r', 'http://schemas.openxmlformats.org/officeDocument/2006/relationships'); $objWriter->writeAttribute('r:id', 'rId'.$pRelationId); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); $objWriter->startElement('xdr:clientData'); $objWriter->endElement(); $objWriter->endElement(); } /** * Write drawings to XML format * * @param PHPExcel_Shared_XMLWriter $objWriter XML Writer * @param PHPExcel_Worksheet_BaseDrawing $pDrawing * @param int $pRelationId * @throws PHPExcel_Writer_Exception */ public function _writeDrawing(PHPExcel_Shared_XMLWriter $objWriter = null, PHPExcel_Worksheet_BaseDrawing $pDrawing = null, $pRelationId = -1) { if ($pRelationId >= 0) { // xdr:oneCellAnchor $objWriter->startElement('xdr:oneCellAnchor'); // Image location $aCoordinates = PHPExcel_Cell::coordinateFromString($pDrawing->getCoordinates()); $aCoordinates[0] = PHPExcel_Cell::columnIndexFromString($aCoordinates[0]); // xdr:from $objWriter->startElement('xdr:from'); $objWriter->writeElement('xdr:col', $aCoordinates[0] - 1); $objWriter->writeElement('xdr:colOff', PHPExcel_Shared_Drawing::pixelsToEMU($pDrawing->getOffsetX())); $objWriter->writeElement('xdr:row', $aCoordinates[1] - 1); $objWriter->writeElement('xdr:rowOff', PHPExcel_Shared_Drawing::pixelsToEMU($pDrawing->getOffsetY())); $objWriter->endElement(); // xdr:ext $objWriter->startElement('xdr:ext'); $objWriter->writeAttribute('cx', PHPExcel_Shared_Drawing::pixelsToEMU($pDrawing->getWidth())); $objWriter->writeAttribute('cy', PHPExcel_Shared_Drawing::pixelsToEMU($pDrawing->getHeight())); $objWriter->endElement(); // xdr:pic $objWriter->startElement('xdr:pic'); // xdr:nvPicPr $objWriter->startElement('xdr:nvPicPr'); // xdr:cNvPr $objWriter->startElement('xdr:cNvPr'); $objWriter->writeAttribute('id', $pRelationId); $objWriter->writeAttribute('name', $pDrawing->getName()); $objWriter->writeAttribute('descr', $pDrawing->getDescription()); $objWriter->endElement(); // xdr:cNvPicPr $objWriter->startElement('xdr:cNvPicPr'); // a:picLocks $objWriter->startElement('a:picLocks'); $objWriter->writeAttribute('noChangeAspect', '1'); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); // xdr:blipFill $objWriter->startElement('xdr:blipFill'); // a:blip $objWriter->startElement('a:blip'); $objWriter->writeAttribute('xmlns:r', 'http://schemas.openxmlformats.org/officeDocument/2006/relationships'); $objWriter->writeAttribute('r:embed', 'rId' . $pRelationId); $objWriter->endElement(); // a:stretch $objWriter->startElement('a:stretch'); $objWriter->writeElement('a:fillRect', null); $objWriter->endElement(); $objWriter->endElement(); // xdr:spPr $objWriter->startElement('xdr:spPr'); // a:xfrm $objWriter->startElement('a:xfrm'); $objWriter->writeAttribute('rot', PHPExcel_Shared_Drawing::degreesToAngle($pDrawing->getRotation())); $objWriter->endElement(); // a:prstGeom $objWriter->startElement('a:prstGeom'); $objWriter->writeAttribute('prst', 'rect'); // a:avLst $objWriter->writeElement('a:avLst', null); $objWriter->endElement(); // // a:solidFill // $objWriter->startElement('a:solidFill'); // // a:srgbClr // $objWriter->startElement('a:srgbClr'); // $objWriter->writeAttribute('val', 'FFFFFF'); ///* SHADE // // a:shade // $objWriter->startElement('a:shade'); // $objWriter->writeAttribute('val', '85000'); // $objWriter->endElement(); //*/ // $objWriter->endElement(); // $objWriter->endElement(); /* // a:ln $objWriter->startElement('a:ln'); $objWriter->writeAttribute('w', '88900'); $objWriter->writeAttribute('cap', 'sq'); // a:solidFill $objWriter->startElement('a:solidFill'); // a:srgbClr $objWriter->startElement('a:srgbClr'); $objWriter->writeAttribute('val', 'FFFFFF'); $objWriter->endElement(); $objWriter->endElement(); // a:miter $objWriter->startElement('a:miter'); $objWriter->writeAttribute('lim', '800000'); $objWriter->endElement(); $objWriter->endElement(); */ if ($pDrawing->getShadow()->getVisible()) { // a:effectLst $objWriter->startElement('a:effectLst'); // a:outerShdw $objWriter->startElement('a:outerShdw'); $objWriter->writeAttribute('blurRad', PHPExcel_Shared_Drawing::pixelsToEMU($pDrawing->getShadow()->getBlurRadius())); $objWriter->writeAttribute('dist', PHPExcel_Shared_Drawing::pixelsToEMU($pDrawing->getShadow()->getDistance())); $objWriter->writeAttribute('dir', PHPExcel_Shared_Drawing::degreesToAngle($pDrawing->getShadow()->getDirection())); $objWriter->writeAttribute('algn', $pDrawing->getShadow()->getAlignment()); $objWriter->writeAttribute('rotWithShape', '0'); // a:srgbClr $objWriter->startElement('a:srgbClr'); $objWriter->writeAttribute('val', $pDrawing->getShadow()->getColor()->getRGB()); // a:alpha $objWriter->startElement('a:alpha'); $objWriter->writeAttribute('val', $pDrawing->getShadow()->getAlpha() * 1000); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); } /* // a:scene3d $objWriter->startElement('a:scene3d'); // a:camera $objWriter->startElement('a:camera'); $objWriter->writeAttribute('prst', 'orthographicFront'); $objWriter->endElement(); // a:lightRig $objWriter->startElement('a:lightRig'); $objWriter->writeAttribute('rig', 'twoPt'); $objWriter->writeAttribute('dir', 't'); // a:rot $objWriter->startElement('a:rot'); $objWriter->writeAttribute('lat', '0'); $objWriter->writeAttribute('lon', '0'); $objWriter->writeAttribute('rev', '0'); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); */ /* // a:sp3d $objWriter->startElement('a:sp3d'); // a:bevelT $objWriter->startElement('a:bevelT'); $objWriter->writeAttribute('w', '25400'); $objWriter->writeAttribute('h', '19050'); $objWriter->endElement(); // a:contourClr $objWriter->startElement('a:contourClr'); // a:srgbClr $objWriter->startElement('a:srgbClr'); $objWriter->writeAttribute('val', 'FFFFFF'); $objWriter->endElement(); $objWriter->endElement(); $objWriter->endElement(); */ $objWriter->endElement(); $objWriter->endElement(); // xdr:clientData $objWriter->writeElement('xdr:clientData', null); $objWriter->endElement(); } else { throw new PHPExcel_Writer_Exception("Invalid parameters passed."); } } /** * Write VML header/footer images to XML format * * @param PHPExcel_Worksheet $pWorksheet * @return string XML Output * @throws PHPExcel_Writer_Exception */ public function writeVMLHeaderFooterImages(PHPExcel_Worksheet $pWorksheet = null) { // Create XML writer $objWriter = null; if ($this->getParentWriter()->getUseDiskCaching()) { $objWriter = new PHPExcel_Shared_XMLWriter(PHPExcel_Shared_XMLWriter::STORAGE_DISK, $this->getParentWriter()->getDiskCachingDirectory()); } else { $objWriter = new PHPExcel_Shared_XMLWriter(PHPExcel_Shared_XMLWriter::STORAGE_MEMORY); } // XML header $objWriter->startDocument('1.0','UTF-8','yes'); // Header/footer images $images = $pWorksheet->getHeaderFooter()->getImages(); // xml $objWriter->startElement('xml'); $objWriter->writeAttribute('xmlns:v', 'urn:schemas-microsoft-com:vml'); $objWriter->writeAttribute('xmlns:o', 'urn:schemas-microsoft-com:office:office'); $objWriter->writeAttribute('xmlns:x', 'urn:schemas-microsoft-com:office:excel'); // o:shapelayout $objWriter->startElement('o:shapelayout'); $objWriter->writeAttribute('v:ext', 'edit'); // o:idmap $objWriter->startElement('o:idmap'); $objWriter->writeAttribute('v:ext', 'edit'); $objWriter->writeAttribute('data', '1'); $objWriter->endElement(); $objWriter->endElement(); // v:shapetype $objWriter->startElement('v:shapetype'); $objWriter->writeAttribute('id', '_x0000_t75'); $objWriter->writeAttribute('coordsize', '21600,21600'); $objWriter->writeAttribute('o:spt', '75'); $objWriter->writeAttribute('o:preferrelative', 't'); $objWriter->writeAttribute('path', 'm@4@5l@4@11@9@11@9@5xe'); $objWriter->writeAttribute('filled', 'f'); $objWriter->writeAttribute('stroked', 'f'); // v:stroke $objWriter->startElement('v:stroke'); $objWriter->writeAttribute('joinstyle', 'miter'); $objWriter->endElement(); // v:formulas $objWriter->startElement('v:formulas'); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'if lineDrawn pixelLineWidth 0'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'sum @0 1 0'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'sum 0 0 @1'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'prod @2 1 2'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'prod @3 21600 pixelWidth'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'prod @3 21600 pixelHeight'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'sum @0 0 1'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'prod @6 1 2'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'prod @7 21600 pixelWidth'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'sum @8 21600 0'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'prod @7 21600 pixelHeight'); $objWriter->endElement(); // v:f $objWriter->startElement('v:f'); $objWriter->writeAttribute('eqn', 'sum @10 21600 0'); $objWriter->endElement(); $objWriter->endElement(); // v:path $objWriter->startElement('v:path'); $objWriter->writeAttribute('o:extrusionok', 'f'); $objWriter->writeAttribute('gradientshapeok', 't'); $objWriter->writeAttribute('o:connecttype', 'rect'); $objWriter->endElement(); // o:lock $objWriter->startElement('o:lock'); $objWriter->writeAttribute('v:ext', 'edit'); $objWriter->writeAttribute('aspectratio', 't'); $objWriter->endElement(); $objWriter->endElement(); // Loop through images foreach ($images as $key => $value) { $this->_writeVMLHeaderFooterImage($objWriter, $key, $value); } $objWriter->endElement(); // Return return $objWriter->getData(); } /** * Write VML comment to XML format * * @param PHPExcel_Shared_XMLWriter $objWriter XML Writer * @param string $pReference Reference * @param PHPExcel_Worksheet_HeaderFooterDrawing $pImage Image * @throws PHPExcel_Writer_Exception */ public function _writeVMLHeaderFooterImage(PHPExcel_Shared_XMLWriter $objWriter = null, $pReference = '', PHPExcel_Worksheet_HeaderFooterDrawing $pImage = null) { // Calculate object id preg_match('{(\d+)}', md5($pReference), $m); $id = 1500 + (substr($m[1], 0, 2) * 1); // Calculate offset $width = $pImage->getWidth(); $height = $pImage->getHeight(); $marginLeft = $pImage->getOffsetX(); $marginTop = $pImage->getOffsetY(); // v:shape $objWriter->startElement('v:shape'); $objWriter->writeAttribute('id', $pReference); $objWriter->writeAttribute('o:spid', '_x0000_s' . $id); $objWriter->writeAttribute('type', '#_x0000_t75'); $objWriter->writeAttribute('style', "position:absolute;margin-left:{$marginLeft}px;margin-top:{$marginTop}px;width:{$width}px;height:{$height}px;z-index:1"); // v:imagedata $objWriter->startElement('v:imagedata'); $objWriter->writeAttribute('o:relid', 'rId' . $pReference); $objWriter->writeAttribute('o:title', $pImage->getName()); $objWriter->endElement(); // o:lock $objWriter->startElement('o:lock'); $objWriter->writeAttribute('v:ext', 'edit'); $objWriter->writeAttribute('rotation', 't'); $objWriter->endElement(); $objWriter->endElement(); } /** * Get an array of all drawings * * @param PHPExcel $pPHPExcel * @return PHPExcel_Worksheet_Drawing[] All drawings in PHPExcel * @throws PHPExcel_Writer_Exception */ public function allDrawings(PHPExcel $pPHPExcel = null) { // Get an array of all drawings $aDrawings = array(); // Loop through PHPExcel $sheetCount = $pPHPExcel->getSheetCount(); for ($i = 0; $i < $sheetCount; ++$i) { // Loop through images and add to array $iterator = $pPHPExcel->getSheet($i)->getDrawingCollection()->getIterator(); while ($iterator->valid()) { $aDrawings[] = $iterator->current(); $iterator->next(); } } return $aDrawings; } }
{ "pile_set_name": "Github" }
<link rel="import" href="../polymer/polymer.html"> <link rel="import" href="chops-user-dropdown.html"> <link rel="import" href="chops-user-id.html"> <link rel="import" href="../paper-input/paper-input.html"> <link rel="import" href="../iron-behaviors/iron-control-state.html"> <dom-module id="chops-user-input"> <template> <style> </style> <paper-input id="input" label="[[label]]" value="{{inputValue}}" on-input="_inputChanged"> <template is="dom-repeat" items="[[selectedUsers]]"> <chops-user-id slot="prefix" user-id="[[item.userId]]" email="[[item.email]]" profile-link="[[item.profileLink]]" full-name="[[item.fullName]]" on-remove-user="_removeUser" removeable> </chops-user-id> </template> </paper-input> <chops-user-dropdown id="dropdown" on-user-selected="_saveUser" suggestions="[[suggestions]]"> </chops-user-dropdown> </template> <script> 'use strict'; /** * * `<chops-user-input>` as an input element for choosing users with * dropdown suggestions and autocomplete. * * It is up to the developer using this element to provide and update * the users listed in the `suggestions` property. The properties, * `inputValue` and `selectedUsers` are available for two-way binding * for this purpose. * * customElement * @polymer * @demo /demo/chops-user-input_demo.html * */ class ChopsUserInput extends Polymer.mixinBehaviors( [Polymer.IronControlState], Polymer.Element) { static get is() { return 'chops-user-input'; } static get properties() { return { /** * The current input value. * * @type String */ inputValue: { type: String, value: '', notify: true, }, /** * The display label. * * @type String */ label: { type: String, value: 'place holder', }, /** * List of suggestions displayed in the dropdown. * * @type Array<Object{userId, email(opt), profileLink(opt), fullName(opt)}> */ suggestions: { type: Array, value: () => { return []; }, notify: true, }, /** * List of users selected, so far. * * @type Array<Object{userId, email(opt), profileLink(opt), fullName(opt)}> */ selectedUsers: { type: Array, value: () => { return []; }, notify: true, }, /** * If true, multiple users can be selected. * * @type Boolean */ multiple: { type: Boolean, value: false, }, /** * If true, additional users cannot be selected. * * @type Boolean */ _inputDisabled: { type: Boolean, value: false, }, } } static get observers() { return [ '_updateInputDisabled(selectedUsers.length)' ] } ready() { super.ready(); this.addEventListener('focused-changed', e => this._toggleDropdown(e)); } /** Updates inputDisabled. */ _updateInputDisabled(length) { this._inputDisabled = Boolean(!this.multiple && length); } /** Toggles the visibility of the dropdown element. */ _toggleDropdown(e) { if (!this._inputDisabled) { if (!this.focused) { this.$.dropdown.close(); } else { this.$.dropdown.open(); } } } /** * Pushes a newly selected user to the selectedUser property * * @param {Event} e event that triggered the function with selectedUser * in the detail{}. */ _saveUser(e) { this.push('selectedUsers', e.detail.selectedUser); this.$.dropdown.close(); this.inputValue = ''; this.dispatchEvent( new CustomEvent('user-selected', {detail: e.detail})); } /** * Removes a selected user from the selectedUser property. * * @param {Event} e event that triggered the function with removedUser * in the detail{}. */ _removeUser(e) { let userId = e.detail.removedUser.userId; let index = -1; this.selectedUsers.forEach((user, i) => { if (user.userId === userId) { index = i; } }); if (index != -1) { this.splice('selectedUsers', index, 1); } } _inputChanged(e) { this.dispatchEvent(new CustomEvent('input', {detail: e.detail})); } } customElements.define(ChopsUserInput.is, ChopsUserInput); </script> </dom-moduel>
{ "pile_set_name": "Github" }
/* * Copyright 2009-2012, Plutext Pty Ltd. * * This file is part of pptx4j, a component of docx4j. docx4j is licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.docx4j.math; import javax.xml.bind.Unmarshaller; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlTransient; import javax.xml.bind.annotation.XmlType; import org.jvnet.jaxb2_commons.ppp.Child; /** * <p>Java class for CT_FuncPr complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="CT_FuncPr"> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="ctrlPr" type="{http://schemas.openxmlformats.org/officeDocument/2006/math}CT_CtrlPr" minOccurs="0"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "CT_FuncPr", propOrder = { "ctrlPr" }) public class CTFuncPr implements Child { protected CTCtrlPr ctrlPr; @XmlTransient private Object parent; /** * Gets the value of the ctrlPr property. * * @return * possible object is * {@link CTCtrlPr } * */ public CTCtrlPr getCtrlPr() { return ctrlPr; } /** * Sets the value of the ctrlPr property. * * @param value * allowed object is * {@link CTCtrlPr } * */ public void setCtrlPr(CTCtrlPr value) { this.ctrlPr = value; } /** * Gets the parent object in the object tree representing the unmarshalled xml document. * * @return * The parent object. */ public Object getParent() { return this.parent; } public void setParent(Object parent) { this.parent = parent; } /** * This method is invoked by the JAXB implementation on each instance when unmarshalling completes. * * @param parent * The parent object in the object tree. * @param unmarshaller * The unmarshaller that generated the instance. */ public void afterUnmarshal(Unmarshaller unmarshaller, Object parent) { setParent(parent); } }
{ "pile_set_name": "Github" }
eclipse.preferences.version=1 formatter_profile=_Space Indent & Long Lines formatter_settings_version=12 org.eclipse.jdt.ui.ignorelowercasenames=true org.eclipse.jdt.ui.importorder=java;javax;org;com; org.eclipse.jdt.ui.ondemandthreshold=99 org.eclipse.jdt.ui.staticondemandthreshold=1
{ "pile_set_name": "Github" }
<?php declare(strict_types=1); namespace Tests\Commands\Upgrade\Databases\V6_5_0\Seeds; use Illuminate\Database\Seeder; use Illuminate\Support\Facades\DB; class ConfigsTableSeeder extends Seeder { /** * Auto generated seed file * * @return void */ public function run() { DB::table('configs')->truncate(); DB::table('configs')->insert([ [ 'id' => 101, 'name' => 'app.name', 'value' => '白俊遥博客', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 102, 'name' => 'bjyblog.head.keywords', 'value' => '个人博客,博客模板,thinkphp,laravel博客,php博客,技术博客,白俊遥', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 103, 'name' => 'bjyblog.head.description', 'value' => '白俊遥的php博客,个人技术博客,bjyblog,bjyadmin官方网站', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 107, 'name' => 'bjyblog.water.text', 'value' => 'baijunyao.com', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 109, 'name' => 'bjyblog.water.size', 'value' => '15', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 110, 'name' => 'bjyblog.water.color', 'value' => '#008CBA', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 117, 'name' => 'bjyblog.icp', 'value' => '豫ICP备14009546号-3', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 118, 'name' => 'bjyblog.admin_email', 'value' => '[email protected]', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 119, 'name' => 'bjyblog.copyright_word', 'value' => '本文为白俊遥原创文章,转载无需和我联系,但请注明来自<a href="http://baijunyao.com">白俊遥博客</a>http://baijunyao.com', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 123, 'name' => 'bjyblog.statistics', 'value' => '', 'created_at' => '2018-08-25 17:04:02', 'updated_at' => '2018-08-25 17:04:02', 'deleted_at' => null, ], [ 'id' => 125, 'name' => 'bjyblog.author', 'value' => '白俊遥', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 128, 'name' => 'bjyblog.baidu_site_url', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 141, 'name' => 'bjyblog.alt_word', 'value' => '白俊遥博客', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 142, 'name' => 'mail.host', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 143, 'name' => 'mail.username', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 144, 'name' => 'mail.password', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 145, 'name' => 'mail.from.name', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 148, 'name' => 'bjyblog.notification_email', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 149, 'name' => 'bjyblog.head.title', 'value' => '白俊遥博客,技术博客,个人博客模板, php博客系统', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 150, 'name' => 'bjyblog.qq_qun.article_id', 'value' => '1', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 151, 'name' => 'bjyblog.qq_qun.number', 'value' => '88199093', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 152, 'name' => 'bjyblog.qq_qun.code', 'value' => '<a target="_blank" href="//shang.qq.com/wpa/qunwpa?idkey=bba3fea90444ee6caf1fb1366027873fe14e86bada254d41ce67768fadd729ee"><img border="0" src="//pub.idqqimg.com/wpa/images/group.png" alt="白俊遥博客群" title="白俊遥博客群"></a>', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 153, 'name' => 'bjyblog.qq_qun.or_code', 'value' => '/uploads/images/default.png', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 154, 'name' => 'mail.driver', 'value' => 'smtp', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 155, 'name' => 'mail.port', 'value' => '465', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 156, 'name' => 'mail.encryption', 'value' => 'ssl', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 157, 'name' => 'mail.from.address', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 158, 'name' => 'sentry.dsn', 'value' => '', 'created_at' => '2018-08-22 21:03:01', 'updated_at' => '2018-08-22 21:03:01', 'deleted_at' => null, ], [ 'id' => 159, 'name' => 'database.connections.mysql.dump.dump_binary_path', 'value' => '/bin/', 'created_at' => '2018-12-03 21:39:22', 'updated_at' => '2018-12-03 21:39:22', 'deleted_at' => null, ], [ 'id' => 160, 'name' => 'filesystems.disks.oss.access_id', 'value' => '', 'created_at' => '2018-12-04 22:29:52', 'updated_at' => '2018-12-04 22:29:52', 'deleted_at' => null, ], [ 'id' => 161, 'name' => 'filesystems.disks.oss.access_key', 'value' => '', 'created_at' => '2018-12-04 22:29:52', 'updated_at' => '2018-12-04 22:29:52', 'deleted_at' => null, ], [ 'id' => 162, 'name' => 'filesystems.disks.oss.bucket', 'value' => '', 'created_at' => '2018-12-04 22:29:52', 'updated_at' => '2018-12-04 22:29:52', 'deleted_at' => null, ], [ 'id' => 163, 'name' => 'filesystems.disks.oss.endpoint', 'value' => '', 'created_at' => '2018-12-04 22:29:52', 'updated_at' => '2018-12-04 22:29:52', 'deleted_at' => null, ], [ 'id' => 164, 'name' => 'backup.backup.destination.disks', 'value' => '[]', 'created_at' => '2018-12-04 22:29:52', 'updated_at' => '2018-12-04 22:29:52', 'deleted_at' => null, ], [ 'id' => 165, 'name' => 'backup.notifications.mail.to', 'value' => '', 'created_at' => '2018-12-04 22:29:52', 'updated_at' => '2018-12-04 22:29:52', 'deleted_at' => null, ], [ 'id' => 166, 'name' => 'app.locale', 'value' => 'en', 'created_at' => '2019-02-26 21:10:52', 'updated_at' => '2019-02-26 21:10:52', 'deleted_at' => null, ], [ 'id' => 167, 'name' => 'bjyblog.seo.use_slug', 'value' => 'false', 'created_at' => '2019-05-19 19:43:00', 'updated_at' => '2019-05-19 19:43:00', 'deleted_at' => null, ], [ 'id' => 168, 'name' => 'bjyblog.social_share.select_plugin', 'value' => 'sharejs', 'created_at' => '2019-05-27 22:22:00', 'updated_at' => '2019-05-27 22:22:00', 'deleted_at' => null, ], [ 'id' => 169, 'name' => 'bjyblog.social_share.jssocials_config', 'value' => '{ shares: ["email", "twitter", "facebook", "googleplus", "linkedin", "pinterest", "stumbleupon", "pocket", "whatsapp", "messenger", "vkontakte", "telegram", "line"], showLabel: false, showCount: false, shareIn: "popup" }', 'created_at' => '2019-05-27 22:22:00', 'updated_at' => '2019-05-27 22:22:00', 'deleted_at' => null, ], [ 'id' => 170, 'name' => 'bjyblog.social_share.sharejs_config', 'value' => '{ sites: ["weibo", "qq", "wechat", "douban", "qzone", "linkedin", "facebook", "twitter", "google"] }', 'created_at' => '2019-05-27 22:22:00', 'updated_at' => '2019-05-27 22:22:00', 'deleted_at' => null, ], [ 'id' => 171, 'name' => 'bjyblog.logo_with_php_tag', 'value' => 'true', 'created_at' => '2019-05-28 23:15:00', 'updated_at' => '2019-05-28 23:15:00', 'deleted_at' => null, ], [ 'id' => 172, 'name' => 'bjyblog.cdn_domain', 'value' => '', 'created_at' => '2019-08-05 22:15:00', 'updated_at' => '2019-08-05 22:15:00', 'deleted_at' => null, ], [ 'id' => 173, 'name' => 'bjyblog.comment_audit', 'value' => 'false', 'created_at' => '2019-10-21 22:45:00', 'updated_at' => '2019-10-21 22:45:00', 'deleted_at' => null, ], [ 'id' => 174, 'name' => 'services.baidu.appid', 'value' => '', 'created_at' => '2019-10-21 22:45:00', 'updated_at' => '2019-10-21 22:45:00', 'deleted_at' => null, ], [ 'id' => 175, 'name' => 'services.baidu.appkey', 'value' => '', 'created_at' => '2019-10-21 22:45:00', 'updated_at' => '2019-10-21 22:45:00', 'deleted_at' => null, ], [ 'id' => 176, 'name' => 'services.baidu.secret', 'value' => '', 'created_at' => '2019-10-21 22:45:00', 'updated_at' => '2019-10-21 22:45:00', 'deleted_at' => null, ], ]); } }
{ "pile_set_name": "Github" }
package cn.dblearn.blog.mapper.book; import cn.dblearn.blog.entity.book.BookNote; import cn.dblearn.blog.entity.book.vo.BookNoteVO; import com.baomidou.mybatisplus.core.mapper.BaseMapper; import com.baomidou.mybatisplus.extension.plugins.pagination.Page; import org.apache.ibatis.annotations.Mapper; import org.apache.ibatis.annotations.Param; import java.util.List; import java.util.Map; /** * <p> * 笔记 Mapper 接口 * </p> * * @author bobbi * @since 2019-02-13 */ @Mapper public interface BookNoteMapper extends BaseMapper<BookNote> { /** * 查询列表 * * @param page * @param params * @return */ List<BookNoteVO> listBookNoteVo(Page<BookNoteVO> page, @Param("params") Map<String, Object> params); /** * 分页分类获取列表 * * @param params * @return */ List<BookNoteVO> queryPageCondition(Page<BookNoteVO> page, @Param("params") Map<String, Object> params); /** * 更新阅读记录数 * @param id */ void updateReadNum(Integer id); /** * 获取简单对象 * @param bookNoteId * @return */ BookNoteVO getSimpleBookNoteVo(Integer bookNoteId); /** * 获取简单list * @param bookId * @return */ List<BookNote> listSimpleBookNote(Integer bookId); /** * 更新点赞记录 * @param parseInt */ void updateLikeNum(int parseInt); /** * 判断该类别下是否有笔记 * @param categoryId * @return */ int checkByCategory(Integer categoryId); }
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated from a template. // // Manual changes to this file may cause unexpected behavior in your application. // Manual changes to this file will be overwritten if the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ using System; using System.Collections.Generic; using System.Collections.ObjectModel; using System.ComponentModel; using System.ComponentModel.DataAnnotations; using System.ComponentModel.DataAnnotations.Schema; using System.Linq; using System.Runtime.CompilerServices; namespace Testing { public partial class Master { partial void Init(); /// <summary> /// Default constructor /// </summary> public Master() { Children = new System.Collections.Generic.HashSet<global::Testing.Child>(); Init(); } /************************************************************************* * Properties *************************************************************************/ /// <summary> /// Backing field for Id /// </summary> internal int _Id; /// <summary> /// When provided in a partial class, allows value of Id to be changed before setting. /// </summary> partial void SetId(int oldValue, ref int newValue); /// <summary> /// When provided in a partial class, allows value of Id to be changed before returning. /// </summary> partial void GetId(ref int result); /// <summary> /// Identity, Indexed, Required /// </summary> [Key] [Required] public int Id { get { int value = _Id; GetId(ref value); return (_Id = value); } protected set { int oldValue = _Id; SetId(oldValue, ref value); if (oldValue != value) { _Id = value; } } } /************************************************************************* * Navigation properties *************************************************************************/ public virtual ICollection<global::Testing.Child> Children { get; protected set; } } }
{ "pile_set_name": "Github" }