text
stringlengths
2
99.9k
meta
dict
(function($K) { $K.add('module', 'autocomplete', { init: function(app, context) { this.app = app; this.$doc = app.$doc; this.$win = app.$win; this.$body = app.$body; this.animate = app.animate; // defaults var defaults = { url: false, min: 2, labelClass: false, target: false, param: false }; // context this.context = context; this.params = context.getParams(defaults); this.$element = context.getElement(); this.$target = context.getTarget(); }, start: function() { this._build(); this.timeout = null; this.$element.on('keyup.kube.autocomplete', this._open.bind(this)); }, stop: function() { this.$box.remove(); this.$element.off('.kube.autocomplete'); this.$doc.off('.kube.autocomplete'); this.$win.off('.kube.autocomplete'); }, // private _build: function() { this.$box = $K.dom('<div />'); this.$box.addClass('autocomplete'); this.$box.addClass('is-hidden'); this.$body.append(this.$box); if (this.$target && !this._isInputTarget()) { this.$target.addClass('autocomplete-labels'); var $closes = this.$target.find('.close'); $closes.on('click', this._removeLabel.bind(this)); } }, _open: function(e) { if (e) e.preventDefault(); clearTimeout(this.timeout); var value = this.$element.val(); if (value.length >= this.params.min) { this._resize(); this.$win.on('resize.kube.autocomplete', this._resize.bind(this)); this.$doc.on('click.kube.autocomplete', this._close.bind(this)); this.$box.addClass('is-open'); this._listen(e); } else { this._close(e); } }, _close: function(e) { if (e) e.preventDefault(); this.$box.removeClass('is-open'); this.$box.addClass('is-hidden'); this.$doc.off('.kube.autocomplete'); this.$win.off('.kube.autocomplete'); }, _getPlacement: function(pos, height) { return ((this.$doc.height() - (pos.top + height)) < this.$box.height()) ? 'top' : 'bottom'; }, _resize: function() { this.$box.width(this.$element.width()); }, _getParamName: function() { return (this.params.param) ? this.params.param : this.$element.attr('name'); }, _getTargetName: function() { var name = this.$target.attr('data-name'); return (name) ? name : this.$target.attr('id'); }, _lookup: function() { var data = this._getParamName() + '=' + this.$element.val(); $K.ajax.post({ url: this.params.url, data: data, success: this._complete.bind(this) }); }, _complete: function(json) { this.$box.html(''); if (json.length === 0) return this._close(); for (var i = 0; i < json.length; i++) { var $item = $K.dom('<a>'); $item.attr('href', '#'); $item.attr('rel', json[i].id); $item.html(json[i].name); $item.on('click', this._set.bind(this)); this.$box.append($item); } var pos = this.$element.offset(); var height = this.$element.height(); var width = this.$element.width(); var placement = this._getPlacement(pos, height); var top = (placement === 'top') ? (pos.top - this.$box.height() - height) : (pos.top + height); this.$box.css({ width: width + 'px', top: top + 'px', left: pos.left + 'px' }); this.$box.removeClass('is-hidden'); }, _listen: function(e) { switch(e.which) { case 40: // down e.preventDefault(); this._select('next'); break; case 38: // up e.preventDefault(); this._select('prev'); break; case 13: // enter e.preventDefault(); this._set(); break; case 27: // esc this._close(e); break; default: this.timeout = setTimeout(this._lookup.bind(this), 300); break; } }, _select: function(type) { var $links = this.$box.find('a'); var $active = this.$box.find('.is-active'); $links.removeClass('is-active'); var $item = this._selectItem($active, $links, type); $item.addClass('is-active'); }, _selectItem: function($active, $links, type) { var $item; var isActive = ($active.length !== 0); var size = (type === 'next') ? 0 : ($links.length - 1); if (isActive) { $item = $active[type](); } if (!isActive || !$item || $item.length === 0) { $item = $links.eq(size); } return $item; }, _set: function(e) { var $active = this.$box.find('.is-active'); if (e) { e.preventDefault(); $active = $K.dom(e.target); } var id = $active.attr('rel'); var value = $active.html(); if (this.$target.length !== 0) { if (this._isInputTarget()) { this.$target.val(value); } else { var $added = this.$target.find('[data-id="' + id + '"]'); if ($added.length === 0) { this._addLabel(id, value); } } this.$element.val(''); } else { this.$element.val(value); } this.$element.focus(); this.app.broadcast('autocomplete.set', this, value); this._close(); }, _addLabel: function(id, name) { var $label = $K.dom('<span>'); $label.addClass('label'); $label.attr('data-id', id); $label.text(name + ' '); if (this.params.labelClass) { $label.addClass(this.params.labelClass); } var $close = $K.dom('<span>'); $close.addClass('close'); $close.on('click', this._removeLabel.bind(this)); var $input = $K.dom('<input>'); $input.attr('type', 'hidden'); $input.attr('name', this._getTargetName() + '[]'); $input.val(name); $label.append($close); $label.append($input); this.$target.append($label); }, _isInputTarget: function() { return (this.$target.get().tagName === 'INPUT'); }, _removeLabel: function(e) { e.preventDefault(); var $el = $K.dom(e.target); var $label = $el.closest('.label'); this.animate.run($label, 'fadeOut', function() { $label.remove(); }.bind(this)) } }); })(Kube);
{ "pile_set_name": "Github" }
/* [auto_generated] boost/numeric/odeint/util/ublas_wrapper.hpp [begin_description] Resizing for ublas::vector and ublas::matrix [end_description] Copyright 2011-2013 Mario Mulansky Copyright 2011-2013 Karsten Ahnert Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) */ #ifndef BOOST_NUMERIC_ODEINT_UTIL_UBLAS_WRAPPER_HPP_INCLUDED #define BOOST_NUMERIC_ODEINT_UTIL_UBLAS_WRAPPER_HPP_INCLUDED #include <boost/type_traits/integral_constant.hpp> #include <boost/numeric/ublas/vector.hpp> #include <boost/numeric/ublas/matrix.hpp> #include <boost/numeric/ublas/lu.hpp> #include <boost/numeric/ublas/vector_expression.hpp> #include <boost/numeric/ublas/matrix_expression.hpp> #include <boost/numeric/odeint/algebra/vector_space_algebra.hpp> #include <boost/numeric/odeint/algebra/default_operations.hpp> #include <boost/numeric/odeint/util/is_resizeable.hpp> #include <boost/numeric/odeint/util/state_wrapper.hpp> /* extend ublas by a few operations */ /* map norm_inf onto reduce( v , default_operations::maximum ) */ namespace boost { namespace numeric { namespace odeint { template< typename T , typename A > struct vector_space_norm_inf< boost::numeric::ublas::vector<T,A> > { typedef T result_type; result_type operator()( const boost::numeric::ublas::vector<T,A> &x ) const { return boost::numeric::ublas::norm_inf( x ); } }; template< class T , class L , class A > struct vector_space_norm_inf< boost::numeric::ublas::matrix<T,L,A> > { typedef T result_type; result_type operator()( const boost::numeric::ublas::matrix<T,L,A> &x ) const { return boost::numeric::ublas::norm_inf( x ); } }; } } } /* additional operations: * abs( v ) * v / w * a + v */ namespace boost { namespace numeric { namespace ublas { // elementwise abs - calculates absolute values of the elements template<class T> struct scalar_abs: public scalar_unary_functor<T> { typedef typename scalar_unary_functor<T>::value_type value_type; typedef typename scalar_unary_functor<T>::argument_type argument_type; typedef typename scalar_unary_functor<T>::result_type result_type; static BOOST_UBLAS_INLINE result_type apply (argument_type t) { using std::abs; return abs (t); } }; // (abs v) [i] = abs (v [i]) template<class E> BOOST_UBLAS_INLINE typename vector_unary_traits<E, scalar_abs<typename E::value_type> >::result_type abs (const vector_expression<E> &e) { typedef typename vector_unary_traits<E, scalar_abs<typename E::value_type> >::expression_type expression_type; return expression_type (e ()); } // (abs m) [i] = abs (m [i]) template<class E> BOOST_UBLAS_INLINE typename matrix_unary1_traits<E, scalar_abs<typename E::value_type> >::result_type abs (const matrix_expression<E> &e) { typedef typename matrix_unary1_traits<E, scalar_abs<typename E::value_type> >::expression_type expression_type; return expression_type (e ()); } // elementwise division (v1 / v2) [i] = v1 [i] / v2 [i] template<class E1, class E2> BOOST_UBLAS_INLINE typename vector_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::result_type operator / (const vector_expression<E1> &e1, const vector_expression<E2> &e2) { typedef typename vector_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::expression_type expression_type; return expression_type (e1 (), e2 ()); } // elementwise division (m1 / m2) [i] = m1 [i] / m2 [i] template<class E1, class E2> BOOST_UBLAS_INLINE typename matrix_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::result_type operator / (const matrix_expression<E1> &e1, const matrix_expression<E2> &e2) { typedef typename matrix_binary_traits<E1, E2, scalar_divides<typename E1::value_type, typename E2::value_type> >::expression_type expression_type; return expression_type (e1 (), e2 ()); } // addition with scalar // (t + v) [i] = t + v [i] template<class T1, class E2> BOOST_UBLAS_INLINE typename enable_if< is_convertible<T1, typename E2::value_type >, typename vector_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::result_type >::type operator + (const T1 &e1, const vector_expression<E2> &e2) { typedef typename vector_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::expression_type expression_type; return expression_type (e1, e2 ()); } // addition with scalar // (t + m) [i] = t + m [i] template<class T1, class E2> BOOST_UBLAS_INLINE typename enable_if< is_convertible<T1, typename E2::value_type >, typename matrix_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::result_type >::type operator + (const T1 &e1, const matrix_expression<E2> &e2) { typedef typename matrix_binary_scalar1_traits<const T1, E2, scalar_plus<T1, typename E2::value_type> >::expression_type expression_type; return expression_type (e1, e2 ()); } } } } /* add resize functionality */ namespace boost { namespace numeric { namespace odeint { /* * resizeable specialization for boost::numeric::ublas::vector */ template< class T , class A > struct is_resizeable< boost::numeric::ublas::vector< T , A > > { typedef boost::true_type type; const static bool value = type::value; }; /* * resizeable specialization for boost::numeric::ublas::matrix */ template< class T , class L , class A > struct is_resizeable< boost::numeric::ublas::matrix< T , L , A > > { typedef boost::true_type type; const static bool value = type::value; }; /* * resizeable specialization for boost::numeric::ublas::permutation_matrix */ template< class T , class A > struct is_resizeable< boost::numeric::ublas::permutation_matrix< T , A > > { typedef boost::true_type type; const static bool value = type::value; }; // specialization for ublas::matrix // same size and resize specialization for matrix-matrix resizing template< class T , class L , class A , class T2 , class L2 , class A2 > struct same_size_impl< boost::numeric::ublas::matrix< T , L , A > , boost::numeric::ublas::matrix< T2 , L2 , A2 > > { static bool same_size( const boost::numeric::ublas::matrix< T , L , A > &m1 , const boost::numeric::ublas::matrix< T2 , L2 , A2 > &m2 ) { return ( ( m1.size1() == m2.size1() ) && ( m1.size2() == m2.size2() ) ); } }; template< class T , class L , class A , class T2 , class L2 , class A2 > struct resize_impl< boost::numeric::ublas::matrix< T , L , A > , boost::numeric::ublas::matrix< T2 , L2 , A2 > > { static void resize( boost::numeric::ublas::matrix< T , L , A > &m1 , const boost::numeric::ublas::matrix< T2 , L2 , A2 > &m2 ) { m1.resize( m2.size1() , m2.size2() ); } }; // same size and resize specialization for matrix-vector resizing template< class T , class L , class A , class T_V , class A_V > struct same_size_impl< boost::numeric::ublas::matrix< T , L , A > , boost::numeric::ublas::vector< T_V , A_V > > { static bool same_size( const boost::numeric::ublas::matrix< T , L , A > &m , const boost::numeric::ublas::vector< T_V , A_V > &v ) { return ( ( m.size1() == v.size() ) && ( m.size2() == v.size() ) ); } }; template< class T , class L , class A , class T_V , class A_V > struct resize_impl< boost::numeric::ublas::matrix< T , L , A > , boost::numeric::ublas::vector< T_V , A_V > > { static void resize( boost::numeric::ublas::matrix< T , L , A > &m , const boost::numeric::ublas::vector< T_V , A_V > &v ) { m.resize( v.size() , v.size() ); } }; // specialization for ublas::permutation_matrix // same size and resize specialization for matrix-vector resizing template< class T , class A , class T_V , class A_V > struct same_size_impl< boost::numeric::ublas::permutation_matrix< T , A > , boost::numeric::ublas::vector< T_V , A_V > > { static bool same_size( const boost::numeric::ublas::permutation_matrix< T , A > &m , const boost::numeric::ublas::vector< T_V , A_V > &v ) { return ( m.size() == v.size() ); // && ( m.size2() == v.size() ) ); } }; template< class T , class A , class T_V , class A_V > struct resize_impl< boost::numeric::ublas::vector< T_V , A_V > , boost::numeric::ublas::permutation_matrix< T , A > > { static void resize( const boost::numeric::ublas::vector< T_V , A_V > &v, boost::numeric::ublas::permutation_matrix< T , A > &m ) { m.resize( v.size() , v.size() ); } }; template< class T , class A > struct state_wrapper< boost::numeric::ublas::permutation_matrix< T , A > > // with resizing { typedef boost::numeric::ublas::permutation_matrix< T , A > state_type; typedef state_wrapper< state_type > state_wrapper_type; state_type m_v; state_wrapper() : m_v( 1 ) // permutation matrix constructor requires a size, choose 1 as default { } }; } } } #endif // BOOST_NUMERIC_ODEINT_UTIL_UBLAS_WRAPPER_HPP_INCLUDED
{ "pile_set_name": "Github" }
require "backup/config/dsl" require "backup/config/helpers" module Backup module Config class Error < Backup::Error; end DEFAULTS = { config_file: "config.rb", data_path: ".data", tmp_path: ".tmp" } class << self include Utilities::Helpers attr_reader :user, :root_path, :config_file, :data_path, :tmp_path # Loads the user's +config.rb+ and all model files. def load(options = {}) update(options) # from the command line unless File.exist?(config_file) raise Error, "Could not find configuration file: '#{config_file}'." end config = File.read(config_file) version = Backup::VERSION.split(".").first unless config =~ /^# Backup v#{ version }\.x Configuration$/ raise Error, <<-EOS Invalid Configuration File The configuration file at '#{config_file}' does not appear to be a Backup v#{version}.x configuration file. If you have upgraded to v#{version}.x from a previous version, you need to upgrade your configuration file. Please see the instructions for upgrading in the Backup documentation. EOS end dsl = DSL.new dsl.instance_eval(config, config_file) update(dsl._config_options) # from config.rb update(options) # command line takes precedence Dir[File.join(File.dirname(config_file), "models", "*.rb")].each do |model| dsl.instance_eval(File.read(model), model) end end def hostname @hostname ||= run(utility(:hostname)) end private # If :root_path is set in the options, all paths will be updated. # Otherwise, only the paths given will be updated. def update(options = {}) root_path = options[:root_path].to_s.strip new_root = root_path.empty? ? false : set_root_path(root_path) DEFAULTS.each do |name, ending| set_path_variable(name, options[name], ending, new_root) end end # Sets the @root_path to the given +path+ and returns it. # Raises an error if the given +path+ does not exist. def set_root_path(path) # allows #reset! to set the default @root_path, # then use #update to set all other paths, # without requiring that @root_path exist. return @root_path if path == @root_path path = File.expand_path(path) unless File.directory?(path) raise Error, <<-EOS Root Path Not Found When specifying a --root-path, the path must exist. Path was: #{path} EOS end @root_path = path end def set_path_variable(name, path, ending, root_path) # strip any trailing '/' in case the user supplied this as part of # an absolute path, so we can match it against File.expand_path() path = path.to_s.sub(/\/\s*$/, "").lstrip new_path = false # If no path is given, the variable will not be set/updated # unless a root_path was given. In which case the value will # be updated with our default ending. if path.empty? new_path = File.join(root_path, ending) if root_path else # When a path is given, the variable will be set/updated. # If the path is relative, it will be joined with root_path (if given), # or expanded relative to PWD. new_path = File.expand_path(path) unless path == new_path new_path = File.join(root_path, path) if root_path end end instance_variable_set(:"@#{name}", new_path) if new_path end def reset! @user = ENV["USER"] || Etc.getpwuid.name @root_path = File.join(File.expand_path(ENV["HOME"] || ""), "Backup") update(root_path: @root_path) end end reset! # set defaults on load end end
{ "pile_set_name": "Github" }
// CodeContracts // // Copyright (c) Microsoft Corporation // // All rights reserved. // // MIT License // // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. using System; using System.Collections.Generic; using System.Text; using System.Diagnostics.Contracts; namespace Tests.Sources { [ContractClass(typeof(GenericAbstractClassContracts<,>))] public abstract class GenericAbstractClass<A,B> where A: class,B { public abstract bool IsMatch(B b, A a); public abstract B ReturnFirst(B[] args, A match, bool behave); public abstract A[][] Collection(int x, int y); public abstract A FirstNonNullMatch(bool behave, A[] elems); public abstract C[] GenericMethod<C>(A[] elems); } [ContractClassFor(typeof(GenericAbstractClass<,>))] internal abstract class GenericAbstractClassContracts<A,B> : GenericAbstractClass<A,B> where A : class, B { public override bool IsMatch(B b, A a) { throw new NotImplementedException(); } public override B ReturnFirst(B[] args, A match, bool behave) { Contract.Requires(args != null); Contract.Requires(args.Length > 0); Contract.Ensures(Contract.Exists(0, args.Length, i => args[i].Equals(Contract.Result<B>()) && IsMatch(args[i], match))); return default(B); } public override A[][] Collection(int x, int y) { Contract.Ensures(Contract.ForAll(Contract.Result<A[][]>(), nested => nested != null && nested.Length == y && Contract.ForAll(nested, elem => elem != null))); Contract.Ensures(Contract.ForAll(0, x, index => Contract.Result<A[][]>()[index] != null)); throw new NotImplementedException(); } public override A FirstNonNullMatch(bool behave, A[] elems) { // meaningless, but testing our closures, in particular inner one with a static closure referring to result. Contract.Ensures(Contract.Exists(0, elems.Length, index => elems[index] != null && elems[index] == Contract.Result<A>() && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))); // See if we are properly sharing fields. Contract.Ensures(Contract.Exists(0, elems.Length, index => elems[index] != null && elems[index] == Contract.Result<A>() && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))); // See if we are properly sharing fields. Contract.Ensures(Contract.Exists(0, elems.Length, index => elems[index] != null && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))); throw new NotImplementedException(); } public override C[] GenericMethod<C>(A[] elems) { Contract.Requires(elems != null); Contract.Ensures(Contract.Result<C[]>() != null); Contract.Ensures(Contract.ForAll(Contract.Result<C[]>(), resultElem => Contract.Exists(elems, orig => resultElem.Equals(orig)))); throw new NotImplementedException(); } } public class ImplForGenericAbstractClass : GenericAbstractClass<string, string> { public override bool IsMatch(string b, string a) { return b == a; } public override string ReturnFirst(string[] args, string match, bool behave) { for (int i = 0; i < args.Length; i++) { if (IsMatch(args[i], match)) return args[i]; } return default(string); } public override string[][] Collection(int x, int y) { var result = new string[x][]; for (int i=0; i<result.Length; i++) { result[i] = new string[y]; for (int j = 0; j < y; j++) { if (x == 5 && y == 5 && i == 4 && j == 4) { // behave badly continue; } result[i][j] = "Foo"; } } return result; } public override string FirstNonNullMatch(bool behave, string[] elems) { if (!behave) return "foobar"; for (int i = 0; i < elems.Length; i++) { if (elems[i] != null) return elems[i]; } return null; } public override C[] GenericMethod<C>(string[] elems) { List<C> result = new List<C>(); foreach (var elem in elems) { if (elem is C) { result.Add((C)(object)elem); } } if (typeof(C) == typeof(int)) { // behave badly result.Add((C)(object)55); } return result.ToArray(); } } partial class TestMain { partial void Run() { var i = new ImplForGenericAbstractClass(); i.FirstNonNullMatch(behave, new string[]{null, "a",null,"b"}); } public ContractFailureKind NegativeExpectedKind = ContractFailureKind.Postcondition; public string NegativeExpectedCondition = "Contract.Exists(0, elems.Length, index => elems[index] != null && elems[index] == Contract.Result<A>() && Contract.ForAll(0, index, prior => Contract.Result<A>() != null))"; } }
{ "pile_set_name": "Github" }
<textarea name={{ UEditor.name }} id=id_{{ UEditor.name }} style="display:inline-block;width:{{ UEditor.width }}px;{{ UEditor.css }}">{{UEditor.value}}</textarea> <script type="text/javascript"> var id_{{ UEditor.name }}= new baidu.editor.ui.Editor({ "UEDITOR_HOME_URL":"{{ STATIC_URL }}ueditor/", {% ifnotequal UEditor.toolbars None %}"toolbars":{{ UEditor.toolbars|safe }},{% endifnotequal %} "imageUrl":"/ueditor/ImageUp/{{ UEditor.imagePath }}", "imagePath":"{{ MEDIA_URL }}{{ UEditor.imagePath }}", "scrawlUrl":"/ueditor/scrawlUp/{{ UEditor.scrawlPath }}", "scrawlPath":"{{ MEDIA_URL }}{{ UEditor.scrawlPath }}", "imageManagerUrl":"/ueditor/ImageManager/{{ UEditor.imageManagerPath }}", "imageManagerPath":"{{ MEDIA_URL }}{{ UEditor.imageManagerPath }}", "catcherUrl":"/ueditor/RemoteCatchImage/{{ UEditor.imagePath }}", "catcherPath":"{{ MEDIA_URL }}{{ UEditor.imagePath }}", "fileUrl":"/ueditor/FileUp/{{ UEditor.filePath }}", "filePath":"{{ MEDIA_URL }}{{ UEditor.filePath }}", "getMovieUrl":"/ueditor/SearchMovie/", "sourceEditorFirst":{{ UEditor.sourceEditorFirst }} {% ifnotequal UEditor.options '' %},{{ UEditor.options|safe }}{% endifnotequal %} }); id_{{UEditor.name}}.render('id_{{ UEditor.name }}'); id_{{UEditor.name}}.addListener('ready',function(){ id_{{UEditor.name}}.setHeight({{ UEditor.height }}); }); </script>
{ "pile_set_name": "Github" }
package problem0958 import ( "testing" "github.com/aQuaYi/LeetCode-in-Go/kit" "github.com/stretchr/testify/assert" ) // tcs is testcase slice var tcs = []struct { root []int ans bool }{ { []int{1, 2, 3, 4, 5, 6}, true, }, { []int{1, 2, 3, 4, 5, kit.NULL, 7}, false, }, // 可以有多个 testcase } func Test_isCompleteTree(t *testing.T) { ast := assert.New(t) for _, tc := range tcs { root := kit.Ints2TreeNode(tc.root) ast.Equal(tc.ans, isCompleteTree(root), "输入:%v", tc) } } func Benchmark_isCompleteTree(b *testing.B) { for i := 0; i < b.N; i++ { for _, tc := range tcs { root := kit.Ints2TreeNode(tc.root) isCompleteTree(root) } } }
{ "pile_set_name": "Github" }
require('../../modules/es6.number.is-finite'); module.exports = require('../../modules/$.core').Number.isFinite;
{ "pile_set_name": "Github" }
/* * CDDL HEADER START * * The contents of this file are subject to the terms of the * Common Development and Distribution License, Version 1.0 only * (the "License"). You may not use this file except in compliance * with the License. * * You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE * or http://www.opensolaris.org/os/licensing. * See the License for the specific language governing permissions * and limitations under the License. * * When distributing Covered Code, include this CDDL HEADER in each * file and include the License file at usr/src/OPENSOLARIS.LICENSE. * If applicable, add the following below this CDDL HEADER, with the * fields enclosed by brackets "[]" replaced with your own identifying * information: Portions Copyright [yyyy] [name of copyright owner] * * CDDL HEADER END */ /* * Copyright 2004 Sun Microsystems, Inc. All rights reserved. * Use is subject to license terms. */ #ifndef _MACH_SDT_H #define _MACH_SDT_H #include <mach/machine/sdt.h> #endif /* _MACH_SDT_H */
{ "pile_set_name": "Github" }
// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #ifndef BASE_LOCATION_H_ #define BASE_LOCATION_H_ #include <stddef.h> #include <cassert> #include <functional> #include <string> #include "base/base_export.h" #include "base/debug/debugging_buildflags.h" #include "base/hash/hash.h" #include "build/build_config.h" namespace base { #if defined(__has_builtin) // Clang allows detection of these builtins. #define SUPPORTS_LOCATION_BUILTINS \ (__has_builtin(__builtin_FUNCTION) && __has_builtin(__builtin_FILE) && \ __has_builtin(__builtin_LINE)) #elif defined(COMPILER_GCC) && __GNUC__ >= 7 // GCC has supported these for a long time, but they point at the function // declaration in the case of default arguments, rather than at the call site. #define SUPPORTS_LOCATION_BUILTINS 1 #else #define SUPPORTS_LOCATION_BUILTINS 0 #endif // Location provides basic info where of an object was constructed, or was // significantly brought to life. class BASE_EXPORT Location { public: Location(); Location(const Location& other); // Only initializes the file name and program counter, the source information // will be null for the strings, and -1 for the line number. // TODO(http://crbug.com/760702) remove file name from this constructor. Location(const char* file_name, const void* program_counter); // Constructor should be called with a long-lived char*, such as __FILE__. // It assumes the provided value will persist as a global constant, and it // will not make a copy of it. Location(const char* function_name, const char* file_name, int line_number, const void* program_counter); // Comparator for hash map insertion. The program counter should uniquely // identify a location. bool operator==(const Location& other) const { return program_counter_ == other.program_counter_; } // Returns true if there is source code location info. If this is false, // the Location object only contains a program counter or is // default-initialized (the program counter is also null). bool has_source_info() const { return function_name_ && file_name_; } // Will be nullptr for default initialized Location objects and when source // names are disabled. const char* function_name() const { return function_name_; } // Will be nullptr for default initialized Location objects and when source // names are disabled. const char* file_name() const { return file_name_; } // Will be -1 for default initialized Location objects and when source names // are disabled. int line_number() const { return line_number_; } // The address of the code generating this Location object. Should always be // valid except for default initialized Location objects, which will be // nullptr. const void* program_counter() const { return program_counter_; } // Converts to the most user-readable form possible. If function and filename // are not available, this will return "pc:<hex address>". std::string ToString() const; static Location CreateFromHere(const char* file_name); static Location CreateFromHere(const char* function_name, const char* file_name, int line_number); #if SUPPORTS_LOCATION_BUILTINS && BUILDFLAG(ENABLE_LOCATION_SOURCE) static Location Current(const char* function_name = __builtin_FUNCTION(), const char* file_name = __builtin_FILE(), int line_number = __builtin_LINE()); #elif SUPPORTS_LOCATION_BUILTINS static Location Current(const char* file_name = __builtin_FILE()); #else static Location Current(); #endif private: const char* function_name_ = nullptr; const char* file_name_ = nullptr; int line_number_ = -1; const void* program_counter_ = nullptr; }; BASE_EXPORT const void* GetProgramCounter(); // The macros defined here will expand to the current function. #if BUILDFLAG(ENABLE_LOCATION_SOURCE) // Full source information should be included. #define FROM_HERE FROM_HERE_WITH_EXPLICIT_FUNCTION(__func__) #define FROM_HERE_WITH_EXPLICIT_FUNCTION(function_name) \ ::base::Location::CreateFromHere(function_name, __FILE__, __LINE__) #else // TODO(http://crbug.com/760702) remove the __FILE__ argument from these calls. #define FROM_HERE ::base::Location::CreateFromHere(__FILE__) #define FROM_HERE_WITH_EXPLICIT_FUNCTION(function_name) \ ::base::Location::CreateFromHere(function_name, __FILE__, -1) #endif } // namespace base namespace std { // Specialization for using Location in hash tables. template <> struct hash<::base::Location> { std::size_t operator()(const ::base::Location& loc) const { const void* program_counter = loc.program_counter(); return base::FastHash(base::as_bytes(base::make_span(&program_counter, 1))); } }; } // namespace std #endif // BASE_LOCATION_H_
{ "pile_set_name": "Github" }
// Copyright 2008-2018 Yolo Technologies, Inc. All Rights Reserved. https://www.comblockengine.com #include "db_exception.h" #include "db_interface_redis.h" #include "db_interface/db_interface.h" namespace KBEngine { //------------------------------------------------------------------------------------- DBException::DBException(DBInterface* pdbi) : errStr_(static_cast<DBInterfaceRedis*>(pdbi)->getstrerror()), errNum_(static_cast<DBInterfaceRedis*>(pdbi)->getlasterror()) { } //------------------------------------------------------------------------------------- DBException::~DBException() throw() { } //------------------------------------------------------------------------------------- bool DBException::shouldRetry() const { return (errNum_== REDIS_ERR_OOM) || (errNum_ == REDIS_ERR_OTHER); } //------------------------------------------------------------------------------------- bool DBException::isLostConnection() const { return (errNum_ == REDIS_ERR_IO) || (errNum_ == REDIS_ERR_EOF); } //------------------------------------------------------------------------------------- } // db_exception.cpp
{ "pile_set_name": "Github" }
#!/bin/bash # Script for running all tests in this directory # This script has to be run in its directory, as shows the usage. # main ------------------------------------------------------------------------ if test "$#" -ne 0; then echo "Usage: ./test_all.sh" exit 1 fi mkdir -p log TIMESTAMP=`date +'%Y-%m-%d-%H-%M-%S'` LOGFILE=log/$TIMESTAMP-test_all.sh GITVERSION=`git version` if [[ "$GITVERSION" ]]; then echo 'Git is available in the working directory:' >> $LOGFILE 2>&1 echo ' Merlin version: ' "`git describe --tags --always`" >> $LOGFILE 2>&1 echo ' branch: ' "`git rev-parse --abbrev-ref HEAD`" >> $LOGFILE 2>&1 echo ' status: ' >> ${LOGFILE}.gitstatus 2>&1 git status >> ${LOGFILE}.gitstatus 2>&1 echo ' diff to Merlin version: ' >> ${LOGFILE}.gitdiff 2>&1 git diff >> ${LOGFILE}.gitdiff 2>&1 echo ' ' fi bash ./test_install.sh >> $LOGFILE 2>&1 source ../src/setup_env.sh python ./test_classes.py >> $LOGFILE 2>&1 bash ./test_training.sh >> $LOGFILE 2>&1
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <ZopeData> <record id="1" aka="AAAAAAAAAAE="> <pickle> <global name="ProxyField" module="Products.ERP5Form.ProxyField"/> </pickle> <pickle> <dictionary> <item> <key> <string>id</string> </key> <value> <string>my_description</string> </value> </item> <item> <key> <string>message_values</string> </key> <value> <dictionary> <item> <key> <string>external_validator_failed</string> </key> <value> <string>The input failed the external validator.</string> </value> </item> </dictionary> </value> </item> <item> <key> <string>overrides</string> </key> <value> <dictionary> <item> <key> <string>extra_context</string> </key> <value> <string></string> </value> </item> <item> <key> <string>field_id</string> </key> <value> <string></string> </value> </item> <item> <key> <string>form_id</string> </key> <value> <string></string> </value> </item> </dictionary> </value> </item> <item> <key> <string>tales</string> </key> <value> <dictionary> <item> <key> <string>extra_context</string> </key> <value> <string></string> </value> </item> <item> <key> <string>field_id</string> </key> <value> <string></string> </value> </item> <item> <key> <string>form_id</string> </key> <value> <string></string> </value> </item> </dictionary> </value> </item> <item> <key> <string>values</string> </key> <value> <dictionary> <item> <key> <string>extra_context</string> </key> <value> <list/> </value> </item> <item> <key> <string>field_id</string> </key> <value> <string>my_description</string> </value> </item> <item> <key> <string>form_id</string> </key> <value> <string>BaseConsulting_FieldLibrary</string> </value> </item> </dictionary> </value> </item> </dictionary> </pickle> </record> </ZopeData>
{ "pile_set_name": "Github" }
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!126 &1 NavMeshProjectSettings: m_ObjectHideFlags: 0 serializedVersion: 2 areas: - name: Walkable cost: 1 - name: Not Walkable cost: 1 - name: Jump cost: 2 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 - name: cost: 1 m_LastAgentTypeID: -887442657 m_Settings: - serializedVersion: 2 agentTypeID: 0 agentRadius: 0.5 agentHeight: 2 agentSlope: 45 agentClimb: 0.75 ledgeDropHeight: 0 maxJumpAcrossDistance: 0 minRegionArea: 2 manualCellSize: 0 cellSize: 0.16666667 manualTileSize: 0 tileSize: 256 accuratePlacement: 0 debug: m_Flags: 0 m_SettingNames: - Humanoid
{ "pile_set_name": "Github" }
-- Loads pre-trained word embeddings from either Word2Vec or Glove assert(get_id_from_word) assert(common_w2v_freq_words) assert(total_num_words) word_vecs_size = 300 -- Loads pre-trained glove or word2vec embeddings: if opt.word_vecs == 'glove' then -- Glove downloaded from: http://nlp.stanford.edu/projects/glove/ w2v_txtfilename = default_path .. 'Glove/glove.840B.300d.txt' w2v_t7filename = opt.root_data_dir .. 'generated/glove.840B.300d.t7' w2v_reader = 'words/w2v/glove_reader.lua' elseif opt.word_vecs == 'w2v' then -- Word2Vec downloaded from: https://code.google.com/archive/p/word2vec/ w2v_binfilename = default_path .. 'Word2Vec/GoogleNews-vectors-negative300.bin' w2v_t7filename = opt.root_data_dir .. 'generated/GoogleNews-vectors-negative300.t7' w2v_reader = 'words/w2v/word2vec_reader.lua' end ---------------------- Code: ----------------------- w2vutils = {} print('==> Loading ' .. opt.word_vecs .. ' vectors') if not paths.filep(w2v_t7filename) then print(' ---> t7 file NOT found. Loading w2v from the bin/txt file instead (slower).') w2vutils.M = require(w2v_reader) print('Writing t7 File for future usage. Next time Word2Vec loading will be faster!') torch.save(w2v_t7filename, w2vutils.M) else print(' ---> from t7 file.') w2vutils.M = torch.load(w2v_t7filename) end -- Move the word embedding matrix on the GPU if we do some training. -- In this way we can perform word embedding lookup much faster. if opt and string.find(opt.type, 'cuda') then w2vutils.M = w2vutils.M:cuda() end ---------- Define additional functions ----------------- -- word -> vec w2vutils.get_w_vec = function (self,word) local w_id = get_id_from_word(word) return w2vutils.M[w_id]:clone() end -- word_id -> vec w2vutils.get_w_vec_from_id = function (self,w_id) return w2vutils.M[w_id]:clone() end w2vutils.lookup_w_vecs = function (self,word_id_tensor) assert(word_id_tensor:dim() <= 2, 'Only word id tensors w/ 1 or 2 dimensions are supported.') local output = torch.FloatTensor() local word_ids = word_id_tensor:long() if opt and string.find(opt.type, 'cuda') then output = output:cuda() word_ids = word_ids:cuda() end if word_ids:dim() == 2 then output:index(w2vutils.M, 1, word_ids:view(-1)) output = output:view(word_ids:size(1), word_ids:size(2), w2vutils.M:size(2)) elseif word_ids:dim() == 1 then output:index(w2vutils.M, 1, word_ids) output = output:view(word_ids:size(1), w2vutils.M:size(2)) end return output end -- Normalize word vectors to have norm 1 . w2vutils.renormalize = function (self) w2vutils.M[unk_w_id]:mul(0) w2vutils.M[unk_w_id]:add(1) w2vutils.M:cdiv(w2vutils.M:norm(2,2):expand(w2vutils.M:size())) local x = w2vutils.M:norm(2,2):view(-1) - 1 assert(x:norm() < 0.1, x:norm()) assert(w2vutils.M[100]:norm() < 1.001 and w2vutils.M[100]:norm() > 0.99) w2vutils.M[unk_w_id]:mul(0) end w2vutils:renormalize() print(' Done reading w2v data. Word vocab size = ' .. w2vutils.M:size(1)) -- Phrase embedding using average of vectors of words in the phrase w2vutils.phrase_avg_vec = function(self, phrase) local words = split_in_words(phrase) local num_words = table_len(words) local num_existent_words = 0 local vec = torch.zeros(word_vecs_size) for i = 1,num_words do local w = words[i] local w_id = get_id_from_word(w) if w_id ~= unk_w_id then vec:add(w2vutils:get_w_vec_from_id(w_id)) num_existent_words = num_existent_words + 1 end end if (num_existent_words > 0) then vec:div(num_existent_words) end return vec end w2vutils.top_k_closest_words = function (self,vec, k, mat) local k = k or 1 vec = vec:float() local distances = torch.mv(mat, vec) local best_scores, best_word_ids = topk(distances, k) local returnwords = {} local returndistances = {} for i = 1,k do local w = get_word_from_id(best_word_ids[i]) if is_stop_word_or_number(w) then table.insert(returnwords, red(w)) else table.insert(returnwords, w) end assert(best_scores[i] == distances[best_word_ids[i]], best_scores[i] .. ' ' .. distances[best_word_ids[i]]) table.insert(returndistances, distances[best_word_ids[i]]) end return returnwords, returndistances end w2vutils.most_similar2word = function(self, word, k) local k = k or 1 local v = w2vutils:get_w_vec(word) neighbors, scores = w2vutils:top_k_closest_words(v, k, w2vutils.M) print('To word ' .. skyblue(word) .. ' : ' .. list_with_scores_to_str(neighbors, scores)) end w2vutils.most_similar2vec = function(self, vec, k) local k = k or 1 neighbors, scores = w2vutils:top_k_closest_words(vec, k, w2vutils.M) print(list_with_scores_to_str(neighbors, scores)) end --------------------- Unit tests ---------------------------------------- local unit_tests = opt.unit_tests or false if (unit_tests) then print('\nWord to word similarity test:') w2vutils:most_similar2word('nice', 5) w2vutils:most_similar2word('france', 5) w2vutils:most_similar2word('hello', 5) end -- Computes for each word w : \sum_v exp(<v,w>) and \sum_v <v,w> w2vutils.total_word_correlation = function(self, k, j) local exp_Z = torch.zeros(w2vutils.M:narrow(1, 1, j):size(1)) local sum_t = w2vutils.M:narrow(1, 1, j):sum(1) -- 1 x d local sum_Z = (w2vutils.M:narrow(1, 1, j) * sum_t:t()):view(-1) -- num_w print(red('Top words by sum_Z:')) best_sum_Z, best_word_ids = topk(sum_Z, k) for i = 1,k do local w = get_word_from_id(best_word_ids[i]) assert(best_sum_Z[i] == sum_Z[best_word_ids[i]]) print(w .. ' [' .. best_sum_Z[i] .. ']; ') end print('\n' .. red('Bottom words by sum_Z:')) best_sum_Z, best_word_ids = topk(- sum_Z, k) for i = 1,k do local w = get_word_from_id(best_word_ids[i]) assert(best_sum_Z[i] == - sum_Z[best_word_ids[i]]) print(w .. ' [' .. sum_Z[best_word_ids[i]] .. ']; ') end end -- Plot with gnuplot: -- set palette model RGB defined ( 0 'white', 1 'pink', 2 'green' , 3 'blue', 4 'red' ) -- plot 'tsne-w2v-vecs.txt_1000' using 1:2:3 with labels offset 0,1, '' using 1:2:4 w points pt 7 ps 2 palette w2vutils.tsne = function(self, num_rand_words) local topic1 = {'japan', 'china', 'france', 'switzerland', 'romania', 'india', 'australia', 'country', 'city', 'tokyo', 'nation', 'capital', 'continent', 'europe', 'asia', 'earth', 'america'} local topic2 = {'football', 'striker', 'goalkeeper', 'basketball', 'coach', 'championship', 'cup', 'soccer', 'player', 'captain', 'qualifier', 'goal', 'under-21', 'halftime', 'standings', 'basketball', 'games', 'league', 'rugby', 'hockey', 'fifa', 'fans', 'maradona', 'mutu', 'hagi', 'beckham', 'injury', 'game', 'kick', 'penalty'} local topic_avg = {'japan national football team', 'germany national football team', 'china national football team', 'brazil soccer', 'japan soccer', 'germany soccer', 'china soccer', 'fc barcelona', 'real madrid'} local stop_words_array = {} for w,_ in pairs(stop_words) do table.insert(stop_words_array, w) end local topic1_len = table_len(topic1) local topic2_len = table_len(topic2) local topic_avg_len = table_len(topic_avg) local stop_words_len = table_len(stop_words_array) torch.setdefaulttensortype('torch.DoubleTensor') w2vutils.M = w2vutils.M:double() local tensor = torch.zeros(num_rand_words + stop_words_len + topic1_len + topic2_len + topic_avg_len, word_vecs_size) local tensor_w_ids = torch.zeros(num_rand_words) local tensor_colors = torch.zeros(tensor:size(1)) for i = 1,num_rand_words do tensor_w_ids[i] = math.random(1,25000) tensor_colors[i] = 0 tensor[i]:copy(w2vutils.M[tensor_w_ids[i]]) end for i = 1, stop_words_len do tensor_colors[num_rand_words + i] = 1 tensor[num_rand_words + i]:copy(w2vutils:phrase_avg_vec(stop_words_array[i])) end for i = 1, topic1_len do tensor_colors[num_rand_words + stop_words_len + i] = 2 tensor[num_rand_words + stop_words_len + i]:copy(w2vutils:phrase_avg_vec(topic1[i])) end for i = 1, topic2_len do tensor_colors[num_rand_words + stop_words_len + topic1_len + i] = 3 tensor[num_rand_words + stop_words_len + topic1_len + i]:copy(w2vutils:phrase_avg_vec(topic2[i])) end for i = 1, topic_avg_len do tensor_colors[num_rand_words + stop_words_len + topic1_len + topic2_len + i] = 4 tensor[num_rand_words + stop_words_len + topic1_len + topic2_len + i]:copy(w2vutils:phrase_avg_vec(topic_avg[i])) end local manifold = require 'manifold' opts = {ndims = 2, perplexity = 30, pca = 50, use_bh = false} mapped_x1 = manifold.embedding.tsne(tensor, opts) assert(mapped_x1:size(1) == tensor:size(1) and mapped_x1:size(2) == 2) ouf_vecs = assert(io.open('tsne-w2v-vecs.txt_' .. num_rand_words, "w")) for i = 1,mapped_x1:size(1) do local w = nil if tensor_colors[i] == 0 then w = get_word_from_id(tensor_w_ids[i]) elseif tensor_colors[i] == 1 then w = stop_words_array[i - num_rand_words]:gsub(' ', '-') elseif tensor_colors[i] == 2 then w = topic1[i - num_rand_words - stop_words_len]:gsub(' ', '-') elseif tensor_colors[i] == 3 then w = topic2[i - num_rand_words - stop_words_len - topic1_len]:gsub(' ', '-') elseif tensor_colors[i] == 4 then w = topic_avg[i - num_rand_words - stop_words_len - topic1_len - topic2_len]:gsub(' ', '-') end assert(w) local v = mapped_x1[i] for j = 1,2 do ouf_vecs:write(v[j] .. ' ') end ouf_vecs:write(w .. ' ' .. tensor_colors[i] .. '\n') end io.close(ouf_vecs) print(' DONE') end
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8" standalone="no"?> <databaseChangeLog xmlns="http://www.liquibase.org/xml/ns/dbchangelog" xmlns:ext="http://www.liquibase.org/xml/ns/dbchangelog-ext" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.liquibase.org/xml/ns/dbchangelog-ext http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-ext.xsd http://www.liquibase.org/xml/ns/dbchangelog http://www.liquibase.org/xml/ns/dbchangelog/dbchangelog-3.3.xsd"> <changeSet author="toja" id="1-indices"> <createIndex tableName="LOGRECORD" indexName="LOGRECORD_TIMESTAMPRECORD_fkey"> <column name="TIMESTAMPRECORD"/> </createIndex> </changeSet> </databaseChangeLog>
{ "pile_set_name": "Github" }
import {MigrationInterface, QueryRunner} from 'typeorm'; export class tipsAndBitsMessagesToText1573942908160 implements MigrationInterface { name = 'tipsAndBitsMessagesToText1573942908160'; public async up(queryRunner: QueryRunner): Promise<any> { await queryRunner.query(`CREATE TABLE "temporary_user_tip" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" float NOT NULL, "currency" varchar NOT NULL, "message" varchar NOT NULL DEFAULT (''), "tippedAt" bigint NOT NULL DEFAULT (0), "sortAmount" float NOT NULL, "userUserId" integer, CONSTRAINT "FK_36683fb221201263b38344a9880" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_tip"("id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId") SELECT "id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId" FROM "user_tip"`, undefined); await queryRunner.query(`DROP TABLE "user_tip"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_tip" RENAME TO "user_tip"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_user_bit" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" bigint NOT NULL, "message" varchar NOT NULL DEFAULT (''), "cheeredAt" bigint NOT NULL DEFAULT (0), "userUserId" integer, CONSTRAINT "FK_cca96526faa532e7d20a0f775b0" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_bit"("id", "amount", "message", "cheeredAt", "userUserId") SELECT "id", "amount", "message", "cheeredAt", "userUserId" FROM "user_bit"`, undefined); await queryRunner.query(`DROP TABLE "user_bit"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_bit" RENAME TO "user_bit"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_user_tip" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" float NOT NULL, "currency" varchar NOT NULL, "message" text NOT NULL DEFAULT (''), "tippedAt" bigint NOT NULL DEFAULT (0), "sortAmount" float NOT NULL, "userUserId" integer, CONSTRAINT "FK_36683fb221201263b38344a9880" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_tip"("id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId") SELECT "id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId" FROM "user_tip"`, undefined); await queryRunner.query(`DROP TABLE "user_tip"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_tip" RENAME TO "user_tip"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_user_bit" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" bigint NOT NULL, "message" text NOT NULL DEFAULT (''), "cheeredAt" bigint NOT NULL DEFAULT (0), "userUserId" integer, CONSTRAINT "FK_cca96526faa532e7d20a0f775b0" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_user_bit"("id", "amount", "message", "cheeredAt", "userUserId") SELECT "id", "amount", "message", "cheeredAt", "userUserId" FROM "user_bit"`, undefined); await queryRunner.query(`DROP TABLE "user_bit"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_user_bit" RENAME TO "user_bit"`, undefined); await queryRunner.query(`DROP INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_twitch_tag_localization_description" ("id" varchar PRIMARY KEY NOT NULL, "locale" varchar NOT NULL, "value" varchar NOT NULL, "tagId" varchar, CONSTRAINT "FK_4d8108fc3e8dcbe5c112f53dd3f" FOREIGN KEY ("tagId") REFERENCES "twitch_tag" ("tag_id") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_twitch_tag_localization_description"("id", "locale", "value", "tagId") SELECT "id", "locale", "value", "tagId" FROM "twitch_tag_localization_description"`, undefined); await queryRunner.query(`DROP TABLE "twitch_tag_localization_description"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_twitch_tag_localization_description" RENAME TO "twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3" ON "twitch_tag_localization_description" ("tagId") `, undefined); await queryRunner.query(`DROP INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3"`, undefined); await queryRunner.query(`CREATE TABLE "temporary_twitch_tag_localization_description" ("id" varchar PRIMARY KEY NOT NULL, "locale" varchar NOT NULL, "value" text NOT NULL, "tagId" varchar, CONSTRAINT "FK_4d8108fc3e8dcbe5c112f53dd3f" FOREIGN KEY ("tagId") REFERENCES "twitch_tag" ("tag_id") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "temporary_twitch_tag_localization_description"("id", "locale", "value", "tagId") SELECT "id", "locale", "value", "tagId" FROM "twitch_tag_localization_description"`, undefined); await queryRunner.query(`DROP TABLE "twitch_tag_localization_description"`, undefined); await queryRunner.query(`ALTER TABLE "temporary_twitch_tag_localization_description" RENAME TO "twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3" ON "twitch_tag_localization_description" ("tagId") `, undefined); } public async down(queryRunner: QueryRunner): Promise<any> { await queryRunner.query(`ALTER TABLE "user_bit" RENAME TO "temporary_user_bit"`, undefined); await queryRunner.query(`CREATE TABLE "user_bit" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" bigint NOT NULL, "message" varchar NOT NULL DEFAULT (''), "cheeredAt" bigint NOT NULL DEFAULT (0), "userUserId" integer, CONSTRAINT "FK_cca96526faa532e7d20a0f775b0" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "user_bit"("id", "amount", "message", "cheeredAt", "userUserId") SELECT "id", "amount", "message", "cheeredAt", "userUserId" FROM "temporary_user_bit"`, undefined); await queryRunner.query(`DROP TABLE "temporary_user_bit"`, undefined); await queryRunner.query(`ALTER TABLE "user_tip" RENAME TO "temporary_user_tip"`, undefined); await queryRunner.query(`CREATE TABLE "user_tip" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" float NOT NULL, "currency" varchar NOT NULL, "message" varchar NOT NULL DEFAULT (''), "tippedAt" bigint NOT NULL DEFAULT (0), "sortAmount" float NOT NULL, "userUserId" integer, CONSTRAINT "FK_36683fb221201263b38344a9880" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "user_tip"("id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId") SELECT "id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId" FROM "temporary_user_tip"`, undefined); await queryRunner.query(`DROP TABLE "temporary_user_tip"`, undefined); await queryRunner.query(`ALTER TABLE "user_bit" RENAME TO "temporary_user_bit"`, undefined); await queryRunner.query(`CREATE TABLE "user_bit" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" bigint NOT NULL, "message" varchar NOT NULL DEFAULT (''), "cheeredAt" bigint NOT NULL DEFAULT (0), "userUserId" integer, CONSTRAINT "FK_cca96526faa532e7d20a0f775b0" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "user_bit"("id", "amount", "message", "cheeredAt", "userUserId") SELECT "id", "amount", "message", "cheeredAt", "userUserId" FROM "temporary_user_bit"`, undefined); await queryRunner.query(`DROP TABLE "temporary_user_bit"`, undefined); await queryRunner.query(`ALTER TABLE "user_tip" RENAME TO "temporary_user_tip"`, undefined); await queryRunner.query(`CREATE TABLE "user_tip" ("id" integer PRIMARY KEY AUTOINCREMENT NOT NULL, "amount" float NOT NULL, "currency" varchar NOT NULL, "message" varchar NOT NULL DEFAULT (''), "tippedAt" bigint NOT NULL DEFAULT (0), "sortAmount" float NOT NULL, "userUserId" integer, CONSTRAINT "FK_36683fb221201263b38344a9880" FOREIGN KEY ("userUserId") REFERENCES "user" ("userId") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "user_tip"("id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId") SELECT "id", "amount", "currency", "message", "tippedAt", "sortAmount", "userUserId" FROM "temporary_user_tip"`, undefined); await queryRunner.query(`DROP TABLE "temporary_user_tip"`, undefined); await queryRunner.query(`DROP INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3"`, undefined); await queryRunner.query(`ALTER TABLE "twitch_tag_localization_description" RENAME TO "temporary_twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE TABLE "twitch_tag_localization_description" ("id" varchar PRIMARY KEY NOT NULL, "locale" varchar NOT NULL, "value" varchar NOT NULL, "tagId" varchar, CONSTRAINT "FK_4d8108fc3e8dcbe5c112f53dd3f" FOREIGN KEY ("tagId") REFERENCES "twitch_tag" ("tag_id") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "twitch_tag_localization_description"("id", "locale", "value", "tagId") SELECT "id", "locale", "value", "tagId" FROM "temporary_twitch_tag_localization_description"`, undefined); await queryRunner.query(`DROP TABLE "temporary_twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3" ON "twitch_tag_localization_description" ("tagId") `, undefined); await queryRunner.query(`DROP INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3"`, undefined); await queryRunner.query(`ALTER TABLE "twitch_tag_localization_description" RENAME TO "temporary_twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE TABLE "twitch_tag_localization_description" ("id" varchar PRIMARY KEY NOT NULL, "locale" varchar NOT NULL, "value" varchar NOT NULL, "tagId" varchar, CONSTRAINT "FK_4d8108fc3e8dcbe5c112f53dd3f" FOREIGN KEY ("tagId") REFERENCES "twitch_tag" ("tag_id") ON DELETE CASCADE ON UPDATE CASCADE)`, undefined); await queryRunner.query(`INSERT INTO "twitch_tag_localization_description"("id", "locale", "value", "tagId") SELECT "id", "locale", "value", "tagId" FROM "temporary_twitch_tag_localization_description"`, undefined); await queryRunner.query(`DROP TABLE "temporary_twitch_tag_localization_description"`, undefined); await queryRunner.query(`CREATE INDEX "IDX_4d8108fc3e8dcbe5c112f53dd3" ON "twitch_tag_localization_description" ("tagId") `, undefined); } }
{ "pile_set_name": "Github" }
# This file is a Tcl script to test the code in the file tkTextIndex.c. # This file is organized in the standard fashion for Tcl tests. # # Copyright (c) 1994 The Regents of the University of California. # Copyright (c) 1994 Sun Microsystems, Inc. # Copyright (c) 1998-1999 by Scriptics Corporation. # All rights reserved. package require tcltest 2.1 eval tcltest::configure $argv tcltest::loadTestedCommands namespace import -force tcltest::test catch {destroy .t} text .t -font {Courier -12} -width 20 -height 10 pack append . .t {top expand fill} update .t debug on wm geometry . {} # The statements below reset the main window; it's needed if the window # manager is mwm to make mwm forget about a previous minimum size setting. wm withdraw . wm minsize . 1 1 wm positionfrom . user wm deiconify . .t insert 1.0 "Line 1 abcdefghijklm 12345 Line 4 b\u4e4fy GIrl .#@? x_yz !@#$% Line 7" image create photo textimage -width 10 -height 10 textimage put red -to 0 0 9 9 test textIndex-1.1 {TkTextMakeByteIndex} {testtext} { # (lineIndex < 0) testtext .t byteindex -1 3 } {1.0 0} test textIndex-1.2 {TkTextMakeByteIndex} {testtext} { # (lineIndex < 0), because lineIndex == strtol(argv[2]) - 1 testtext .t byteindex 0 3 } {1.0 0} test textIndex-1.3 {TkTextMakeByteIndex} {testtext} { # not (lineIndex < 0) testtext .t byteindex 1 3 } {1.3 3} test textIndex-1.4 {TkTextMakeByteIndex} {testtext} { # (byteIndex < 0) testtext .t byteindex 3 -1 } {3.0 0} test textIndex-1.5 {TkTextMakeByteIndex} {testtext} { # not (byteIndex < 0) testtext .t byteindex 3 3 } {3.3 3} test textIndex-1.6 {TkTextMakeByteIndex} {testtext} { # (indexPtr->linePtr == NULL) testtext .t byteindex 9 2 } {8.0 0} test textIndex-1.7 {TkTextMakeByteIndex} {testtext} { # not (indexPtr->linePtr == NULL) testtext .t byteindex 7 2 } {7.2 2} test textIndex-1.8 {TkTextMakeByteIndex: shortcut for 0} {testtext} { # (byteIndex == 0) testtext .t byteindex 1 0 } {1.0 0} test textIndex-1.9 {TkTextMakeByteIndex: shortcut for 0} {testtext} { # not (byteIndex == 0) testtext .t byteindex 3 80 } {3.5 5} test textIndex-1.10 {TkTextMakeByteIndex: verify index is in range} {testtext} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # one segment testtext .t byteindex 3 5 } {3.5 5} test textIndex-1.11 {TkTextMakeByteIndex: verify index is in range} {testtext} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # index += segPtr->size # Multiple segments, make sure add segment size to index. .t mark set foo 3.2 set x [testtext .t byteindex 3 7] .t mark unset foo set x } {3.5 5} test textIndex-1.12 {TkTextMakeByteIndex: verify index is in range} {testtext} { # (segPtr == NULL) testtext .t byteindex 3 7 } {3.5 5} test textIndex-1.13 {TkTextMakeByteIndex: verify index is in range} {testtext} { # not (segPtr == NULL) testtext .t byteindex 3 4 } {3.4 4} test textIndex-1.14 {TkTextMakeByteIndex: verify index is in range} {testtext} { # (index + segPtr->size > byteIndex) # in this segment. testtext .t byteindex 3 4 } {3.4 4} test textIndex-1.15 {TkTextMakeByteIndex: verify index is in range} {testtext} { # (index + segPtr->size > byteIndex), index != 0 # in this segment. .t mark set foo 3.2 set x [testtext .t byteindex 3 4] .t mark unset foo set x } {3.4 4} test textIndex-1.16 {TkTextMakeByteIndex: UTF-8 characters} {testtext} { testtext .t byteindex 5 100 } {5.18 20} test textIndex-1.17 {TkTextMakeByteIndex: prevent splitting UTF-8 character} \ {testtext} { # ((byteIndex > index) && (segPtr->typePtr == &tkTextCharType)) # Wrong answer would be \xb9 (the 2nd byte of UTF rep of 0x4e4f). set x [testtext .t byteindex 5 2] list $x [.t get insert] } {{5.2 4} y} test textIndex-1.18 {TkTextMakeByteIndex: prevent splitting UTF-8 character} \ {testtext} { # ((byteIndex > index) && (segPtr->typePtr == &tkTextCharType)) testtext .t byteindex 5 1 .t get insert } "\u4e4f" test textIndex-2.1 {TkTextMakeCharIndex} { # (lineIndex < 0) .t index -1.3 } 1.0 test textIndex-2.2 {TkTextMakeCharIndex} { # (lineIndex < 0), because lineIndex == strtol(argv[2]) - 1 .t index 0.3 } 1.0 test textIndex-2.3 {TkTextMakeCharIndex} { # not (lineIndex < 0) .t index 1.3 } 1.3 test textIndex-2.4 {TkTextMakeCharIndex} { # (charIndex < 0) .t index 3.-1 } 3.0 test textIndex-2.5 {TkTextMakeCharIndex} { # (charIndex < 0) .t index 3.3 } 3.3 test textIndex-2.6 {TkTextMakeCharIndex} { # (indexPtr->linePtr == NULL) .t index 9.2 } 8.0 test textIndex-2.7 {TkTextMakeCharIndex} { # not (indexPtr->linePtr == NULL) .t index 7.2 } 7.2 test textIndex-2.8 {TkTextMakeCharIndex: verify index is in range} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # one segment .t index 3.5 } 3.5 test textIndex-2.9 {TkTextMakeCharIndex: verify index is in range} { # for (segPtr = indexPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # Multiple segments, make sure add segment size to index. .t mark set foo 3.2 set x [.t index 3.7] .t mark unset foo set x } 3.5 test textIndex-2.10 {TkTextMakeCharIndex: verify index is in range} { # (segPtr == NULL) .t index 3.7 } 3.5 test textIndex-2.11 {TkTextMakeCharIndex: verify index is in range} { # not (segPtr == NULL) .t index 3.4 } 3.4 test textIndex-2.12 {TkTextMakeCharIndex: verify index is in range} { # (segPtr->typePtr == &tkTextCharType) # Wrong answer would be \xb9 (the 2nd byte of UTF rep of 0x4e4f). .t mark set insert 5.2 .t get insert } y test textIndex-2.13 {TkTextMakeCharIndex: verify index is in range} { # not (segPtr->typePtr == &tkTextCharType) .t image create 5.2 -image textimage .t mark set insert 5.5 set x [.t get insert] .t delete 5.2 set x } "G" test textIndex-2.14 {TkTextMakeCharIndex: verify index is in range} { # (charIndex < segPtr->size) .t image create 5.0 -image textimage set x [.t index 5.0] .t delete 5.0 set x } 5.0 .t mark set foo 3.2 .t tag add x 2.8 2.11 .t tag add x 6.0 6.2 set weirdTag "funny . +- 22.1\n\t{" .t tag add $weirdTag 2.1 2.6 set weirdMark "asdf \n{-+ 66.2\t" .t mark set $weirdMark 4.0 .t tag config y -relief raised set weirdImage "foo-1" .t image create 2.1 -image [image create photo $weirdImage] set weirdEmbWin ".t.bar-1" entry $weirdEmbWin .t window create 3.1 -window $weirdEmbWin test textIndex-3.1 {TkTextGetIndex, weird mark names} { list [catch {.t index $weirdMark} msg] $msg } {0 4.0} test textIndex-3.2 {TkTextGetIndex, weird mark names} knownBug { list [catch {.t index "$weirdMark -1char"} msg] $msg } {0 4.0} test textIndex-3.3 {TkTextGetIndex, weird embedded window names} { list [catch {.t index $weirdEmbWin} msg] $msg } {0 3.1} test textIndex-3.4 {TkTextGetIndex, weird embedded window names} knownBug { list [catch {.t index "$weirdEmbWin -1char"} msg] $msg } {0 3.0} test textIndex-3.5 {TkTextGetIndex, weird image names} { list [catch {.t index $weirdImage} msg] $msg } {0 2.1} test textIndex-3.6 {TkTextGetIndex, weird image names} knownBug { list [catch {.t index "$weirdImage -1char"} msg] $msg } {0 2.0} .t delete 3.1 ; # remove the weirdEmbWin .t delete 2.1 ; # remove the weirdImage test textIndex-4.1 {TkTextGetIndex, tags} { list [catch {.t index x.first} msg] $msg } {0 2.8} test textIndex-4.2 {TkTextGetIndex, tags} { list [catch {.t index x.last} msg] $msg } {0 6.2} test textIndex-4.3 {TkTextGetIndex, weird tags} { list [.t index $weirdTag.first+1c] [.t index $weirdTag.last+2c] } {2.2 2.8} test textIndex-4.4 {TkTextGetIndex, tags} { list [catch {.t index x.gorp} msg] $msg } {1 {bad text index "x.gorp"}} test textIndex-4.5 {TkTextGetIndex, tags} { list [catch {.t index foo.last} msg] $msg } {1 {bad text index "foo.last"}} test textIndex-4.6 {TkTextGetIndex, tags} { list [catch {.t index y.first} msg] $msg } {1 {text doesn't contain any characters tagged with "y"}} test textIndex-4.7 {TkTextGetIndex, tags} { list [catch {.t index x.last,} msg] $msg } {1 {bad text index "x.last,"}} test textIndex-4.8 {TkTextGetIndex, tags} { .t tag add z 1.0 set result [list [.t index z.first] [.t index z.last]] .t tag delete z set result } {1.0 1.1} test textIndex-5.1 {TkTextGetIndex, "@"} {nonPortable fonts} { .t index @12,9 } 1.1 test textIndex-5.2 {TkTextGetIndex, "@"} {fonts} { .t index @-2,7 } 1.0 test textIndex-5.3 {TkTextGetIndex, "@"} {fonts} { .t index @10,-7 } 1.0 test textIndex-5.4 {TkTextGetIndex, "@"} {fonts} { list [catch {.t index @x} msg] $msg } {1 {bad text index "@x"}} test textIndex-5.5 {TkTextGetIndex, "@"} {fonts} { list [catch {.t index @10q} msg] $msg } {1 {bad text index "@10q"}} test textIndex-5.6 {TkTextGetIndex, "@"} {fonts} { list [catch {.t index @10,} msg] $msg } {1 {bad text index "@10,"}} test textIndex-5.7 {TkTextGetIndex, "@"} {fonts} { list [catch {.t index @10,a} msg] $msg } {1 {bad text index "@10,a"}} test textIndex-5.8 {TkTextGetIndex, "@"} {fonts} { list [catch {.t index @10,9,} msg] $msg } {1 {bad text index "@10,9,"}} test textIndex-6.1 {TkTextGetIndex, numeric} { list [catch {.t index 2.3} msg] $msg } {0 2.3} test textIndex-6.2 {TkTextGetIndex, numeric} { list [catch {.t index -} msg] $msg } {1 {bad text index "-"}} test textIndex-6.3 {TkTextGetIndex, numeric} { list [catch {.t index 2.end} msg] $msg } {0 2.13} test textIndex-6.4 {TkTextGetIndex, numeric} { list [catch {.t index 2.x} msg] $msg } {1 {bad text index "2.x"}} test textIndex-6.5 {TkTextGetIndex, numeric} { list [catch {.t index 2.3x} msg] $msg } {1 {bad text index "2.3x"}} test textIndex-7.1 {TkTextGetIndex, miscellaneous other bases} { list [catch {.t index end} msg] $msg } {0 8.0} test textIndex-7.2 {TkTextGetIndex, miscellaneous other bases} { list [catch {.t index foo} msg] $msg } {0 3.2} test textIndex-7.3 {TkTextGetIndex, miscellaneous other bases} { list [catch {.t index foo+1c} msg] $msg } {0 3.3} test textIndex-8.1 {TkTextGetIndex, modifiers} { list [catch {.t index 2.1+1char} msg] $msg } {0 2.2} test textIndex-8.2 {TkTextGetIndex, modifiers} { list [catch {.t index "2.1 +1char"} msg] $msg } {0 2.2} test textIndex-8.3 {TkTextGetIndex, modifiers} { list [catch {.t index 2.1-1char} msg] $msg } {0 2.0} test textIndex-8.4 {TkTextGetIndex, modifiers} { list [catch {.t index {2.1 }} msg] $msg } {0 2.1} test textIndex-8.5 {TkTextGetIndex, modifiers} { list [catch {.t index {2.1+foo bar}} msg] $msg } {1 {bad text index "2.1+foo bar"}} test textIndex-8.6 {TkTextGetIndex, modifiers} { list [catch {.t index {2.1 foo bar}} msg] $msg } {1 {bad text index "2.1 foo bar"}} test textIndex-9.1 {TkTextIndexCmp} { list [.t compare 3.1 < 3.2] [.t compare 3.1 == 3.2] } {1 0} test textIndex-9.2 {TkTextIndexCmp} { list [.t compare 3.2 < 3.2] [.t compare 3.2 == 3.2] } {0 1} test textIndex-9.3 {TkTextIndexCmp} { list [.t compare 3.3 < 3.2] [.t compare 3.3 == 3.2] } {0 0} test textIndex-9.4 {TkTextIndexCmp} { list [.t compare 2.1 < 3.2] [.t compare 2.1 == 3.2] } {1 0} test textIndex-9.5 {TkTextIndexCmp} { list [.t compare 4.1 < 3.2] [.t compare 4.1 == 3.2] } {0 0} test textIndex-10.1 {ForwBack} { list [catch {.t index {2.3 + x}} msg] $msg } {1 {bad text index "2.3 + x"}} test textIndex-10.2 {ForwBack} { list [catch {.t index {2.3 + 2 chars}} msg] $msg } {0 2.5} test textIndex-10.3 {ForwBack} { list [catch {.t index {2.3 + 2c}} msg] $msg } {0 2.5} test textIndex-10.4 {ForwBack} { list [catch {.t index {2.3 - 3ch}} msg] $msg } {0 2.0} test textIndex-10.5 {ForwBack} { list [catch {.t index {1.3 + 3 lines}} msg] $msg } {0 4.3} test textIndex-10.6 {ForwBack} { list [catch {.t index {2.3 -1l}} msg] $msg } {0 1.3} test textIndex-10.7 {ForwBack} { list [catch {.t index {2.3 -1 gorp}} msg] $msg } {1 {bad text index "2.3 -1 gorp"}} test textIndex-10.8 {ForwBack} { list [catch {.t index {2.3 - 4 lines}} msg] $msg } {0 1.3} test textIndex-10.9 {ForwBack} { .t mark set insert 2.0 list [catch {.t index {insert -0 chars}} msg] $msg } {0 2.0} test textIndex-10.10 {ForwBack} { .t mark set insert 2.end list [catch {.t index {insert +0 chars}} msg] $msg } {0 2.13} test textIndex-11.1 {TkTextIndexForwBytes} {testtext} { testtext .t forwbytes 2.3 -7 } {1.3 3} test textIndex-11.2 {TkTextIndexForwBytes} {testtext} { testtext .t forwbytes 2.3 5 } {2.8 8} test textIndex-11.3 {TkTextIndexForwBytes} {testtext} { testtext .t forwbytes 2.3 10 } {2.13 13} test textIndex-11.4 {TkTextIndexForwBytes} {testtext} { testtext .t forwbytes 2.3 11 } {3.0 0} test textIndex-11.5 {TkTextIndexForwBytes} {testtext} { testtext .t forwbytes 2.3 57 } {7.6 6} test textIndex-11.6 {TkTextIndexForwBytes} {testtext} { testtext .t forwbytes 2.3 58 } {8.0 0} test textIndex-11.7 {TkTextIndexForwBytes} {testtext} { testtext .t forwbytes 2.3 59 } {8.0 0} test textIndex-12.1 {TkTextIndexForwChars} { # (charCount < 0) .t index {2.3 + -7 chars} } 1.3 test textIndex-12.2 {TkTextIndexForwChars} { # not (charCount < 0) .t index {2.3 + 5 chars} } 2.8 test textIndex-12.3 {TkTextIndexForwChars: find index} { # for ( ; segPtr != NULL; segPtr = segPtr->nextPtr) # one loop .t index {2.3 + 9 chars} } 2.12 test textIndex-12.4 {TkTextIndexForwChars: find index} { # for ( ; segPtr != NULL; segPtr = segPtr->nextPtr) # multiple loops .t mark set foo 2.5 set x [.t index {2.3 + 9 chars}] .t mark unset foo set x } 2.12 test textIndex-12.5 {TkTextIndexForwChars: find index} { # for ( ; segPtr != NULL; segPtr = segPtr->nextPtr) # border condition: last char .t index {2.3 + 10 chars} } 2.13 test textIndex-12.6 {TkTextIndexForwChars: find index} { # for ( ; segPtr != NULL; segPtr = segPtr->nextPtr) # border condition: segPtr == NULL -> beginning of next line .t index {2.3 + 11 chars} } 3.0 test textIndex-12.7 {TkTextIndexForwChars: find index} { # (segPtr->typePtr == &tkTextCharType) .t index {2.3 + 2 chars} } 2.5 test textIndex-12.8 {TkTextIndexForwChars: find index} { # (charCount == 0) # No more chars, so we found byte offset. .t index {2.3 + 2 chars} } 2.5 test textIndex-12.9 {TkTextIndexForwChars: find index} { # not (segPtr->typePtr == &tkTextCharType) .t image create 2.4 -image textimage set x [.t get {2.3 + 3 chars}] .t delete 2.4 set x } "f" test textIndex-12.10 {TkTextIndexForwChars: find index} { # dstPtr->byteIndex += segPtr->size - byteOffset # When moving to next segment, account for bytes in last segment. # Wrong answer would be 2.4 .t mark set foo 2.4 set x [.t index {2.3 + 5 chars}] .t mark unset foo set x } 2.8 test textIndex-12.11 {TkTextIndexForwChars: go to next line} { # (linePtr == NULL) .t index {7.6 + 3 chars} } 8.0 test textIndex-12.12 {TkTextIndexForwChars: go to next line} { # Reset byteIndex to 0 now that we are on a new line. # Wrong answer would be 2.9 .t index {1.3 + 6 chars} } 2.2 test textIndex-12.13 {TkTextIndexForwChars} { # right to end .t index {2.3 + 56 chars} } 8.0 test textIndex-12.14 {TkTextIndexForwChars} { # try to go past end .t index {2.3 + 57 chars} } 8.0 test textIndex-13.1 {TkTextIndexBackBytes} {testtext} { testtext .t backbytes 3.2 -10 } {4.6 6} test textIndex-13.2 {TkTextIndexBackBytes} {testtext} { testtext .t backbytes 3.2 2 } {3.0 0} test textIndex-13.3 {TkTextIndexBackBytes} {testtext} { testtext .t backbytes 3.2 3 } {2.13 13} test textIndex-13.4 {TkTextIndexBackBytes} {testtext} { testtext .t backbytes 3.2 22 } {1.1 1} test textIndex-13.5 {TkTextIndexBackBytes} {testtext} { testtext .t backbytes 3.2 23 } {1.0 0} test textIndex-13.6 {TkTextIndexBackBytes} {testtext} { testtext .t backbytes 3.2 24 } {1.0 0} test textIndex-14.1 {TkTextIndexBackChars} { # (charCount < 0) .t index {3.2 - -10 chars} } 4.6 test textIndex-14.2 {TkTextIndexBackChars} { # not (charCount < 0) .t index {3.2 - 2 chars} } 3.0 test textIndex-14.3 {TkTextIndexBackChars: find starting segment} { # for (segPtr = dstPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # single loop .t index {3.2 - 3 chars} } 2.13 test textIndex-14.4 {TkTextIndexBackChars: find starting segment} { # for (segPtr = dstPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # multiple loop .t mark set foo1 2.5 .t mark set foo2 2.7 .t mark set foo3 2.10 set x [.t index {2.9 - 1 chars}] .t mark unset foo1 foo2 foo3 set x } 2.8 test textIndex-14.5 {TkTextIndexBackChars: find starting seg and offset} { # for (segPtr = dstPtr->linePtr->segPtr; ; segPtr = segPtr->nextPtr) # Make sure segSize was decremented. Wrong answer would be 2.10 .t mark set foo 2.2 set x [.t index {2.9 - 1 char}] .t mark unset foo set x } 2.8 test textIndex-14.6 {TkTextIndexBackChars: back over characters} { # (segPtr->typePtr == &tkTextCharType) .t index {3.2 - 22 chars} } 1.1 test textIndex-14.7 {TkTextIndexBackChars: loop backwards over chars} { # (charCount == 0) # No more chars, so we found byte offset. .t index {3.4 - 2 chars} } 3.2 test textIndex-14.8 {TkTextIndexBackChars: loop backwards over chars} { # (p == start) # Still more chars, but we reached beginning of segment .t image create 5.6 -image textimage set x [.t index {5.8 - 3 chars}] .t delete 5.6 set x } 5.5 test textIndex-14.9 {TkTextIndexBackChars: back over image} { # not (segPtr->typePtr == &tkTextCharType) .t image create 5.6 -image textimage set x [.t get {5.8 - 4 chars}] .t delete 5.6 set x } "G" test textIndex-14.10 {TkTextIndexBackChars: move to previous segment} { # (segPtr != oldPtr) # More segments to go .t mark set foo 3.4 set x [.t index {3.5 - 2 chars}] .t mark unset foo set x } 3.3 test textIndex-14.11 {TkTextIndexBackChars: move to previous segment} { # not (segPtr != oldPtr) # At beginning of line. .t mark set foo 3.4 set x [.t index {3.5 - 10 chars}] .t mark unset foo set x } 2.9 test textIndex-14.12 {TkTextIndexBackChars: move to previous line} { # (lineIndex == 0) .t index {1.5 - 10 chars} } 1.0 test textIndex-14.13 {TkTextIndexBackChars: move to previous line} { # not (lineIndex == 0) .t index {2.5 - 10 chars} } 1.2 test textIndex-14.14 {TkTextIndexBackChars: move to previous line} { # for (segPtr = oldPtr; segPtr != NULL; segPtr = segPtr->nextPtr) # Set byteIndex to end of previous line so we can subtract more # bytes from it. Otherwise we get an TkTextIndex with a negative # byteIndex. .t index {2.5 - 6 chars} } 1.6 test textIndex-14.15 {TkTextIndexBackChars: UTF} { .t get {5.3 - 1 chars} } y test textIndex-14.16 {TkTextIndexBackChars: UTF} { .t get {5.3 - 2 chars} } \u4e4f test textIndex-14.17 {TkTextIndexBackChars: UTF} { .t get {5.3 - 3 chars} } b proc getword index { .t get [.t index "$index wordstart"] [.t index "$index wordend"] } test textIndex-15.1 {StartEnd} { list [catch {.t index {2.3 lineend}} msg] $msg } {0 2.13} test textIndex-15.2 {StartEnd} { list [catch {.t index {2.3 linee}} msg] $msg } {0 2.13} test textIndex-15.3 {StartEnd} { list [catch {.t index {2.3 line}} msg] $msg } {1 {bad text index "2.3 line"}} test textIndex-15.4 {StartEnd} { list [catch {.t index {2.3 linestart}} msg] $msg } {0 2.0} test textIndex-15.5 {StartEnd} { list [catch {.t index {2.3 lines}} msg] $msg } {0 2.0} test textIndex-15.6 {StartEnd} { getword 5.3 } { } test textIndex-15.7 {StartEnd} { getword 5.4 } GIrl test textIndex-15.8 {StartEnd} { getword 5.7 } GIrl test textIndex-15.9 {StartEnd} { getword 5.8 } { } test textIndex-15.10 {StartEnd} { getword 5.14 } x_yz test textIndex-15.11 {StartEnd} { getword 6.2 } # test textIndex-15.12 {StartEnd} { getword 3.4 } 12345 .t tag add x 2.8 2.11 test textIndex-15.13 {StartEnd} { list [catch {.t index {2.2 worde}} msg] $msg } {0 2.13} test textIndex-15.14 {StartEnd} { list [catch {.t index {2.12 words}} msg] $msg } {0 2.0} test textIndex-15.15 {StartEnd} { list [catch {.t index {2.12 word}} msg] $msg } {1 {bad text index "2.12 word"}} test textIndex-16.1 {TkTextPrintIndex} { set t [text .t2] $t insert end \n $t window create end -window [button $t.b] set result [$t index end-2c] pack $t catch {destroy $t} } 0 test textIndex-16.2 {TkTextPrintIndex} { set t [text .t2] $t insert end \n $t window create end -window [button $t.b] set result [$t tag add {} end-2c] pack $t catch {destroy $t} } 0 test textIndex-17.1 {Object indices} { set res {} set t [text .t2 -height 20] for {set i 0} {$i < 100} {incr i} { $t insert end $i\n } pack $t update set idx @0,0 lappend res $idx [$t index $idx] $t yview scroll 2 pages lappend res $idx [$t index $idx] catch {destroy $t} unset i unset idx list $res } {{@0,0 1.0 @0,0 37.0}} test textIndex-18.1 {Object indices don't cache mark names} { set res {} text .t2 .t2 insert 1.0 1234\n1234\n1234 set pos "insert" lappend res [.t2 index $pos] .t2 mark set $pos 3.0 lappend res [.t2 index $pos] .t2 mark set $pos 1.0 lappend res [.t2 index $pos] catch {destroy .t2} set res } {3.4 3.0 1.0} frame .f -width 100 -height 20 pack append . .f left set fixedFont {Courier -12} set fixedHeight [font metrics $fixedFont -linespace] set fixedWidth [font measure $fixedFont m] set varFont {Times -14} set bigFont {Helvetica -24} destroy .t text .t -font $fixedFont -width 20 -height 10 -wrap char pack append . .t {top expand fill} .t tag configure big -font $bigFont .t debug on wm geometry . {} # The statements below reset the main window; it's needed if the window # manager is mwm to make mwm forget about a previous minimum size setting. wm withdraw . wm minsize . 1 1 wm positionfrom . user wm deiconify . update # Some window managers (like olwm under SunOS 4.1.3) misbehave in a way # that tends to march windows off the top and left of the screen. If # this happens, some tests will fail because parts of the window will # not need to be displayed (because they're off-screen). To keep this # from happening, move the window if it's getting near the left or top # edges of the screen. if {([winfo rooty .] < 50) || ([winfo rootx .] < 50)} { wm geom . +50+50 } set str [string repeat "hello " 20] .t insert end "$str one two three four five six seven height nine ten\n" .t insert end "$str one two three four five six seven height nine ten\n" .t insert end "$str one two three four five six seven height nine ten\n" test textIndex-19.1 {Display lines} { .t index "2.7 displaylinestart" } {2.0} test textIndex-19.2 {Display lines} { .t index "2.7 displaylineend" } {2.19} test textIndex-19.3 {Display lines} { .t index "2.30 displaylinestart" } {2.20} test textIndex-19.4 {Display lines} { .t index "2.30 displaylineend" } {2.39} test textIndex-19.5 {Display lines} { .t index "2.40 displaylinestart" } {2.40} test textIndex-19.6 {Display lines} { .t index "2.40 displaylineend" } {2.59} test textIndex-19.7 {Display lines} { .t index "2.7 +1displaylines" } {2.27} test textIndex-19.8 {Display lines} { .t index "2.7 -1displaylines" } {1.167} test textIndex-19.9 {Display lines} { .t index "2.30 +1displaylines" } {2.50} test textIndex-19.10 {Display lines} { .t index "2.30 -1displaylines" } {2.10} test textIndex-19.11 {Display lines} { .t index "2.40 +1displaylines" } {2.60} test textIndex-19.12 {Display lines} { .t index "2.40 -1displaylines" } {2.20} test textIndex-19.13 {Display lines} { destroy {*}[pack slaves .] text .txt -height 1 -wrap word -yscroll ".sbar set" -width 400 scrollbar .sbar -command ".txt yview" grid .txt .sbar -sticky news grid configure .sbar -sticky ns grid rowconfigure . 0 -weight 1 grid columnconfigure . 0 -weight 1 .txt configure -width 10 .txt tag config STAMP -elide 1 .txt tag config NICK-tick -elide 0 .txt insert end "+++++ Loading History ++++++++++++++++\n" .txt mark set HISTORY {2.0 - 1 line} .txt insert HISTORY { } STAMP .txt insert HISTORY {tick } {NICK NICK-tick} .txt insert HISTORY "\n" {NICK NICK-tick} .txt insert HISTORY {[23:51] } STAMP .txt insert HISTORY "\n" {NICK NICK-tick} # Must not crash .txt index "2.0 - 2 display lines" destroy .txt .sbar } {} proc text_test_word {startend chars start} { destroy .t text .t .t insert end $chars if {[regexp {end} $start]} { set start [.t index "${start}chars -2c"] } else { set start [.t index "1.0 + ${start}chars"] } if {[.t compare $start >= "end-1c"]} { set start "end-2c" } set res [.t index "$start $startend"] .t count 1.0 $res } # Following tests copied from tests from string wordstart/end in Tcl test textIndex-21.4 {text index wordend} { text_test_word wordend abc. -1 } 3 test textIndex-21.5 {text index wordend} { text_test_word wordend abc. 100 } 4 test textIndex-21.6 {text index wordend} { text_test_word wordend "word_one two three" 2 } 8 test textIndex-21.7 {text index wordend} { text_test_word wordend "one .&# three" 5 } 6 test textIndex-21.8 {text index wordend} { text_test_word worde "x.y" 0 } 1 test textIndex-21.9 {text index wordend} { text_test_word worde "x.y" end-1 } 2 test textIndex-21.10 {text index wordend, unicode} { text_test_word wordend "xyz\u00c7de fg" 0 } 6 test textIndex-21.11 {text index wordend, unicode} { text_test_word wordend "xyz\uc700de fg" 0 } 6 test textIndex-21.12 {text index wordend, unicode} { text_test_word wordend "xyz\u203fde fg" 0 } 6 test textIndex-21.13 {text index wordend, unicode} { text_test_word wordend "xyz\u2045de fg" 0 } 3 test textIndex-21.14 {text index wordend, unicode} { text_test_word wordend "\uc700\uc700 abc" 8 } 6 test textIndex-22.5 {text index wordstart} { text_test_word wordstart "one two three_words" 400 } 8 test textIndex-22.6 {text index wordstart} { text_test_word wordstart "one two three_words" 2 } 0 test textIndex-22.7 {text index wordstart} { text_test_word wordstart "one two three_words" -2 } 0 test textIndex-22.8 {text index wordstart} { text_test_word wordstart "one .*&^ three" 6 } 6 test textIndex-22.9 {text index wordstart} { text_test_word wordstart "one two three" 4 } 4 test textIndex-22.10 {text index wordstart} { text_test_word wordstart "one two three" end-5 } 7 test textIndex-22.11 {text index wordstart, unicode} { text_test_word wordstart "one tw\u00c7o three" 7 } 4 test textIndex-22.12 {text index wordstart, unicode} { text_test_word wordstart "ab\uc700\uc700 cdef ghi" 12 } 10 test textIndex-22.13 {text index wordstart, unicode} { text_test_word wordstart "\uc700\uc700 abc" 8 } 3 test textIndex-22.14 {text index wordstart, unicode, start index at internal segment start} { catch {destroy .t} text .t .t insert end "C'est du texte en fran\u00e7ais\n" .t insert end "\u042D\u0442\u043E\u0020\u0442\u0435\u043A\u0441\u0442\u0020\u043D\u0430\u0020\u0440\u0443\u0441\u0441\u043A\u043E\u043C" .t mark set insert 1.23 set res [.t index "1.23 wordstart"] .t mark set insert 2.16 lappend res [.t index "2.16 wordstart"] [.t index "2.15 wordstart"] } {1.18 2.13 2.13} test textIndex-22.15 {text index display wordstart} { catch {destroy .t} text .t .t index "1.0 display wordstart" ; # used to crash } 1.0 test textIndex-23.1 {text paragraph start} { pack [text .t2] .t2 insert end " Text" set res 2.0 for {set i 0} {$i < 2} {incr i} { lappend res [::tk::TextPrevPara .t2 [lindex $res end]] } destroy .t2 set res } {2.0 1.1 1.1} test textIndex-24.1 {text mark prev} { pack [text .t2] .t2 insert end [string repeat "1 2 3 4 5 6 7 8 9 0\n" 12] .t2 mark set 1.0 10.0 update # then this crash Tk: set res [.t2 mark previous 10.10] destroy .t2 set res } {1.0} test textIndex-25.1 {IndexCountBytesOrdered, bug [3f1f79abcf]} { pack [text .t2] .t2 tag configure elided -elide 1 .t2 insert end "01\n02\n03\n04\n05\n06\n07\n08\n09\n10\n" .t2 insert end "11\n12\n13\n14\n15\n16\n17\n18\n19\n20\n" .t2 insert end "21\n22\n23\n25\n26\n27\n28\n29\n30\n31" .t2 insert end "32\n33\n34\n36\n37\n38\n39" elided # then this used to crash Tk: .t2 see end focus -force .t2 ; # to see the cursor blink destroy .t2 } {} # cleanup rename textimage {} catch {destroy .t} cleanupTests return
{ "pile_set_name": "Github" }
package me.coley.recaf.ui.controls; import javafx.scene.control.TextField; /** * TextField that with a numeric text parse. * * @author Matt */ public class NumericText extends TextField { /** * @return Generic number, {@code null} if text does not represent any number format. */ public Number get() { String text = getText(); if(text.matches("\\d+")) return Integer.parseInt(text); else if(text.matches("\\d+\\.?\\d*[dD]?")) { if(text.toLowerCase().contains("d")) return Double.parseDouble(text.substring(0, text.length() - 1)); else return Double.parseDouble(text); } else if(text.matches("\\d+\\.?\\d*[fF]")) return Float.parseFloat(text.substring(0, text.length() - 1)); else if(text.matches("\\d+\\.?\\d*[lL]")) return Long.parseLong(text.substring(0, text.length() - 1)); return null; } }
{ "pile_set_name": "Github" }
//********************************************************* // // Copyright (c) Microsoft. All rights reserved. // This code is licensed under the MIT License (MIT). // THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF // ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY // IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR // PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT. // //********************************************************* using System; using System.Collections.Generic; using System.Threading.Tasks; using Windows.UI.Xaml; using Windows.UI.Xaml.Controls; using Windows.UI.Xaml.Navigation; using Windows.ApplicationModel.Contacts; namespace SDKTemplate { /// <summary> /// An empty page that can be used on its own or navigated to within a Frame. /// </summary> public sealed partial class Scenario1_CreateContacts : Page { private MainPage rootPage = MainPage.Current; public Scenario1_CreateContacts() { this.InitializeComponent(); } private async Task<ContactList> _GetContactList() { ContactStore store = await ContactManager.RequestStoreAsync(ContactStoreAccessType.AppContactsReadWrite); if (null == store) { rootPage.NotifyUser("Unable to get a contacts store.", NotifyType.ErrorMessage); return null; } ContactList contactList; IReadOnlyList<ContactList> contactLists = await store.FindContactListsAsync(); if (0 == contactLists.Count) { contactList = await store.CreateContactListAsync("TestContactList"); } else { contactList = contactLists[0]; } return contactList; } private async Task<ContactAnnotationList> _GetContactAnnotationList() { ContactAnnotationStore annotationStore = await ContactManager.RequestAnnotationStoreAsync(ContactAnnotationStoreAccessType.AppAnnotationsReadWrite); if (null == annotationStore) { rootPage.NotifyUser("Unable to get an annotations store.", NotifyType.ErrorMessage); return null; } ContactAnnotationList annotationList; IReadOnlyList<ContactAnnotationList> annotationLists = await annotationStore.FindAnnotationListsAsync(); if (0 == annotationLists.Count) { annotationList = await annotationStore.CreateAnnotationListAsync(); } else { annotationList = annotationLists[0]; } return annotationList; } private async void CreateTestContacts() { // // Creating two test contacts with email address and phone number. // Contact contact1 = new Contact(); contact1.FirstName = "TestContact1"; ContactEmail email1 = new ContactEmail(); email1.Address = "[email protected]"; contact1.Emails.Add(email1); ContactPhone phone1 = new ContactPhone(); phone1.Number = "4255550100"; contact1.Phones.Add(phone1); Contact contact2 = new Contact(); contact2.FirstName = "TestContact2"; ContactEmail email2 = new ContactEmail(); email2.Address = "[email protected]"; email2.Kind = ContactEmailKind.Other; contact2.Emails.Add(email2); ContactPhone phone2 = new ContactPhone(); phone2.Number = "4255550101"; phone2.Kind = ContactPhoneKind.Mobile; contact2.Phones.Add(phone2); // Save the contacts ContactList contactList = await _GetContactList(); if (null == contactList) { return; } await contactList.SaveContactAsync(contact1); await contactList.SaveContactAsync(contact2); // // Create annotations for those test contacts. // Annotation is the contact meta data that allows People App to generate deep links // in the contact card that takes the user back into this app. // ContactAnnotationList annotationList = await _GetContactAnnotationList(); if (null == annotationList) { return; } ContactAnnotation annotation = new ContactAnnotation(); annotation.ContactId = contact1.Id; // Remote ID: The identifier of the user relevant for this app. When this app is // launched into from the People App, this id will be provided as context on which user // the operation (e.g. ContactProfile) is for. annotation.RemoteId = "user12"; // The supported operations flags indicate that this app can fulfill these operations // for this contact. These flags are read by apps such as the People App to create deep // links back into this app. This app must also be registered for the relevant // protocols in the Package.appxmanifest (in this case, ms-contact-profile). annotation.SupportedOperations = ContactAnnotationOperations.ContactProfile; if (!await annotationList.TrySaveAnnotationAsync(annotation)) { rootPage.NotifyUser("Failed to save annotation for TestContact1 to the store.", NotifyType.ErrorMessage); return; } annotation = new ContactAnnotation(); annotation.ContactId = contact2.Id; annotation.RemoteId = "user22"; // You can also specify multiple supported operations for a contact in a single // annotation. In this case, this annotation indicates that the user can be // communicated via VOIP call, Video Call, or IM via this application. annotation.SupportedOperations = ContactAnnotationOperations.Message | ContactAnnotationOperations.AudioCall | ContactAnnotationOperations.VideoCall; if (!await annotationList.TrySaveAnnotationAsync(annotation)) { rootPage.NotifyUser("Failed to save annotation for TestContact2 to the store.", NotifyType.ErrorMessage); return; } rootPage.NotifyUser("Sample data created successfully.", NotifyType.StatusMessage); } private async void DeleteTestContacts() { ContactList contactList = null; ContactStore store = await ContactManager.RequestStoreAsync(ContactStoreAccessType.AppContactsReadWrite); if (null != store) { IReadOnlyList<ContactList> contactLists = await store.FindContactListsAsync(); if (0 < contactLists.Count) { contactList = contactLists[0]; } } if (null != contactList) { await contactList.DeleteAsync(); rootPage.NotifyUser("Sample data deleted.", NotifyType.StatusMessage); } else { rootPage.NotifyUser("Could not delete sample data.", NotifyType.ErrorMessage); } } } }
{ "pile_set_name": "Github" }
/** @file Contains the global variables used in LabelMe. */ // Parsed LabelMe XML file. Manipulate this variable with jquery. var LM_xml; // URL of CGI script to submit XML annotation: var SubmitXmlUrl = 'annotationTools/perl/submit.cgi'; // LabelMe username: var username = 'anonymous'; // Boolean indicating whether user is currently signing in (this should be abstracted into class): var username_flag = 0; // Boolean indicating if we will use attributes. This should be read from the URL and set to 0 by default. var use_attributes = 1; // if this is 0, then it will remove all the attributes from the bubble. var use_parts = 1; // if this is 0 disapears the message from the bubble // for now, let's remove the attributes in MT mode. Just in case anybody is trying this. if (getQueryVariable('mode')=='mt'){ //use_attributes=0; //use_parts = 0; } // Boolean indicating whether the control points were edited: var editedControlPoints = 0; // Scalar indicating which polygon is selected; -1 means no polygon is selected var selected_poly = -1; // Class with functions to handle actions/events. var main_handler; // Canvas that renders polygons at rest state. var main_canvas; // Holds image. var main_media; // URL of XHTML namespace. This is needed for generating SVG elements. var xhtmlNS = 'http://www.w3.org/1999/xhtml'; // Website that refers to LabelMe: var ref; // Indicates whether we are in segmentation or polygon mode var drawing_mode = 0; var showImgName = false; // Scribble mode: var scribble_mode = true; var threed_mode = false; var video_mode = false; var bounding_box = false; var bbox_mode = true; var autocomplete_mode = false; var wait_for_input; var edit_popup_open = 0; var num_orig_anno; var global_count = 0; var req_submit; // Indicates if polygon has been edited. var submission_edited = 0; // Allowable user actions: var action_CreatePolygon = 1; var action_RenameExistingObjects = 0; var action_ModifyControlExistingObjects = 0; var action_DeleteExistingObjects = 0; // Which polygons are visible: var view_Existing = 1; var view_Deleted = 0; // Flag for right-hand object list: var view_ObjList = true; // Mechanical Turk variables: var LMbaseurl = 'http://' + window.location.host + window.location.pathname; var MThelpPage = 'annotationTools/html/mt_instructions.html'; var externalSubmitURL = 'https://www.mturk.com/mturk/externalSubmit'; var externalSubmitURLsandbox = 'https://workersandbox.mturk.com/mturk/externalSubmit'; var mt_N = 'inf'; var object_choices = '...'; var loaded_once = false;
{ "pile_set_name": "Github" }
// Copyright 2019 ETH Zurich // Copyright 2020 ETH Zurich, Anapaya Systems // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package json_test import ( "encoding/json" "flag" "io/ioutil" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "github.com/scionproto/scion/go/lib/common" jsontopo "github.com/scionproto/scion/go/lib/topology/json" ) var ( update = flag.Bool("update", false, "set to true to update golden files") ) func TestLoadRawFromFile(t *testing.T) { referenceTopology := &jsontopo.Topology{ Timestamp: 168562800, TimestampHuman: "May 6 00:00:00 CET 1975", IA: "6-ff00:0:362", MTU: 1472, Attributes: []jsontopo.Attribute{jsontopo.Authoritative, jsontopo.AttrCore, jsontopo.Issuing, jsontopo.Voting}, BorderRouters: map[string]*jsontopo.BRInfo{ "borderrouter6-f00:0:362-1": { InternalAddr: "10.1.0.1:0", CtrlAddr: "10.1.0.1:30098", Interfaces: map[common.IFIDType]*jsontopo.BRInterface{ 91: { Underlay: jsontopo.Underlay{ Public: "192.0.2.1:4997", Remote: "192.0.2.2:4998", Bind: "10.0.0.1", }, Bandwidth: 100000, IA: "6-ff00:0:363", LinkTo: "CORE", MTU: 1472, }, }, }, "borderrouter6-f00:0:362-9": { InternalAddr: "[2001:db8:a0b:12f0::2]:0", CtrlAddr: "[2001:db8:a0b:12f0::2300]:30098", Interfaces: map[common.IFIDType]*jsontopo.BRInterface{ 32: { Underlay: jsontopo.Underlay{ Public: "[2001:db8:a0b:12f0::1]:4997", Remote: "[2001:db8:a0b:12f0::2]:4998", Bind: "2001:db8:a0b:12f0::8", }, Bandwidth: 5000, IA: "6-ff00:0:364", LinkTo: "CHILD", MTU: 4430, }, }, }, }, } if *update { b, err := json.MarshalIndent(referenceTopology, "", " ") require.NoError(t, err) b = append(b, []byte("\n")...) err = ioutil.WriteFile("testdata/topology.json", b, 0644) require.NoError(t, err) } t.Run("unmarshaled struct matches", func(t *testing.T) { loadedTopology, err := jsontopo.LoadFromFile("testdata/topology.json") assert.NoError(t, err) assert.Equal(t, referenceTopology, loadedTopology) }) t.Run("marshaled bytes match", func(t *testing.T) { referenceTopologyBytes, err := ioutil.ReadFile("testdata/topology.json") require.NoError(t, err) topologyBytes, err := json.MarshalIndent(referenceTopology, "", " ") require.NoError(t, err) assert.Equal(t, strings.TrimSpace(string(referenceTopologyBytes)), strings.TrimSpace(string(topologyBytes)), ) }) }
{ "pile_set_name": "Github" }
/* Header describing `ar' archive file format. Copyright (C) 1996 Free Software Foundation, Inc. This file is part of the GNU C Library. The GNU C Library is free software; you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation; either version 2.1 of the License, or (at your option) any later version. The GNU C Library is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the GNU C Library; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. */ #ifndef _AR_H #define _AR_H 1 #include <sys/cdefs.h> /* Archive files start with the ARMAG identifying string. Then follows a `struct ar_hdr', and as many bytes of member file data as its `ar_size' member indicates, for each member file. */ #define ARMAG "!<arch>\n" /* String that begins an archive file. */ #define SARMAG 8 /* Size of that string. */ #define ARFMAG "`\n" /* String in ar_fmag at end of each header. */ __BEGIN_DECLS struct ar_hdr { char ar_name[16]; /* Member file name, sometimes / terminated. */ char ar_date[12]; /* File date, decimal seconds since Epoch. */ char ar_uid[6], ar_gid[6]; /* User and group IDs, in ASCII decimal. */ char ar_mode[8]; /* File mode, in ASCII octal. */ char ar_size[10]; /* File size, in ASCII decimal. */ char ar_fmag[2]; /* Always contains ARFMAG. */ }; __END_DECLS #endif /* ar.h */
{ "pile_set_name": "Github" }
/* * @brief LPC11xx ROM API declarations and functions * * @note * Copyright(C) NXP Semiconductors, 2012 * All rights reserved. * * @par * Software that is described herein is for illustrative purposes only * which provides customers with programming information regarding the * LPC products. This software is supplied "AS IS" without any warranties of * any kind, and NXP Semiconductors and its licensor disclaim any and * all warranties, express or implied, including all implied warranties of * merchantability, fitness for a particular purpose and non-infringement of * intellectual property rights. NXP Semiconductors assumes no responsibility * or liability for the use of the software, conveys no license or rights under any * patent, copyright, mask work right, or any other intellectual property rights in * or to any products. NXP Semiconductors reserves the right to make changes * in the software without notification. NXP Semiconductors also makes no * representation or warranty that such application will be suitable for the * specified use without further testing or modification. * * @par * Permission to use, copy, modify, and distribute this software and its * documentation is hereby granted, under NXP Semiconductors' and its * licensor's relevant copyrights in the software, without fee, provided that it * is used in conjunction with NXP Semiconductors microcontrollers. This * copyright, permission, and disclaimer notice must appear in all copies of * this code. */ #ifndef __ROMAPI_11XX_H_ #define __ROMAPI_11XX_H_ #include "error.h" #ifdef __cplusplus extern "C" { #endif /** @defgroup ROMAPI_11XX CHIP: LPC11XX ROM API declarations and functions * @ingroup CHIP_11XX_Drivers * @{ */ /** * @brief LPC11XX High level ROM API structure */ typedef struct { const uint32_t usbdApiBase; /*!< USBD API function table base address */ const uint32_t reserved0; /*!< Reserved */ const uint32_t candApiBase; /*!< CAN API function table base address */ const uint32_t pwrApiBase; /*!< Power API function table base address */ const uint32_t reserved1; /*!< Reserved */ const uint32_t reserved2; /*!< Reserved */ const uint32_t reserved3; /*!< Reserved */ const uint32_t reserved4; /*!< Reserved */ } LPC_ROM_API_T; /** * @brief LPC11XX IAP_ENTRY API function type */ typedef void (*IAP_ENTRY_T)(unsigned int[], unsigned int[]); static INLINE void iap_entry(unsigned int cmd_param[], unsigned int status_result[]) { ((IAP_ENTRY_T) IAP_ENTRY_LOCATION)(cmd_param, status_result); } /** * @} */ #ifdef __cplusplus } #endif #endif /* __ROMAPI_11XX_H_ */
{ "pile_set_name": "Github" }
<?xml version="1.0"?> <window xmlns:html="http://www.w3.org/1999/xhtml" class="reftest-wait" xmlns="http://www.mozilla.org/keymaster/gatekeeper/there.is.only.xul" title="Testcase bug 311661 - Evil xul testcase, using display:table-row causes crash [@ nsTableRowGroupFrame::GetFirstRow]"> <html:script><![CDATA[ function doe() { document.documentElement.getElementsByTagName('*')[1].style.display='table-row'; setTimeout(doe2,20); } function doe2(){ document.documentElement.getElementsByTagName('*')[1].style.display=''; setTimeout(doe,20); } ]]></html:script> <button id="button" onclick="doe()" label="Mozilla should not crash, when clicking this button"/> <div style="display:table-row"/> <html:script> function clickbutton() { var ev = document.createEvent('MouseEvents'); ev.initMouseEvent("click", true, true, window, 0, 0, 0, 0, 0, false, false, false, false, 0, null); var button = document.getElementById('button'); button.dispatchEvent(ev); setTimeout(function() { document.documentElement.className = "" }, 500); } window.addEventListener("load", clickbutton, false); </html:script> </window>
{ "pile_set_name": "Github" }
config BR2_PACKAGE_LIBVA_UTILS bool "libva-utils" depends on BR2_INSTALL_LIBSTDCPP depends on !BR2_STATIC_LIBS # libva depends on BR2_TOOLCHAIN_HAS_THREADS # libva select BR2_PACKAGE_LIBVA help Libva-utils is a collection of tests for VA-API (Video Acceleration API) https://01.org/vaapi comment "libva-utils needs a toolchain w/ C++, threads, dynamic library" depends on !BR2_INSTALL_LIBSTDCPP || \ BR2_STATIC_LIBS || !BR2_TOOLCHAIN_HAS_THREADS
{ "pile_set_name": "Github" }
/* * libjingle * Copyright 2013, Google Inc. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO * EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ //downloaded from https://code.google.com/p/libjingle/source/browse/trunk/talk/base/?r=273 #ifndef TALK_BASE_IFADDRS_ANDROID_H_ #define TALK_BASE_IFADDRS_ANDROID_H_ #include <stdio.h> #include <sys/socket.h> // Implementation of getifaddrs for Android. // Fills out a list of ifaddr structs (see below) which contain information // about every network interface available on the host. // See 'man getifaddrs' on Linux or OS X (nb: it is not a POSIX function). struct ifaddrs { struct ifaddrs* ifa_next; char* ifa_name; unsigned int ifa_flags; struct sockaddr* ifa_addr; struct sockaddr* ifa_netmask; // Real ifaddrs has broadcast, point to point and data members. // We don't need them (yet?). }; int getifaddrs(struct ifaddrs** result); void freeifaddrs(struct ifaddrs* addrs); #endif // TALK_BASE_IFADDRS_ANDROID_H_
{ "pile_set_name": "Github" }
// Copyright 2013 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package ipv6 import ( "encoding/binary" "errors" "net" "unsafe" ) var ( errMissingAddress = errors.New("missing address") errHeaderTooShort = errors.New("header too short") errInvalidConnType = errors.New("invalid conn type") errOpNoSupport = errors.New("operation not supported") errNoSuchInterface = errors.New("no such interface") nativeEndian binary.ByteOrder ) func init() { i := uint32(1) b := (*[4]byte)(unsafe.Pointer(&i)) if b[0] == 1 { nativeEndian = binary.LittleEndian } else { nativeEndian = binary.BigEndian } } func boolint(b bool) int { if b { return 1 } return 0 } func netAddrToIP16(a net.Addr) net.IP { switch v := a.(type) { case *net.UDPAddr: if ip := v.IP.To16(); ip != nil && ip.To4() == nil { return ip } case *net.IPAddr: if ip := v.IP.To16(); ip != nil && ip.To4() == nil { return ip } } return nil }
{ "pile_set_name": "Github" }
package unused import ( "fmt" "go/ast" "go/token" "go/types" "io" "strings" "sync" "sync/atomic" "golang.org/x/tools/go/analysis" "honnef.co/go/tools/code" "honnef.co/go/tools/go/types/typeutil" "honnef.co/go/tools/internal/passes/buildir" "honnef.co/go/tools/ir" "honnef.co/go/tools/lint" ) // The graph we construct omits nodes along a path that do not // contribute any new information to the solution. For example, the // full graph for a function with a receiver would be Func -> // Signature -> Var -> Type. However, since signatures cannot be // unused, and receivers are always considered used, we can compact // the graph down to Func -> Type. This makes the graph smaller, but // harder to debug. // TODO(dh): conversions between structs mark fields as used, but the // conversion itself isn't part of that subgraph. even if the function // containing the conversion is unused, the fields will be marked as // used. // TODO(dh): we cannot observe function calls in assembly files. /* - packages use: - (1.1) exported named types (unless in package main) - (1.2) exported functions (unless in package main) - (1.3) exported variables (unless in package main) - (1.4) exported constants (unless in package main) - (1.5) init functions - (1.6) functions exported to cgo - (1.7) the main function iff in the main package - (1.8) symbols linked via go:linkname - named types use: - (2.1) exported methods - (2.2) the type they're based on - (2.3) all their aliases. we can't easily track uses of aliases because go/types turns them into uses of the aliased types. assume that if a type is used, so are all of its aliases. - (2.4) the pointer type. this aids with eagerly implementing interfaces. if a method that implements an interface is defined on a pointer receiver, and the pointer type is never used, but the named type is, then we still want to mark the method as used. - variables and constants use: - their types - functions use: - (4.1) all their arguments, return parameters and receivers - (4.2) anonymous functions defined beneath them - (4.3) closures and bound methods. this implements a simplified model where a function is used merely by being referenced, even if it is never called. that way we don't have to keep track of closures escaping functions. - (4.4) functions they return. we assume that someone else will call the returned function - (4.5) functions/interface methods they call - types they instantiate or convert to - (4.7) fields they access - (4.8) types of all instructions - (4.9) package-level variables they assign to iff in tests (sinks for benchmarks) - conversions use: - (5.1) when converting between two equivalent structs, the fields in either struct use each other. the fields are relevant for the conversion, but only if the fields are also accessed outside the conversion. - (5.2) when converting to or from unsafe.Pointer, mark all fields as used. - structs use: - (6.1) fields of type NoCopy sentinel - (6.2) exported fields - (6.3) embedded fields that help implement interfaces (either fully implements it, or contributes required methods) (recursively) - (6.4) embedded fields that have exported methods (recursively) - (6.5) embedded structs that have exported fields (recursively) - (7.1) field accesses use fields - (7.2) fields use their types - (8.0) How we handle interfaces: - (8.1) We do not technically care about interfaces that only consist of exported methods. Exported methods on concrete types are always marked as used. - Any concrete type implements all known interfaces. Even if it isn't assigned to any interfaces in our code, the user may receive a value of the type and expect to pass it back to us through an interface. Concrete types use their methods that implement interfaces. If the type is used, it uses those methods. Otherwise, it doesn't. This way, types aren't incorrectly marked reachable through the edge from method to type. - (8.3) All interface methods are marked as used, even if they never get called. This is to accommodate sum types (unexported interface method that must exist but never gets called.) - (8.4) All embedded interfaces are marked as used. This is an extension of 8.3, but we have to explicitly track embedded interfaces because in a chain C->B->A, B wouldn't be marked as used by 8.3 just because it contributes A's methods to C. - Inherent uses: - thunks and other generated wrappers call the real function - (9.2) variables use their types - (9.3) types use their underlying and element types - (9.4) conversions use the type they convert to - (9.5) instructions use their operands - (9.6) instructions use their operands' types - (9.7) variable _reads_ use variables, writes do not, except in tests - (9.8) runtime functions that may be called from user code via the compiler - const groups: (10.1) if one constant out of a block of constants is used, mark all of them used. a lot of the time, unused constants exist for the sake of completeness. See also https://github.com/dominikh/go-tools/issues/365 - (11.1) anonymous struct types use all their fields. we cannot deduplicate struct types, as that leads to order-dependent reportings. we can't not deduplicate struct types while still tracking fields, because then each instance of the unnamed type in the data flow chain will get its own fields, causing false positives. Thus, we only accurately track fields of named struct types, and assume that unnamed struct types use all their fields. - Differences in whole program mode: - (e2) types aim to implement all exported interfaces from all packages - (e3) exported identifiers aren't automatically used. for fields and methods this poses extra issues due to reflection. We assume that all exported fields are used. We also maintain a list of known reflection-based method callers. */ func assert(b bool) { if !b { panic("failed assertion") } } func typString(obj types.Object) string { switch obj := obj.(type) { case *types.Func: return "func" case *types.Var: if obj.IsField() { return "field" } return "var" case *types.Const: return "const" case *types.TypeName: return "type" default: return "identifier" } } // /usr/lib/go/src/runtime/proc.go:433:6: func badmorestackg0 is unused (U1000) // Functions defined in the Go runtime that may be called through // compiler magic or via assembly. var runtimeFuncs = map[string]bool{ // The first part of the list is copied from // cmd/compile/internal/gc/builtin.go, var runtimeDecls "newobject": true, "panicindex": true, "panicslice": true, "panicdivide": true, "panicmakeslicelen": true, "throwinit": true, "panicwrap": true, "gopanic": true, "gorecover": true, "goschedguarded": true, "printbool": true, "printfloat": true, "printint": true, "printhex": true, "printuint": true, "printcomplex": true, "printstring": true, "printpointer": true, "printiface": true, "printeface": true, "printslice": true, "printnl": true, "printsp": true, "printlock": true, "printunlock": true, "concatstring2": true, "concatstring3": true, "concatstring4": true, "concatstring5": true, "concatstrings": true, "cmpstring": true, "intstring": true, "slicebytetostring": true, "slicebytetostringtmp": true, "slicerunetostring": true, "stringtoslicebyte": true, "stringtoslicerune": true, "slicecopy": true, "slicestringcopy": true, "decoderune": true, "countrunes": true, "convI2I": true, "convT16": true, "convT32": true, "convT64": true, "convTstring": true, "convTslice": true, "convT2E": true, "convT2Enoptr": true, "convT2I": true, "convT2Inoptr": true, "assertE2I": true, "assertE2I2": true, "assertI2I": true, "assertI2I2": true, "panicdottypeE": true, "panicdottypeI": true, "panicnildottype": true, "ifaceeq": true, "efaceeq": true, "fastrand": true, "makemap64": true, "makemap": true, "makemap_small": true, "mapaccess1": true, "mapaccess1_fast32": true, "mapaccess1_fast64": true, "mapaccess1_faststr": true, "mapaccess1_fat": true, "mapaccess2": true, "mapaccess2_fast32": true, "mapaccess2_fast64": true, "mapaccess2_faststr": true, "mapaccess2_fat": true, "mapassign": true, "mapassign_fast32": true, "mapassign_fast32ptr": true, "mapassign_fast64": true, "mapassign_fast64ptr": true, "mapassign_faststr": true, "mapiterinit": true, "mapdelete": true, "mapdelete_fast32": true, "mapdelete_fast64": true, "mapdelete_faststr": true, "mapiternext": true, "mapclear": true, "makechan64": true, "makechan": true, "chanrecv1": true, "chanrecv2": true, "chansend1": true, "closechan": true, "writeBarrier": true, "typedmemmove": true, "typedmemclr": true, "typedslicecopy": true, "selectnbsend": true, "selectnbrecv": true, "selectnbrecv2": true, "selectsetpc": true, "selectgo": true, "block": true, "makeslice": true, "makeslice64": true, "growslice": true, "memmove": true, "memclrNoHeapPointers": true, "memclrHasPointers": true, "memequal": true, "memequal8": true, "memequal16": true, "memequal32": true, "memequal64": true, "memequal128": true, "int64div": true, "uint64div": true, "int64mod": true, "uint64mod": true, "float64toint64": true, "float64touint64": true, "float64touint32": true, "int64tofloat64": true, "uint64tofloat64": true, "uint32tofloat64": true, "complex128div": true, "racefuncenter": true, "racefuncenterfp": true, "racefuncexit": true, "raceread": true, "racewrite": true, "racereadrange": true, "racewriterange": true, "msanread": true, "msanwrite": true, "x86HasPOPCNT": true, "x86HasSSE41": true, "arm64HasATOMICS": true, // The second part of the list is extracted from assembly code in // the standard library, with the exception of the runtime package itself "abort": true, "aeshashbody": true, "args": true, "asminit": true, "badctxt": true, "badmcall2": true, "badmcall": true, "badmorestackg0": true, "badmorestackgsignal": true, "badsignal2": true, "callbackasm1": true, "callCfunction": true, "cgocallback_gofunc": true, "cgocallbackg": true, "checkgoarm": true, "check": true, "debugCallCheck": true, "debugCallWrap": true, "emptyfunc": true, "entersyscall": true, "exit": true, "exits": true, "exitsyscall": true, "externalthreadhandler": true, "findnull": true, "goexit1": true, "gostring": true, "i386_set_ldt": true, "_initcgo": true, "init_thread_tls": true, "ldt0setup": true, "libpreinit": true, "load_g": true, "morestack": true, "mstart": true, "nacl_sysinfo": true, "nanotimeQPC": true, "nanotime": true, "newosproc0": true, "newproc": true, "newstack": true, "noted": true, "nowQPC": true, "osinit": true, "printf": true, "racecallback": true, "reflectcallmove": true, "reginit": true, "rt0_go": true, "save_g": true, "schedinit": true, "setldt": true, "settls": true, "sighandler": true, "sigprofNonGo": true, "sigtrampgo": true, "_sigtramp": true, "sigtramp": true, "stackcheck": true, "syscall_chdir": true, "syscall_chroot": true, "syscall_close": true, "syscall_dup2": true, "syscall_execve": true, "syscall_exit": true, "syscall_fcntl": true, "syscall_forkx": true, "syscall_gethostname": true, "syscall_getpid": true, "syscall_ioctl": true, "syscall_pipe": true, "syscall_rawsyscall6": true, "syscall_rawSyscall6": true, "syscall_rawsyscall": true, "syscall_RawSyscall": true, "syscall_rawsysvicall6": true, "syscall_setgid": true, "syscall_setgroups": true, "syscall_setpgid": true, "syscall_setsid": true, "syscall_setuid": true, "syscall_syscall6": true, "syscall_syscall": true, "syscall_Syscall": true, "syscall_sysvicall6": true, "syscall_wait4": true, "syscall_write": true, "traceback": true, "tstart": true, "usplitR0": true, "wbBufFlush": true, "write": true, } type pkg struct { Fset *token.FileSet Files []*ast.File Pkg *types.Package TypesInfo *types.Info TypesSizes types.Sizes IR *ir.Package SrcFuncs []*ir.Function } type Checker struct { WholeProgram bool Debug io.Writer mu sync.Mutex initialPackages map[*types.Package]struct{} allPackages map[*types.Package]struct{} graph *Graph } func NewChecker(wholeProgram bool) *Checker { return &Checker{ initialPackages: map[*types.Package]struct{}{}, allPackages: map[*types.Package]struct{}{}, WholeProgram: wholeProgram, } } func (c *Checker) Analyzer() *analysis.Analyzer { name := "U1000" if c.WholeProgram { name = "U1001" } return &analysis.Analyzer{ Name: name, Doc: "Unused code", Run: c.Run, Requires: []*analysis.Analyzer{buildir.Analyzer}, } } func (c *Checker) Run(pass *analysis.Pass) (interface{}, error) { c.mu.Lock() if c.graph == nil { c.graph = NewGraph() c.graph.wholeProgram = c.WholeProgram c.graph.fset = pass.Fset } var visit func(pkg *types.Package) visit = func(pkg *types.Package) { if _, ok := c.allPackages[pkg]; ok { return } c.allPackages[pkg] = struct{}{} for _, imp := range pkg.Imports() { visit(imp) } } visit(pass.Pkg) c.initialPackages[pass.Pkg] = struct{}{} c.mu.Unlock() irpkg := pass.ResultOf[buildir.Analyzer].(*buildir.IR) pkg := &pkg{ Fset: pass.Fset, Files: pass.Files, Pkg: pass.Pkg, TypesInfo: pass.TypesInfo, TypesSizes: pass.TypesSizes, IR: irpkg.Pkg, SrcFuncs: irpkg.SrcFuncs, } c.processPkg(c.graph, pkg) return nil, nil } func (c *Checker) ProblemObject(fset *token.FileSet, obj types.Object) lint.Problem { name := obj.Name() if sig, ok := obj.Type().(*types.Signature); ok && sig.Recv() != nil { switch sig.Recv().Type().(type) { case *types.Named, *types.Pointer: typ := types.TypeString(sig.Recv().Type(), func(*types.Package) string { return "" }) if len(typ) > 0 && typ[0] == '*' { name = fmt.Sprintf("(%s).%s", typ, obj.Name()) } else if len(typ) > 0 { name = fmt.Sprintf("%s.%s", typ, obj.Name()) } } } checkName := "U1000" if c.WholeProgram { checkName = "U1001" } return lint.Problem{ Pos: lint.DisplayPosition(fset, obj.Pos()), Message: fmt.Sprintf("%s %s is unused", typString(obj), name), Check: checkName, } } func (c *Checker) Result() []types.Object { out := c.results() out2 := make([]types.Object, 0, len(out)) for _, v := range out { if _, ok := c.initialPackages[v.Pkg()]; !ok { continue } out2 = append(out2, v) } return out2 } func (c *Checker) debugf(f string, v ...interface{}) { if c.Debug != nil { fmt.Fprintf(c.Debug, f, v...) } } func (graph *Graph) quieten(node *Node) { if node.seen { return } switch obj := node.obj.(type) { case *types.Named: for i := 0; i < obj.NumMethods(); i++ { m := obj.Method(i) if node, ok := graph.nodeMaybe(m); ok { node.quiet = true } } case *types.Struct: for i := 0; i < obj.NumFields(); i++ { if node, ok := graph.nodeMaybe(obj.Field(i)); ok { node.quiet = true } } case *types.Interface: for i := 0; i < obj.NumExplicitMethods(); i++ { m := obj.ExplicitMethod(i) if node, ok := graph.nodeMaybe(m); ok { node.quiet = true } } } } func (c *Checker) results() []types.Object { if c.graph == nil { // We never analyzed any packages return nil } var out []types.Object if c.WholeProgram { var ifaces []*types.Interface var notIfaces []types.Type // implement as many interfaces as possible c.graph.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: if t.NumMethods() > 0 { ifaces = append(ifaces, t) } default: if _, ok := t.Underlying().(*types.Interface); !ok { notIfaces = append(notIfaces, t) } } }) for pkg := range c.allPackages { for _, iface := range interfacesFromExportData(pkg) { if iface.NumMethods() > 0 { ifaces = append(ifaces, iface) } } } ctx := &context{ g: c.graph, seenTypes: &c.graph.seenTypes, } // (8.0) handle interfaces // (e2) types aim to implement all exported interfaces from all packages for _, t := range notIfaces { // OPT(dh): it is unfortunate that we do not have access // to a populated method set at this point. ms := types.NewMethodSet(t) for _, iface := range ifaces { if sels, ok := c.graph.implements(t, iface, ms); ok { for _, sel := range sels { c.graph.useMethod(ctx, t, sel, t, edgeImplements) } } } } } if c.Debug != nil { debugNode := func(node *Node) { if node.obj == nil { c.debugf("n%d [label=\"Root\"];\n", node.id) } else { c.debugf("n%d [label=%q];\n", node.id, fmt.Sprintf("(%T) %s", node.obj, node.obj)) } for _, e := range node.used { for i := edgeKind(1); i < 64; i++ { if e.kind.is(1 << i) { c.debugf("n%d -> n%d [label=%q];\n", node.id, e.node.id, edgeKind(1<<i)) } } } } c.debugf("digraph{\n") debugNode(c.graph.Root) for _, v := range c.graph.Nodes { debugNode(v) } c.graph.TypeNodes.Iterate(func(key types.Type, value interface{}) { debugNode(value.(*Node)) }) c.debugf("}\n") } c.graph.color(c.graph.Root) // if a node is unused, don't report any of the node's // children as unused. for example, if a function is unused, // don't flag its receiver. if a named type is unused, don't // flag its methods. for _, v := range c.graph.Nodes { c.graph.quieten(v) } c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { c.graph.quieten(value.(*Node)) }) report := func(node *Node) { if node.seen { return } if node.quiet { c.debugf("n%d [color=purple];\n", node.id) return } c.debugf("n%d [color=red];\n", node.id) switch obj := node.obj.(type) { case *types.Var: // don't report unnamed variables (interface embedding) if obj.Name() != "" || obj.IsField() { out = append(out, obj) } return case types.Object: if obj.Name() != "_" { out = append(out, obj) } return } c.debugf("n%d [color=gray];\n", node.id) } for _, v := range c.graph.Nodes { report(v) } c.graph.TypeNodes.Iterate(func(_ types.Type, value interface{}) { report(value.(*Node)) }) return out } func (c *Checker) processPkg(graph *Graph, pkg *pkg) { if pkg.Pkg.Path() == "unsafe" { return } graph.entry(pkg) } func objNodeKeyFor(fset *token.FileSet, obj types.Object) objNodeKey { var kind objType switch obj.(type) { case *types.PkgName: kind = otPkgName case *types.Const: kind = otConst case *types.TypeName: kind = otTypeName case *types.Var: kind = otVar case *types.Func: kind = otFunc case *types.Label: kind = otLabel case *types.Builtin: kind = otBuiltin case *types.Nil: kind = otNil default: panic(fmt.Sprintf("unreachable: %T", obj)) } position := fset.PositionFor(obj.Pos(), false) position.Column = 0 position.Offset = 0 return objNodeKey{ position: position, kind: kind, name: obj.Name(), } } type objType uint8 const ( otPkgName objType = iota otConst otTypeName otVar otFunc otLabel otBuiltin otNil ) // An objNodeKey describes a types.Object node in the graph. // // Due to test variants we may end up with multiple instances of the // same object, which is why we have to deduplicate based on their // source position. And because export data lacks column information, // we also have to incorporate the object's string representation in // the key. // // Previously we used the object's full string representation // (types.ObjectString), but that causes a significant amount of // allocations. Currently we're using the object's type and name, in // the hope that it is impossible for two objects to have the same // type, name and file position. type objNodeKey struct { position token.Position kind objType name string } type Graph struct { // accessed atomically nodeOffset uint64 // Safe for concurrent use fset *token.FileSet Root *Node seenTypes typeutil.Map // read-only wholeProgram bool // need synchronisation mu sync.Mutex TypeNodes typeutil.Map Nodes map[interface{}]*Node objNodes map[objNodeKey]*Node } type context struct { g *Graph pkg *pkg seenFns map[string]struct{} seenTypes *typeutil.Map nodeCounter uint64 } func NewGraph() *Graph { g := &Graph{ Nodes: map[interface{}]*Node{}, objNodes: map[objNodeKey]*Node{}, } g.Root = g.newNode(&context{}, nil) return g } func (g *Graph) color(root *Node) { if root.seen { return } root.seen = true for _, e := range root.used { g.color(e.node) } } type ConstGroup struct { // give the struct a size to get unique pointers _ byte } func (ConstGroup) String() string { return "const group" } type edge struct { node *Node kind edgeKind } type Node struct { obj interface{} id uint64 mu sync.Mutex used []edge // set during final graph walk if node is reachable seen bool // a parent node (e.g. the struct type containing a field) is // already unused, don't report children quiet bool } func (g *Graph) nodeMaybe(obj types.Object) (*Node, bool) { g.mu.Lock() defer g.mu.Unlock() if node, ok := g.Nodes[obj]; ok { return node, true } return nil, false } func (g *Graph) node(ctx *context, obj interface{}) (node *Node, new bool) { g.mu.Lock() defer g.mu.Unlock() switch obj := obj.(type) { case types.Type: if v := g.TypeNodes.At(obj); v != nil { return v.(*Node), false } node := g.newNode(ctx, obj) g.TypeNodes.Set(obj, node) return node, true case types.Object: if node, ok := g.Nodes[obj]; ok { return node, false } key := objNodeKeyFor(g.fset, obj) if onode, ok := g.objNodes[key]; ok { return onode, false } node = g.newNode(ctx, obj) g.Nodes[obj] = node g.objNodes[key] = node return node, true default: if node, ok := g.Nodes[obj]; ok { return node, false } node = g.newNode(ctx, obj) g.Nodes[obj] = node return node, true } } func (g *Graph) newNode(ctx *context, obj interface{}) *Node { ctx.nodeCounter++ return &Node{ obj: obj, id: ctx.nodeCounter, } } func (n *Node) use(node *Node, kind edgeKind) { n.mu.Lock() defer n.mu.Unlock() assert(node != nil) n.used = append(n.used, edge{node: node, kind: kind}) } // isIrrelevant reports whether an object's presence in the graph is // of any relevance. A lot of objects will never have outgoing edges, // nor meaningful incoming ones. Examples are basic types and empty // signatures, among many others. // // Dropping these objects should have no effect on correctness, but // may improve performance. It also helps with debugging, as it // greatly reduces the size of the graph. func isIrrelevant(obj interface{}) bool { if obj, ok := obj.(types.Object); ok { switch obj := obj.(type) { case *types.Var: if obj.IsField() { // We need to track package fields return false } if obj.Pkg() != nil && obj.Parent() == obj.Pkg().Scope() { // We need to track package-level variables return false } return isIrrelevant(obj.Type()) default: return false } } if T, ok := obj.(types.Type); ok { switch T := T.(type) { case *types.Array: return isIrrelevant(T.Elem()) case *types.Slice: return isIrrelevant(T.Elem()) case *types.Basic: return true case *types.Tuple: for i := 0; i < T.Len(); i++ { if !isIrrelevant(T.At(i).Type()) { return false } } return true case *types.Signature: if T.Recv() != nil { return false } for i := 0; i < T.Params().Len(); i++ { if !isIrrelevant(T.Params().At(i)) { return false } } for i := 0; i < T.Results().Len(); i++ { if !isIrrelevant(T.Results().At(i)) { return false } } return true case *types.Interface: return T.NumMethods() == 0 && T.NumEmbeddeds() == 0 case *types.Pointer: return isIrrelevant(T.Elem()) case *types.Map: return isIrrelevant(T.Key()) && isIrrelevant(T.Elem()) case *types.Struct: return T.NumFields() == 0 case *types.Chan: return isIrrelevant(T.Elem()) default: return false } } return false } func (ctx *context) see(obj interface{}) *Node { if isIrrelevant(obj) { return nil } assert(obj != nil) // add new node to graph node, _ := ctx.g.node(ctx, obj) return node } func (ctx *context) use(used, by interface{}, kind edgeKind) { if isIrrelevant(used) { return } assert(used != nil) if obj, ok := by.(types.Object); ok && obj.Pkg() != nil { if !ctx.g.wholeProgram && obj.Pkg() != ctx.pkg.Pkg { return } } usedNode, new := ctx.g.node(ctx, used) assert(!new) if by == nil { ctx.g.Root.use(usedNode, kind) } else { byNode, new := ctx.g.node(ctx, by) assert(!new) byNode.use(usedNode, kind) } } func (ctx *context) seeAndUse(used, by interface{}, kind edgeKind) *Node { node := ctx.see(used) ctx.use(used, by, kind) return node } // trackExportedIdentifier reports whether obj should be considered // used due to being exported, checking various conditions that affect // the decision. func (g *Graph) trackExportedIdentifier(ctx *context, obj types.Object) bool { if !obj.Exported() { // object isn't exported, the question is moot return false } path := g.fset.Position(obj.Pos()).Filename if g.wholeProgram { // Example functions without "Output:" comments aren't being // run and thus don't show up in the graph. if strings.HasSuffix(path, "_test.go") && strings.HasPrefix(obj.Name(), "Example") { return true } // whole program mode tracks exported identifiers accurately return false } if ctx.pkg.Pkg.Name() == "main" && !strings.HasSuffix(path, "_test.go") { // exported identifiers in package main can't be imported. // However, test functions can be called, and xtest packages // even have access to exported identifiers. return false } if strings.HasSuffix(path, "_test.go") { if strings.HasPrefix(obj.Name(), "Test") || strings.HasPrefix(obj.Name(), "Benchmark") || strings.HasPrefix(obj.Name(), "Example") { return true } return false } return true } func (g *Graph) entry(pkg *pkg) { no := atomic.AddUint64(&g.nodeOffset, 1) ctx := &context{ g: g, pkg: pkg, nodeCounter: no * 1e9, seenFns: map[string]struct{}{}, } if g.wholeProgram { ctx.seenTypes = &g.seenTypes } else { ctx.seenTypes = &typeutil.Map{} } scopes := map[*types.Scope]*ir.Function{} for _, fn := range pkg.SrcFuncs { if fn.Object() != nil { scope := fn.Object().(*types.Func).Scope() scopes[scope] = fn } } for _, f := range pkg.Files { for _, cg := range f.Comments { for _, c := range cg.List { if strings.HasPrefix(c.Text, "//go:linkname ") { // FIXME(dh): we're looking at all comments. The // compiler only looks at comments in the // left-most column. The intention probably is to // only look at top-level comments. // (1.8) packages use symbols linked via go:linkname fields := strings.Fields(c.Text) if len(fields) == 3 { if m, ok := pkg.IR.Members[fields[1]]; ok { var obj types.Object switch m := m.(type) { case *ir.Global: obj = m.Object() case *ir.Function: obj = m.Object() default: panic(fmt.Sprintf("unhandled type: %T", m)) } assert(obj != nil) ctx.seeAndUse(obj, nil, edgeLinkname) } } } } } } surroundingFunc := func(obj types.Object) *ir.Function { scope := obj.Parent() for scope != nil { if fn := scopes[scope]; fn != nil { return fn } scope = scope.Parent() } return nil } // IR form won't tell us about locally scoped types that aren't // being used. Walk the list of Defs to get all named types. // // IR form also won't tell us about constants; use Defs and Uses // to determine which constants exist and which are being used. for _, obj := range pkg.TypesInfo.Defs { switch obj := obj.(type) { case *types.TypeName: // types are being handled by walking the AST case *types.Const: ctx.see(obj) fn := surroundingFunc(obj) if fn == nil && g.trackExportedIdentifier(ctx, obj) { // (1.4) packages use exported constants (unless in package main) ctx.use(obj, nil, edgeExportedConstant) } g.typ(ctx, obj.Type(), nil) ctx.seeAndUse(obj.Type(), obj, edgeType) } } // Find constants being used inside functions, find sinks in tests for _, fn := range pkg.SrcFuncs { if fn.Object() != nil { ctx.see(fn.Object()) } node := fn.Source() if node == nil { continue } ast.Inspect(node, func(node ast.Node) bool { switch node := node.(type) { case *ast.Ident: obj, ok := pkg.TypesInfo.Uses[node] if !ok { return true } switch obj := obj.(type) { case *types.Const: ctx.seeAndUse(obj, owningObject(fn), edgeUsedConstant) } case *ast.AssignStmt: for _, expr := range node.Lhs { ident, ok := expr.(*ast.Ident) if !ok { continue } obj := pkg.TypesInfo.ObjectOf(ident) if obj == nil { continue } path := g.fset.File(obj.Pos()).Name() if strings.HasSuffix(path, "_test.go") { if obj.Parent() != nil && obj.Parent().Parent() != nil && obj.Parent().Parent().Parent() == nil { // object's scope is the package, whose // parent is the file, whose parent is nil // (4.9) functions use package-level variables they assign to iff in tests (sinks for benchmarks) // (9.7) variable _reads_ use variables, writes do not, except in tests ctx.seeAndUse(obj, owningObject(fn), edgeTestSink) } } } } return true }) } // Find constants being used in non-function contexts for _, obj := range pkg.TypesInfo.Uses { _, ok := obj.(*types.Const) if !ok { continue } ctx.seeAndUse(obj, nil, edgeUsedConstant) } var fns []*types.Func var fn *types.Func var stack []ast.Node for _, f := range pkg.Files { ast.Inspect(f, func(n ast.Node) bool { if n == nil { pop := stack[len(stack)-1] stack = stack[:len(stack)-1] if _, ok := pop.(*ast.FuncDecl); ok { fns = fns[:len(fns)-1] if len(fns) == 0 { fn = nil } else { fn = fns[len(fns)-1] } } return true } stack = append(stack, n) switch n := n.(type) { case *ast.FuncDecl: fn = pkg.TypesInfo.ObjectOf(n.Name).(*types.Func) fns = append(fns, fn) ctx.see(fn) case *ast.GenDecl: switch n.Tok { case token.CONST: groups := code.GroupSpecs(pkg.Fset, n.Specs) for _, specs := range groups { if len(specs) > 1 { cg := &ConstGroup{} ctx.see(cg) for _, spec := range specs { for _, name := range spec.(*ast.ValueSpec).Names { obj := pkg.TypesInfo.ObjectOf(name) // (10.1) const groups ctx.seeAndUse(obj, cg, edgeConstGroup) ctx.use(cg, obj, edgeConstGroup) } } } } case token.VAR: for _, spec := range n.Specs { v := spec.(*ast.ValueSpec) for _, name := range v.Names { T := pkg.TypesInfo.TypeOf(name) if fn != nil { ctx.seeAndUse(T, fn, edgeVarDecl) } else { // TODO(dh): we likely want to make // the type used by the variable, not // the package containing the // variable. But then we have to take // special care of blank identifiers. ctx.seeAndUse(T, nil, edgeVarDecl) } g.typ(ctx, T, nil) } } case token.TYPE: for _, spec := range n.Specs { // go/types doesn't provide a way to go from a // types.Named to the named type it was based on // (the t1 in type t2 t1). Therefore we walk the // AST and process GenDecls. // // (2.2) named types use the type they're based on v := spec.(*ast.TypeSpec) T := pkg.TypesInfo.TypeOf(v.Type) obj := pkg.TypesInfo.ObjectOf(v.Name) ctx.see(obj) ctx.see(T) ctx.use(T, obj, edgeType) g.typ(ctx, obj.Type(), nil) g.typ(ctx, T, nil) if v.Assign != 0 { aliasFor := obj.(*types.TypeName).Type() // (2.3) named types use all their aliases. we can't easily track uses of aliases if isIrrelevant(aliasFor) { // We do not track the type this is an // alias for (for example builtins), so // just mark the alias used. // // FIXME(dh): what about aliases declared inside functions? ctx.use(obj, nil, edgeAlias) } else { ctx.see(aliasFor) ctx.seeAndUse(obj, aliasFor, edgeAlias) } } } } } return true }) } for _, m := range pkg.IR.Members { switch m := m.(type) { case *ir.NamedConst: // nothing to do, we collect all constants from Defs case *ir.Global: if m.Object() != nil { ctx.see(m.Object()) if g.trackExportedIdentifier(ctx, m.Object()) { // (1.3) packages use exported variables (unless in package main) ctx.use(m.Object(), nil, edgeExportedVariable) } } case *ir.Function: mObj := owningObject(m) if mObj != nil { ctx.see(mObj) } //lint:ignore SA9003 handled implicitly if m.Name() == "init" { // (1.5) packages use init functions // // This is handled implicitly. The generated init // function has no object, thus everything in it will // be owned by the package. } // This branch catches top-level functions, not methods. if m.Object() != nil && g.trackExportedIdentifier(ctx, m.Object()) { // (1.2) packages use exported functions (unless in package main) ctx.use(mObj, nil, edgeExportedFunction) } if m.Name() == "main" && pkg.Pkg.Name() == "main" { // (1.7) packages use the main function iff in the main package ctx.use(mObj, nil, edgeMainFunction) } if pkg.Pkg.Path() == "runtime" && runtimeFuncs[m.Name()] { // (9.8) runtime functions that may be called from user code via the compiler ctx.use(mObj, nil, edgeRuntimeFunction) } if m.Source() != nil { doc := m.Source().(*ast.FuncDecl).Doc if doc != nil { for _, cmt := range doc.List { if strings.HasPrefix(cmt.Text, "//go:cgo_export_") { // (1.6) packages use functions exported to cgo ctx.use(mObj, nil, edgeCgoExported) } } } } g.function(ctx, m) case *ir.Type: if m.Object() != nil { ctx.see(m.Object()) if g.trackExportedIdentifier(ctx, m.Object()) { // (1.1) packages use exported named types (unless in package main) ctx.use(m.Object(), nil, edgeExportedType) } } g.typ(ctx, m.Type(), nil) default: panic(fmt.Sprintf("unreachable: %T", m)) } } if !g.wholeProgram { // When not in whole program mode we reset seenTypes after each package, // which means g.seenTypes only contains types of // interest to us. In whole program mode, we're better off // processing all interfaces at once, globally, both for // performance reasons and because in whole program mode we // actually care about all interfaces, not just the subset // that has unexported methods. var ifaces []*types.Interface var notIfaces []types.Type ctx.seenTypes.Iterate(func(t types.Type, _ interface{}) { switch t := t.(type) { case *types.Interface: // OPT(dh): (8.1) we only need interfaces that have unexported methods ifaces = append(ifaces, t) default: if _, ok := t.Underlying().(*types.Interface); !ok { notIfaces = append(notIfaces, t) } } }) // (8.0) handle interfaces for _, t := range notIfaces { ms := pkg.IR.Prog.MethodSets.MethodSet(t) for _, iface := range ifaces { if sels, ok := g.implements(t, iface, ms); ok { for _, sel := range sels { g.useMethod(ctx, t, sel, t, edgeImplements) } } } } } } func (g *Graph) useMethod(ctx *context, t types.Type, sel *types.Selection, by interface{}, kind edgeKind) { obj := sel.Obj() path := sel.Index() assert(obj != nil) if len(path) > 1 { base := code.Dereference(t).Underlying().(*types.Struct) for _, idx := range path[:len(path)-1] { next := base.Field(idx) // (6.3) structs use embedded fields that help implement interfaces ctx.see(base) ctx.seeAndUse(next, base, edgeProvidesMethod) base, _ = code.Dereference(next.Type()).Underlying().(*types.Struct) } } ctx.seeAndUse(obj, by, kind) } func owningObject(fn *ir.Function) types.Object { if fn.Object() != nil { return fn.Object() } if fn.Parent() != nil { return owningObject(fn.Parent()) } return nil } func (g *Graph) function(ctx *context, fn *ir.Function) { if fn.Package() != nil && fn.Package() != ctx.pkg.IR { return } name := fn.RelString(nil) if _, ok := ctx.seenFns[name]; ok { return } ctx.seenFns[name] = struct{}{} // (4.1) functions use all their arguments, return parameters and receivers g.signature(ctx, fn.Signature, owningObject(fn)) g.instructions(ctx, fn) for _, anon := range fn.AnonFuncs { // (4.2) functions use anonymous functions defined beneath them // // This fact is expressed implicitly. Anonymous functions have // no types.Object, so their owner is the surrounding // function. g.function(ctx, anon) } } func (g *Graph) typ(ctx *context, t types.Type, parent types.Type) { if g.wholeProgram { g.mu.Lock() } if ctx.seenTypes.At(t) != nil { if g.wholeProgram { g.mu.Unlock() } return } if g.wholeProgram { g.mu.Unlock() } if t, ok := t.(*types.Named); ok && t.Obj().Pkg() != nil { if t.Obj().Pkg() != ctx.pkg.Pkg { return } } if g.wholeProgram { g.mu.Lock() } ctx.seenTypes.Set(t, struct{}{}) if g.wholeProgram { g.mu.Unlock() } if isIrrelevant(t) { return } ctx.see(t) switch t := t.(type) { case *types.Struct: for i := 0; i < t.NumFields(); i++ { ctx.see(t.Field(i)) if t.Field(i).Exported() { // (6.2) structs use exported fields ctx.use(t.Field(i), t, edgeExportedField) } else if t.Field(i).Name() == "_" { ctx.use(t.Field(i), t, edgeBlankField) } else if isNoCopyType(t.Field(i).Type()) { // (6.1) structs use fields of type NoCopy sentinel ctx.use(t.Field(i), t, edgeNoCopySentinel) } else if parent == nil { // (11.1) anonymous struct types use all their fields. ctx.use(t.Field(i), t, edgeAnonymousStruct) } if t.Field(i).Anonymous() { // (e3) exported identifiers aren't automatically used. if !g.wholeProgram { // does the embedded field contribute exported methods to the method set? T := t.Field(i).Type() if _, ok := T.Underlying().(*types.Pointer); !ok { // An embedded field is addressable, so check // the pointer type to get the full method set T = types.NewPointer(T) } ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(T) for j := 0; j < ms.Len(); j++ { if ms.At(j).Obj().Exported() { // (6.4) structs use embedded fields that have exported methods (recursively) ctx.use(t.Field(i), t, edgeExtendsExportedMethodSet) break } } } seen := map[*types.Struct]struct{}{} var hasExportedField func(t types.Type) bool hasExportedField = func(T types.Type) bool { t, ok := code.Dereference(T).Underlying().(*types.Struct) if !ok { return false } if _, ok := seen[t]; ok { return false } seen[t] = struct{}{} for i := 0; i < t.NumFields(); i++ { field := t.Field(i) if field.Exported() { return true } if field.Embedded() && hasExportedField(field.Type()) { return true } } return false } // does the embedded field contribute exported fields? if hasExportedField(t.Field(i).Type()) { // (6.5) structs use embedded structs that have exported fields (recursively) ctx.use(t.Field(i), t, edgeExtendsExportedFields) } } g.variable(ctx, t.Field(i)) } case *types.Basic: // Nothing to do case *types.Named: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Underlying(), t, edgeUnderlyingType) ctx.seeAndUse(t.Obj(), t, edgeTypeName) ctx.seeAndUse(t, t.Obj(), edgeNamedType) // (2.4) named types use the pointer type if _, ok := t.Underlying().(*types.Interface); !ok && t.NumMethods() > 0 { ctx.seeAndUse(types.NewPointer(t), t, edgePointerType) } for i := 0; i < t.NumMethods(); i++ { ctx.see(t.Method(i)) // don't use trackExportedIdentifier here, we care about // all exported methods, even in package main or in tests. if t.Method(i).Exported() && !g.wholeProgram { // (2.1) named types use exported methods ctx.use(t.Method(i), t, edgeExportedMethod) } g.function(ctx, ctx.pkg.IR.Prog.FuncValue(t.Method(i))) } g.typ(ctx, t.Underlying(), t) case *types.Slice: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) g.typ(ctx, t.Elem(), nil) case *types.Map: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) // (9.3) types use their underlying and element types ctx.seeAndUse(t.Key(), t, edgeKeyType) g.typ(ctx, t.Elem(), nil) g.typ(ctx, t.Key(), nil) case *types.Signature: g.signature(ctx, t, nil) case *types.Interface: for i := 0; i < t.NumMethods(); i++ { m := t.Method(i) // (8.3) All interface methods are marked as used ctx.seeAndUse(m, t, edgeInterfaceMethod) ctx.seeAndUse(m.Type().(*types.Signature), m, edgeSignature) g.signature(ctx, m.Type().(*types.Signature), nil) } for i := 0; i < t.NumEmbeddeds(); i++ { tt := t.EmbeddedType(i) // (8.4) All embedded interfaces are marked as used ctx.seeAndUse(tt, t, edgeEmbeddedInterface) } case *types.Array: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) g.typ(ctx, t.Elem(), nil) case *types.Pointer: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) g.typ(ctx, t.Elem(), nil) case *types.Chan: // (9.3) types use their underlying and element types ctx.seeAndUse(t.Elem(), t, edgeElementType) g.typ(ctx, t.Elem(), nil) case *types.Tuple: for i := 0; i < t.Len(); i++ { // (9.3) types use their underlying and element types ctx.seeAndUse(t.At(i).Type(), t, edgeTupleElement|edgeType) g.typ(ctx, t.At(i).Type(), nil) } default: panic(fmt.Sprintf("unreachable: %T", t)) } } func (g *Graph) variable(ctx *context, v *types.Var) { // (9.2) variables use their types ctx.seeAndUse(v.Type(), v, edgeType) g.typ(ctx, v.Type(), nil) } func (g *Graph) signature(ctx *context, sig *types.Signature, fn types.Object) { var user interface{} = fn if fn == nil { user = sig ctx.see(sig) } if sig.Recv() != nil { ctx.seeAndUse(sig.Recv().Type(), user, edgeReceiver|edgeType) g.typ(ctx, sig.Recv().Type(), nil) } for i := 0; i < sig.Params().Len(); i++ { param := sig.Params().At(i) ctx.seeAndUse(param.Type(), user, edgeFunctionArgument|edgeType) g.typ(ctx, param.Type(), nil) } for i := 0; i < sig.Results().Len(); i++ { param := sig.Results().At(i) ctx.seeAndUse(param.Type(), user, edgeFunctionResult|edgeType) g.typ(ctx, param.Type(), nil) } } func (g *Graph) instructions(ctx *context, fn *ir.Function) { fnObj := owningObject(fn) for _, b := range fn.Blocks { for _, instr := range b.Instrs { ops := instr.Operands(nil) switch instr.(type) { case *ir.Store: // (9.7) variable _reads_ use variables, writes do not ops = ops[1:] case *ir.DebugRef: ops = nil } for _, arg := range ops { walkPhi(*arg, func(v ir.Value) { switch v := v.(type) { case *ir.Function: // (4.3) functions use closures and bound methods. // (4.5) functions use functions they call // (9.5) instructions use their operands // (4.4) functions use functions they return. we assume that someone else will call the returned function if owningObject(v) != nil { ctx.seeAndUse(owningObject(v), fnObj, edgeInstructionOperand) } g.function(ctx, v) case *ir.Const: // (9.6) instructions use their operands' types ctx.seeAndUse(v.Type(), fnObj, edgeType) g.typ(ctx, v.Type(), nil) case *ir.Global: if v.Object() != nil { // (9.5) instructions use their operands ctx.seeAndUse(v.Object(), fnObj, edgeInstructionOperand) } } }) } if v, ok := instr.(ir.Value); ok { if _, ok := v.(*ir.Range); !ok { // See https://github.com/golang/go/issues/19670 // (4.8) instructions use their types // (9.4) conversions use the type they convert to ctx.seeAndUse(v.Type(), fnObj, edgeType) g.typ(ctx, v.Type(), nil) } } switch instr := instr.(type) { case *ir.Field: st := instr.X.Type().Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access ctx.seeAndUse(field, fnObj, edgeFieldAccess) case *ir.FieldAddr: st := code.Dereference(instr.X.Type()).Underlying().(*types.Struct) field := st.Field(instr.Field) // (4.7) functions use fields they access ctx.seeAndUse(field, fnObj, edgeFieldAccess) case *ir.Store: // nothing to do, handled generically by operands case *ir.Call: c := instr.Common() if !c.IsInvoke() { // handled generically as an instruction operand if g.wholeProgram { // (e3) special case known reflection-based method callers switch code.CallName(c) { case "net/rpc.Register", "net/rpc.RegisterName", "(*net/rpc.Server).Register", "(*net/rpc.Server).RegisterName": var arg ir.Value switch code.CallName(c) { case "net/rpc.Register": arg = c.Args[0] case "net/rpc.RegisterName": arg = c.Args[1] case "(*net/rpc.Server).Register": arg = c.Args[1] case "(*net/rpc.Server).RegisterName": arg = c.Args[2] } walkPhi(arg, func(v ir.Value) { if v, ok := v.(*ir.MakeInterface); ok { walkPhi(v.X, func(vv ir.Value) { ms := ctx.pkg.IR.Prog.MethodSets.MethodSet(vv.Type()) for i := 0; i < ms.Len(); i++ { if ms.At(i).Obj().Exported() { g.useMethod(ctx, vv.Type(), ms.At(i), fnObj, edgeNetRPCRegister) } } }) } }) } } } else { // (4.5) functions use functions/interface methods they call ctx.seeAndUse(c.Method, fnObj, edgeInterfaceCall) } case *ir.Return: // nothing to do, handled generically by operands case *ir.ChangeType: // conversion type handled generically s1, ok1 := code.Dereference(instr.Type()).Underlying().(*types.Struct) s2, ok2 := code.Dereference(instr.X.Type()).Underlying().(*types.Struct) if ok1 && ok2 { // Converting between two structs. The fields are // relevant for the conversion, but only if the // fields are also used outside of the conversion. // Mark fields as used by each other. assert(s1.NumFields() == s2.NumFields()) for i := 0; i < s1.NumFields(); i++ { ctx.see(s1.Field(i)) ctx.see(s2.Field(i)) // (5.1) when converting between two equivalent structs, the fields in // either struct use each other. the fields are relevant for the // conversion, but only if the fields are also accessed outside the // conversion. ctx.seeAndUse(s1.Field(i), s2.Field(i), edgeStructConversion) ctx.seeAndUse(s2.Field(i), s1.Field(i), edgeStructConversion) } } case *ir.MakeInterface: // nothing to do, handled generically by operands case *ir.Slice: // nothing to do, handled generically by operands case *ir.RunDefers: // nothing to do, the deferred functions are already marked use by defering them. case *ir.Convert: // to unsafe.Pointer if typ, ok := instr.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { if ptr, ok := instr.X.Type().Underlying().(*types.Pointer); ok { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. ctx.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) } } } } // from unsafe.Pointer if typ, ok := instr.X.Type().(*types.Basic); ok && typ.Kind() == types.UnsafePointer { if ptr, ok := instr.Type().Underlying().(*types.Pointer); ok { if st, ok := ptr.Elem().Underlying().(*types.Struct); ok { for i := 0; i < st.NumFields(); i++ { // (5.2) when converting to or from unsafe.Pointer, mark all fields as used. ctx.seeAndUse(st.Field(i), fnObj, edgeUnsafeConversion) } } } } case *ir.TypeAssert: // nothing to do, handled generically by instruction // type (possibly a tuple, which contains the asserted // to type). redundantly handled by the type of // ir.Extract, too case *ir.MakeClosure: // nothing to do, handled generically by operands case *ir.Alloc: // nothing to do case *ir.UnOp: // nothing to do case *ir.BinOp: // nothing to do case *ir.If: // nothing to do case *ir.Jump: // nothing to do case *ir.Unreachable: // nothing to do case *ir.IndexAddr: // nothing to do case *ir.Extract: // nothing to do case *ir.Panic: // nothing to do case *ir.DebugRef: // nothing to do case *ir.BlankStore: // nothing to do case *ir.Phi: // nothing to do case *ir.Sigma: // nothing to do case *ir.MakeMap: // nothing to do case *ir.MapUpdate: // nothing to do case *ir.MapLookup: // nothing to do case *ir.StringLookup: // nothing to do case *ir.MakeSlice: // nothing to do case *ir.Send: // nothing to do case *ir.MakeChan: // nothing to do case *ir.Range: // nothing to do case *ir.Next: // nothing to do case *ir.Index: // nothing to do case *ir.Select: // nothing to do case *ir.ChangeInterface: // nothing to do case *ir.Load: // nothing to do case *ir.Go: // nothing to do case *ir.Defer: // nothing to do case *ir.Parameter: // nothing to do case *ir.Const: // nothing to do case *ir.Recv: // nothing to do case *ir.TypeSwitch: // nothing to do case *ir.ConstantSwitch: // nothing to do default: panic(fmt.Sprintf("unreachable: %T", instr)) } } } } // isNoCopyType reports whether a type represents the NoCopy sentinel // type. The NoCopy type is a named struct with no fields and exactly // one method `func Lock()` that is empty. // // FIXME(dh): currently we're not checking that the function body is // empty. func isNoCopyType(typ types.Type) bool { st, ok := typ.Underlying().(*types.Struct) if !ok { return false } if st.NumFields() != 0 { return false } named, ok := typ.(*types.Named) if !ok { return false } if named.NumMethods() != 1 { return false } meth := named.Method(0) if meth.Name() != "Lock" { return false } sig := meth.Type().(*types.Signature) if sig.Params().Len() != 0 || sig.Results().Len() != 0 { return false } return true } func walkPhi(v ir.Value, fn func(v ir.Value)) { phi, ok := v.(*ir.Phi) if !ok { fn(v) return } seen := map[ir.Value]struct{}{} var impl func(v *ir.Phi) impl = func(v *ir.Phi) { if _, ok := seen[v]; ok { return } seen[v] = struct{}{} for _, e := range v.Edges { if ev, ok := e.(*ir.Phi); ok { impl(ev) } else { fn(e) } } } impl(phi) } func interfacesFromExportData(pkg *types.Package) []*types.Interface { var out []*types.Interface scope := pkg.Scope() for _, name := range scope.Names() { obj := scope.Lookup(name) out = append(out, interfacesFromObject(obj)...) } return out } func interfacesFromObject(obj types.Object) []*types.Interface { var out []*types.Interface switch obj := obj.(type) { case *types.Func: sig := obj.Type().(*types.Signature) for i := 0; i < sig.Results().Len(); i++ { out = append(out, interfacesFromObject(sig.Results().At(i))...) } for i := 0; i < sig.Params().Len(); i++ { out = append(out, interfacesFromObject(sig.Params().At(i))...) } case *types.TypeName: if named, ok := obj.Type().(*types.Named); ok { for i := 0; i < named.NumMethods(); i++ { out = append(out, interfacesFromObject(named.Method(i))...) } if iface, ok := named.Underlying().(*types.Interface); ok { out = append(out, iface) } } case *types.Var: // No call to Underlying here. We want unnamed interfaces // only. Named interfaces are gotten directly from the // package's scope. if iface, ok := obj.Type().(*types.Interface); ok { out = append(out, iface) } case *types.Const: case *types.Builtin: default: panic(fmt.Sprintf("unhandled type: %T", obj)) } return out }
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // This file was autogenerated by go-to-protobuf. Do not edit it manually! syntax = 'proto2'; package k8s.io.apimachinery.pkg.apis.meta.v1; import "k8s.io/apimachinery/pkg/runtime/generated.proto"; import "k8s.io/apimachinery/pkg/runtime/schema/generated.proto"; import "k8s.io/apimachinery/pkg/util/intstr/generated.proto"; // Package-wide variables from generator "generated". option go_package = "v1"; // APIGroup contains the name, the supported versions, and the preferred version // of a group. message APIGroup { // name is the name of the group. optional string name = 1; // versions are the versions supported in this group. repeated GroupVersionForDiscovery versions = 2; // preferredVersion is the version preferred by the API server, which // probably is the storage version. // +optional optional GroupVersionForDiscovery preferredVersion = 3; // a map of client CIDR to server address that is serving this group. // This is to help clients reach servers in the most network-efficient way possible. // Clients can use the appropriate server address as per the CIDR that they match. // In case of multiple matches, clients should use the longest matching CIDR. // The server returns only those CIDRs that it thinks that the client can match. // For example: the master will return an internal IP CIDR only, if the client reaches the server using an internal IP. // Server looks at X-Forwarded-For header or X-Real-Ip header or request.RemoteAddr (in that order) to get the client IP. repeated ServerAddressByClientCIDR serverAddressByClientCIDRs = 4; } // APIGroupList is a list of APIGroup, to allow clients to discover the API at // /apis. message APIGroupList { // groups is a list of APIGroup. repeated APIGroup groups = 1; } // APIResource specifies the name of a resource and whether it is namespaced. message APIResource { // name is the name of the resource. optional string name = 1; // namespaced indicates if a resource is namespaced or not. optional bool namespaced = 2; // kind is the kind for the resource (e.g. 'Foo' is the kind for a resource 'foo') optional string kind = 3; // verbs is a list of supported kube verbs (this includes get, list, watch, create, // update, patch, delete, deletecollection, and proxy) optional Verbs verbs = 4; // shortNames is a list of suggested short names of the resource. repeated string shortNames = 5; } // APIResourceList is a list of APIResource, it is used to expose the name of the // resources supported in a specific group and version, and if the resource // is namespaced. message APIResourceList { // groupVersion is the group and version this APIResourceList is for. optional string groupVersion = 1; // resources contains the name of the resources and if they are namespaced. repeated APIResource resources = 2; } // APIVersions lists the versions that are available, to allow clients to // discover the API at /api, which is the root path of the legacy v1 API. // // +protobuf.options.(gogoproto.goproto_stringer)=false message APIVersions { // versions are the api versions that are available. repeated string versions = 1; // a map of client CIDR to server address that is serving this group. // This is to help clients reach servers in the most network-efficient way possible. // Clients can use the appropriate server address as per the CIDR that they match. // In case of multiple matches, clients should use the longest matching CIDR. // The server returns only those CIDRs that it thinks that the client can match. // For example: the master will return an internal IP CIDR only, if the client reaches the server using an internal IP. // Server looks at X-Forwarded-For header or X-Real-Ip header or request.RemoteAddr (in that order) to get the client IP. repeated ServerAddressByClientCIDR serverAddressByClientCIDRs = 2; } // DeleteOptions may be provided when deleting an API object. message DeleteOptions { // The duration in seconds before the object should be deleted. Value must be non-negative integer. // The value zero indicates delete immediately. If this value is nil, the default grace period for the // specified type will be used. // Defaults to a per object value if not specified. zero means delete immediately. // +optional optional int64 gracePeriodSeconds = 1; // Must be fulfilled before a deletion is carried out. If not possible, a 409 Conflict status will be // returned. // +optional optional Preconditions preconditions = 2; // Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. // Should the dependent objects be orphaned. If true/false, the "orphan" // finalizer will be added to/removed from the object's finalizers list. // Either this field or PropagationPolicy may be set, but not both. // +optional optional bool orphanDependents = 3; // Whether and how garbage collection will be performed. // Either this field or OrphanDependents may be set, but not both. // The default policy is decided by the existing finalizer set in the // metadata.finalizers and the resource-specific default policy. // +optional optional string propagationPolicy = 4; } // Duration is a wrapper around time.Duration which supports correct // marshaling to YAML and JSON. In particular, it marshals into strings, which // can be used as map keys in json. message Duration { optional int64 duration = 1; } // ExportOptions is the query options to the standard REST get call. message ExportOptions { // Should this value be exported. Export strips fields that a user can not specify. optional bool export = 1; // Should the export be exact. Exact export maintains cluster-specific fields like 'Namespace'. optional bool exact = 2; } // GetOptions is the standard query options to the standard REST get call. message GetOptions { // When specified: // - if unset, then the result is returned from remote storage based on quorum-read flag; // - if it's 0, then we simply return what we currently have in cache, no guarantee; // - if set to non zero, then the result is at least as fresh as given rv. optional string resourceVersion = 1; } // GroupKind specifies a Group and a Kind, but does not force a version. This is useful for identifying // concepts during lookup stages without having partially valid types // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupKind { optional string group = 1; optional string kind = 2; } // GroupResource specifies a Group and a Resource, but does not force a version. This is useful for identifying // concepts during lookup stages without having partially valid types // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupResource { optional string group = 1; optional string resource = 2; } // GroupVersion contains the "group" and the "version", which uniquely identifies the API. // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupVersion { optional string group = 1; optional string version = 2; } // GroupVersion contains the "group/version" and "version" string of a version. // It is made a struct to keep extensibility. message GroupVersionForDiscovery { // groupVersion specifies the API group and version in the form "group/version" optional string groupVersion = 1; // version specifies the version in the form of "version". This is to save // the clients the trouble of splitting the GroupVersion. optional string version = 2; } // GroupVersionKind unambiguously identifies a kind. It doesn't anonymously include GroupVersion // to avoid automatic coersion. It doesn't use a GroupVersion to avoid custom marshalling // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupVersionKind { optional string group = 1; optional string version = 2; optional string kind = 3; } // GroupVersionResource unambiguously identifies a resource. It doesn't anonymously include GroupVersion // to avoid automatic coersion. It doesn't use a GroupVersion to avoid custom marshalling // // +protobuf.options.(gogoproto.goproto_stringer)=false message GroupVersionResource { optional string group = 1; optional string version = 2; optional string resource = 3; } // A label selector is a label query over a set of resources. The result of matchLabels and // matchExpressions are ANDed. An empty label selector matches all objects. A null // label selector matches no objects. message LabelSelector { // matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels // map is equivalent to an element of matchExpressions, whose key field is "key", the // operator is "In", and the values array contains only "value". The requirements are ANDed. // +optional map<string, string> matchLabels = 1; // matchExpressions is a list of label selector requirements. The requirements are ANDed. // +optional repeated LabelSelectorRequirement matchExpressions = 2; } // A label selector requirement is a selector that contains values, a key, and an operator that // relates the key and values. message LabelSelectorRequirement { // key is the label key that the selector applies to. optional string key = 1; // operator represents a key's relationship to a set of values. // Valid operators ard In, NotIn, Exists and DoesNotExist. optional string operator = 2; // values is an array of string values. If the operator is In or NotIn, // the values array must be non-empty. If the operator is Exists or DoesNotExist, // the values array must be empty. This array is replaced during a strategic // merge patch. // +optional repeated string values = 3; } // ListMeta describes metadata that synthetic resources must have, including lists and // various status objects. A resource may have only one of {ObjectMeta, ListMeta}. message ListMeta { // SelfLink is a URL representing this object. // Populated by the system. // Read-only. // +optional optional string selfLink = 1; // String that identifies the server's internal version of this object that // can be used by clients to determine when objects have changed. // Value must be treated as opaque by clients and passed unmodified back to the server. // Populated by the system. // Read-only. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#concurrency-control-and-consistency // +optional optional string resourceVersion = 2; } // ListOptions is the query options to a standard REST list call. message ListOptions { // A selector to restrict the list of returned objects by their labels. // Defaults to everything. // +optional optional string labelSelector = 1; // A selector to restrict the list of returned objects by their fields. // Defaults to everything. // +optional optional string fieldSelector = 2; // Watch for changes to the described resources and return them as a stream of // add, update, and remove notifications. Specify resourceVersion. // +optional optional bool watch = 3; // When specified with a watch call, shows changes that occur after that particular version of a resource. // Defaults to changes from the beginning of history. // When specified for list: // - if unset, then the result is returned from remote storage based on quorum-read flag; // - if it's 0, then we simply return what we currently have in cache, no guarantee; // - if set to non zero, then the result is at least as fresh as given rv. // +optional optional string resourceVersion = 4; // Timeout for the list/watch call. // +optional optional int64 timeoutSeconds = 5; } // ObjectMeta is metadata that all persisted resources must have, which includes all objects // users must create. message ObjectMeta { // Name must be unique within a namespace. Is required when creating resources, although // some resources may allow a client to request the generation of an appropriate name // automatically. Name is primarily intended for creation idempotence and configuration // definition. // Cannot be updated. // More info: http://kubernetes.io/docs/user-guide/identifiers#names // +optional optional string name = 1; // GenerateName is an optional prefix, used by the server, to generate a unique // name ONLY IF the Name field has not been provided. // If this field is used, the name returned to the client will be different // than the name passed. This value will also be combined with a unique suffix. // The provided value has the same validation rules as the Name field, // and may be truncated by the length of the suffix required to make the value // unique on the server. // // If this field is specified and the generated name exists, the server will // NOT return a 409 - instead, it will either return 201 Created or 500 with Reason // ServerTimeout indicating a unique name could not be found in the time allotted, and the client // should retry (optionally after the time indicated in the Retry-After header). // // Applied only if Name is not specified. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#idempotency // +optional optional string generateName = 2; // Namespace defines the space within each name must be unique. An empty namespace is // equivalent to the "default" namespace, but "default" is the canonical representation. // Not all objects are required to be scoped to a namespace - the value of this field for // those objects will be empty. // // Must be a DNS_LABEL. // Cannot be updated. // More info: http://kubernetes.io/docs/user-guide/namespaces // +optional optional string namespace = 3; // SelfLink is a URL representing this object. // Populated by the system. // Read-only. // +optional optional string selfLink = 4; // UID is the unique in time and space value for this object. It is typically generated by // the server on successful creation of a resource and is not allowed to change on PUT // operations. // // Populated by the system. // Read-only. // More info: http://kubernetes.io/docs/user-guide/identifiers#uids // +optional optional string uid = 5; // An opaque value that represents the internal version of this object that can // be used by clients to determine when objects have changed. May be used for optimistic // concurrency, change detection, and the watch operation on a resource or set of resources. // Clients must treat these values as opaque and passed unmodified back to the server. // They may only be valid for a particular resource or set of resources. // // Populated by the system. // Read-only. // Value must be treated as opaque by clients and . // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#concurrency-control-and-consistency // +optional optional string resourceVersion = 6; // A sequence number representing a specific generation of the desired state. // Populated by the system. Read-only. // +optional optional int64 generation = 7; // CreationTimestamp is a timestamp representing the server time when this object was // created. It is not guaranteed to be set in happens-before order across separate operations. // Clients may not set this value. It is represented in RFC3339 form and is in UTC. // // Populated by the system. // Read-only. // Null for lists. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata // +optional optional Time creationTimestamp = 8; // DeletionTimestamp is RFC 3339 date and time at which this resource will be deleted. This // field is set by the server when a graceful deletion is requested by the user, and is not // directly settable by a client. The resource is expected to be deleted (no longer visible // from resource lists, and not reachable by name) after the time in this field. Once set, // this value may not be unset or be set further into the future, although it may be shortened // or the resource may be deleted prior to this time. For example, a user may request that // a pod is deleted in 30 seconds. The Kubelet will react by sending a graceful termination // signal to the containers in the pod. After that 30 seconds, the Kubelet will send a hard // termination signal (SIGKILL) to the container and after cleanup, remove the pod from the // API. In the presence of network partitions, this object may still exist after this // timestamp, until an administrator or automated process can determine the resource is // fully terminated. // If not set, graceful deletion of the object has not been requested. // // Populated by the system when a graceful deletion is requested. // Read-only. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#metadata // +optional optional Time deletionTimestamp = 9; // Number of seconds allowed for this object to gracefully terminate before // it will be removed from the system. Only set when deletionTimestamp is also set. // May only be shortened. // Read-only. // +optional optional int64 deletionGracePeriodSeconds = 10; // Map of string keys and values that can be used to organize and categorize // (scope and select) objects. May match selectors of replication controllers // and services. // More info: http://kubernetes.io/docs/user-guide/labels // +optional map<string, string> labels = 11; // Annotations is an unstructured key value map stored with a resource that may be // set by external tools to store and retrieve arbitrary metadata. They are not // queryable and should be preserved when modifying objects. // More info: http://kubernetes.io/docs/user-guide/annotations // +optional map<string, string> annotations = 12; // List of objects depended by this object. If ALL objects in the list have // been deleted, this object will be garbage collected. If this object is managed by a controller, // then an entry in this list will point to this controller, with the controller field set to true. // There cannot be more than one managing controller. // +optional repeated OwnerReference ownerReferences = 13; // Must be empty before the object is deleted from the registry. Each entry // is an identifier for the responsible component that will remove the entry // from the list. If the deletionTimestamp of the object is non-nil, entries // in this list can only be removed. // +optional repeated string finalizers = 14; // The name of the cluster which the object belongs to. // This is used to distinguish resources with same name and namespace in different clusters. // This field is not set anywhere right now and apiserver is going to ignore it if set in create or update request. // +optional optional string clusterName = 15; } // OwnerReference contains enough information to let you identify an owning // object. Currently, an owning object must be in the same namespace, so there // is no namespace field. message OwnerReference { // API version of the referent. optional string apiVersion = 5; // Kind of the referent. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds optional string kind = 1; // Name of the referent. // More info: http://kubernetes.io/docs/user-guide/identifiers#names optional string name = 3; // UID of the referent. // More info: http://kubernetes.io/docs/user-guide/identifiers#uids optional string uid = 4; // If true, this reference points to the managing controller. // +optional optional bool controller = 6; // If true, AND if the owner has the "foregroundDeletion" finalizer, then // the owner cannot be deleted from the key-value store until this // reference is removed. // Defaults to false. // To set this field, a user needs "delete" permission of the owner, // otherwise 422 (Unprocessable Entity) will be returned. // +optional optional bool blockOwnerDeletion = 7; } // Preconditions must be fulfilled before an operation (update, delete, etc.) is carried out. message Preconditions { // Specifies the target UID. // +optional optional string uid = 1; } // RootPaths lists the paths available at root. // For example: "/healthz", "/apis". message RootPaths { // paths are the paths available at root. repeated string paths = 1; } // ServerAddressByClientCIDR helps the client to determine the server address that they should use, depending on the clientCIDR that they match. message ServerAddressByClientCIDR { // The CIDR with which clients can match their IP to figure out the server address that they should use. optional string clientCIDR = 1; // Address of this server, suitable for a client that matches the above CIDR. // This can be a hostname, hostname:port, IP or IP:port. optional string serverAddress = 2; } // Status is a return value for calls that don't return other objects. message Status { // Standard list metadata. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds // +optional optional ListMeta metadata = 1; // Status of the operation. // One of: "Success" or "Failure". // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#spec-and-status // +optional optional string status = 2; // A human-readable description of the status of this operation. // +optional optional string message = 3; // A machine-readable description of why this operation is in the // "Failure" status. If this value is empty there // is no information available. A Reason clarifies an HTTP status // code but does not override it. // +optional optional string reason = 4; // Extended data associated with the reason. Each reason may define its // own extended details. This field is optional and the data returned // is not guaranteed to conform to any schema except that defined by // the reason type. // +optional optional StatusDetails details = 5; // Suggested HTTP return code for this status, 0 if not set. // +optional optional int32 code = 6; } // StatusCause provides more information about an api.Status failure, including // cases when multiple errors are encountered. message StatusCause { // A machine-readable description of the cause of the error. If this value is // empty there is no information available. // +optional optional string reason = 1; // A human-readable description of the cause of the error. This field may be // presented as-is to a reader. // +optional optional string message = 2; // The field of the resource that has caused this error, as named by its JSON // serialization. May include dot and postfix notation for nested attributes. // Arrays are zero-indexed. Fields may appear more than once in an array of // causes due to fields having multiple errors. // Optional. // // Examples: // "name" - the field "name" on the current resource // "items[0].name" - the field "name" on the first array entry in "items" // +optional optional string field = 3; } // StatusDetails is a set of additional properties that MAY be set by the // server to provide additional information about a response. The Reason // field of a Status object defines what attributes will be set. Clients // must ignore fields that do not match the defined type of each attribute, // and should assume that any attribute may be empty, invalid, or under // defined. message StatusDetails { // The name attribute of the resource associated with the status StatusReason // (when there is a single name which can be described). // +optional optional string name = 1; // The group attribute of the resource associated with the status StatusReason. // +optional optional string group = 2; // The kind attribute of the resource associated with the status StatusReason. // On some operations may differ from the requested resource Kind. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds // +optional optional string kind = 3; // The Causes array includes more details associated with the StatusReason // failure. Not all StatusReasons may provide detailed causes. // +optional repeated StatusCause causes = 4; // If specified, the time in seconds before the operation should be retried. // +optional optional int32 retryAfterSeconds = 5; } // Time is a wrapper around time.Time which supports correct // marshaling to YAML and JSON. Wrappers are provided for many // of the factory methods that the time package offers. // // +protobuf.options.marshal=false // +protobuf.as=Timestamp // +protobuf.options.(gogoproto.goproto_stringer)=false message Time { // Represents seconds of UTC time since Unix epoch // 1970-01-01T00:00:00Z. Must be from from 0001-01-01T00:00:00Z to // 9999-12-31T23:59:59Z inclusive. optional int64 seconds = 1; // Non-negative fractions of a second at nanosecond resolution. Negative // second values with fractions must still have non-negative nanos values // that count forward in time. Must be from 0 to 999,999,999 // inclusive. This field may be limited in precision depending on context. optional int32 nanos = 2; } // Timestamp is a struct that is equivalent to Time, but intended for // protobuf marshalling/unmarshalling. It is generated into a serialization // that matches Time. Do not use in Go structs. message Timestamp { // Represents seconds of UTC time since Unix epoch // 1970-01-01T00:00:00Z. Must be from from 0001-01-01T00:00:00Z to // 9999-12-31T23:59:59Z inclusive. optional int64 seconds = 1; // Non-negative fractions of a second at nanosecond resolution. Negative // second values with fractions must still have non-negative nanos values // that count forward in time. Must be from 0 to 999,999,999 // inclusive. This field may be limited in precision depending on context. optional int32 nanos = 2; } // TypeMeta describes an individual object in an API response or request // with strings representing the type of the object and its API schema version. // Structures that are versioned or persisted should inline TypeMeta. message TypeMeta { // Kind is a string value representing the REST resource this object represents. // Servers may infer this from the endpoint the client submits requests to. // Cannot be updated. // In CamelCase. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#types-kinds // +optional optional string kind = 1; // APIVersion defines the versioned schema of this representation of an object. // Servers should convert recognized schemas to the latest internal value, and // may reject unrecognized values. // More info: http://releases.k8s.io/HEAD/docs/devel/api-conventions.md#resources // +optional optional string apiVersion = 2; } // Verbs masks the value so protobuf can generate // // +protobuf.nullable=true // +protobuf.options.(gogoproto.goproto_stringer)=false message Verbs { // items, if empty, will result in an empty slice repeated string items = 1; } // Event represents a single event to a watched resource. // // +protobuf=true message WatchEvent { optional string type = 1; // Object is: // * If Type is Added or Modified: the new state of the object. // * If Type is Deleted: the state of the object immediately before deletion. // * If Type is Error: *Status is recommended; other types may make sense // depending on context. optional k8s.io.apimachinery.pkg.runtime.RawExtension object = 2; }
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard. // #import "CBaseContact.h" #import "WCDBCoding.h" @class EnterpriseRoomData, NSString; @interface CEnterpriseContact : CBaseContact <WCDBCoding> { _Bool m_bHeadImageUpdateFlag; _Bool m_bUpdateFlag; unsigned int m_uiUserFlag; unsigned int m_uiContactType; NSString *m_nsContactDisplayName; unsigned long long m_uiContactVer; NSString *m_nsProfileJumpUrl; NSString *m_nsAddMemberUrl; EnterpriseRoomData *m_oRoomData; NSString *m_nsBrandUserName; long long m___rowID; } + (id)contactFromBizChatUser:(id)arg1 brandUserName:(id)arg2; + (const basic_string_a490aa4c *)getWCDBPrimaryColumnName; + (const struct WCDBIndexHelper *)getWCDBIndexArray; + (unsigned long long)getWCDBIndexArrayCount; + (const map_0e718273 *)getFileValueTagIndexMap; + (id)getFileValueTypeTable; + (const map_0e718273 *)getPackedValueTagIndexMap; + (id)getPackedValueTypeTable; + (const map_7a576766 *)getValueNameIndexMap; + (id)getValueTable; + (id)dummyObject; @property(nonatomic) long long __rowID; // @synthesize __rowID=m___rowID; @property(nonatomic) _Bool m_bUpdateFlag; // @synthesize m_bUpdateFlag; @property(retain, nonatomic) NSString *m_nsBrandUserName; // @synthesize m_nsBrandUserName; @property(nonatomic) _Bool m_bHeadImageUpdateFlag; // @synthesize m_bHeadImageUpdateFlag; @property(retain, nonatomic) EnterpriseRoomData *m_oRoomData; // @synthesize m_oRoomData; @property(nonatomic) unsigned int m_uiContactType; // @synthesize m_uiContactType; @property(nonatomic) unsigned int m_uiUserFlag; // @synthesize m_uiUserFlag; @property(retain, nonatomic) NSString *m_nsAddMemberUrl; // @synthesize m_nsAddMemberUrl; @property(retain, nonatomic) NSString *m_nsProfileJumpUrl; // @synthesize m_nsProfileJumpUrl; @property(nonatomic) unsigned long long m_uiContactVer; // @synthesize m_uiContactVer; @property(retain, nonatomic) NSString *m_nsContactDisplayName; // @synthesize m_nsContactDisplayName; - (void).cxx_destruct; - (_Bool)isContactTop; - (_Bool)isFavorite; - (_Bool)isSelf; - (_Bool)isChatStatusNotifyOpen; - (_Bool)isChatroom; - (const map_0e718273 *)getValueTagIndexMap; - (id)getValueTypeTable; - (const WCDBCondition_d7690721 *)db_m_bUpdateFlag; - (const WCDBCondition_c6db074e *)db_m_uiDraftTime; - (const WCDBCondition_22fabacd *)db_m_nsDraft; - (const WCDBCondition_22fabacd *)db_m_nsAtUserList; - (const WCDBCondition_22fabacd *)db_m_nsBrandUserName; - (const WCDBCondition_d7690721 *)db_m_bHeadImageUpdateFlag; - (const WCDBCondition_8dd2b00c *)db_m_oRoomData; - (const WCDBCondition_c6db074e *)db_m_uiContactType; - (const WCDBCondition_c6db074e *)db_m_uiUserFlag; - (const WCDBCondition_22fabacd *)db_m_nsAddMemberUrl; - (const WCDBCondition_22fabacd *)db_m_nsHeadHDImgUrl; - (const WCDBCondition_22fabacd *)db_m_nsProfileJumpUrl; - (const WCDBCondition_7786cbb5 *)db_m_uiContactVer; - (const WCDBCondition_22fabacd *)db_m_nsContactDisplayName; - (const WCDBCondition_22fabacd *)db_m_nsUsrName; // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(retain, nonatomic) NSString *m_nsAtUserList; @property(retain, nonatomic) NSString *m_nsDraft; @property(retain, nonatomic) NSString *m_nsHeadHDImgUrl; @property(retain, nonatomic) NSString *m_nsUsrName; @property(nonatomic) unsigned int m_uiDraftTime; @property(readonly) Class superclass; @end
{ "pile_set_name": "Github" }
var rules = require('../../../../lib/rules/util').rules; module.exports = function(req, res) { rules.disableDefault(); res.json({ec: 0, em: 'success', defaultRulesIsDisabled: rules.defaultRulesIsDisabled(), list: rules.getSelectedList()}); };
{ "pile_set_name": "Github" }
<?php /** * Magento * * NOTICE OF LICENSE * * This source file is subject to the Open Software License (OSL 3.0) * that is bundled with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://opensource.org/licenses/osl-3.0.php * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * DISCLAIMER * * Do not edit or add to this file if you wish to upgrade Magento to newer * versions in the future. If you wish to customize Magento for your * needs please refer to http://www.magento.com for more information. * * @category Mage * @package Mage_Api * @copyright Copyright (c) 2006-2020 Magento, Inc. (http://www.magento.com) * @license http://opensource.org/licenses/osl-3.0.php Open Software License (OSL 3.0) */ /** * Enter description here ... * * @method Mage_Api_Model_Resource_Rules _getResource() * @method Mage_Api_Model_Resource_Rules getResource() * @method int getRoleId() * @method Mage_Api_Model_Rules setRoleId(int $value) * @method string getResourceId() * @method Mage_Api_Model_Rules setResourceId(string $value) * @method string getPrivileges() * @method Mage_Api_Model_Rules setPrivileges(string $value) * @method int getAssertId() * @method Mage_Api_Model_Rules setAssertId(int $value) * @method string getRoleType() * @method Mage_Api_Model_Rules setRoleType(string $value) * @method string getPermission() * @method Mage_Api_Model_Rules setPermission(string $value) * * @category Mage * @package Mage_Api * @author Magento Core Team <[email protected]> */ class Mage_Api_Model_Rules extends Mage_Core_Model_Abstract { protected function _construct() { $this->_init('api/rules'); } public function update() { $this->getResource()->update($this); return $this; } public function getCollection() { return Mage::getResourceModel('api/permissions_collection'); } public function saveRel() { $this->getResource()->saveRel($this); return $this; } }
{ "pile_set_name": "Github" }
function [improvedRxns, intermediateSlns] = analyzeGCdesign(modelRed, selectedRxns, target, deletions, maxKOs, objFunction, delPenalty, intermediateSlns) % Analyzes results with replacement knockouts % should get closer to local maxima. Must have num `KOs` > 1. % % USAGE: % % [improvedRxns, intermediateSlns] = analyzeGCdesign(modelRed, selectedRxns, target, deletions, maxKOs, objFunction, delPenalty, intermediateSlns) % % INPUTS: % modelRed: reduced model % selectedRxns: selected reaction list from the reduced model % target: exchange `rxn` to optimize % deletions: initial set of `KO` `rxns` (must have at least 1 `rxn`) % % OPTIONAL INPUTS: % maxKOs: maximum number of `rxn` `KOs` to allow (Default = 10) % objFunction: pick an objective function to use (Default = 1): % % 1. `obj = maxRate` (yield) % 2. `obj = growth*maxRate` (SSP) % 3. `obj = maxRate*(delPenalty^numDels)` (yield with KO penalty) % 4. `obj = growth*maxRate*(delPenalty^numDels)` (SSP with KO penalty) % 5. `obj = maxRate*(slope^(-1))` (GC_yield) % 6. `obj = growth*maxRate*(slope^(-1))` (GC_SSP) % 7. `obj = maxRate*(delPenalty^numDels)*(slope^(-1))` (GC_yield with KO penalty) % 8. `obj = growth*maxRate*(delPenalty^numDels)*(slope^(-1))` (GC_SSP with KO penalty) % delPenalty: penalty on extra `rxn` deletions (Default = .99) % intermediateSlns: Previous set of solutions (Default = deletions) % % OUTPUTS: % improvedRxns: the `KO` `rxns` for an improved strain % intermediateSlns: all the sets of best `KO` `rxns` that are picked before the % final set is reached % .. Authors: % - Jeff Orth 7/25/07 % - Richard Que 1/19/10 Replaced try/catch blocks if (nargin < 5) maxKOs = 10; end if (nargin < 6) objFunction = 1; end if (nargin < 7) delPenalty = .99; end if (nargin < 8) intermediateSlns = {deletions}; end %set the objective function switch objFunction case 1 objectiveFunction = 'maxRate'; hasSlope = false; case 2 objectiveFunction = 'growth*maxRate'; hasSlope = false; case 3 objectiveFunction = 'maxRate*(delPenalty^numDels)'; hasSlope = false; case 4 objectiveFunction = 'growth*maxRate*(delPenalty^numDels)'; hasSlope = false; case 5 objectiveFunction = 'maxRate*(slope^(-1))'; hasSlope = true; case 6 objectiveFunction = 'growth*maxRate*(slope^(-1))'; hasSlope = true; case 7 objectiveFunction = 'maxRate*(delPenalty^numDels)*(slope^(-1))'; hasSlope = true; case 8 objectiveFunction = 'growth*maxRate*(delPenalty^numDels)*(slope^(-1))'; hasSlope = true; end if isempty(deletions) error('no knockout reactions defined') end delArraySize = size(deletions); %make sure deletions list is horizontal if delArraySize(1) > 1 rxns = deletions'; else rxns = deletions; end BOF = modelRed.rxns(modelRed.c==1); %get biomass objective function modelKO = changeRxnBounds(modelRed,rxns,0,'b'); FBAsol1 = optimizeCbModel(modelKO,'max',0,true); %find max growth rate of strain if FBAsol1.stat>0 modelKOfixed = changeRxnBounds(modelKO,BOF,FBAsol1.f-1e-6,'l'); %fix the growth rate modelKOfixed = changeObjective(modelKOfixed,target); %set target as the objective FBAsol2 = optimizeCbModel(modelKOfixed,'min',0,true); %find minimum target rate at this growth rate growth = FBAsol1.f; maxRate = FBAsol2.f; numDels = length(rxns); if hasSlope %only calculate these if the obj function includes slope modelTarget = changeObjective(modelKO,target); %set target as the objective FBAsol4 = optimizeCbModel(modelTarget,'min',0,true); %find min production rate modelTargetFixed = changeRxnBounds(modelKO,target,FBAsol4.f,'b'); %fix production to minimum FBAsol5 = optimizeCbModel(modelTargetFixed,'max',0,true); %find max growth at min production minProdRate = FBAsol4.f; maxGrowthMinRate = FBAsol5.f; if growth ~= maxGrowthMinRate slope = (maxRate-minProdRate)/(growth-maxGrowthMinRate); else slope = 1; %don't consider slope if div by 0 end end objective = eval(objectiveFunction); bestObjective = objective bestRxns = rxns; % if the initial reactions are lethal else bestObjective = 0 bestRxns = rxns; end % loop through each KO rxn and replace with every rxn from selectedRxns to % search for a possible improvement showprogress(0, 'improving knockout design'); for i = 1:length(rxns)+1 bestObjective2 = bestObjective; bestRxns2 = bestRxns; for j = 1:length(selectedRxns)+1 showprogress((j+(i-1)*length(selectedRxns))/((length(rxns)+1)*(length(selectedRxns)+1))); newRxns = rxns; if (i==length(rxns)+1)&&(j==length(selectedRxns)+1) %don't do anything at the very end elseif j ~= length(selectedRxns)+1 newRxns{i} = selectedRxns{j}; %replace rxn with different one elseif i == 1 %or else remove one of the rxns newRxns = rxns(2:length(rxns)); elseif i == length(rxns) newRxns = rxns(1:length(rxns)-1); else newRxns = cat(2,rxns(1:i-1),rxns(i+1:length(rxns))); end if length(newRxns) <= maxKOs %limit the total number of knockouts modelKO = changeRxnBounds(modelRed,newRxns,0,'b'); FBAsol1 = optimizeCbModel(modelKO,'max',0,true); %find max growth rate of strain if FBAsol1.stat>0 modelKOfixed = changeRxnBounds(modelKO,BOF,FBAsol1.f-1e-6,'l'); %fix the growth rate modelKOfixed = changeObjective(modelKOfixed,target); %set target as the objective FBAsol2 = optimizeCbModel(modelKOfixed,'min',0,true); %find minimum target rate at this growth rate FBAsol3 = optimizeCbModel(modelKOfixed,'max',0,true); %find maximum target rate at this growth rate growth = FBAsol1.f; maxRate = FBAsol2.f; numDels = length(newRxns); if hasSlope %only calculate these if the obj function includes slope modelTarget = changeObjective(modelKO,target); %set target as the objective FBAsol4 = optimizeCbModel(modelTarget,'min',0,true); %find min production rate modelTargetFixed = changeRxnBounds(modelKO,target,FBAsol4.f,'b'); %fix production to minimum FBAsol5 = optimizeCbModel(modelTargetFixed,'max',0,true); %find max growth at min production minProdRate = FBAsol4.f; maxGrowthMinRate = FBAsol5.f; if growth ~= maxGrowthMinRate slope = (maxRate-minProdRate)/(growth-maxGrowthMinRate); else slope = 1; %don't consider slope if div by 0 end end newObjective = eval(objectiveFunction); %see if objective is increased by this new gene if newObjective > bestObjective2 bestObjective2 = newObjective bestRxns2 = newRxns intermediateSlns{length(intermediateSlns)+1} = bestRxns2; %add new intermediateSln to the list end end end end if bestObjective2 > bestObjective bestObjective = bestObjective2 bestRxns = bestRxns2 end end bestObjective bestRxns % recursively call analyzeGCdesign again until no improvement is found if length(bestRxns) ~= length(rxns) [bestRxns,intermediateSlns] = analyzeGCdesign(modelRed,selectedRxns,target,bestRxns,maxKOs,objFunction,delPenalty,intermediateSlns); elseif length(find(strcmp(bestRxns,rxns)))~=length(rxns) [bestRxns,intermediateSlns] = analyzeGCdesign(modelRed,selectedRxns,target,bestRxns,maxKOs,objFunction,delPenalty,intermediateSlns); end % print final results improvedRxns = sort(bestRxns)
{ "pile_set_name": "Github" }
UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 50 WHERE (ID = 52223); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 50 WHERE (ID = 52225); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 100 WHERE (ID = 52227); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52226); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52228); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 48752); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 48764); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48767); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48770); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 48752); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48761); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 51568); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 48768); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52044); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52049); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52050); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 51380); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 52050); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48754); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48755); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 48756); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48757); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 48769); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51016); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51313); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51361); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51365); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 51962); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 51964); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 51965); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 50 WHERE (ID = 52014); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 135 WHERE (ID = 52015); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 52016); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52017); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52055); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 51971); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 130 WHERE (ID = 52046); UPDATE quest_template_addon SET RequiredSkillID = 2565, RequiredSkillPoints = 150 WHERE (ID = 52053); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 60 WHERE (ID = 51575); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 100 WHERE (ID = 52213); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 100 WHERE (ID = 52216); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52217); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 60 WHERE (ID = 52214); UPDATE quest_template_addon SET RequiredSkillID = 2557, RequiredSkillPoints = 150 WHERE (ID = 52215); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51230); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51243); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51448); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51452); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51464); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51478); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51481); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51482); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 50 WHERE (ID = 51498); UPDATE quest_template_addon SET RequiredSkillID = 2549, RequiredSkillPoints = 150 WHERE (ID = 51503);
{ "pile_set_name": "Github" }
/// @ref simd /// @file glm/simd/integer.h #pragma once #if GLM_ARCH & GLM_ARCH_SSE2_BIT GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave(glm_uvec4 x) { glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); glm_uvec4 Reg1; glm_uvec4 Reg2; // REG1 = x; // REG2 = y; //Reg1 = _mm_unpacklo_epi64(x, y); Reg1 = x; //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); Reg2 = _mm_slli_si128(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask4); //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); Reg2 = _mm_slli_si128(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask3); //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); Reg2 = _mm_slli_epi32(Reg1, 4); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask2); //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); Reg2 = _mm_slli_epi32(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask1); //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); Reg2 = _mm_slli_epi32(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask0); //return REG1 | (REG2 << 1); Reg2 = _mm_slli_epi32(Reg1, 1); Reg2 = _mm_srli_si128(Reg2, 8); Reg1 = _mm_or_si128(Reg1, Reg2); return Reg1; } GLM_FUNC_QUALIFIER glm_uvec4 glm_i128_interleave2(glm_uvec4 x, glm_uvec4 y) { glm_uvec4 const Mask4 = _mm_set1_epi32(0x0000FFFF); glm_uvec4 const Mask3 = _mm_set1_epi32(0x00FF00FF); glm_uvec4 const Mask2 = _mm_set1_epi32(0x0F0F0F0F); glm_uvec4 const Mask1 = _mm_set1_epi32(0x33333333); glm_uvec4 const Mask0 = _mm_set1_epi32(0x55555555); glm_uvec4 Reg1; glm_uvec4 Reg2; // REG1 = x; // REG2 = y; Reg1 = _mm_unpacklo_epi64(x, y); //REG1 = ((REG1 << 16) | REG1) & glm::uint64(0x0000FFFF0000FFFF); //REG2 = ((REG2 << 16) | REG2) & glm::uint64(0x0000FFFF0000FFFF); Reg2 = _mm_slli_si128(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask4); //REG1 = ((REG1 << 8) | REG1) & glm::uint64(0x00FF00FF00FF00FF); //REG2 = ((REG2 << 8) | REG2) & glm::uint64(0x00FF00FF00FF00FF); Reg2 = _mm_slli_si128(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask3); //REG1 = ((REG1 << 4) | REG1) & glm::uint64(0x0F0F0F0F0F0F0F0F); //REG2 = ((REG2 << 4) | REG2) & glm::uint64(0x0F0F0F0F0F0F0F0F); Reg2 = _mm_slli_epi32(Reg1, 4); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask2); //REG1 = ((REG1 << 2) | REG1) & glm::uint64(0x3333333333333333); //REG2 = ((REG2 << 2) | REG2) & glm::uint64(0x3333333333333333); Reg2 = _mm_slli_epi32(Reg1, 2); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask1); //REG1 = ((REG1 << 1) | REG1) & glm::uint64(0x5555555555555555); //REG2 = ((REG2 << 1) | REG2) & glm::uint64(0x5555555555555555); Reg2 = _mm_slli_epi32(Reg1, 1); Reg1 = _mm_or_si128(Reg2, Reg1); Reg1 = _mm_and_si128(Reg1, Mask0); //return REG1 | (REG2 << 1); Reg2 = _mm_slli_epi32(Reg1, 1); Reg2 = _mm_srli_si128(Reg2, 8); Reg1 = _mm_or_si128(Reg1, Reg2); return Reg1; } #endif//GLM_ARCH & GLM_ARCH_SSE2_BIT
{ "pile_set_name": "Github" }
// 文件 // 有时候别人对你的好要记得 class IO{ }
{ "pile_set_name": "Github" }
/* * Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ #import "EC2Response.h" /** * Authorize Security Group Ingress */ @interface EC2AuthorizeSecurityGroupIngressResponse:EC2Response { } @end
{ "pile_set_name": "Github" }
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; /** * Tests for {@link RenameLabels}. */ public class RenameLabelsTest extends CompilerTestCase { @Override protected CompilerPass getProcessor(Compiler compiler) { return new RenameLabels(compiler); } public void testRenameInFunction() { test("function x(){ Foo:a(); }", "function x(){ a(); }"); test("function x(){ Foo:{ a(); break Foo; } }", "function x(){ a:{ a(); break a; } }"); test("function x() { " + "Foo:{ " + "function goo() {" + "Foo: {" + "a(); " + "break Foo; " + "}" + "}" + "}" + "}", "function x(){function goo(){a:{ a(); break a; }}}"); test("function x() { " + "Foo:{ " + "function goo() {" + "Foo: {" + "a(); " + "break Foo; " + "}" + "}" + "break Foo;" + "}" + "}", "function x(){a:{function goo(){a:{ a(); break a; }} break a;}}"); } public void testRenameGlobals() { test("Foo:{a();}", "a();"); test("Foo:{a(); break Foo;}", "a:{a(); break a;}"); test("Foo:{Goo:a(); break Foo;}", "a:{a(); break a;}"); test("Foo:{Goo:while(1){a(); continue Goo; break Foo;}}", "a:{b:while(1){a(); continue b;break a;}}"); test("Foo:Goo:while(1){a(); continue Goo; break Foo;}", "a:b:while(1){a(); continue b;break a;}"); test("Foo:Bar:X:{ break Bar; }", "a:{ break a; }"); test("Foo:Bar:X:{ break Bar; break X; }", "a:b:{ break a; break b;}"); test("Foo:Bar:X:{ break Bar; break Foo; }", "a:b:{ break b; break a;}"); test("Foo:while (1){a(); break;}", "while (1){a(); break;}"); // Remove label that is not referenced. test("Foo:{a(); while (1) break;}", "a(); while (1) break;"); } public void testRenameReused() { test("foo:{break foo}; foo:{break foo}", "a:{break a};a:{break a}"); } }
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- # # This file is part of EventGhost. # Copyright © 2005-2020 EventGhost Project <http://www.eventghost.net/> # # EventGhost is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation, either version 2 of the License, or (at your option) # any later version. # # EventGhost is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along # with EventGhost. If not, see <http://www.gnu.org/licenses/>. import errno import os import time from docutils.core import publish_parts from jinja2 import Environment, FileSystemLoader from os.path import abspath, join # Local imports import builder class BuildWebsite(builder.Task): description = "Build website" def Setup(self): if self.buildSetup.showGui: self.activated = False else: self.activated = bool(self.buildSetup.args.sync) def DoTask(self): buildSetup = self.buildSetup menuTabs = (HomePage, DocsPage, WikiPage, ForumPage, DownloadPage) env = Environment( loader=FileSystemLoader( abspath(join(buildSetup.dataDir, 'templates')) ), trim_blocks=True ) env.globals = { "files": GetSetupFiles(join(buildSetup.websiteDir, "downloads")), "MENU_TABS": menuTabs, } env.filters = {'rst2html': rst2html} for page in menuTabs: path = os.path.abspath(join(buildSetup.websiteDir, page.outfile)) try: os.makedirs(os.path.dirname(path)) except os.error, exc: if exc.errno != errno.EEXIST: raise env.get_template(page.template).stream(CURRENT=page).dump(path) class FileData(object): def __init__(self, path): self.path = path self.target = os.path.basename(path) parts = self.target.split("_") self.name = " ".join(parts[:2]) fileStat = os.stat(path) self.time = time.strftime("%b %d %Y", time.gmtime(fileStat.st_mtime)) self.size = "%0.1f MB" % (fileStat.st_size / 1024.0 / 1024) class Page(object): def __init__(self): pass class HomePage(Page): name = "Home" target = "/" outfile = "index.html" template = "home.tmpl" class DocsPage(Page): name = "Documentation" target = "/docs/" outfile = "css/header_docs.html" template = "header_only.tmpl" class DownloadPage(Page): name = "Downloads" target = "/downloads/" outfile = "downloads/index.html" template = "download.tmpl" class ForumPage(Page): name = "Forum" target = "/forum/" outfile = "css/header_forum.html" template = "header_only.tmpl" class WikiPage(Page): name = "Wiki" target = "/mediawiki/" outfile = "css/header_wiki.html" template = "header_only.tmpl" def GetSetupFiles(srcDir): if not os.path.exists(srcDir): return [] files = [] for name in os.listdir(srcDir): if name.lower().startswith("eventghost_"): if name.lower().endswith("_setup.exe"): path = join(srcDir, name) fileData = FileData(path) files.append(fileData) def Cmp(x, y): x = x.target.split("_")[1].replace("r", "").split(".") y = y.target.split("_")[1].replace("r", "").split(".") x = [int(s) for s in x] y = [int(s) for s in y] return cmp(x, y) return list(reversed(sorted(files, cmp=Cmp))) def rst2html(rst): return publish_parts(rst, writer_name="html")["fragment"]
{ "pile_set_name": "Github" }
/* * **************************************************************************** * Cloud Foundry * Copyright (c) [2009-2016] Pivotal Software, Inc. All Rights Reserved. * * This product is licensed to you under the Apache License, Version 2.0 (the "License"). * You may not use this product except in compliance with the License. * * This product includes a number of subcomponents with * separate copyright notices and license terms. Your use of these * subcomponents is subject to the terms and conditions of the * subcomponent's license, as noted in the LICENSE file. * **************************************************************************** */ package org.cloudfoundry.identity.uaa.oauth.jwt; import org.cloudfoundry.identity.uaa.oauth.jwk.JsonWebKey; import org.cloudfoundry.identity.uaa.oauth.jwk.JsonWebKeySet; import org.springframework.security.jwt.crypto.sign.InvalidSignatureException; import org.springframework.security.jwt.crypto.sign.SignatureVerifier; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ChainedSignatureVerifier implements SignatureVerifier { private final List<SignatureVerifier> delegates; public ChainedSignatureVerifier(JsonWebKeySet<? extends JsonWebKey> keys) { if(keys == null || keys.getKeys() == null || keys.getKeys().isEmpty()) { throw new IllegalArgumentException("keys cannot be null or empty"); } List<SignatureVerifier> ds = new ArrayList<>(keys.getKeys().size()); for (JsonWebKey key : keys.getKeys()) { ds.add(new CommonSignatureVerifier(key.getValue())); } delegates = Collections.unmodifiableList(ds); } public ChainedSignatureVerifier(List<SignatureVerifier> delegates) { this.delegates = delegates; } @Override public void verify(byte[] content, byte[] signature) { Exception last = new InvalidSignatureException("No matching keys found."); for (SignatureVerifier delegate : delegates) { try { delegate.verify(content, signature); //success return; } catch (Exception e) { last = e; } } throw (last instanceof RuntimeException) ? (RuntimeException) last : new RuntimeException(last); } @Override public String algorithm() { return null; } }
{ "pile_set_name": "Github" }
/* * @(#)DeclarationScanner.java 1.5 04/04/20 * * Copyright (c) 2004, Sun Microsystems, Inc. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the Sun Microsystems, Inc. nor the names of * its contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER * OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.sun.mirror.util; import com.sun.mirror.declaration.*; /** * A visitor for declarations that scans declarations contained within * the given declaration. For example, when visiting a class, the * methods, fields, constructors, and nested types of the class are * also visited. * * <p> To control the processing done on a declaration, users of this * class pass in their own visitors for pre and post processing. The * preprocessing visitor is called before the contained declarations * are scanned; the postprocessing visitor is called after the * contained declarations are scanned. * * @author Joseph D. Darcy * @author Scott Seligman * @version 1.5 04/04/20 * @since 1.5 */ class DeclarationScanner implements DeclarationVisitor { protected DeclarationVisitor pre; protected DeclarationVisitor post; DeclarationScanner(DeclarationVisitor pre, DeclarationVisitor post) { this.pre = pre; this.post = post; } @Override public void visitDeclaration(Declaration d) { d.accept(pre); d.accept(post); } @Override public void visitPackageDeclaration(PackageDeclaration d) { d.accept(pre); for(ClassDeclaration classDecl: d.getClasses()) { classDecl.accept(this); } for(InterfaceDeclaration interfaceDecl: d.getInterfaces()) { interfaceDecl.accept(this); } d.accept(post); } @Override public void visitMemberDeclaration(MemberDeclaration d) { visitDeclaration(d); } @Override public void visitTypeDeclaration(TypeDeclaration d) { d.accept(pre); for(TypeParameterDeclaration tpDecl: d.getFormalTypeParameters()) { tpDecl.accept(this); } for(FieldDeclaration fieldDecl: d.getFields()) { fieldDecl.accept(this); } for(MethodDeclaration methodDecl: d.getMethods()) { methodDecl.accept(this); } for(TypeDeclaration typeDecl: d.getNestedTypes()) { typeDecl.accept(this); } d.accept(post); } @Override public void visitClassDeclaration(ClassDeclaration d) { d.accept(pre); for(TypeParameterDeclaration tpDecl: d.getFormalTypeParameters()) { tpDecl.accept(this); } for(FieldDeclaration fieldDecl: d.getFields()) { fieldDecl.accept(this); } for(MethodDeclaration methodDecl: d.getMethods()) { methodDecl.accept(this); } for(TypeDeclaration typeDecl: d.getNestedTypes()) { typeDecl.accept(this); } for(ConstructorDeclaration ctorDecl: d.getConstructors()) { ctorDecl.accept(this); } d.accept(post); } @Override public void visitEnumDeclaration(EnumDeclaration d) { visitClassDeclaration(d); } @Override public void visitInterfaceDeclaration(InterfaceDeclaration d) { visitTypeDeclaration(d); } @Override public void visitAnnotationTypeDeclaration(AnnotationTypeDeclaration d) { visitInterfaceDeclaration(d); } @Override public void visitFieldDeclaration(FieldDeclaration d) { visitMemberDeclaration(d); } @Override public void visitEnumConstantDeclaration(EnumConstantDeclaration d) { visitFieldDeclaration(d); } @Override public void visitExecutableDeclaration(ExecutableDeclaration d) { d.accept(pre); for(TypeParameterDeclaration tpDecl: d.getFormalTypeParameters()) { tpDecl.accept(this); } for(ParameterDeclaration pDecl: d.getParameters()) { pDecl.accept(this); } d.accept(post); } @Override public void visitConstructorDeclaration(ConstructorDeclaration d) { visitExecutableDeclaration(d); } @Override public void visitMethodDeclaration(MethodDeclaration d) { visitExecutableDeclaration(d); } @Override public void visitAnnotationTypeElementDeclaration( AnnotationTypeElementDeclaration d) { visitMethodDeclaration(d); } @Override public void visitParameterDeclaration(ParameterDeclaration d) { visitDeclaration(d); } @Override public void visitTypeParameterDeclaration(TypeParameterDeclaration d) { visitDeclaration(d); } }
{ "pile_set_name": "Github" }
#ifndef OPENTISSUE_CORE_CONTAINERS_GRID_UTIL_GRID_BISECTION_LINE_SEARCH_H #define OPENTISSUE_CORE_CONTAINERS_GRID_UTIL_GRID_BISECTION_LINE_SEARCH_H // // OpenTissue Template Library // - A generic toolbox for physics-based modeling and simulation. // Copyright (C) 2008 Department of Computer Science, University of Copenhagen. // // OTTL is licensed under zlib: http://opensource.org/licenses/zlib-license.php // #include <OpenTissue/configuration.h> #include <OpenTissue/core/containers/grid/util/grid_gradient_at_point.h> namespace OpenTissue { namespace grid { /** * Grid Bisection Line Search * * @param q_a * @param q_b * @param phi * @param maximize If true the bisection method tries to find the maximimum value between q_a and q_b otherwise it tries to find the minimum value. * * @return The point that maximizes the value of phi on the line between q_a and q_b. */ template<typename vector3_type,typename grid_type> inline vector3_type bisection_line_search(vector3_type q_a, vector3_type q_b, grid_type & phi, bool maximize = true) { using std::fabs; typedef typename vector3_type::value_type real_type; real_type const precision = 10e-5;//OpenTissue::math::working_precision<real_type>(100); real_type const too_small_interval = sqr_length(q_b-q_a)*0.0001; //--- 1/100'th of distance! vector3_type n = unit(gradient_at_point(phi,q_a)); vector3_type r; real_type const sign = maximize? 1.0 : -1.0; bool forever = true; do { vector3_type q_c = (q_a + q_b)*.5; if( sqr_length(q_a - q_b) < too_small_interval ) { r = q_c; break; } vector3_type dir = unit(gradient_at_point(phi,q_c)); real_type n_dot_dir = inner_prod(n , dir)*sign; if(fabs(n_dot_dir) < precision) { r = q_c; break; } if(n_dot_dir > 0) { q_a = q_c; } if(n_dot_dir < 0) { q_b = q_c; } } while (forever); return r; } } // namespace grid } // namespace OpenTissue // OPENTISSUE_CORE_CONTAINERS_GRID_UTIL_GRID_BISECTION_LINE_SEARCH_H #endif
{ "pile_set_name": "Github" }
package org.javaee8.jsonp.merge; import javax.json.Json; import javax.json.JsonMergePatch; import javax.json.JsonObject; import javax.json.JsonValue; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.arquillian.junit.Arquillian; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import static org.junit.Assert.assertTrue; import org.junit.Test; import org.junit.runner.RunWith; /** * Class that tests and demonstrates the JSON-P 1.1 Merge Operations. * @author Andrew Pielage */ @RunWith(Arquillian.class) public class JsonpMergeTest { // Create a JsonObject with some values to be used in each test private static final JsonObject json = Json.createObjectBuilder() .add("Wibbly", "Wobbly") .add("Replaced", false) .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .add("Wobbles") .build()) .add("Nested", Json.createObjectBuilder() .add("Birdie", "Wordie") .add("Bestiary", Json.createArrayBuilder() .add("Drowner") .add("Werewolf") .add("Chimera") .build()) .build()) .build(); @Deployment public static JavaArchive createDeployment() { // Create a JavaArchive to deploy JavaArchive jar = ShrinkWrap.create(JavaArchive.class); // Print out directory contents System.out.println(jar.toString(true)); // Return Arquillian Test Archive for application server return jar; } /** * Test that the JSON Merge operation replaces values as intended. */ @Test public void replaceTest() { // Create a JSON object that we'll merge into the class variable, replacing object members and array values JsonObject jsonToMerge = Json.createObjectBuilder() .add("Wibbly", "Bibbly") .add("Replaced", "Yes") .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .add("Bibbles") .build()) .add("Nested", Json.createObjectBuilder() .add("Bestiary", Json.createArrayBuilder() .add("Slyzard") .add("Dragon") .add("Ekimmara") .build()) .build()) .build(); // Create a merge patch and apply it JsonMergePatch mergePatch = Json.createMergePatch(jsonToMerge); JsonValue mergedJson = mergePatch.apply(json); // Print out to more easily see what we've done System.out.println("JsonpMergeTest.replaceTest: Before Merge: " + json); System.out.println("JsonpMergeTest.replaceTest: JSON to Merge: " + jsonToMerge); System.out.println("JsonpMergeTest.replaceTest: After Merge: " + mergedJson); // Test that everything is as it should be JsonObject mergedJsonObject = mergedJson.asJsonObject(); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Wibbly").equals("Bibbly")); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Replaced").equals("Yes")); assertTrue("JSON Array didn't merge correctly!", mergedJsonObject.getJsonArray("Lexicon").getString(0).equals("Wibbles") && mergedJsonObject.getJsonArray("Lexicon").getString(1).equals("Bibbles")); assertTrue("Nested JSON didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getString("Birdie").equals("Wordie")); assertTrue("Nested JSON Array didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(0).equals("Slyzard") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(1).equals("Dragon") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(2).equals("Ekimmara")); } /** * Test that the JSON Merge operation adds values as intended. */ @Test public void addTest() { // Create a JSON object that we'll merge into the class variable, adding object members and array values JsonObject jsonToMerge = Json.createObjectBuilder() .add("Bibbly", "Bobbly") .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .add("Wobbles") .add("Bibbles") .add("Bobbles") .build()) .build(); // Create a merge patch and apply it JsonMergePatch mergePatch = Json.createMergePatch(jsonToMerge); JsonValue mergedJson = mergePatch.apply(json); // Print out to more easily see what we've done System.out.println("JsonpMergeTest.addTest: Before Merge: " + json); System.out.println("JsonpMergeTest.addTest: JSON to Merge: " + jsonToMerge); System.out.println("JsonpMergeTest.addTest: After Merge: " + mergedJson); // Test that everything is as it should be JsonObject mergedJsonObject = mergedJson.asJsonObject(); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Wibbly").equals("Wobbly")); assertTrue("Merged JSON didn't merge correctly!", mergedJsonObject.getString("Bibbly").equals("Bobbly")); assertTrue("Merged JSON didn't merge correctly!", !mergedJsonObject.getBoolean("Replaced")); assertTrue("JSON Array didn't merge correctly!", mergedJsonObject.getJsonArray("Lexicon").getString(0).equals("Wibbles") && mergedJsonObject.getJsonArray("Lexicon").getString(1).equals("Wobbles") && mergedJsonObject.getJsonArray("Lexicon").getString(2).equals("Bibbles") && mergedJsonObject.getJsonArray("Lexicon").getString(3).equals("Bobbles")); assertTrue("Nested JSON didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getString("Birdie").equals("Wordie")); assertTrue("Nested JSON Array didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(0).equals("Drowner") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(1).equals("Werewolf") && mergedJsonObject.getJsonObject("Nested").getJsonArray("Bestiary").getString(2).equals("Chimera")); } /** * Test that the JSON Merge operation removes values as intended. */ @Test public void removeTest() { // Create a JSON object that we'll merge into the class variable, removing object members and array values JsonObject jsonToMerge = Json.createObjectBuilder() .addNull("Wibbly") .add("Lexicon", Json.createArrayBuilder() .add("Wibbles") .build()) .add("Nested", Json.createObjectBuilder() .addNull("Bestiary") .build()) .build(); // Create a merge patch and apply it JsonMergePatch mergePatch = Json.createMergePatch(jsonToMerge); JsonValue mergedJson = mergePatch.apply(json); // Print out to more easily see what we've done System.out.println("JsonpMergeTest.removeTest: Before Merge: " + json); System.out.println("JsonpMergeTest.removeTest: JSON to Merge: " + jsonToMerge); System.out.println("JsonpMergeTest.removeTest: After Merge: " + mergedJson); // Test that everything is as it should be JsonObject mergedJsonObject = mergedJson.asJsonObject(); assertTrue("Merged JSON didn't merge correctly!", !mergedJsonObject.containsKey("Wibbly")); assertTrue("Merged JSON didn't merge correctly!", !mergedJsonObject.getBoolean("Replaced")); assertTrue("JSON Array didn't merge correctly!", mergedJsonObject.getJsonArray("Lexicon").getString(0).equals("Wibbles")); assertTrue("Nested JSON didn't merge correctly!", mergedJsonObject.getJsonObject("Nested").getString("Birdie").equals("Wordie")); assertTrue("Nested JSON Array didn't merge correctly!", !mergedJsonObject.getJsonObject("Nested").containsKey("Bestiary")); } }
{ "pile_set_name": "Github" }
--- title: 如何把一个 RegularJS 组件打成 npm 包 date: 2017-07-04 --- 本篇基于 RegularJS 热区组件,简单分享一下组件打包发布的全流程及主要遇到的问题。 ## 目录 1. 项目初始化 2. 开发环境准备,安装基础依赖 3. 将组件打包输出成多种模式!important 4. 进入开发 5. 包装工作 6. 最终发布 <!-- more --> ## 1. 项目初始化 #### 1. 在 GitHub 上创建项目仓库,添加 README 和 License 没什么好说的,License 一般设置成 MIT(开源万岁),详细协议介绍可查:[HELP](https://choosealicense.com/)。 #### 2. clone 到本地,设置 git config 本地全局的 git config 文件一般设置为公司的邮箱和用户名。为了避免泄露信息,可在初始化时提前进行项目层面的 config 设置: ```shell $ git config user.name "GitHub 用户名" $ git config user.email "GitHub 邮箱" ``` 这样提交代码时就以该用户名及邮箱作为用户信息了,此时执行查看命令可以看到: ```shell $ cat .git/config [user] name = GitHub 用户名 email = GitHub 邮箱 ``` #### 3. 执行 npm init,生成 package.json 按提示一步步来完成配置即可。 ## 2. 开发环境准备,安装基础依赖 这里偷了个懒,直接使用了 vue-cli 的 [webpack-simple](https://github.com/vuejs-templates/webpack-simple) 模式生成的 webpack.config.js 和 package.json,并调整成实际需要的配置。 配置比较简单,可以直接 [**戳我**](https://github.com/Deol/regular-hotzone/blob/master/webpack.config.js) 看一下(具体配置解释直接查阅 [文档](https://webpack.js.org/configuration/output/#output-librarytarget))。 ## 3. 将组件打包输出成多种模式!important 既然是 RegularJS 组件,那么打包后的组件无论是直接以 `<script>` 标签形式引入,或者用 AMD / CommonJS 方式引入都应该可以使用。 ### 第一部分,webpack 配置 与此相关的配置项是这三个: - output.library && output.libraryTarget library 属性能让打包后的整个组件被当成一个全局变量使用。考虑命名污染及冲突,可以将 `library` 属性的值起得相对复杂些,如 `regularHotZone`。 另外,为了让组件在多种模式下都可运行,使用 `libraryTarget` 配置该组件的暴露方式为 **umd**。该模式意味着组件在 CommonJS、AMD 及 global 环境下都能运行: ``` output: { library: 'regularHotZone', libraryTarget: 'umd' } ``` - externals 这个配置是为了排除外部依赖,不将它们一起打包进去。对于 RegularJS 组件来说,并不需要把 RegularJS 也打包进去,此时就应该用 externals。 而配置中是这么写的: ``` externals: { regularjs: { root: 'Regular', commonjs: 'regularjs', commonjs2: 'regularjs', amd: 'regularjs' } } ``` 这是由于上述的 libraryTarget 设置为 umd,那么这里必须设置成这种形式,RegularJS 才能在 AMD 和 CommonJS 模式下通过 regularjs 被访问,但在全局变量下通过 Regular 被访问。 ### 第二部分,package 配置 另一方面,我们需要在组件的 package.json 中需要将 RegularJS 设置为同伴依赖 (`peerDependencies`): ``` // 建议:不同于一般的依赖,同伴依赖需要降低版本限制。不应该将同伴依赖锁定在特定的版本号。 "peerDependencies": { "regularjs": "^0.4.3" } ``` 因为 RegularJS 组件是 RegularJS 框架的拓展,它不能脱离于框架独立存在。 也就是说,如果需要以 npm 包形式引入 RegularJS 组件,那么 RegularJS 框架必须也被引入,不管是以 npm 包形式引入,还是用 `script` 标签引入并配置 externals。 **注意**:如果安装组件包时,找不到 RegularJS 或者其**不符合同伴依赖的版本要求**,终端将抛出警告: ``` `-- UNMET PEER DEPENDENCY regularjs@^0.4.3 npm WARN [email protected] requires a peer of regularjs@^0.4.3 but none was installed. ``` npm 使用的版本规则「[**在此**](https://docs.npmjs.com/misc/semver)」查看。 可以知道,上面设定 RegularJS 版本为 `^0.4.3`,相当于 version >= 0.4.3 && version < 0.5.0。 ## 4. 进入开发 跑个 `npm run startdev`,balabalabala... ## 5. 包装工作 1. 完成整体开发后,修改 package.json 中的 version(版本介绍「[**在此**](http://semver.org/lang/zh-CN/)」,每次发布都必须修改,否则无法发布),并利用 `npm run build` 打包输出 dist 文件夹。 2. 编写 Readme,可参考「[如何写好 Github 中的 readme? - 知乎](https://www.zhihu.com/question/29100816/answer/68750410)」。 ## 6. 最终发布 最终阶段,进入 https://www.npmjs.com/ 完成注册后,执行: ``` $ npm publish ``` 完成登录后可能会发布失败,因为我们可能会将 npm 源设置为淘宝源,此时需要添加 `//` 暂时将其注释: ``` $ vi ~/.npmrc //registry=https://registry.npm.taobao.org ``` 保存后重新执行发布操作即可。 此时我们可以通过 npms.io 搜索 npm 包名,如(请忽略分数): ![npms](https://user-images.githubusercontent.com/4961878/27834960-9bd1e728-610b-11e7-9de6-2e64a1c110e3.png) 并通过其[**分析**](https://npms.io/about)增强 npm 包的质量,最简单的可以有: - 完善 Readme、license、.gitignore 等; - 接入 [Travis CI](https://travis-ci.org/) 等,并确保覆盖率; - 去除过时依赖,减少依赖的脆弱性; - 增加专属站点,添加 Readme 上面的 icons; - 接入 ESLint,实现静态代码检查; - ...
{ "pile_set_name": "Github" }
if TARGET_TQM834X config SYS_BOARD default "tqm834x" config SYS_VENDOR default "tqc" config SYS_CONFIG_NAME default "TQM834x" endif
{ "pile_set_name": "Github" }
--- title: 关于公共服务的思考 date: 2016-12-07 21:12:53 tags: [] author: xizhibei issue_link: https://github.com/xizhibei/blog/issues/32 --- 前几日与一同行交流,一一交流下来,发现什么叫『固步自封』,跟外界交流少了,很多东西便会搞不清楚,甚至脱离主流。 比如最近一直在为团队做基础设施方面的工作,但是,会有一种吃力不讨好的感觉,虽然搭建完毕之后自己会很很有成就感,但是随之而来的维护成本却是很让人头疼。 是的,**『能花钱的,就不要花时间』**。 我也想反驳,但后来仔细回想,没有立即反驳是因为我认同这句话。我当时想反驳的便是:公共服务就像公交车,的确有时候会很方便,可是一旦你想自由些便是很困难,这时候便需要私家车。没错,私家车成本高,还要花时间金钱维护,但是,它就是比公交车方便。 所以,对于一个创业公司来说,你完全可以用公交服务,比如代码托管,文档管理,项目管理,云服务器,监控服务,CI&CD 服务。实际上,现在公共服务越来越多,创业成本实际上越来越低,最后可能到一种程度了之后,只要使用的人搭积木即可了,所有的服务都可以是公共的、现成的。 只是,我觉得高成本的服务才是有做成公共服务的硬需求的,也是价值非常高的,比如云服务主机,安全,APM,大数据等。 好了,话说回来,我列出有些部分为什么不用公共服务: 两个词:** 成本与收益 ** #### 成本: - 金钱:应该算是机器了,无论云服务或者自己买机器 - 时间:搭建与维护 - 人力:需要专门的人去维护 - 安全:数据不会泄露 #### 收益 - 时间:反馈时间,形成一个高效的正负反馈系统 - 可用性:满足需求,甚至比公共服务好用 这里插一句前提,国内的很多公共服务并不怎么好用,而且让人怀疑,虽然他们一再声明不会去查看甚至泄露用户数据。而国外的服务有非常多好用的,但是由于网络问题,得投入 VPN 成本,另外,他们很多是使用美元结算的,换算成 RMB 之后,很贵,不过他们的成本本来就很高。(T_T 国外好幸福。。。 比如当初选择 gitlab,一个是因为 github 有时候太慢,线上部署代码的时候太慢,换成 coding 之后,又觉得不如 github 好用。于是自己用 gitlab,然后那时候,gitlab 已经比较新,自带了 pipeline,可以直接作为 CI&CD。 然后是日志系统,目前国内好用的服务比较少,国外有个 loggy 不错,只是按照我们的需求来的话,至少得要 $5000 + 了,还不如自己搭建了,另外,由于需要上传数据到国外服务器,VPN 的成本不会太低,更别说有延时了。(其实还有点小私心,想要借此机会熟悉 ELK 好了,每个人都会有自己的选择,但是从个人角度来说:** 生命不息,折腾不止 **。 ### PS 目前就我接触下来,用过的产品中,ping++ 挺棒,起码会让人觉得好用,文档非常棒,虽然现在的公司因为担心数据没用。再吐槽下个推,文档真让人头大。。。 ### PPS 国内的服务目前还处于发展阶段,我觉得做出好产品是需要高成本的,并且跟创业公司是互相成就的,有条件的情况下还是多支持国内的同胞吧。。。 ### Reference 1. https://github.com/qinghuaiorg/free-for-dev-zh 2. https://github.com/ripienaar/free-for-dev *** 首发于 Github issues: https://github.com/xizhibei/blog/issues/32 ,欢迎 Star 以及 Watch {% post_link footer %} ***
{ "pile_set_name": "Github" }
/* Copyright (C) 1995-1998 Eric Young ([email protected]) * All rights reserved. * * This package is an SSL implementation written * by Eric Young ([email protected]). * The implementation was written so as to conform with Netscapes SSL. * * This library is free for commercial and non-commercial use as long as * the following conditions are aheared to. The following conditions * apply to all code found in this distribution, be it the RC4, RSA, * lhash, DES, etc., code; not just the SSL code. The SSL documentation * included with this distribution is covered by the same copyright terms * except that the holder is Tim Hudson ([email protected]). * * Copyright remains Eric Young's, and as such any Copyright notices in * the code are not to be removed. * If this package is used in a product, Eric Young should be given attribution * as the author of the parts of the library used. * This can be in the form of a textual message at program startup or * in documentation (online or textual) provided with the package. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. All advertising materials mentioning features or use of this software * must display the following acknowledgement: * "This product includes cryptographic software written by * Eric Young ([email protected])" * The word 'cryptographic' can be left out if the rouines from the library * being used are not cryptographic related :-). * 4. If you include any Windows specific code (or a derivative thereof) from * the apps directory (application code) you must include an acknowledgement: * "This product includes software written by Tim Hudson ([email protected])" * * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * * The licence and distribution terms for any publically available version or * derivative of this code cannot be changed. i.e. this code cannot simply be * copied and put under another distribution licence * [including the GNU Public Licence.] */ #include <openssl/cipher.h> #include <assert.h> #include <string.h> #include <openssl/err.h> #include <openssl/mem.h> #include <openssl/nid.h> #include "internal.h" #include "../internal.h" const EVP_CIPHER *EVP_get_cipherbynid(int nid) { switch (nid) { case NID_rc2_cbc: return EVP_rc2_cbc(); case NID_rc2_40_cbc: return EVP_rc2_40_cbc(); case NID_des_ede3_cbc: return EVP_des_ede3_cbc(); case NID_des_ede_cbc: return EVP_des_cbc(); case NID_aes_128_cbc: return EVP_aes_128_cbc(); case NID_aes_192_cbc: return EVP_aes_192_cbc(); case NID_aes_256_cbc: return EVP_aes_256_cbc(); default: return NULL; } } void EVP_CIPHER_CTX_init(EVP_CIPHER_CTX *ctx) { OPENSSL_memset(ctx, 0, sizeof(EVP_CIPHER_CTX)); } EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void) { EVP_CIPHER_CTX *ctx = OPENSSL_malloc(sizeof(EVP_CIPHER_CTX)); if (ctx) { EVP_CIPHER_CTX_init(ctx); } return ctx; } int EVP_CIPHER_CTX_cleanup(EVP_CIPHER_CTX *c) { if (c->cipher != NULL) { if (c->cipher->cleanup) { c->cipher->cleanup(c); } OPENSSL_cleanse(c->cipher_data, c->cipher->ctx_size); } OPENSSL_free(c->cipher_data); OPENSSL_memset(c, 0, sizeof(EVP_CIPHER_CTX)); return 1; } void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx) { if (ctx) { EVP_CIPHER_CTX_cleanup(ctx); OPENSSL_free(ctx); } } int EVP_CIPHER_CTX_copy(EVP_CIPHER_CTX *out, const EVP_CIPHER_CTX *in) { if (in == NULL || in->cipher == NULL) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INPUT_NOT_INITIALIZED); return 0; } EVP_CIPHER_CTX_cleanup(out); OPENSSL_memcpy(out, in, sizeof(EVP_CIPHER_CTX)); if (in->cipher_data && in->cipher->ctx_size) { out->cipher_data = OPENSSL_malloc(in->cipher->ctx_size); if (!out->cipher_data) { out->cipher = NULL; OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE); return 0; } OPENSSL_memcpy(out->cipher_data, in->cipher_data, in->cipher->ctx_size); } if (in->cipher->flags & EVP_CIPH_CUSTOM_COPY) { if (!in->cipher->ctrl((EVP_CIPHER_CTX *)in, EVP_CTRL_COPY, 0, out)) { out->cipher = NULL; return 0; } } return 1; } int EVP_CipherInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *engine, const uint8_t *key, const uint8_t *iv, int enc) { if (enc == -1) { enc = ctx->encrypt; } else { if (enc) { enc = 1; } ctx->encrypt = enc; } if (cipher) { /* Ensure a context left from last time is cleared (the previous check * attempted to avoid this if the same ENGINE and EVP_CIPHER could be * used). */ if (ctx->cipher) { EVP_CIPHER_CTX_cleanup(ctx); /* Restore encrypt and flags */ ctx->encrypt = enc; } ctx->cipher = cipher; if (ctx->cipher->ctx_size) { ctx->cipher_data = OPENSSL_malloc(ctx->cipher->ctx_size); if (!ctx->cipher_data) { ctx->cipher = NULL; OPENSSL_PUT_ERROR(CIPHER, ERR_R_MALLOC_FAILURE); return 0; } } else { ctx->cipher_data = NULL; } ctx->key_len = cipher->key_len; ctx->flags = 0; if (ctx->cipher->flags & EVP_CIPH_CTRL_INIT) { if (!EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_INIT, 0, NULL)) { ctx->cipher = NULL; OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INITIALIZATION_ERROR); return 0; } } } else if (!ctx->cipher) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_NO_CIPHER_SET); return 0; } /* we assume block size is a power of 2 in *cryptUpdate */ assert(ctx->cipher->block_size == 1 || ctx->cipher->block_size == 8 || ctx->cipher->block_size == 16); if (!(EVP_CIPHER_CTX_flags(ctx) & EVP_CIPH_CUSTOM_IV)) { switch (EVP_CIPHER_CTX_mode(ctx)) { case EVP_CIPH_STREAM_CIPHER: case EVP_CIPH_ECB_MODE: break; case EVP_CIPH_CFB_MODE: ctx->num = 0; /* fall-through */ case EVP_CIPH_CBC_MODE: assert(EVP_CIPHER_CTX_iv_length(ctx) <= sizeof(ctx->iv)); if (iv) { OPENSSL_memcpy(ctx->oiv, iv, EVP_CIPHER_CTX_iv_length(ctx)); } OPENSSL_memcpy(ctx->iv, ctx->oiv, EVP_CIPHER_CTX_iv_length(ctx)); break; case EVP_CIPH_CTR_MODE: case EVP_CIPH_OFB_MODE: ctx->num = 0; /* Don't reuse IV for CTR mode */ if (iv) { OPENSSL_memcpy(ctx->iv, iv, EVP_CIPHER_CTX_iv_length(ctx)); } break; default: return 0; } } if (key || (ctx->cipher->flags & EVP_CIPH_ALWAYS_CALL_INIT)) { if (!ctx->cipher->init(ctx, key, iv, enc)) { return 0; } } ctx->buf_len = 0; ctx->final_used = 0; ctx->block_mask = ctx->cipher->block_size - 1; return 1; } int EVP_EncryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *impl, const uint8_t *key, const uint8_t *iv) { return EVP_CipherInit_ex(ctx, cipher, impl, key, iv, 1); } int EVP_DecryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, ENGINE *impl, const uint8_t *key, const uint8_t *iv) { return EVP_CipherInit_ex(ctx, cipher, impl, key, iv, 0); } int EVP_EncryptUpdate(EVP_CIPHER_CTX *ctx, uint8_t *out, int *out_len, const uint8_t *in, int in_len) { int i, j, bl; if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { i = ctx->cipher->cipher(ctx, out, in, in_len); if (i < 0) { return 0; } else { *out_len = i; } return 1; } if (in_len <= 0) { *out_len = 0; return in_len == 0; } if (ctx->buf_len == 0 && (in_len & ctx->block_mask) == 0) { if (ctx->cipher->cipher(ctx, out, in, in_len)) { *out_len = in_len; return 1; } else { *out_len = 0; return 0; } } i = ctx->buf_len; bl = ctx->cipher->block_size; assert(bl <= (int)sizeof(ctx->buf)); if (i != 0) { if (bl - i > in_len) { OPENSSL_memcpy(&ctx->buf[i], in, in_len); ctx->buf_len += in_len; *out_len = 0; return 1; } else { j = bl - i; OPENSSL_memcpy(&ctx->buf[i], in, j); if (!ctx->cipher->cipher(ctx, out, ctx->buf, bl)) { return 0; } in_len -= j; in += j; out += bl; *out_len = bl; } } else { *out_len = 0; } i = in_len & ctx->block_mask; in_len -= i; if (in_len > 0) { if (!ctx->cipher->cipher(ctx, out, in, in_len)) { return 0; } *out_len += in_len; } if (i != 0) { OPENSSL_memcpy(ctx->buf, &in[in_len], i); } ctx->buf_len = i; return 1; } int EVP_EncryptFinal_ex(EVP_CIPHER_CTX *ctx, uint8_t *out, int *out_len) { int n, ret; unsigned int i, b, bl; if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { ret = ctx->cipher->cipher(ctx, out, NULL, 0); if (ret < 0) { return 0; } else { *out_len = ret; } return 1; } b = ctx->cipher->block_size; assert(b <= sizeof(ctx->buf)); if (b == 1) { *out_len = 0; return 1; } bl = ctx->buf_len; if (ctx->flags & EVP_CIPH_NO_PADDING) { if (bl) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH); return 0; } *out_len = 0; return 1; } n = b - bl; for (i = bl; i < b; i++) { ctx->buf[i] = n; } ret = ctx->cipher->cipher(ctx, out, ctx->buf, b); if (ret) { *out_len = b; } return ret; } int EVP_DecryptUpdate(EVP_CIPHER_CTX *ctx, uint8_t *out, int *out_len, const uint8_t *in, int in_len) { int fix_len; unsigned int b; if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { int r = ctx->cipher->cipher(ctx, out, in, in_len); if (r < 0) { *out_len = 0; return 0; } else { *out_len = r; } return 1; } if (in_len <= 0) { *out_len = 0; return in_len == 0; } if (ctx->flags & EVP_CIPH_NO_PADDING) { return EVP_EncryptUpdate(ctx, out, out_len, in, in_len); } b = ctx->cipher->block_size; assert(b <= sizeof(ctx->final)); if (ctx->final_used) { OPENSSL_memcpy(out, ctx->final, b); out += b; fix_len = 1; } else { fix_len = 0; } if (!EVP_EncryptUpdate(ctx, out, out_len, in, in_len)) { return 0; } /* if we have 'decrypted' a multiple of block size, make sure * we have a copy of this last block */ if (b > 1 && !ctx->buf_len) { *out_len -= b; ctx->final_used = 1; OPENSSL_memcpy(ctx->final, &out[*out_len], b); } else { ctx->final_used = 0; } if (fix_len) { *out_len += b; } return 1; } int EVP_DecryptFinal_ex(EVP_CIPHER_CTX *ctx, unsigned char *out, int *out_len) { int i, n; unsigned int b; *out_len = 0; if (ctx->cipher->flags & EVP_CIPH_FLAG_CUSTOM_CIPHER) { i = ctx->cipher->cipher(ctx, out, NULL, 0); if (i < 0) { return 0; } else { *out_len = i; } return 1; } b = ctx->cipher->block_size; if (ctx->flags & EVP_CIPH_NO_PADDING) { if (ctx->buf_len) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH); return 0; } *out_len = 0; return 1; } if (b > 1) { if (ctx->buf_len || !ctx->final_used) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_WRONG_FINAL_BLOCK_LENGTH); return 0; } assert(b <= sizeof(ctx->final)); /* The following assumes that the ciphertext has been authenticated. * Otherwise it provides a padding oracle. */ n = ctx->final[b - 1]; if (n == 0 || n > (int)b) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT); return 0; } for (i = 0; i < n; i++) { if (ctx->final[--b] != n) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT); return 0; } } n = ctx->cipher->block_size - n; for (i = 0; i < n; i++) { out[i] = ctx->final[i]; } *out_len = n; } else { *out_len = 0; } return 1; } int EVP_Cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in, size_t in_len) { return ctx->cipher->cipher(ctx, out, in, in_len); } int EVP_CipherUpdate(EVP_CIPHER_CTX *ctx, uint8_t *out, int *out_len, const uint8_t *in, int in_len) { if (ctx->encrypt) { return EVP_EncryptUpdate(ctx, out, out_len, in, in_len); } else { return EVP_DecryptUpdate(ctx, out, out_len, in, in_len); } } int EVP_CipherFinal_ex(EVP_CIPHER_CTX *ctx, uint8_t *out, int *out_len) { if (ctx->encrypt) { return EVP_EncryptFinal_ex(ctx, out, out_len); } else { return EVP_DecryptFinal_ex(ctx, out, out_len); } } const EVP_CIPHER *EVP_CIPHER_CTX_cipher(const EVP_CIPHER_CTX *ctx) { return ctx->cipher; } int EVP_CIPHER_CTX_nid(const EVP_CIPHER_CTX *ctx) { return ctx->cipher->nid; } unsigned EVP_CIPHER_CTX_block_size(const EVP_CIPHER_CTX *ctx) { return ctx->cipher->block_size; } unsigned EVP_CIPHER_CTX_key_length(const EVP_CIPHER_CTX *ctx) { return ctx->key_len; } unsigned EVP_CIPHER_CTX_iv_length(const EVP_CIPHER_CTX *ctx) { return ctx->cipher->iv_len; } void *EVP_CIPHER_CTX_get_app_data(const EVP_CIPHER_CTX *ctx) { return ctx->app_data; } void EVP_CIPHER_CTX_set_app_data(EVP_CIPHER_CTX *ctx, void *data) { ctx->app_data = data; } uint32_t EVP_CIPHER_CTX_flags(const EVP_CIPHER_CTX *ctx) { return ctx->cipher->flags & ~EVP_CIPH_MODE_MASK; } uint32_t EVP_CIPHER_CTX_mode(const EVP_CIPHER_CTX *ctx) { return ctx->cipher->flags & EVP_CIPH_MODE_MASK; } int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *ctx, int command, int arg, void *ptr) { int ret; if (!ctx->cipher) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_NO_CIPHER_SET); return 0; } if (!ctx->cipher->ctrl) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_CTRL_NOT_IMPLEMENTED); return 0; } ret = ctx->cipher->ctrl(ctx, command, arg, ptr); if (ret == -1) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_CTRL_OPERATION_NOT_IMPLEMENTED); return 0; } return ret; } int EVP_CIPHER_CTX_set_padding(EVP_CIPHER_CTX *ctx, int pad) { if (pad) { ctx->flags &= ~EVP_CIPH_NO_PADDING; } else { ctx->flags |= EVP_CIPH_NO_PADDING; } return 1; } int EVP_CIPHER_CTX_set_key_length(EVP_CIPHER_CTX *c, unsigned key_len) { if (c->key_len == key_len) { return 1; } if (key_len == 0 || !(c->cipher->flags & EVP_CIPH_VARIABLE_LENGTH)) { OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_KEY_LENGTH); return 0; } c->key_len = key_len; return 1; } int EVP_CIPHER_nid(const EVP_CIPHER *cipher) { return cipher->nid; } unsigned EVP_CIPHER_block_size(const EVP_CIPHER *cipher) { return cipher->block_size; } unsigned EVP_CIPHER_key_length(const EVP_CIPHER *cipher) { return cipher->key_len; } unsigned EVP_CIPHER_iv_length(const EVP_CIPHER *cipher) { return cipher->iv_len; } uint32_t EVP_CIPHER_flags(const EVP_CIPHER *cipher) { return cipher->flags & ~EVP_CIPH_MODE_MASK; } uint32_t EVP_CIPHER_mode(const EVP_CIPHER *cipher) { return cipher->flags & EVP_CIPH_MODE_MASK; } int EVP_CipherInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, const uint8_t *key, const uint8_t *iv, int enc) { if (cipher) { EVP_CIPHER_CTX_init(ctx); } return EVP_CipherInit_ex(ctx, cipher, NULL, key, iv, enc); } int EVP_EncryptInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, const uint8_t *key, const uint8_t *iv) { return EVP_CipherInit(ctx, cipher, key, iv, 1); } int EVP_DecryptInit(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, const uint8_t *key, const uint8_t *iv) { return EVP_CipherInit(ctx, cipher, key, iv, 0); } int EVP_add_cipher_alias(const char *a, const char *b) { return 1; } const EVP_CIPHER *EVP_get_cipherbyname(const char *name) { if (OPENSSL_strcasecmp(name, "rc4") == 0) { return EVP_rc4(); } else if (OPENSSL_strcasecmp(name, "des-cbc") == 0) { return EVP_des_cbc(); } else if (OPENSSL_strcasecmp(name, "des-ede3-cbc") == 0 || OPENSSL_strcasecmp(name, "3des") == 0) { return EVP_des_ede3_cbc(); } else if (OPENSSL_strcasecmp(name, "aes-128-cbc") == 0) { return EVP_aes_128_cbc(); } else if (OPENSSL_strcasecmp(name, "aes-256-cbc") == 0) { return EVP_aes_256_cbc(); } else if (OPENSSL_strcasecmp(name, "aes-128-ctr") == 0) { return EVP_aes_128_ctr(); } else if (OPENSSL_strcasecmp(name, "aes-256-ctr") == 0) { return EVP_aes_256_ctr(); } else if (OPENSSL_strcasecmp(name, "aes-128-ecb") == 0) { return EVP_aes_128_ecb(); } else if (OPENSSL_strcasecmp(name, "aes-256-ecb") == 0) { return EVP_aes_256_ecb(); } return NULL; }
{ "pile_set_name": "Github" }
{ "name": "bluetoothconnector", "full_name": "bluetoothconnector", "oldname": null, "aliases": [ ], "versioned_formulae": [ ], "desc": "Connect and disconnect Bluetooth devices", "license": "MIT", "homepage": "https://github.com/lapfelix/BluetoothConnector", "versions": { "stable": "2.0.0", "head": "HEAD", "bottle": true }, "urls": { "stable": { "url": "https://github.com/lapfelix/BluetoothConnector/archive/2.0.0.tar.gz", "tag": null, "revision": null } }, "revision": 0, "version_scheme": 0, "bottle": { "stable": { "rebuild": 0, "cellar": ":any_skip_relocation", "prefix": "/home/linuxbrew/.linuxbrew", "root_url": "https://linuxbrew.bintray.com/bottles", "files": { "catalina": { "url": "https://linuxbrew.bintray.com/bottles/bluetoothconnector-2.0.0.catalina.bottle.tar.gz", "sha256": "38d8b5c89fd8fee4a746eadaceb399d5b7e1148db2cee896381b6e093aef56e3" }, "mojave": { "url": "https://linuxbrew.bintray.com/bottles/bluetoothconnector-2.0.0.mojave.bottle.tar.gz", "sha256": "1a0c1e83b5640a35c48ba982f1b7cf5b1bebdda6fd4957368262c3e001c740e3" } } } }, "keg_only": false, "bottle_disabled": false, "options": [ ], "build_dependencies": [ ], "dependencies": [ ], "recommended_dependencies": [ ], "optional_dependencies": [ ], "uses_from_macos": [ ], "requirements": [ { "name": "xcode", "cask": null, "download": null, "version": "11.0", "contexts": [ "build" ] } ], "conflicts_with": [ ], "caveats": null, "installed": [ ], "linked_keg": null, "pinned": false, "outdated": false, "deprecated": false, "disabled": false }
{ "pile_set_name": "Github" }
<?xml version='1.0'?> <xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" xmlns:fo="http://www.w3.org/1999/XSL/Format" xmlns:exsl="http://exslt.org/common" exclude-result-prefixes="exsl" version='1.0'> <!-- ******************************************************************** $Id: xref.xsl,v 1.1 2005/08/28 00:35:05 cbauer Exp $ ******************************************************************** This file is part of the XSL DocBook Stylesheet distribution. See ../README or http://nwalsh.com/docbook/xsl/ for copyright and other information. ******************************************************************** --> <!-- Create keys for quickly looking up olink targets --> <xsl:key name="targetdoc-key" match="document" use="@targetdoc" /> <xsl:key name="targetptr-key" match="div|obj" use="concat(ancestor::document/@targetdoc, '/', @targetptr)" /> <!-- ==================================================================== --> <xsl:template match="anchor"> <fo:wrapper id="{@id}"/> </xsl:template> <!-- ==================================================================== --> <xsl:template match="xref" name="xref"> <xsl:variable name="targets" select="key('id',@linkend)"/> <xsl:variable name="target" select="$targets[1]"/> <xsl:variable name="refelem" select="local-name($target)"/> <xsl:call-template name="check.id.unique"> <xsl:with-param name="linkend" select="@linkend"/> </xsl:call-template> <xsl:choose> <xsl:when test="$refelem=''"> <xsl:message> <xsl:text>XRef to nonexistent id: </xsl:text> <xsl:value-of select="@linkend"/> </xsl:message> <xsl:text>???</xsl:text> </xsl:when> <xsl:when test="@endterm"> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:variable name="etargets" select="key('id',@endterm)"/> <xsl:variable name="etarget" select="$etargets[1]"/> <xsl:choose> <xsl:when test="count($etarget) = 0"> <xsl:message> <xsl:value-of select="count($etargets)"/> <xsl:text>Endterm points to nonexistent ID: </xsl:text> <xsl:value-of select="@endterm"/> </xsl:message> <xsl:text>???</xsl:text> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="$etarget" mode="endterm"/> </xsl:otherwise> </xsl:choose> </fo:basic-link> </xsl:when> <xsl:when test="$target/@xreflabel"> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:call-template name="xref.xreflabel"> <xsl:with-param name="target" select="$target"/> </xsl:call-template> </fo:basic-link> </xsl:when> <xsl:otherwise> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:apply-templates select="$target" mode="xref-to"> <xsl:with-param name="referrer" select="."/> <xsl:with-param name="xrefstyle"> <xsl:choose> <xsl:when test="@role and not(@xrefstyle) and $use.role.as.xrefstyle != 0"> <xsl:value-of select="@role"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="@xrefstyle"/> </xsl:otherwise> </xsl:choose> </xsl:with-param> </xsl:apply-templates> </fo:basic-link> </xsl:otherwise> </xsl:choose> <!-- Add standard page reference? --> <xsl:if test="not(starts-with(normalize-space(@xrefstyle), 'select:') != '' and (contains(@xrefstyle, 'page') or contains(@xrefstyle, 'Page'))) and ( $insert.xref.page.number = 'yes' or $insert.xref.page.number = '1') or local-name($target) = 'para'"> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:apply-templates select="$target" mode="page.citation"> <xsl:with-param name="id" select="@linkend"/> </xsl:apply-templates> </fo:basic-link> </xsl:if> </xsl:template> <!-- ==================================================================== --> <xsl:template match="*" mode="endterm"> <!-- Process the children of the endterm element --> <xsl:variable name="endterm"> <xsl:apply-templates select="child::node()"/> </xsl:variable> <xsl:choose> <xsl:when test="function-available('exsl:node-set')"> <xsl:apply-templates select="exsl:node-set($endterm)" mode="remove-ids"/> </xsl:when> <xsl:otherwise> <xsl:copy-of select="$endterm"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="*" mode="remove-ids"> <xsl:copy> <xsl:for-each select="@*"> <xsl:choose> <xsl:when test="name(.) != 'id'"> <xsl:copy/> </xsl:when> <xsl:otherwise> <xsl:message>removing <xsl:value-of select="name(.)"/></xsl:message> </xsl:otherwise> </xsl:choose> </xsl:for-each> <xsl:apply-templates mode="remove-ids"/> </xsl:copy> </xsl:template> <!--- ==================================================================== --> <xsl:template match="*" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:message> <xsl:text>Don't know what gentext to create for xref to: "</xsl:text> <xsl:value-of select="name(.)"/> <xsl:text>"</xsl:text> </xsl:message> <xsl:text>???</xsl:text> </xsl:template> <xsl:template match="title" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <!-- if you xref to a title, xref to the parent... --> <xsl:choose> <!-- FIXME: how reliable is this? --> <xsl:when test="contains(local-name(parent::*), 'info')"> <xsl:apply-templates select="parent::*[2]" mode="xref-to"> <xsl:with-param name="referrer" select="$referrer"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> </xsl:apply-templates> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="parent::*" mode="xref-to"> <xsl:with-param name="referrer" select="$referrer"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> </xsl:apply-templates> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="abstract|article|authorblurb|bibliodiv|bibliomset |biblioset|blockquote|calloutlist|caution|colophon |constraintdef|formalpara|glossdiv|important|indexdiv |itemizedlist|legalnotice|lot|msg|msgexplan|msgmain |msgrel|msgset|msgsub|note|orderedlist|partintro |productionset|qandadiv|refsynopsisdiv|segmentedlist |set|setindex|sidebar|tip|toc|variablelist|warning" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <!-- catch-all for things with (possibly optional) titles --> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="author|editor|othercredit|personname" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:call-template name="person.name"/> </xsl:template> <xsl:template match="authorgroup" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:call-template name="person.name.list"/> </xsl:template> <xsl:template match="figure|example|table|equation" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="procedure" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="cmdsynopsis" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="(.//command)[1]" mode="xref"/> </xsl:template> <xsl:template match="funcsynopsis" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="(.//function)[1]" mode="xref"/> </xsl:template> <xsl:template match="dedication|preface|chapter|appendix" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <fo:inline text-decoration="underline" color="blue"> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </fo:inline> </xsl:template> <xsl:template match="bibliography" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="biblioentry|bibliomixed" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <!-- handles both biblioentry and bibliomixed --> <xsl:text>[</xsl:text> <xsl:choose> <xsl:when test="string(.) = ''"> <xsl:variable name="bib" select="document($bibliography.collection,.)"/> <xsl:variable name="id" select="@id"/> <xsl:variable name="entry" select="$bib/bibliography/*[@id=$id][1]"/> <xsl:choose> <xsl:when test="$entry"> <xsl:choose> <xsl:when test="$bibliography.numbered != 0"> <xsl:number from="bibliography" count="biblioentry|bibliomixed" level="any" format="1"/> </xsl:when> <xsl:when test="local-name($entry/*[1]) = 'abbrev'"> <xsl:apply-templates select="$entry/*[1]"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="@id"/> </xsl:otherwise> </xsl:choose> </xsl:when> <xsl:otherwise> <xsl:message> <xsl:text>No bibliography entry: </xsl:text> <xsl:value-of select="$id"/> <xsl:text> found in </xsl:text> <xsl:value-of select="$bibliography.collection"/> </xsl:message> <xsl:value-of select="@id"/> </xsl:otherwise> </xsl:choose> </xsl:when> <xsl:otherwise> <xsl:choose> <xsl:when test="$bibliography.numbered != 0"> <xsl:number from="bibliography" count="biblioentry|bibliomixed" level="any" format="1"/> </xsl:when> <xsl:when test="local-name(*[1]) = 'abbrev'"> <xsl:apply-templates select="*[1]"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="@id"/> </xsl:otherwise> </xsl:choose> </xsl:otherwise> </xsl:choose> <xsl:text>]</xsl:text> </xsl:template> <xsl:template match="glossary" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="glossentry" mode="xref-to"> <xsl:choose> <xsl:when test="$glossentry.show.acronym = 'primary'"> <xsl:choose> <xsl:when test="acronym|abbrev"> <xsl:apply-templates select="(acronym|abbrev)[1]"/> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="glossterm[1]" mode="xref-to"/> </xsl:otherwise> </xsl:choose> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="glossterm[1]" mode="xref-to"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="glossterm" mode="xref-to"> <xsl:apply-templates/> </xsl:template> <xsl:template match="index" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="listitem" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="section|simplesect |sect1|sect2|sect3|sect4|sect5 |refsect1|refsect2|refsect3|refsection" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <fo:inline text-decoration="underline" color="blue"> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </fo:inline> <!-- What about "in Chapter X"? --> </xsl:template> <xsl:template match="bridgehead" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> <!-- What about "in Chapter X"? --> </xsl:template> <xsl:template match="qandaset" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="qandadiv" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="qandaentry" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="question[1]" mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="question|answer" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="part|reference" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="refentry" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:choose> <xsl:when test="refmeta/refentrytitle"> <xsl:apply-templates select="refmeta/refentrytitle"/> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="refnamediv/refname[1]"/> </xsl:otherwise> </xsl:choose> <xsl:apply-templates select="refmeta/manvolnum"/> </xsl:template> <xsl:template match="refnamediv" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="refname[1]" mode="xref-to"> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="refname" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates mode="xref-to"> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="step" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:call-template name="gentext"> <xsl:with-param name="key" select="'Step'"/> </xsl:call-template> <xsl:text> </xsl:text> <xsl:apply-templates select="." mode="number"/> </xsl:template> <xsl:template match="varlistentry" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="term[1]" mode="xref-to"> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="varlistentry/term" mode="xref-to"> <!-- to avoid the comma that will be generated if there are several terms --> <xsl:apply-templates/> </xsl:template> <xsl:template match="co" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="callout-bug"/> </xsl:template> <xsl:template match="book" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> </xsl:template> <xsl:template match="para" mode="xref-to"> <xsl:param name="referrer"/> <xsl:param name="xrefstyle"/> <xsl:variable name="context" select="(ancestor::simplesect |ancestor::section |ancestor::sect1 |ancestor::sect2 |ancestor::sect3 |ancestor::sect4 |ancestor::sect5 |ancestor::refsection |ancestor::refsect1 |ancestor::refsect2 |ancestor::refsect3 |ancestor::chapter |ancestor::appendix |ancestor::preface |ancestor::partintro |ancestor::dedication |ancestor::colophon |ancestor::bibliography |ancestor::index |ancestor::glossary |ancestor::glossentry |ancestor::listitem |ancestor::varlistentry)[last()]"/> <xsl:apply-templates select="$context" mode="xref-to"/> <!-- <xsl:apply-templates select="." mode="object.xref.markup"> <xsl:with-param name="purpose" select="'xref'"/> <xsl:with-param name="xrefstyle" select="$xrefstyle"/> <xsl:with-param name="referrer" select="$referrer"/> </xsl:apply-templates> --> </xsl:template> <!-- ==================================================================== --> <xsl:template match="link" name="link"> <xsl:variable name="targets" select="key('id',@linkend)"/> <xsl:variable name="target" select="$targets[1]"/> <xsl:call-template name="check.id.unique"> <xsl:with-param name="linkend" select="@linkend"/> </xsl:call-template> <fo:basic-link internal-destination="{@linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:choose> <xsl:when test="count(child::node()) &gt; 0"> <!-- If it has content, use it --> <xsl:apply-templates/> </xsl:when> <xsl:otherwise> <!-- else look for an endterm --> <xsl:choose> <xsl:when test="@endterm"> <xsl:variable name="etargets" select="key('id',@endterm)"/> <xsl:variable name="etarget" select="$etargets[1]"/> <xsl:choose> <xsl:when test="count($etarget) = 0"> <xsl:message> <xsl:value-of select="count($etargets)"/> <xsl:text>Endterm points to nonexistent ID: </xsl:text> <xsl:value-of select="@endterm"/> </xsl:message> <xsl:text>???</xsl:text> </xsl:when> <xsl:otherwise> <xsl:apply-templates select="$etarget" mode="endterm"/> </xsl:otherwise> </xsl:choose> </xsl:when> <xsl:otherwise> <xsl:message> <xsl:text>Link element has no content and no Endterm. </xsl:text> <xsl:text>Nothing to show in the link to </xsl:text> <xsl:value-of select="$target"/> </xsl:message> <xsl:text>???</xsl:text> </xsl:otherwise> </xsl:choose> </xsl:otherwise> </xsl:choose> </fo:basic-link> </xsl:template> <xsl:template match="ulink" name="ulink"> <fo:basic-link xsl:use-attribute-sets="xref.properties"> <xsl:attribute name="external-destination"> <xsl:call-template name="fo-external-image"> <xsl:with-param name="filename" select="@url"/> </xsl:call-template> </xsl:attribute> <xsl:choose> <xsl:when test="count(child::node())=0"> <xsl:call-template name="hyphenate-url"> <xsl:with-param name="url" select="@url"/> </xsl:call-template> </xsl:when> <xsl:otherwise> <xsl:apply-templates/> </xsl:otherwise> </xsl:choose> </fo:basic-link> <xsl:if test="count(child::node()) != 0 and string(.) != @url and $ulink.show != 0"> <!-- yes, show the URI --> <xsl:choose> <xsl:when test="$ulink.footnotes != 0 and not(ancestor::footnote)"> <xsl:text>&#xA0;</xsl:text> <fo:footnote> <xsl:call-template name="ulink.footnote.number"/> <fo:footnote-body font-family="{$body.fontset}" font-size="{$footnote.font.size}"> <fo:block> <xsl:call-template name="ulink.footnote.number"/> <xsl:text> </xsl:text> <fo:inline> <xsl:value-of select="@url"/> </fo:inline> </fo:block> </fo:footnote-body> </fo:footnote> </xsl:when> <xsl:otherwise> <fo:inline hyphenate="false"> <xsl:text> [</xsl:text> <xsl:call-template name="hyphenate-url"> <xsl:with-param name="url" select="@url"/> </xsl:call-template> <xsl:text>]</xsl:text> </fo:inline> </xsl:otherwise> </xsl:choose> </xsl:if> </xsl:template> <xsl:template name="ulink.footnote.number"> <fo:inline font-size="90%"> <!-- FIXME: this isn't going to be perfect! --> <xsl:text>[</xsl:text> <xsl:number level="any" from="chapter|appendix|preface|article|refentry" format="{$ulink.footnote.number.format}"/> <xsl:text>]</xsl:text> </fo:inline> </xsl:template> <xsl:template name="hyphenate-url"> <xsl:param name="url" select="''"/> <xsl:choose> <xsl:when test="$ulink.hyphenate = ''"> <xsl:value-of select="$url"/> </xsl:when> <xsl:when test="contains($url, '/')"> <xsl:value-of select="substring-before($url, '/')"/> <xsl:text>/</xsl:text> <xsl:copy-of select="$ulink.hyphenate"/> <xsl:call-template name="hyphenate-url"> <xsl:with-param name="url" select="substring-after($url, '/')"/> </xsl:call-template> </xsl:when> <xsl:otherwise> <xsl:value-of select="$url"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="olink"> <xsl:param name="target.database" select="document($target.database.document, /)"/> <xsl:variable name="localinfo" select="@localinfo"/> <!-- Olink that points to internal id can be a link --> <xsl:variable name="linkend"> <xsl:choose> <xsl:when test="@targetdoc and not(@targetptr)" > <xsl:message>Olink missing @targetptr attribute value</xsl:message> </xsl:when> <xsl:when test="not(@targetdoc) and @targetptr" > <xsl:message>Olink missing @targetdoc attribute value</xsl:message> </xsl:when> <xsl:when test="@targetdoc and @targetptr"> <xsl:if test="$current.docid = @targetdoc"> <xsl:if test="id(@targetptr)"> <xsl:value-of select="@targetptr"/> </xsl:if> </xsl:if> </xsl:when> </xsl:choose> </xsl:variable> <xsl:choose> <xsl:when test="$linkend != ''"> <fo:basic-link internal-destination="{$linkend}" xsl:use-attribute-sets="xref.properties"> <xsl:call-template name="olink.hottext"> <xsl:with-param name="target.database" select="$target.database"/> </xsl:call-template> </fo:basic-link> </xsl:when> <xsl:otherwise> <xsl:call-template name="olink.hottext"> <xsl:with-param name="target.database" select="$target.database"/> </xsl:call-template> <!-- Append other document title if appropriate --> <xsl:if test="@targetdoc and @targetptr and $olink.doctitle != 0 and $current.docid != '' and $current.docid != @targetdoc"> <xsl:variable name="doctitle"> <xsl:variable name="seek.targetdoc" select="@targetdoc"/> <xsl:for-each select="$target.database" > <xsl:value-of select="key('targetdoc-key', $seek.targetdoc)/div[1]/ttl" /> </xsl:for-each> </xsl:variable> <xsl:if test="$doctitle != ''"> <xsl:text> (</xsl:text><xsl:value-of select="$doctitle"/><xsl:text>)</xsl:text> </xsl:if> </xsl:if> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template name="olink.hottext"> <xsl:param name="target.database"/> <xsl:choose> <!-- If it has elements or text (not just PI or comment) --> <xsl:when test="child::text() or child::*"> <xsl:apply-templates/> </xsl:when> <xsl:when test="@targetdoc and @targetptr"> <!-- Get the xref text for this record --> <xsl:variable name="seek.targetdoc" select="@targetdoc"/> <xsl:variable name="seek.targetptr" select="@targetptr"/> <xsl:variable name="xref.text" > <xsl:for-each select="$target.database" > <xsl:value-of select="key('targetptr-key', concat($seek.targetdoc, '/', $seek.targetptr))/xreftext"/> </xsl:for-each> </xsl:variable> <xsl:choose> <xsl:when test="$use.local.olink.style != 0"> <!-- Get the element name and lang for this targetptr --> <xsl:variable name="element" > <xsl:for-each select="$target.database" > <xsl:value-of select="key('targetptr-key', concat($seek.targetdoc, '/', $seek.targetptr))/@element"/> </xsl:for-each> </xsl:variable> <xsl:variable name="lang"> <xsl:variable name="candidate"> <xsl:for-each select="$target.database" > <xsl:value-of select="key('targetptr-key', concat($seek.targetdoc, '/', $seek.targetptr))/@lang"/> </xsl:for-each> </xsl:variable> <xsl:choose> <xsl:when test="$candidate != ''"> <xsl:value-of select="$candidate"/> </xsl:when> <xsl:otherwise> <xsl:value-of select="'en'"/> </xsl:otherwise> </xsl:choose> </xsl:variable> <xsl:variable name="template"> <xsl:call-template name="gentext.template"> <xsl:with-param name="context" select="'title'"/> <xsl:with-param name="name" select="$element"/> <xsl:with-param name="lang" select="$lang"/> </xsl:call-template> </xsl:variable> <xsl:call-template name="substitute-markup"> <xsl:with-param name="template" select="$template"/> <xsl:with-param name="title"> <xsl:for-each select="$target.database" > <xsl:value-of select="key('targetptr-key', concat($seek.targetdoc, '/', $seek.targetptr))/ttl"/> </xsl:for-each> </xsl:with-param> <xsl:with-param name="label"> <xsl:for-each select="$target.database" > <xsl:value-of select="key('targetptr-key', concat($seek.targetdoc, '/', $seek.targetptr))/@number"/> </xsl:for-each> </xsl:with-param> </xsl:call-template> </xsl:when> <xsl:when test="$xref.text !=''"> <xsl:value-of select="$xref.text"/> </xsl:when> <xsl:otherwise> <xsl:message>Olink error: no generated text for targetdoc/targetptr = <xsl:value-of select="@targetdoc"/>/<xsl:value-of select="@targetptr"/></xsl:message> <xsl:text>????</xsl:text> </xsl:otherwise> </xsl:choose> </xsl:when> <xsl:otherwise> <xsl:text>????</xsl:text> <!-- <xsl:call-template name="olink.outline"> <xsl:with-param name="outline.base.uri" select="unparsed-entity-uri(@targetdocent)"/> <xsl:with-param name="localinfo" select="@localinfo"/> <xsl:with-param name="return" select="'xreftext'"/> </xsl:call-template> --> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template name="olink.outline"> <xsl:message terminate="yes">Fatal error: what is this supposed to do?</xsl:message> </xsl:template> <!-- ==================================================================== --> <xsl:template name="title.xref"> <xsl:param name="target" select="."/> <xsl:choose> <xsl:when test="local-name($target) = 'figure' or local-name($target) = 'example' or local-name($target) = 'equation' or local-name($target) = 'table' or local-name($target) = 'dedication' or local-name($target) = 'preface' or local-name($target) = 'bibliography' or local-name($target) = 'glossary' or local-name($target) = 'index' or local-name($target) = 'setindex' or local-name($target) = 'colophon'"> <xsl:call-template name="gentext.startquote"/> <xsl:apply-templates select="$target" mode="title.markup"/> <xsl:call-template name="gentext.endquote"/> </xsl:when> <xsl:otherwise> <fo:inline font-style="italic"> <xsl:apply-templates select="$target" mode="title.markup"/> </fo:inline> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template name="number.xref"> <xsl:param name="target" select="."/> <xsl:apply-templates select="$target" mode="label.markup"/> </xsl:template> <!-- ==================================================================== --> <xsl:template name="xref.xreflabel"> <!-- called to process an xreflabel...you might use this to make --> <!-- xreflabels come out in the right font for different targets, --> <!-- for example. --> <xsl:param name="target" select="."/> <xsl:value-of select="$target/@xreflabel"/> </xsl:template> <!-- ==================================================================== --> <xsl:template match="title" mode="xref"> <xsl:apply-templates/> </xsl:template> <xsl:template match="command" mode="xref"> <xsl:call-template name="inline.boldseq"/> </xsl:template> <xsl:template match="function" mode="xref"> <xsl:call-template name="inline.monoseq"/> </xsl:template> <xsl:template match="*" mode="page.citation"> <xsl:param name="id" select="'???'"/> <fo:inline keep-together.within-line="always"> <xsl:call-template name="substitute-markup"> <xsl:with-param name="template"> <xsl:call-template name="gentext.template"> <xsl:with-param name="name" select="'page.citation'"/> <xsl:with-param name="context" select="'xref'"/> </xsl:call-template> </xsl:with-param> </xsl:call-template> </fo:inline> </xsl:template> <xsl:template match="*" mode="pagenumber.markup"> <fo:page-number-citation ref-id="{@id}"/> </xsl:template> <!-- ==================================================================== --> <xsl:template match="*" mode="insert.title.markup"> <xsl:param name="purpose"/> <xsl:param name="xrefstyle"/> <xsl:param name="title"/> <xsl:choose> <!-- FIXME: what about the case where titleabbrev is inside the info? --> <xsl:when test="$purpose = 'xref' and titleabbrev"> <xsl:apply-templates select="." mode="titleabbrev.markup"/> </xsl:when> <xsl:otherwise> <xsl:copy-of select="$title"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="chapter|appendix" mode="insert.title.markup"> <xsl:param name="purpose"/> <xsl:param name="xrefstyle"/> <xsl:param name="title"/> <xsl:choose> <xsl:when test="$purpose = 'xref'"> <fo:inline font-style="italic"> <xsl:copy-of select="$title"/> </fo:inline> </xsl:when> <xsl:otherwise> <xsl:copy-of select="$title"/> </xsl:otherwise> </xsl:choose> </xsl:template> <xsl:template match="*" mode="insert.subtitle.markup"> <xsl:param name="purpose"/> <xsl:param name="xrefstyle"/> <xsl:param name="subtitle"/> <xsl:copy-of select="$subtitle"/> </xsl:template> <xsl:template match="*" mode="insert.label.markup"> <xsl:param name="purpose"/> <xsl:param name="xrefstyle"/> <xsl:param name="label"/> <xsl:copy-of select="$label"/> </xsl:template> <xsl:template match="*" mode="insert.pagenumber.markup"> <xsl:param name="purpose"/> <xsl:param name="xrefstyle"/> <xsl:param name="pagenumber"/> <xsl:copy-of select="$pagenumber"/> </xsl:template> <xsl:template match="*" mode="insert.direction.markup"> <xsl:param name="purpose"/> <xsl:param name="xrefstyle"/> <xsl:param name="direction"/> <xsl:copy-of select="$direction"/> </xsl:template> </xsl:stylesheet>
{ "pile_set_name": "Github" }
package cdn //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. // // Code generated by Alibaba Cloud SDK Code Generator. // Changes may cause incorrect behavior and will be lost if the code is regenerated. // HttpCodeData is a nested struct in cdn response type HttpCodeData struct { UsageData []UsageDataInDescribeDomainHttpCodeData `json:"UsageData" xml:"UsageData"` }
{ "pile_set_name": "Github" }
/* * Copyright 2018 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.processor.parser.sql; import com.streamsets.pipeline.api.base.BaseEnumChooserValues; public class JDBCTypeChooserValues extends BaseEnumChooserValues<JDBCTypes> { public JDBCTypeChooserValues() { super(JDBCTypes.class); } }
{ "pile_set_name": "Github" }
--- id: i18n title: Internationalization --- ## Add a language ### Edit your bot configs In the Admin section > Your bots > Configs ![Bot Config](assets/i18n-configs.png) ### Switch language Go back to Studio and switch language ![Switch Language](assets/i18n-switch-lang.png) You'll see a "missing translation" notification on your content ![Missing Translation](assets/i18n-missing-translation.png) ### Translate your content Edit the content and add a translation ![Edit Content](assets/i18n-edit-content.png) ![Edited Content](assets/i18n-edited-content.png) ## Change the language Botpress use the browser language to detect the user language. This is stored in the `language` field of the user attributes. It is possible to change the language of a user by modifying this field. See [updateAttributes](https://botpress.com/reference/modules/_botpress_sdk_.users.html#updateattributes) Example usage: ```js await bp.users.updateAttributes('web', 'someId', { language: 'fr' }) ```
{ "pile_set_name": "Github" }
package cli import ( "fmt" "io" "os" "strings" "text/tabwriter" "text/template" ) // AppHelpTemplate is the text template for the Default help topic. // cli.go uses text/template to render templates. You can // render custom help text by setting this variable. var AppHelpTemplate = `NAME: {{.Name}}{{if .Usage}} - {{.Usage}}{{end}} USAGE: {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} {{if .VisibleFlags}}[global options]{{end}}{{if .Commands}} command [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Version}}{{if not .HideVersion}} VERSION: {{.Version}}{{end}}{{end}}{{if .Description}} DESCRIPTION: {{.Description}}{{end}}{{if len .Authors}} AUTHOR{{with $length := len .Authors}}{{if ne 1 $length}}S{{end}}{{end}}: {{range $index, $author := .Authors}}{{if $index}} {{end}}{{$author}}{{end}}{{end}}{{if .VisibleCommands}} COMMANDS:{{range .VisibleCategories}}{{if .Name}} {{.Name}}:{{end}}{{range .VisibleCommands}} {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}}{{end}}{{end}}{{if .VisibleFlags}} GLOBAL OPTIONS: {{range $index, $option := .VisibleFlags}}{{if $index}} {{end}}{{$option}}{{end}}{{end}}{{if .Copyright}} COPYRIGHT: {{.Copyright}}{{end}} ` // CommandHelpTemplate is the text template for the command help topic. // cli.go uses text/template to render templates. You can // render custom help text by setting this variable. var CommandHelpTemplate = `NAME: {{.HelpName}} - {{.Usage}} USAGE: {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}}{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}}{{if .Category}} CATEGORY: {{.Category}}{{end}}{{if .Description}} DESCRIPTION: {{.Description}}{{end}}{{if .VisibleFlags}} OPTIONS: {{range .VisibleFlags}}{{.}} {{end}}{{end}} ` // SubcommandHelpTemplate is the text template for the subcommand help topic. // cli.go uses text/template to render templates. You can // render custom help text by setting this variable. var SubcommandHelpTemplate = `NAME: {{.HelpName}} - {{if .Description}}{{.Description}}{{else}}{{.Usage}}{{end}} USAGE: {{if .UsageText}}{{.UsageText}}{{else}}{{.HelpName}} command{{if .VisibleFlags}} [command options]{{end}} {{if .ArgsUsage}}{{.ArgsUsage}}{{else}}[arguments...]{{end}}{{end}} COMMANDS:{{range .VisibleCategories}}{{if .Name}} {{.Name}}:{{end}}{{range .VisibleCommands}} {{join .Names ", "}}{{"\t"}}{{.Usage}}{{end}} {{end}}{{if .VisibleFlags}} OPTIONS: {{range .VisibleFlags}}{{.}} {{end}}{{end}} ` var helpCommand = Command{ Name: "help", Aliases: []string{"h"}, Usage: "Shows a list of commands or help for one command", ArgsUsage: "[command]", Action: func(c *Context) error { args := c.Args() if args.Present() { return ShowCommandHelp(c, args.First()) } ShowAppHelp(c) return nil }, } var helpSubcommand = Command{ Name: "help", Aliases: []string{"h"}, Usage: "Shows a list of commands or help for one command", ArgsUsage: "[command]", Action: func(c *Context) error { args := c.Args() if args.Present() { return ShowCommandHelp(c, args.First()) } return ShowSubcommandHelp(c) }, } // Prints help for the App or Command type helpPrinter func(w io.Writer, templ string, data interface{}) // Prints help for the App or Command with custom template function. type helpPrinterCustom func(w io.Writer, templ string, data interface{}, customFunc map[string]interface{}) // HelpPrinter is a function that writes the help output. If not set a default // is used. The function signature is: // func(w io.Writer, templ string, data interface{}) var HelpPrinter helpPrinter = printHelp // HelpPrinterCustom is same as HelpPrinter but // takes a custom function for template function map. var HelpPrinterCustom helpPrinterCustom = printHelpCustom // VersionPrinter prints the version for the App var VersionPrinter = printVersion // ShowAppHelpAndExit - Prints the list of subcommands for the app and exits with exit code. func ShowAppHelpAndExit(c *Context, exitCode int) { ShowAppHelp(c) os.Exit(exitCode) } // ShowAppHelp is an action that displays the help. func ShowAppHelp(c *Context) (err error) { if c.App.CustomAppHelpTemplate == "" { HelpPrinter(c.App.Writer, AppHelpTemplate, c.App) return } customAppData := func() map[string]interface{} { if c.App.ExtraInfo == nil { return nil } return map[string]interface{}{ "ExtraInfo": c.App.ExtraInfo, } } HelpPrinterCustom(c.App.Writer, c.App.CustomAppHelpTemplate, c.App, customAppData()) return nil } // DefaultAppComplete prints the list of subcommands as the default app completion method func DefaultAppComplete(c *Context) { for _, command := range c.App.Commands { if command.Hidden { continue } for _, name := range command.Names() { fmt.Fprintln(c.App.Writer, name) } } } // ShowCommandHelpAndExit - exits with code after showing help func ShowCommandHelpAndExit(c *Context, command string, code int) { ShowCommandHelp(c, command) os.Exit(code) } // ShowCommandHelp prints help for the given command func ShowCommandHelp(ctx *Context, command string) error { // show the subcommand help for a command with subcommands if command == "" { HelpPrinter(ctx.App.Writer, SubcommandHelpTemplate, ctx.App) return nil } for _, c := range ctx.App.Commands { if c.HasName(command) { if c.CustomHelpTemplate != "" { HelpPrinterCustom(ctx.App.Writer, c.CustomHelpTemplate, c, nil) } else { HelpPrinter(ctx.App.Writer, CommandHelpTemplate, c) } return nil } } if ctx.App.CommandNotFound == nil { return NewExitError(fmt.Sprintf("No help topic for '%v'", command), 3) } ctx.App.CommandNotFound(ctx, command) return nil } // ShowSubcommandHelp prints help for the given subcommand func ShowSubcommandHelp(c *Context) error { return ShowCommandHelp(c, c.Command.Name) } // ShowVersion prints the version number of the App func ShowVersion(c *Context) { VersionPrinter(c) } func printVersion(c *Context) { fmt.Fprintf(c.App.Writer, "%v version %v\n", c.App.Name, c.App.Version) } // ShowCompletions prints the lists of commands within a given context func ShowCompletions(c *Context) { a := c.App if a != nil && a.BashComplete != nil { a.BashComplete(c) } } // ShowCommandCompletions prints the custom completions for a given command func ShowCommandCompletions(ctx *Context, command string) { c := ctx.App.Command(command) if c != nil && c.BashComplete != nil { c.BashComplete(ctx) } } func printHelpCustom(out io.Writer, templ string, data interface{}, customFunc map[string]interface{}) { funcMap := template.FuncMap{ "join": strings.Join, } if customFunc != nil { for key, value := range customFunc { funcMap[key] = value } } w := tabwriter.NewWriter(out, 1, 8, 2, ' ', 0) t := template.Must(template.New("help").Funcs(funcMap).Parse(templ)) err := t.Execute(w, data) if err != nil { // If the writer is closed, t.Execute will fail, and there's nothing // we can do to recover. if os.Getenv("CLI_TEMPLATE_ERROR_DEBUG") != "" { fmt.Fprintf(ErrWriter, "CLI TEMPLATE ERROR: %#v\n", err) } return } w.Flush() } func printHelp(out io.Writer, templ string, data interface{}) { printHelpCustom(out, templ, data, nil) } func checkVersion(c *Context) bool { found := false if VersionFlag.GetName() != "" { eachName(VersionFlag.GetName(), func(name string) { if c.GlobalBool(name) || c.Bool(name) { found = true } }) } return found } func checkHelp(c *Context) bool { found := false if HelpFlag.GetName() != "" { eachName(HelpFlag.GetName(), func(name string) { if c.GlobalBool(name) || c.Bool(name) { found = true } }) } return found } func checkCommandHelp(c *Context, name string) bool { if c.Bool("h") || c.Bool("help") { ShowCommandHelp(c, name) return true } return false } func checkSubcommandHelp(c *Context) bool { if c.Bool("h") || c.Bool("help") { ShowSubcommandHelp(c) return true } return false } func checkShellCompleteFlag(a *App, arguments []string) (bool, []string) { if !a.EnableBashCompletion { return false, arguments } pos := len(arguments) - 1 lastArg := arguments[pos] if lastArg != "--"+BashCompletionFlag.GetName() { return false, arguments } return true, arguments[:pos] } func checkCompletions(c *Context) bool { if !c.shellComplete { return false } if args := c.Args(); args.Present() { name := args.First() if cmd := c.App.Command(name); cmd != nil { // let the command handle the completion return false } } ShowCompletions(c) return true } func checkCommandCompletions(c *Context, name string) bool { if !c.shellComplete { return false } ShowCommandCompletions(c, name) return true }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2015, 2016, Oracle and/or its affiliates. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * - Neither the name of Oracle nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef LIBJIMAGE_INTTYPES_HPP #define LIBJIMAGE_INTTYPES_HPP typedef unsigned char u1; typedef char s1; typedef unsigned short u2; typedef short s2; typedef unsigned int u4; typedef int s4; #ifdef LP64 typedef unsigned long u8; typedef long s8; #else typedef unsigned long long u8; typedef long long s8; #endif #endif // LIBJIMAGE_INTTYPES_HPP
{ "pile_set_name": "Github" }
-- -- User: mike -- Date: 03.06.2018 -- Time: 22:51 -- This file is part of Remixed Pixel Dungeon. -- local RPD = require "scripts/lib/commonClasses" local spell = require "scripts/lib/spell" local mob = require "scripts/lib/mob" local storage = require "scripts/lib/storage" local latest_kill_index = "__latest_dead_mob" local function updateLatestDeadMob(mob) local mobClass = mob:getMobClassName() if mob:canBePet() and mobClass ~= "MirrorImage" then storage.put(latest_kill_index, {class = mob:getMobClassName(), pos = mob:getPos()}) end end mob.installOnDieCallback(updateLatestDeadMob) return spell.init{ desc = function () return { image = 2, imageFile = "spellsIcons/necromancy.png", name = "RaiseDead_Name", info = "RaiseDead_Info", magicAffinity = "Necromancy", targetingType = "none", spellCost = 15, castTime = 3, level = 4 } end, cast = function(self, spell, chr) local latestDeadMob = storage.get(latest_kill_index) or {} if latestDeadMob.class ~= nil then local mob = RPD.MobFactory:mobByName(latestDeadMob.class) storage.put(latest_kill_index, {}) local level = RPD.Dungeon.level local mobPos = latestDeadMob.pos if level:cellValid(mobPos) then mob:setPos(mobPos) mob:loot(RPD.ItemFactory:itemByName("Gold")) RPD.Mob:makePet(mob, chr) level:spawnMob(mob) chr:getSprite():emitter():burst( RPD.Sfx.ShadowParticle.CURSE, 6 ) mob:getSprite():emitter():burst( RPD.Sfx.ShadowParticle.CURSE, 6 ) RPD.playSound( "snd_cursed" ) return true else RPD.glog("RaiseDead_NoSpace") return false end end RPD.glog("RaiseDead_NoKill") return false end }
{ "pile_set_name": "Github" }
#ifndef NUMBA_PY_MODULE_H_ #define NUMBA_PY_MODULE_H_ #define PY_SSIZE_T_CLEAN #include <Python.h> #include <structmember.h> #include <frameobject.h> #if PY_MAJOR_VERSION >= 3 #define MOD_ERROR_VAL NULL #define MOD_SUCCESS_VAL(val) val #define MOD_INIT(name) PyMODINIT_FUNC PyInit_##name(void) #define MOD_DEF(ob, name, doc, methods) { \ static struct PyModuleDef moduledef = { \ PyModuleDef_HEAD_INIT, name, doc, -1, methods, }; \ ob = PyModule_Create(&moduledef); } #define MOD_INIT_EXEC(name) PyInit_##name(); #else #define MOD_ERROR_VAL #define MOD_SUCCESS_VAL(val) #define MOD_INIT(name) PyMODINIT_FUNC init##name(void) #define MOD_DEF(ob, name, doc, methods) \ ob = Py_InitModule3(name, methods, doc); #define MOD_INIT_EXEC(name) init##name(); #endif #if PY_MAJOR_VERSION >= 3 #define PyString_AsString PyUnicode_AsUTF8 #define PyString_Check PyUnicode_Check #define PyString_FromFormat PyUnicode_FromFormat #define PyString_FromString PyUnicode_FromString #define PyString_InternFromString PyUnicode_InternFromString #define PyInt_Type PyLong_Type #define PyInt_Check PyLong_Check #define PyInt_CheckExact PyLong_CheckExact #else #define Py_hash_t long #define Py_uhash_t unsigned long #endif #if PY_MAJOR_VERSION < 3 || (PY_MAJOR_VERSION == 3 && PY_MINOR_VERSION < 4) #define PyMem_RawMalloc malloc #define PyMem_RawRealloc realloc #define PyMem_RawFree free #endif #ifndef Py_MIN #define Py_MIN(x, y) (((x) > (y)) ? (y) : (x)) #endif #ifndef Py_MAX #define Py_MAX(x, y) (((x) < (y)) ? (y) : (x)) #endif #endif /* NUMBA_PY_MODULE_H_ */
{ "pile_set_name": "Github" }
<manifest package="com.eighteengray.imageprocesslibrary" xmlns:android="http://schemas.android.com/apk/res/android" android:installLocation="auto" > <application android:allowBackup="true" android:label="@string/app_name" android:supportsRtl="true" > <activity android:name=".cvdemo.camera.DisplayModeActivity"/> <activity android:name=".cvdemo.camera.CameraViewActivity"/> <activity android:name=".cvdemo.CVTestActivity"/> </application> </manifest>
{ "pile_set_name": "Github" }
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // https://developers.google.com/protocol-buffers/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include <ext/spl/spl_iterators.h> #include <Zend/zend_API.h> #include <Zend/zend_interfaces.h> #include "protobuf.h" #include "utf8.h" ZEND_BEGIN_ARG_INFO_EX(arginfo_offsetGet, 0, 0, 1) ZEND_ARG_INFO(0, index) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO_EX(arginfo_offsetSet, 0, 0, 2) ZEND_ARG_INFO(0, index) ZEND_ARG_INFO(0, newval) ZEND_END_ARG_INFO() ZEND_BEGIN_ARG_INFO(arginfo_void, 0) ZEND_END_ARG_INFO() // Utilities void* upb_value_memory(upb_value* v) { return (void*)(&v->val); } // ----------------------------------------------------------------------------- // Basic map operations on top of upb's strtable. // // Note that we roll our own `Map` container here because, as for // `RepeatedField`, we want a strongly-typed container. This is so that any user // errors due to incorrect map key or value types are raised as close as // possible to the error site, rather than at some deferred point (e.g., // serialization). // // We build our `Map` on top of upb_strtable so that we're able to take // advantage of the native_slot storage abstraction, as RepeatedField does. // (This is not quite a perfect mapping -- see the key conversions below -- but // gives us full support and error-checking for all value types for free.) // ----------------------------------------------------------------------------- // Map values are stored using the native_slot abstraction (as with repeated // field values), but keys are a bit special. Since we use a strtable, we need // to store keys as sequences of bytes such that equality of those bytes maps // one-to-one to equality of keys. We store strings directly (i.e., they map to // their own bytes) and integers as native integers (using the native_slot // abstraction). // Note that there is another tradeoff here in keeping string keys as native // strings rather than PHP strings: traversing the Map requires conversion to // PHP string values on every traversal, potentially creating more garbage. We // should consider ways to cache a PHP version of the key if this becomes an // issue later. // Forms a key to use with the underlying strtable from a PHP key value. |buf| // must point to TABLE_KEY_BUF_LENGTH bytes of temporary space, used to // construct a key byte sequence if needed. |out_key| and |out_length| provide // the resulting key data/length. #define TABLE_KEY_BUF_LENGTH 8 // sizeof(uint64_t) static bool table_key(Map* self, zval* key, char* buf, const char** out_key, size_t* out_length TSRMLS_DC) { switch (self->key_type) { case UPB_TYPE_STRING: if (!protobuf_convert_to_string(key)) { return false; } if (!is_structurally_valid_utf8(Z_STRVAL_P(key), Z_STRLEN_P(key))) { zend_error(E_USER_ERROR, "Given key is not UTF8 encoded."); return false; } *out_key = Z_STRVAL_P(key); *out_length = Z_STRLEN_P(key); break; #define CASE_TYPE(upb_type, type, c_type, php_type) \ case UPB_TYPE_##upb_type: { \ c_type type##_value; \ if (!protobuf_convert_to_##type(key, &type##_value)) { \ return false; \ } \ native_slot_set_by_array(self->key_type, NULL, buf, key TSRMLS_CC); \ *out_key = buf; \ *out_length = native_slot_size(self->key_type); \ break; \ } CASE_TYPE(BOOL, bool, int8_t, BOOL) CASE_TYPE(INT32, int32, int32_t, LONG) CASE_TYPE(INT64, int64, int64_t, LONG) CASE_TYPE(UINT32, uint32, uint32_t, LONG) CASE_TYPE(UINT64, uint64, uint64_t, LONG) #undef CASE_TYPE default: // Map constructor should not allow a Map with another key type to be // constructed. assert(false); break; } return true; } // ----------------------------------------------------------------------------- // MapField methods // ----------------------------------------------------------------------------- static zend_function_entry map_field_methods[] = { PHP_ME(MapField, __construct, NULL, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetExists, arginfo_offsetGet, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetGet, arginfo_offsetGet, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetSet, arginfo_offsetSet, ZEND_ACC_PUBLIC) PHP_ME(MapField, offsetUnset, arginfo_offsetGet, ZEND_ACC_PUBLIC) PHP_ME(MapField, count, arginfo_void, ZEND_ACC_PUBLIC) PHP_ME(MapField, getIterator, arginfo_void, ZEND_ACC_PUBLIC) ZEND_FE_END }; // Forward declare static functions. static void map_field_write_dimension(zval *object, zval *key, zval *value TSRMLS_DC); // ----------------------------------------------------------------------------- // MapField creation/desctruction // ----------------------------------------------------------------------------- zend_class_entry* map_field_type; zend_class_entry* map_field_iter_type; zend_object_handlers* map_field_handlers; zend_object_handlers* map_field_iter_handlers; static void map_begin_internal(Map *map, MapIter *iter) { iter->self = map; upb_strtable_begin(&iter->it, &map->table); } static HashTable *map_field_get_gc(zval *object, CACHED_VALUE **table, int *n) { // TODO(teboring): Unfortunately, zend engine does not support garbage // collection for custom array. We have to use zend engine's native array // instead. *table = NULL; *n = 0; return NULL; } // Define map value element free function. #if PHP_MAJOR_VERSION < 7 static inline void php_proto_map_string_release(void *value) { zval_ptr_dtor(value); } static inline void php_proto_map_object_release(void *value) { zval_ptr_dtor(value); } #else static inline void php_proto_map_string_release(void *value) { zend_string* object = *(zend_string**)value; zend_string_release(object); } static inline void php_proto_map_object_release(void *value) { zend_object* object = *(zend_object**)value; if(--GC_REFCOUNT(object) == 0) { zend_objects_store_del(object); } } #endif // Define object free method. PHP_PROTO_OBJECT_FREE_START(Map, map_field) MapIter it; int len; for (map_begin_internal(intern, &it); !map_done(&it); map_next(&it)) { upb_value value = map_iter_value(&it, &len); void *mem = upb_value_memory(&value); switch (intern->value_type) { case UPB_TYPE_MESSAGE: php_proto_map_object_release(mem); break; case UPB_TYPE_STRING: case UPB_TYPE_BYTES: php_proto_map_string_release(mem); break; default: break; } } upb_strtable_uninit(&intern->table); PHP_PROTO_OBJECT_FREE_END PHP_PROTO_OBJECT_DTOR_START(Map, map_field) PHP_PROTO_OBJECT_DTOR_END // Define object create method. PHP_PROTO_OBJECT_CREATE_START(Map, map_field) // Table value type is always UINT64: this ensures enough space to store the // native_slot value. if (!upb_strtable_init(&intern->table, UPB_CTYPE_UINT64)) { zend_error(E_USER_ERROR, "Could not allocate table."); } PHP_PROTO_OBJECT_CREATE_END(Map, map_field) // Init class entry. PHP_PROTO_INIT_CLASS_START("Google\\Protobuf\\Internal\\MapField", Map, map_field) zend_class_implements(map_field_type TSRMLS_CC, 3, spl_ce_ArrayAccess, zend_ce_aggregate, spl_ce_Countable); map_field_handlers->write_dimension = map_field_write_dimension; map_field_handlers->get_gc = map_field_get_gc; PHP_PROTO_INIT_CLASS_END void map_field_create_with_field(const zend_class_entry *ce, const upb_fielddef *field, CACHED_VALUE *map_field PHP_PROTO_TSRMLS_DC) { const upb_fielddef *key_field = map_field_key(field); const upb_fielddef *value_field = map_field_value(field); map_field_create_with_type( ce, upb_fielddef_type(key_field), upb_fielddef_type(value_field), field_type_class(value_field TSRMLS_CC), map_field PHP_PROTO_TSRMLS_CC); } void map_field_create_with_type(const zend_class_entry *ce, upb_fieldtype_t key_type, upb_fieldtype_t value_type, const zend_class_entry *msg_ce, CACHED_VALUE *map_field PHP_PROTO_TSRMLS_DC) { CREATE_OBJ_ON_ALLOCATED_ZVAL_PTR(CACHED_PTR_TO_ZVAL_PTR(map_field), map_field_type); Map *intern = UNBOX(Map, CACHED_TO_ZVAL_PTR(*map_field)); intern->key_type = key_type; intern->value_type = value_type; intern->msg_ce = msg_ce; } // ----------------------------------------------------------------------------- // MapField Handlers // ----------------------------------------------------------------------------- static bool map_field_read_dimension(zval *object, zval *key, int type, CACHED_VALUE *retval TSRMLS_DC) { Map *intern = UNBOX(Map, object); char keybuf[TABLE_KEY_BUF_LENGTH]; const char* keyval = NULL; size_t length = 0; upb_value v; #ifndef NDEBUG v.ctype = UPB_CTYPE_UINT64; #endif if (!table_key(intern, key, keybuf, &keyval, &length TSRMLS_CC)) { return false; } if (upb_strtable_lookup2(&intern->table, keyval, length, &v)) { void* mem = upb_value_memory(&v); native_slot_get_by_map_value(intern->value_type, mem, retval TSRMLS_CC); return true; } else { zend_error(E_USER_ERROR, "Given key doesn't exist."); return false; } } bool map_index_set(Map *intern, const char* keyval, int length, upb_value v) { // Replace any existing value by issuing a 'remove' operation first. upb_strtable_remove2(&intern->table, keyval, length, NULL); if (!upb_strtable_insert2(&intern->table, keyval, length, v)) { zend_error(E_USER_ERROR, "Could not insert into table"); return false; } return true; } static void map_field_write_dimension(zval *object, zval *key, zval *value TSRMLS_DC) { Map *intern = UNBOX(Map, object); char keybuf[TABLE_KEY_BUF_LENGTH]; const char* keyval = NULL; size_t length = 0; upb_value v; void* mem; if (!table_key(intern, key, keybuf, &keyval, &length TSRMLS_CC)) { return; } mem = upb_value_memory(&v); memset(mem, 0, native_slot_size(intern->value_type)); if (!native_slot_set_by_map(intern->value_type, intern->msg_ce, mem, value TSRMLS_CC)) { return; } #ifndef NDEBUG v.ctype = UPB_CTYPE_UINT64; #endif // Replace any existing value by issuing a 'remove' operation first. upb_strtable_remove2(&intern->table, keyval, length, NULL); if (!upb_strtable_insert2(&intern->table, keyval, length, v)) { zend_error(E_USER_ERROR, "Could not insert into table"); return; } } static bool map_field_unset_dimension(zval *object, zval *key TSRMLS_DC) { Map *intern = UNBOX(Map, object); char keybuf[TABLE_KEY_BUF_LENGTH]; const char* keyval = NULL; size_t length = 0; upb_value v; if (!table_key(intern, key, keybuf, &keyval, &length TSRMLS_CC)) { return false; } #ifndef NDEBUG v.ctype = UPB_CTYPE_UINT64; #endif upb_strtable_remove2(&intern->table, keyval, length, &v); return true; } // ----------------------------------------------------------------------------- // PHP MapField Methods // ----------------------------------------------------------------------------- PHP_METHOD(MapField, __construct) { long key_type, value_type; zend_class_entry* klass = NULL; if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "ll|C", &key_type, &value_type, &klass) == FAILURE) { return; } Map *intern = UNBOX(Map, getThis()); intern->key_type = to_fieldtype(key_type); intern->value_type = to_fieldtype(value_type); intern->msg_ce = klass; // Check that the key type is an allowed type. switch (intern->key_type) { case UPB_TYPE_INT32: case UPB_TYPE_INT64: case UPB_TYPE_UINT32: case UPB_TYPE_UINT64: case UPB_TYPE_BOOL: case UPB_TYPE_STRING: case UPB_TYPE_BYTES: // These are OK. break; default: zend_error(E_USER_ERROR, "Invalid key type for map."); } } PHP_METHOD(MapField, offsetExists) { zval *key; if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "z", &key) == FAILURE) { return; } Map *intern = UNBOX(Map, getThis()); char keybuf[TABLE_KEY_BUF_LENGTH]; const char* keyval = NULL; size_t length = 0; upb_value v; #ifndef NDEBUG v.ctype = UPB_CTYPE_UINT64; #endif if (!table_key(intern, key, keybuf, &keyval, &length TSRMLS_CC)) { RETURN_BOOL(false); } RETURN_BOOL(upb_strtable_lookup2(&intern->table, keyval, length, &v)); } PHP_METHOD(MapField, offsetGet) { zval *index, *value; if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "z", &index) == FAILURE) { return; } map_field_read_dimension(getThis(), index, BP_VAR_R, ZVAL_PTR_TO_CACHED_PTR(return_value) TSRMLS_CC); } PHP_METHOD(MapField, offsetSet) { zval *index, *value; if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "zz", &index, &value) == FAILURE) { return; } map_field_write_dimension(getThis(), index, value TSRMLS_CC); } PHP_METHOD(MapField, offsetUnset) { zval *index; if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "z", &index) == FAILURE) { return; } map_field_unset_dimension(getThis(), index TSRMLS_CC); } PHP_METHOD(MapField, count) { Map *intern = UNBOX(Map, getThis()); if (zend_parse_parameters_none() == FAILURE) { return; } RETURN_LONG(upb_strtable_count(&intern->table)); } PHP_METHOD(MapField, getIterator) { CREATE_OBJ_ON_ALLOCATED_ZVAL_PTR(return_value, map_field_iter_type); Map *intern = UNBOX(Map, getThis()); MapIter *iter = UNBOX(MapIter, return_value); map_begin(getThis(), iter TSRMLS_CC); } // ----------------------------------------------------------------------------- // Map Iterator // ----------------------------------------------------------------------------- void map_begin(zval *map_php, MapIter *iter TSRMLS_DC) { Map *self = UNBOX(Map, map_php); map_begin_internal(self, iter); } void map_next(MapIter *iter) { upb_strtable_next(&iter->it); } bool map_done(MapIter *iter) { return upb_strtable_done(&iter->it); } const char *map_iter_key(MapIter *iter, int *len) { *len = upb_strtable_iter_keylength(&iter->it); return upb_strtable_iter_key(&iter->it); } upb_value map_iter_value(MapIter *iter, int *len) { *len = native_slot_size(iter->self->value_type); return upb_strtable_iter_value(&iter->it); } // ----------------------------------------------------------------------------- // MapFieldIter methods // ----------------------------------------------------------------------------- static zend_function_entry map_field_iter_methods[] = { PHP_ME(MapFieldIter, rewind, arginfo_void, ZEND_ACC_PUBLIC) PHP_ME(MapFieldIter, current, arginfo_void, ZEND_ACC_PUBLIC) PHP_ME(MapFieldIter, key, arginfo_void, ZEND_ACC_PUBLIC) PHP_ME(MapFieldIter, next, arginfo_void, ZEND_ACC_PUBLIC) PHP_ME(MapFieldIter, valid, arginfo_void, ZEND_ACC_PUBLIC) ZEND_FE_END }; // ----------------------------------------------------------------------------- // MapFieldIter creation/desctruction // ----------------------------------------------------------------------------- // Define object free method. PHP_PROTO_OBJECT_FREE_START(MapIter, map_field_iter) PHP_PROTO_OBJECT_FREE_END PHP_PROTO_OBJECT_DTOR_START(MapIter, map_field_iter) PHP_PROTO_OBJECT_DTOR_END // Define object create method. PHP_PROTO_OBJECT_CREATE_START(MapIter, map_field_iter) intern->self = NULL; PHP_PROTO_OBJECT_CREATE_END(MapIter, map_field_iter) // Init class entry. PHP_PROTO_INIT_CLASS_START("Google\\Protobuf\\Internal\\MapFieldIter", MapIter, map_field_iter) zend_class_implements(map_field_iter_type TSRMLS_CC, 1, zend_ce_iterator); PHP_PROTO_INIT_CLASS_END // ----------------------------------------------------------------------------- // PHP MapFieldIter Methods // ----------------------------------------------------------------------------- PHP_METHOD(MapFieldIter, rewind) { MapIter *intern = UNBOX(MapIter, getThis()); map_begin_internal(intern->self, intern); } PHP_METHOD(MapFieldIter, current) { MapIter *intern = UNBOX(MapIter, getThis()); Map *map_field = intern->self; int value_length = 0; upb_value value = map_iter_value(intern, &value_length); void* mem = upb_value_memory(&value); native_slot_get_by_map_value(map_field->value_type, mem, ZVAL_PTR_TO_CACHED_PTR(return_value) TSRMLS_CC); } PHP_METHOD(MapFieldIter, key) { MapIter *intern = UNBOX(MapIter, getThis()); Map *map_field = intern->self; int key_length = 0; const char* key = map_iter_key(intern, &key_length); native_slot_get_by_map_key(map_field->key_type, key, key_length, ZVAL_PTR_TO_CACHED_PTR(return_value) TSRMLS_CC); } PHP_METHOD(MapFieldIter, next) { MapIter *intern = UNBOX(MapIter, getThis()); map_next(intern); } PHP_METHOD(MapFieldIter, valid) { MapIter *intern = UNBOX(MapIter, getThis()); RETURN_BOOL(!map_done(intern)); }
{ "pile_set_name": "Github" }
//------------------------------------------------------------------------------ // <copyright file="DetailsViewMode.cs" company="Microsoft"> // Copyright (c) Microsoft Corporation. All rights reserved. // </copyright> //------------------------------------------------------------------------------ namespace System.Web.UI.WebControls { using System; /// <devdoc> /// <para>Specifies the DetailsView edit/view mode.</para> /// </devdoc> public enum DetailsViewMode { /// <devdoc> /// <para> /// The control is in read-only mode.</para> /// </devdoc> ReadOnly = 0, /// <devdoc> /// <para> /// The control is editing an existing record for update.</para> /// </devdoc> Edit = 1, /// <devdoc> /// <para> /// The control is editing a new record for insert.</para> /// </devdoc> Insert = 2 } }
{ "pile_set_name": "Github" }
/* Esperanto initialisation for the jQuery UI date picker plugin. */ /* Written by Olivier M. ([email protected]). */ jQuery(function($){ $.datepicker.regional['eo'] = { closeText: 'Fermi', prevText: '&lt;Anta', nextText: 'Sekv&gt;', currentText: 'Nuna', monthNames: ['Januaro','Februaro','Marto','Aprilo','Majo','Junio', 'Julio','Aŭgusto','Septembro','Oktobro','Novembro','Decembro'], monthNamesShort: ['Jan','Feb','Mar','Apr','Maj','Jun', 'Jul','Aŭg','Sep','Okt','Nov','Dec'], dayNames: ['Dimanĉo','Lundo','Mardo','Merkredo','Ĵaŭdo','Vendredo','Sabato'], dayNamesShort: ['Dim','Lun','Mar','Mer','Ĵaŭ','Ven','Sab'], dayNamesMin: ['Di','Lu','Ma','Me','Ĵa','Ve','Sa'], weekHeader: 'Sb', dateFormat: 'dd/mm/yy', firstDay: 0, isRTL: false, showMonthAfterYear: false, yearSuffix: ''}; $.datepicker.setDefaults($.datepicker.regional['eo']); });
{ "pile_set_name": "Github" }
require_relative '../spec_helper' require_relative '../fixtures/classes' describe "UNIXSocket#addr" do platform_is_not :windows do before :each do @path = SocketSpecs.socket_path @server = UNIXServer.open(@path) @client = UNIXSocket.open(@path) end after :each do @client.close @server.close SocketSpecs.rm_socket @path end it "returns an array" do @client.addr.should be_kind_of(Array) end it "returns the address family of this socket in an array" do @client.addr[0].should == "AF_UNIX" @server.addr[0].should == "AF_UNIX" end it "returns the path of the socket in an array if it's a server" do @server.addr[1].should == @path end it "returns an empty string for path if it's a client" do @client.addr[1].should == "" end end end
{ "pile_set_name": "Github" }
<!-- Generated by pkgdown: do not edit by hand --> <!DOCTYPE html> <html lang="en"> <head> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <title>Create a numeric input control — numericInput • SHINY.SEMANTIC</title> <!-- jquery --> <script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.4.1/jquery.min.js" integrity="sha256-CSXorXvZcTkaix6Yvo6HppcZGetbYMGWSFlBw8HfCJo=" crossorigin="anonymous"></script> <!-- Bootstrap --> <link href="https://cdnjs.cloudflare.com/ajax/libs/bootswatch/3.4.0/yeti/bootstrap.min.css" rel="stylesheet" crossorigin="anonymous" /> <script src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.4.1/js/bootstrap.min.js" integrity="sha256-nuL8/2cJ5NDSSwnKD8VqreErSWHtnEP9E7AySL+1ev4=" crossorigin="anonymous"></script> <!-- bootstrap-toc --> <link rel="stylesheet" href="../bootstrap-toc.css"> <script src="../bootstrap-toc.js"></script> <!-- Font Awesome icons --> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/all.min.css" integrity="sha256-mmgLkCYLUQbXn0B1SRqzHar6dCnv9oZFPEC1g1cwlkk=" crossorigin="anonymous" /> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.12.1/css/v4-shims.min.css" integrity="sha256-wZjR52fzng1pJHwx4aV2AO3yyTOXrcDW7jBpJtTwVxw=" crossorigin="anonymous" /> <!-- clipboard.js --> <script src="https://cdnjs.cloudflare.com/ajax/libs/clipboard.js/2.0.6/clipboard.min.js" integrity="sha256-inc5kl9MA1hkeYUt+EC3BhlIgyp/2jDIyBLS6k3UxPI=" crossorigin="anonymous"></script> <!-- headroom.js --> <script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/headroom.min.js" integrity="sha256-AsUX4SJE1+yuDu5+mAVzJbuYNPHj/WroHuZ8Ir/CkE0=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/headroom/0.11.0/jQuery.headroom.min.js" integrity="sha256-ZX/yNShbjqsohH1k95liqY9Gd8uOiE1S4vZc+9KQ1K4=" crossorigin="anonymous"></script> <!-- pkgdown --> <link href="../pkgdown.css" rel="stylesheet"> <script src="../pkgdown.js"></script> <meta property="og:title" content="Create a numeric input control — numericInput" /> <meta property="og:description" content="Create a numeric input control" /> <!-- mathjax --> <script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/MathJax.js" integrity="sha256-nvJJv9wWKEm88qvoQl9ekL2J+k/RWIsaSScxxlsrv8k=" crossorigin="anonymous"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.5/config/TeX-AMS-MML_HTMLorMML.js" integrity="sha256-84DKXVJXs0/F8OTMzX4UR909+jtl4G7SPypPavF+GfA=" crossorigin="anonymous"></script> <!--[if lt IE 9]> <script src="https://oss.maxcdn.com/html5shiv/3.7.3/html5shiv.min.js"></script> <script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script> <![endif]--> </head> <body data-spy="scroll" data-target="#toc"> <div class="container template-reference-topic"> <header> <div class="navbar navbar-inverse navbar-fixed-top" role="navigation"> <div class="container"> <div class="navbar-header"> <button type="button" class="navbar-toggle collapsed" data-toggle="collapse" data-target="#navbar" aria-expanded="false"> <span class="sr-only">Toggle navigation</span> <span class="icon-bar"></span> <span class="icon-bar"></span> <span class="icon-bar"></span> </button> <span class="navbar-brand"> <a class="navbar-link" href="../index.html">SHINY.SEMANTIC</a> <span class="version label label-default" data-toggle="tooltip" data-placement="bottom" title="Released version">0.3.1</span> </span> </div> <div id="navbar" class="navbar-collapse collapse"> <ul class="nav navbar-nav"> <li> <a href="../index.html"> <span class="fa fa-home"></span> Start </a> </li> </ul> <ul class="nav navbar-nav navbar-right"> <li> <a href="../reference/index.html"> <span class="fa fa-file-code-o"></span> Functions </a> </li> <li> <a href="../CHANGELOG.html"> <span class="fa fa-newspaper-o"></span> Changes </a> </li> <li> <a href="../CODE_OF_CONDUCT.html"> <span class="fa fa-user-o"></span> CoC </a> </li> <li> <a href="https://github.com/Appsilon/shiny.semantic"> <span class="fa fa-github fa-lg"></span> </a> </li> <li> <a href="https://twitter.com/Appsilon"> <span class="fa fa-twitter fa-lg"></span> </a> </li> </ul> </div><!--/.nav-collapse --> </div><!--/.container --> </div><!--/.navbar --> </header> <div class="row"> <div class="col-md-9 contents"> <div class="page-header"> <h1>Create a numeric input control</h1> <small class="dont-index">Source: <a href='https://github.com/Appsilon/shiny.semantic/blob/master/R/input.R'><code>R/input.R</code></a></small> <div class="hidden name"><code>numericInput.Rd</code></div> </div> <div class="ref-description"> <p>Create a numeric input control</p> </div> <pre class="usage"><span class='fu'>numericInput</span>( <span class='no'>inputId</span>, <span class='no'>label</span>, <span class='no'>value</span>, <span class='kw'>min</span> <span class='kw'>=</span> <span class='fl'>NA</span>, <span class='kw'>max</span> <span class='kw'>=</span> <span class='fl'>NA</span>, <span class='kw'>step</span> <span class='kw'>=</span> <span class='fl'>NA</span>, <span class='kw'>width</span> <span class='kw'>=</span> <span class='kw'>NULL</span>, <span class='no'>...</span> )</pre> <h2 class="hasAnchor" id="arguments"><a class="anchor" href="#arguments"></a>Arguments</h2> <table class="ref-arguments"> <colgroup><col class="name" /><col class="desc" /></colgroup> <tr> <th>inputId</th> <td><p>The input slot that will be used to access the value.</p></td> </tr> <tr> <th>label</th> <td><p>Display label for the control, or NULL for no label.</p></td> </tr> <tr> <th>value</th> <td><p>Initial value of the numeric input.</p></td> </tr> <tr> <th>min</th> <td><p>Minimum allowed value.</p></td> </tr> <tr> <th>max</th> <td><p>Maximum allowed value.</p></td> </tr> <tr> <th>step</th> <td><p>Interval to use when stepping between min and max.</p></td> </tr> <tr> <th>width</th> <td><p>The width of the input.</p></td> </tr> <tr> <th>...</th> <td><p>Other parameters passed to <code><a href='numeric_input.html'>numeric_input</a></code> like <code>type</code> or <code>icon</code>.</p></td> </tr> </table> </div> <div class="col-md-3 hidden-xs hidden-sm" id="pkgdown-sidebar"> <nav id="toc" data-toggle="toc" class="sticky-top"> <h2 data-toc-skip>Contents</h2> </nav> </div> </div> <footer> <div class="copyright"> <p>Developed by Filip Stachura, Krystian Igras, Adam Forys, Dominik Krzeminski.</p> </div> <div class="pkgdown"> <p>Site built with <a href="https://pkgdown.r-lib.org/">pkgdown</a> 1.5.1.</p> </div> </footer> </div> </body> </html>
{ "pile_set_name": "Github" }
var mkdirp = require('../').mkdirp; var path = require('path'); var fs = require('fs'); var test = require('tap').test; var _0777 = parseInt('0777', 8); var _0755 = parseInt('0755', 8); var _0744 = parseInt('0744', 8); var ps = [ '', 'tmp' ]; for (var i = 0; i < 25; i++) { var dir = Math.floor(Math.random() * Math.pow(16,4)).toString(16); ps.push(dir); } var file = ps.join('/'); test('chmod-pre', function (t) { var mode = _0744 mkdirp(file, mode, function (er) { t.ifError(er, 'should not error'); fs.stat(file, function (er, stat) { t.ifError(er, 'should exist'); t.ok(stat && stat.isDirectory(), 'should be directory'); t.equal(stat && stat.mode & _0777, mode, 'should be 0744'); t.end(); }); }); }); test('chmod', function (t) { var mode = _0755 mkdirp(file, mode, function (er) { t.ifError(er, 'should not error'); fs.stat(file, function (er, stat) { t.ifError(er, 'should exist'); t.ok(stat && stat.isDirectory(), 'should be directory'); t.end(); }); }); });
{ "pile_set_name": "Github" }
/*! * # Semantic UI - Nag * http://github.com/semantic-org/semantic-ui/ * * * Released under the MIT license * http://opensource.org/licenses/MIT * */ ;(function ($, window, document, undefined) { "use strict"; window = (typeof window != 'undefined' && window.Math == Math) ? window : (typeof self != 'undefined' && self.Math == Math) ? self : Function('return this')() ; $.fn.nag = function(parameters) { var $allModules = $(this), moduleSelector = $allModules.selector || '', time = new Date().getTime(), performance = [], query = arguments[0], methodInvoked = (typeof query == 'string'), queryArguments = [].slice.call(arguments, 1), returnedValue ; $allModules .each(function() { var settings = ( $.isPlainObject(parameters) ) ? $.extend(true, {}, $.fn.nag.settings, parameters) : $.extend({}, $.fn.nag.settings), className = settings.className, selector = settings.selector, error = settings.error, namespace = settings.namespace, eventNamespace = '.' + namespace, moduleNamespace = namespace + '-module', $module = $(this), $close = $module.find(selector.close), $context = (settings.context) ? $(settings.context) : $('body'), element = this, instance = $module.data(moduleNamespace), moduleOffset, moduleHeight, contextWidth, contextHeight, contextOffset, yOffset, yPosition, timer, module, requestAnimationFrame = window.requestAnimationFrame || window.mozRequestAnimationFrame || window.webkitRequestAnimationFrame || window.msRequestAnimationFrame || function(callback) { setTimeout(callback, 0); } ; module = { initialize: function() { module.verbose('Initializing element'); $module .on('click' + eventNamespace, selector.close, module.dismiss) .data(moduleNamespace, module) ; if(settings.detachable && $module.parent()[0] !== $context[0]) { $module .detach() .prependTo($context) ; } if(settings.displayTime > 0) { setTimeout(module.hide, settings.displayTime); } module.show(); }, destroy: function() { module.verbose('Destroying instance'); $module .removeData(moduleNamespace) .off(eventNamespace) ; }, show: function() { if( module.should.show() && !$module.is(':visible') ) { module.debug('Showing nag', settings.animation.show); if(settings.animation.show == 'fade') { $module .fadeIn(settings.duration, settings.easing) ; } else { $module .slideDown(settings.duration, settings.easing) ; } } }, hide: function() { module.debug('Showing nag', settings.animation.hide); if(settings.animation.show == 'fade') { $module .fadeIn(settings.duration, settings.easing) ; } else { $module .slideUp(settings.duration, settings.easing) ; } }, onHide: function() { module.debug('Removing nag', settings.animation.hide); $module.remove(); if (settings.onHide) { settings.onHide(); } }, dismiss: function(event) { if(settings.storageMethod) { module.storage.set(settings.key, settings.value); } module.hide(); event.stopImmediatePropagation(); event.preventDefault(); }, should: { show: function() { if(settings.persist) { module.debug('Persistent nag is set, can show nag'); return true; } if( module.storage.get(settings.key) != settings.value.toString() ) { module.debug('Stored value is not set, can show nag', module.storage.get(settings.key)); return true; } module.debug('Stored value is set, cannot show nag', module.storage.get(settings.key)); return false; } }, get: { storageOptions: function() { var options = {} ; if(settings.expires) { options.expires = settings.expires; } if(settings.domain) { options.domain = settings.domain; } if(settings.path) { options.path = settings.path; } return options; } }, clear: function() { module.storage.remove(settings.key); }, storage: { set: function(key, value) { var options = module.get.storageOptions() ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { window.localStorage.setItem(key, value); module.debug('Value stored using local storage', key, value); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { window.sessionStorage.setItem(key, value); module.debug('Value stored using session storage', key, value); } else if($.cookie !== undefined) { $.cookie(key, value, options); module.debug('Value stored using cookie', key, value, options); } else { module.error(error.noCookieStorage); return; } }, get: function(key, value) { var storedValue ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { storedValue = window.localStorage.getItem(key); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { storedValue = window.sessionStorage.getItem(key); } // get by cookie else if($.cookie !== undefined) { storedValue = $.cookie(key); } else { module.error(error.noCookieStorage); } if(storedValue == 'undefined' || storedValue == 'null' || storedValue === undefined || storedValue === null) { storedValue = undefined; } return storedValue; }, remove: function(key) { var options = module.get.storageOptions() ; if(settings.storageMethod == 'localstorage' && window.localStorage !== undefined) { window.localStorage.removeItem(key); } else if(settings.storageMethod == 'sessionstorage' && window.sessionStorage !== undefined) { window.sessionStorage.removeItem(key); } // store by cookie else if($.cookie !== undefined) { $.removeCookie(key, options); } else { module.error(error.noStorage); } } }, setting: function(name, value) { module.debug('Changing setting', name, value); if( $.isPlainObject(name) ) { $.extend(true, settings, name); } else if(value !== undefined) { if($.isPlainObject(settings[name])) { $.extend(true, settings[name], value); } else { settings[name] = value; } } else { return settings[name]; } }, internal: function(name, value) { if( $.isPlainObject(name) ) { $.extend(true, module, name); } else if(value !== undefined) { module[name] = value; } else { return module[name]; } }, debug: function() { if(!settings.silent && settings.debug) { if(settings.performance) { module.performance.log(arguments); } else { module.debug = Function.prototype.bind.call(console.info, console, settings.name + ':'); module.debug.apply(console, arguments); } } }, verbose: function() { if(!settings.silent && settings.verbose && settings.debug) { if(settings.performance) { module.performance.log(arguments); } else { module.verbose = Function.prototype.bind.call(console.info, console, settings.name + ':'); module.verbose.apply(console, arguments); } } }, error: function() { if(!settings.silent) { module.error = Function.prototype.bind.call(console.error, console, settings.name + ':'); module.error.apply(console, arguments); } }, performance: { log: function(message) { var currentTime, executionTime, previousTime ; if(settings.performance) { currentTime = new Date().getTime(); previousTime = time || currentTime; executionTime = currentTime - previousTime; time = currentTime; performance.push({ 'Name' : message[0], 'Arguments' : [].slice.call(message, 1) || '', 'Element' : element, 'Execution Time' : executionTime }); } clearTimeout(module.performance.timer); module.performance.timer = setTimeout(module.performance.display, 500); }, display: function() { var title = settings.name + ':', totalTime = 0 ; time = false; clearTimeout(module.performance.timer); $.each(performance, function(index, data) { totalTime += data['Execution Time']; }); title += ' ' + totalTime + 'ms'; if(moduleSelector) { title += ' \'' + moduleSelector + '\''; } if( (console.group !== undefined || console.table !== undefined) && performance.length > 0) { console.groupCollapsed(title); if(console.table) { console.table(performance); } else { $.each(performance, function(index, data) { console.log(data['Name'] + ': ' + data['Execution Time']+'ms'); }); } console.groupEnd(); } performance = []; } }, invoke: function(query, passedArguments, context) { var object = instance, maxDepth, found, response ; passedArguments = passedArguments || queryArguments; context = element || context; if(typeof query == 'string' && object !== undefined) { query = query.split(/[\. ]/); maxDepth = query.length - 1; $.each(query, function(depth, value) { var camelCaseValue = (depth != maxDepth) ? value + query[depth + 1].charAt(0).toUpperCase() + query[depth + 1].slice(1) : query ; if( $.isPlainObject( object[camelCaseValue] ) && (depth != maxDepth) ) { object = object[camelCaseValue]; } else if( object[camelCaseValue] !== undefined ) { found = object[camelCaseValue]; return false; } else if( $.isPlainObject( object[value] ) && (depth != maxDepth) ) { object = object[value]; } else if( object[value] !== undefined ) { found = object[value]; return false; } else { module.error(error.method, query); return false; } }); } if ( $.isFunction( found ) ) { response = found.apply(context, passedArguments); } else if(found !== undefined) { response = found; } if($.isArray(returnedValue)) { returnedValue.push(response); } else if(returnedValue !== undefined) { returnedValue = [returnedValue, response]; } else if(response !== undefined) { returnedValue = response; } return found; } }; if(methodInvoked) { if(instance === undefined) { module.initialize(); } module.invoke(query); } else { if(instance !== undefined) { instance.invoke('destroy'); } module.initialize(); } }) ; return (returnedValue !== undefined) ? returnedValue : this ; }; $.fn.nag.settings = { name : 'Nag', silent : false, debug : false, verbose : false, performance : true, namespace : 'Nag', // allows cookie to be overridden persist : false, // set to zero to require manually dismissal, otherwise hides on its own displayTime : 0, animation : { show : 'slide', hide : 'slide' }, context : false, detachable : false, expires : 30, domain : false, path : '/', // type of storage to use storageMethod : 'cookie', // value to store in dismissed localstorage/cookie key : 'nag', value : 'dismiss', error: { noCookieStorage : '$.cookie is not included. A storage solution is required.', noStorage : 'Neither $.cookie or store is defined. A storage solution is required for storing state', method : 'The method you called is not defined.' }, className : { bottom : 'bottom', fixed : 'fixed' }, selector : { close : '.close.icon' }, speed : 500, easing : 'easeOutQuad', onHide: function() {} }; // Adds easing $.extend( $.easing, { easeOutQuad: function (x, t, b, c, d) { return -c *(t/=d)*(t-2) + b; } }); })( jQuery, window, document );
{ "pile_set_name": "Github" }
/* * Copyright © 2003 Keith Packard * * Permission to use, copy, modify, distribute, and sell this software and its * documentation for any purpose is hereby granted without fee, provided that * the above copyright notice appear in all copies and that both that * copyright notice and this permission notice appear in supporting * documentation, and that the name of Keith Packard not be used in * advertising or publicity pertaining to distribution of the software without * specific, written prior permission. Keith Packard makes no * representations about the suitability of this software for any purpose. It * is provided "as is" without express or implied warranty. * * KEITH PACKARD DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, * INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO * EVENT SHALL KEITH PACKARD BE LIABLE FOR ANY SPECIAL, INDIRECT OR * CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, * DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER * TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR * PERFORMANCE OF THIS SOFTWARE. */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include <limits.h> #include "Xfixesint.h" XserverRegion XFixesCreateRegion (Display *dpy, XRectangle *rectangles, int nrectangles) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionReq *req; long len; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegion; region = req->region = XAllocID (dpy); len = ((long) nrectangles) << 1; SetReqLen (req, len, len); len <<= 2; Data16 (dpy, (short *) rectangles, len); UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromBitmap (Display *dpy, Pixmap bitmap) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromBitmapReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromBitmap, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromBitmap; region = req->region = XAllocID (dpy); req->bitmap = bitmap; UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromWindow (Display *dpy, Window window, int kind) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromWindowReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromWindow, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromWindow; region = req->region = XAllocID (dpy); req->window = window; req->kind = kind; UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromGC (Display *dpy, GC gc) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromGCReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromGC, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromGC; region = req->region = XAllocID (dpy); req->gc = gc->gid; UnlockDisplay (dpy); SyncHandle(); return region; } XserverRegion XFixesCreateRegionFromPicture (Display *dpy, XID picture) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCreateRegionFromPictureReq *req; XserverRegion region; XFixesCheckExtension (dpy, info, 0); LockDisplay (dpy); GetReq (XFixesCreateRegionFromPicture, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCreateRegionFromPicture; region = req->region = XAllocID (dpy); req->picture = picture; UnlockDisplay (dpy); SyncHandle(); return region; } void XFixesDestroyRegion (Display *dpy, XserverRegion region) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesDestroyRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesDestroyRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesDestroyRegion; req->region = region; UnlockDisplay (dpy); SyncHandle(); } void XFixesSetRegion (Display *dpy, XserverRegion region, XRectangle *rectangles, int nrectangles) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesSetRegionReq *req; long len; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesSetRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesSetRegion; req->region = region; len = ((long) nrectangles) << 1; SetReqLen (req, len, len); len <<= 2; Data16 (dpy, (short *) rectangles, len); UnlockDisplay (dpy); SyncHandle(); } void XFixesCopyRegion (Display *dpy, XserverRegion dst, XserverRegion src) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesCopyRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesCopyRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesCopyRegion; req->source = src; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } void XFixesUnionRegion (Display *dpy, XserverRegion dst, XserverRegion src1, XserverRegion src2) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesUnionRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesUnionRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesUnionRegion; req->source1 = src1; req->source2 = src2; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } void XFixesIntersectRegion (Display *dpy, XserverRegion dst, XserverRegion src1, XserverRegion src2) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesIntersectRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesIntersectRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesIntersectRegion; req->source1 = src1; req->source2 = src2; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } void XFixesSubtractRegion (Display *dpy, XserverRegion dst, XserverRegion src1, XserverRegion src2) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesSubtractRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesSubtractRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesSubtractRegion; req->source1 = src1; req->source2 = src2; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } void XFixesInvertRegion (Display *dpy, XserverRegion dst, XRectangle *rect, XserverRegion src) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesInvertRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesInvertRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesInvertRegion; req->x = rect->x; req->y = rect->y; req->width = rect->width; req->height = rect->height; req->source = src; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } void XFixesTranslateRegion (Display *dpy, XserverRegion region, int dx, int dy) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesTranslateRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesTranslateRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesTranslateRegion; req->region = region; req->dx = dx; req->dy = dy; UnlockDisplay (dpy); SyncHandle(); } void XFixesRegionExtents (Display *dpy, XserverRegion dst, XserverRegion src) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesRegionExtentsReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesRegionExtents, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesRegionExtents; req->source = src; req->destination = dst; UnlockDisplay (dpy); SyncHandle(); } XRectangle * XFixesFetchRegion (Display *dpy, XserverRegion region, int *nrectanglesRet) { XRectangle bounds; return XFixesFetchRegionAndBounds (dpy, region, nrectanglesRet, &bounds); } XRectangle * XFixesFetchRegionAndBounds (Display *dpy, XserverRegion region, int *nrectanglesRet, XRectangle *bounds) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesFetchRegionReq *req; xXFixesFetchRegionReply rep; XRectangle *rects; int nrects; long nbytes; long nread; XFixesCheckExtension (dpy, info, NULL); LockDisplay (dpy); GetReq (XFixesFetchRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesFetchRegion; req->region = region; *nrectanglesRet = 0; if (!_XReply (dpy, (xReply *) &rep, 0, xFalse)) { UnlockDisplay (dpy); SyncHandle (); return NULL; } bounds->x = rep.x; bounds->y = rep.y; bounds->width = rep.width; bounds->height = rep.height; if (rep.length < (INT_MAX >> 2)) { nbytes = (long) rep.length << 2; nrects = rep.length >> 1; rects = Xmalloc (nrects * sizeof (XRectangle)); } else { nbytes = 0; nrects = 0; rects = NULL; } if (!rects) { _XEatDataWords(dpy, rep.length); UnlockDisplay (dpy); SyncHandle (); return NULL; } nread = nrects << 3; _XRead16 (dpy, (short *) rects, nread); /* skip any padding */ if(nbytes > nread) { _XEatData (dpy, (unsigned long) (nbytes - nread)); } UnlockDisplay (dpy); SyncHandle(); *nrectanglesRet = nrects; return rects; } void XFixesSetGCClipRegion (Display *dpy, GC gc, int clip_x_origin, int clip_y_origin, XserverRegion region) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesSetGCClipRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesSetGCClipRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesSetGCClipRegion; req->gc = gc->gid; req->region = region; req->xOrigin = clip_x_origin; req->yOrigin = clip_y_origin; UnlockDisplay (dpy); SyncHandle(); } void XFixesSetWindowShapeRegion (Display *dpy, Window win, int shape_kind, int x_off, int y_off, XserverRegion region) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesSetWindowShapeRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesSetWindowShapeRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesSetWindowShapeRegion; req->dest = win; req->destKind = shape_kind; req->xOff = x_off; req->yOff = y_off; req->region = region; UnlockDisplay (dpy); SyncHandle(); } void XFixesSetPictureClipRegion (Display *dpy, XID picture, int clip_x_origin, int clip_y_origin, XserverRegion region) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesSetPictureClipRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesSetPictureClipRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesSetPictureClipRegion; req->picture = picture; req->region = region; req->xOrigin = clip_x_origin; req->yOrigin = clip_y_origin; UnlockDisplay (dpy); SyncHandle(); } void XFixesExpandRegion (Display *dpy, XserverRegion dst, XserverRegion src, unsigned left, unsigned right, unsigned top, unsigned bottom) { XFixesExtDisplayInfo *info = XFixesFindDisplay (dpy); xXFixesExpandRegionReq *req; XFixesSimpleCheckExtension (dpy, info); LockDisplay (dpy); GetReq (XFixesExpandRegion, req); req->reqType = info->codes->major_opcode; req->xfixesReqType = X_XFixesExpandRegion; req->source = src; req->destination = dst; req->left = left; req->right = right; req->top = top; req->bottom = bottom; UnlockDisplay (dpy); SyncHandle(); }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <meta name="generator" content="Doxygen 1.8.6"/> <title>libguac: Data Fields</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="search/search.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="search/search.js"></script> <script type="text/javascript"> $(document).ready(function() { searchBox.OnSelectItem(0); }); </script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="top"><!-- do not remove this div, it is closed by doxygen! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td style="padding-left: 0.5em;"> <div id="projectname">libguac &#160;<span id="projectnumber">0.9.3</span> </div> </td> </tr> </tbody> </table> </div> <!-- end header part --> <!-- Generated by Doxygen 1.8.6 --> <script type="text/javascript"> var searchBox = new SearchBox("searchBox", "search",false,'Search'); </script> <div id="navrow1" class="tabs"> <ul class="tablist"> <li><a href="index.html"><span>Main&#160;Page</span></a></li> <li class="current"><a href="annotated.html"><span>Data&#160;Structures</span></a></li> <li><a href="files.html"><span>Files</span></a></li> <li> <div id="MSearchBox" class="MSearchBoxInactive"> <span class="left"> <img id="MSearchSelect" src="search/mag_sel.png" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" alt=""/> <input type="text" id="MSearchField" value="Search" accesskey="S" onfocus="searchBox.OnSearchFieldFocus(true)" onblur="searchBox.OnSearchFieldFocus(false)" onkeyup="searchBox.OnSearchFieldChange(event)"/> </span><span class="right"> <a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a> </span> </div> </li> </ul> </div> <div id="navrow2" class="tabs2"> <ul class="tablist"> <li><a href="annotated.html"><span>Data&#160;Structures</span></a></li> <li class="current"><a href="functions.html"><span>Data&#160;Fields</span></a></li> </ul> </div> <div id="navrow3" class="tabs2"> <ul class="tablist"> <li class="current"><a href="functions.html"><span>All</span></a></li> <li><a href="functions_vars.html"><span>Variables</span></a></li> </ul> </div> <div id="navrow4" class="tabs3"> <ul class="tablist"> <li><a href="#index_a"><span>a</span></a></li> <li><a href="#index_b"><span>b</span></a></li> <li><a href="#index_c"><span>c</span></a></li> <li><a href="#index_d"><span>d</span></a></li> <li><a href="#index_e"><span>e</span></a></li> <li><a href="#index_f"><span>f</span></a></li> <li><a href="#index_h"><span>h</span></a></li> <li><a href="#index_i"><span>i</span></a></li> <li><a href="#index_k"><span>k</span></a></li> <li><a href="#index_l"><span>l</span></a></li> <li><a href="#index_m"><span>m</span></a></li> <li><a href="#index_o"><span>o</span></a></li> <li><a href="#index_p"><span>p</span></a></li> <li><a href="#index_r"><span>r</span></a></li> <li><a href="#index_s"><span>s</span></a></li> <li><a href="#index_u"><span>u</span></a></li> <li><a href="#index_v"><span>v</span></a></li> <li class="current"><a href="#index_w"><span>w</span></a></li> </ul> </div> </div><!-- top --> <!-- window showing the filter options --> <div id="MSearchSelectWindow" onmouseover="return searchBox.OnSearchSelectShow()" onmouseout="return searchBox.OnSearchSelectHide()" onkeydown="return searchBox.OnSearchSelectKey(event)"> <a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(0)"><span class="SelectionMark">&#160;</span>All</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(1)"><span class="SelectionMark">&#160;</span>Data Structures</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(2)"><span class="SelectionMark">&#160;</span>Files</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(3)"><span class="SelectionMark">&#160;</span>Functions</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(4)"><span class="SelectionMark">&#160;</span>Variables</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(5)"><span class="SelectionMark">&#160;</span>Typedefs</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(6)"><span class="SelectionMark">&#160;</span>Enumerations</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(7)"><span class="SelectionMark">&#160;</span>Enumerator</a><a class="SelectItem" href="javascript:void(0)" onclick="searchBox.OnSelectItem(8)"><span class="SelectionMark">&#160;</span>Macros</a></div> <!-- iframe showing the search results (closed by default) --> <div id="MSearchResultsWindow"> <iframe src="javascript:void(0)" frameborder="0" name="MSearchResults" id="MSearchResults"> </iframe> </div> <div class="contents"> <div class="textblock">Here is a list of all documented struct and union fields with links to the struct/union documentation for each field:</div> <h3><a class="anchor" id="index_a"></a>- a -</h3><ul> <li>ack_handler : <a class="el" href="structguac__client.html#ada98af16d05a257165080441ad13309a">guac_client</a> , <a class="el" href="structguac__stream.html#a677e30a2082b332285b723c12c03cbda">guac_stream</a> </li> <li>active : <a class="el" href="structguac__pool.html#ac22871d5c476ca9a6dd25a2446fa0fd3">guac_pool</a> </li> <li>argc : <a class="el" href="structguac__instruction.html#a0745b07a905cd06a37b5e7614db32ec8">guac_instruction</a> </li> <li>args : <a class="el" href="structguac__client__plugin.html#afdcc20662bdf6e448ebe55967331b79a">guac_client_plugin</a> </li> <li>argv : <a class="el" href="structguac__instruction.html#ac3b766830d0b97dce75c5b488aca436c">guac_instruction</a> </li> <li>audio_mimetypes : <a class="el" href="structguac__client__info.html#a662fb7d39af7bddcb2f5c83a6b54f923">guac_client_info</a> </li> </ul> <h3><a class="anchor" id="index_b"></a>- b -</h3><ul> <li>begin_handler : <a class="el" href="structguac__audio__encoder.html#a43d161556d3d55031ea486cc8b4ce3f6">guac_audio_encoder</a> </li> <li>blob_handler : <a class="el" href="structguac__client.html#ae394480f2509d5e2f3fa93a57db59ef6">guac_client</a> , <a class="el" href="structguac__stream.html#a3392ef3fe5c37e2ae95140a1e171cee9">guac_stream</a> </li> <li>bps : <a class="el" href="structguac__audio__stream.html#a1de1be8874fb844002b92ac1e3813866">guac_audio_stream</a> </li> </ul> <h3><a class="anchor" id="index_c"></a>- c -</h3><ul> <li>channels : <a class="el" href="structguac__audio__stream.html#af9d1ad90194e24c2967e2f9f18de0ad6">guac_audio_stream</a> </li> <li>client : <a class="el" href="structguac__audio__stream.html#a1771fa5ff88b8f5d4ca4cd5e77a1ffba">guac_audio_stream</a> </li> <li>clipboard_handler : <a class="el" href="structguac__client.html#addf8b9aeb379e6132797f2b7000fb0d1">guac_client</a> </li> <li>connection_id : <a class="el" href="structguac__client.html#ad38b9ded1f04bfe8eb235e551fc08094">guac_client</a> </li> </ul> <h3><a class="anchor" id="index_d"></a>- d -</h3><ul> <li>data : <a class="el" href="structguac__audio__stream.html#a9ca03221b02d1206eb698f8ee936c4b7">guac_audio_stream</a> , <a class="el" href="structguac__client.html#a0569bc7eea8d37b5bd95bcd3821ca0a3">guac_client</a> , <a class="el" href="structguac__socket.html#ac8340e5627b43138bb50aac6c4368c0d">guac_socket</a> , <a class="el" href="structguac__stream.html#acdf20063ca408431862d901c9445b429">guac_stream</a> </li> </ul> <h3><a class="anchor" id="index_e"></a>- e -</h3><ul> <li>encoded_data : <a class="el" href="structguac__audio__stream.html#a4b58881fe297d3ccac941225e4ff2dd0">guac_audio_stream</a> </li> <li>encoded_data_length : <a class="el" href="structguac__audio__stream.html#a91977a758f703f5b2a8e635f455a3407">guac_audio_stream</a> </li> <li>encoded_data_used : <a class="el" href="structguac__audio__stream.html#ab0648dbc9e47b1d93939087acd0b5394">guac_audio_stream</a> </li> <li>encoder : <a class="el" href="structguac__audio__stream.html#af3b4811ba5a4f1a337d338777fe9049d">guac_audio_stream</a> </li> <li>end_handler : <a class="el" href="structguac__audio__encoder.html#a6fbc4f8efe14c9a119fdfaaef78f7e92">guac_audio_encoder</a> , <a class="el" href="structguac__client.html#aae4631fe1d35e9ee2b494303921145dc">guac_client</a> , <a class="el" href="structguac__stream.html#aabe2ae7142cb29070b5021dd3d4255f3">guac_stream</a> </li> </ul> <h3><a class="anchor" id="index_f"></a>- f -</h3><ul> <li>file_handler : <a class="el" href="structguac__client.html#abc56a0e12d564843eee4723f82f3d2fc">guac_client</a> </li> <li>free_handler : <a class="el" href="structguac__client.html#a79e29c931fd448cf51f7153fea064169">guac_client</a> , <a class="el" href="structguac__socket.html#a1044cde496cfe11c914c623dc1e29678">guac_socket</a> </li> </ul> <h3><a class="anchor" id="index_h"></a>- h -</h3><ul> <li>handle_messages : <a class="el" href="structguac__client.html#a75a6618618ff90b6ade14c750f7d3343">guac_client</a> </li> </ul> <h3><a class="anchor" id="index_i"></a>- i -</h3><ul> <li>index : <a class="el" href="structguac__layer.html#a686a85d9d758ba5f2f4426a4c4ae13ba">guac_layer</a> , <a class="el" href="structguac__stream.html#a487951c2cc77b2e1637e3aa0b5faaa60">guac_stream</a> </li> <li>info : <a class="el" href="structguac__client.html#ab67a82a9f940360dac0ec0ca96d446cf">guac_client</a> </li> <li>init_handler : <a class="el" href="structguac__client__plugin.html#af650493e50e6abd9b0bb33c917bd0b95">guac_client_plugin</a> </li> </ul> <h3><a class="anchor" id="index_k"></a>- k -</h3><ul> <li>key_handler : <a class="el" href="structguac__client.html#ac0ed2c84d5aa62c435e8024310812259">guac_client</a> </li> </ul> <h3><a class="anchor" id="index_l"></a>- l -</h3><ul> <li>last_received_timestamp : <a class="el" href="structguac__client.html#a3c8e4b4a92bdeaace0fa2dd32f956ab2">guac_client</a> </li> <li>last_sent_timestamp : <a class="el" href="structguac__client.html#a0f3069566c7a5ca76ce4369715589dad">guac_client</a> </li> <li>last_write_timestamp : <a class="el" href="structguac__socket.html#adf00e9c13ff3808fceb4c71c1172fb51">guac_socket</a> </li> <li>length : <a class="el" href="structguac__audio__stream.html#ae5d8ebcf53b63fc827654fe4c970ff2d">guac_audio_stream</a> </li> <li>log_error_handler : <a class="el" href="structguac__client.html#a59c74b12d77ead505b4e94ba169b2fdf">guac_client</a> </li> <li>log_info_handler : <a class="el" href="structguac__client.html#a055d63be9770e614c7abb2dfc5a8fb0c">guac_client</a> </li> </ul> <h3><a class="anchor" id="index_m"></a>- m -</h3><ul> <li>mimetype : <a class="el" href="structguac__audio__encoder.html#a51b8bfeae3709c1859710cbf1039e01b">guac_audio_encoder</a> </li> <li>min_size : <a class="el" href="structguac__pool.html#a71ed274c2481dbcd5e06bf7d8d74d71e">guac_pool</a> </li> <li>mouse_handler : <a class="el" href="structguac__client.html#a13f56570676f29a6f463e1542e156416">guac_client</a> </li> </ul> <h3><a class="anchor" id="index_o"></a>- o -</h3><ul> <li>opcode : <a class="el" href="structguac__instruction.html#afe89ba323779cb1c6a68cf3fe9df73cc">guac_instruction</a> </li> <li>optimal_height : <a class="el" href="structguac__client__info.html#a58c3084b5e6b40ad43956562764d8732">guac_client_info</a> </li> <li>optimal_resolution : <a class="el" href="structguac__client__info.html#ae4ab77c47b095cf284971b0fe0fe69a5">guac_client_info</a> </li> <li>optimal_width : <a class="el" href="structguac__client__info.html#aed4ddf74fa55fe039f38225c3373f25b">guac_client_info</a> </li> </ul> <h3><a class="anchor" id="index_p"></a>- p -</h3><ul> <li>pcm_bytes_written : <a class="el" href="structguac__audio__stream.html#a3db3ad0af5385d4ea20daece3e4e6d05">guac_audio_stream</a> </li> <li>pcm_data : <a class="el" href="structguac__audio__stream.html#a7cfb92e31f72f1748aa0b3611ffdb0c8">guac_audio_stream</a> </li> <li>pipe_handler : <a class="el" href="structguac__client.html#acd62220caeae9c5da0291a30e29d5dfa">guac_client</a> </li> </ul> <h3><a class="anchor" id="index_r"></a>- r -</h3><ul> <li>rate : <a class="el" href="structguac__audio__stream.html#a6810293a6fa915e497d2dd1a643993d9">guac_audio_stream</a> </li> <li>read_handler : <a class="el" href="structguac__socket.html#a10042a8e591737aec8d4db5a3423b250">guac_socket</a> </li> </ul> <h3><a class="anchor" id="index_s"></a>- s -</h3><ul> <li>select_handler : <a class="el" href="structguac__socket.html#ad29e7f8a6819ca7bb541e8a947d5bc4f">guac_socket</a> </li> <li>size_handler : <a class="el" href="structguac__client.html#a9768d046d71ad0e81746fa0bffc08092">guac_client</a> </li> <li>socket : <a class="el" href="structguac__client.html#aa634856fd3c522377ba7345f3308000e">guac_client</a> </li> <li>state : <a class="el" href="structguac__client.html#acb3e3f5d0ce5726cdde768f97b473f53">guac_client</a> , <a class="el" href="structguac__instruction.html#aad671813a07d6d94e9ca14123b7c985c">guac_instruction</a> , <a class="el" href="structguac__socket.html#ac2d5236c7e570dfee207511241edaa9b">guac_socket</a> </li> <li>stream : <a class="el" href="structguac__audio__stream.html#a4d8f5cfc70ec54b97ba0c4eea596f894">guac_audio_stream</a> </li> </ul> <h3><a class="anchor" id="index_u"></a>- u -</h3><ul> <li>used : <a class="el" href="structguac__audio__stream.html#aa9580dff19ff2fd8bf3ac1069fdd8ca6">guac_audio_stream</a> </li> </ul> <h3><a class="anchor" id="index_v"></a>- v -</h3><ul> <li>value : <a class="el" href="structguac__pool__int.html#af76ff5f21c6e0f69d95cdd1385ea24a4">guac_pool_int</a> </li> <li>video_mimetypes : <a class="el" href="structguac__client__info.html#a8b5c9d593d6a6a5deaeadbe2973b9fe8">guac_client_info</a> </li> </ul> <h3><a class="anchor" id="index_w"></a>- w -</h3><ul> <li>write_handler : <a class="el" href="structguac__audio__encoder.html#a1b74c3dd402b0a6afa5f5b9000d02c74">guac_audio_encoder</a> , <a class="el" href="structguac__socket.html#a0f5f24200b0dad4ef1443b863d1bfc7d">guac_socket</a> </li> </ul> </div><!-- contents --> <!-- start footer part --> <hr class="footer"/><address class="footer"><small> Generated on Tue Sep 30 2014 00:05:02 for libguac by &#160;<a href="http://www.doxygen.org/index.html"> <img class="footer" src="doxygen.png" alt="doxygen"/> </a> 1.8.6 </small></address> <!-- Google Analytics --> <script type="text/javascript"> (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){ (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o), m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m) })(window,document,'script','//www.google-analytics.com/analytics.js','ga'); ga('create', 'UA-75289145-1', 'auto'); ga('send', 'pageview'); </script> <!-- End Google Analytics --> </body> </html>
{ "pile_set_name": "Github" }
/////////////////////////////////////////////////////////////////////////////////// /// OpenGL Mathematics (glm.g-truc.net) /// /// Copyright (c) 2005 - 2014 G-Truc Creation (www.g-truc.net) /// Permission is hereby granted, free of charge, to any person obtaining a copy /// of this software and associated documentation files (the "Software"), to deal /// in the Software without restriction, including without limitation the rights /// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell /// copies of the Software, and to permit persons to whom the Software is /// furnished to do so, subject to the following conditions: /// /// The above copyright notice and this permission notice shall be included in /// all copies or substantial portions of the Software. /// /// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR /// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, /// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE /// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER /// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, /// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN /// THE SOFTWARE. /// /// @ref core /// @file glm/core/func_vector_relational.inl /// @date 2008-08-03 / 2011-09-09 /// @author Christophe Riccio /////////////////////////////////////////////////////////////////////////////////// #include <limits> namespace glm { template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type lessThan ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'lessThan', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] < y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type lessThanEqual ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'lessThanEqual', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] <= y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type greaterThan ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'greaterThan', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] > y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type greaterThanEqual ( vecType<T, P> const & x, vecType<T, P> const & y ) { GLM_STATIC_ASSERT(std::numeric_limits<T>::is_iec559 || std::numeric_limits<T>::is_integer, "Invalid template instantiation of 'greaterThanEqual', GLM vector types required floating-point or integer value types vectors"); assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] >= y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type equal ( vecType<T, P> const & x, vecType<T, P> const & y ) { assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] == y[i]; return Result; } template <typename T, precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER typename vecType<T, P>::bool_type notEqual ( vecType<T, P> const & x, vecType<T, P> const & y ) { assert(x.length() == y.length()); typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < x.length(); ++i) Result[i] = x[i] != y[i]; return Result; } template <precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER bool any(vecType<bool, P> const & v) { bool Result = false; for(int i = 0; i < v.length(); ++i) Result = Result || v[i]; return Result; } template <precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER bool all(vecType<bool, P> const & v) { bool Result = true; for(int i = 0; i < v.length(); ++i) Result = Result && v[i]; return Result; } template <precision P, template <typename, precision> class vecType> GLM_FUNC_QUALIFIER vecType<bool, P> not_(vecType<bool, P> const & v) { typename vecType<bool, P>::bool_type Result(vecType<bool, P>::_null); for(int i = 0; i < v.length(); ++i) Result[i] = !v[i]; return Result; } }//namespace glm
{ "pile_set_name": "Github" }
# script for stm32 interface ft2232 ft2232_device_desc "Olimex OpenOCD JTAG" ft2232_layout olimex-jtag ft2232_vid_pid 0x15ba 0x0003 if { [info exists CHIPNAME] } { set _CHIPNAME $CHIPNAME } else { set _CHIPNAME stm32 } if { [info exists ENDIAN] } { set _ENDIAN $ENDIAN } else { set _ENDIAN little } # jtag speed jtag_khz 600 #use combined on interfaces or targets that can't set TRST/SRST separately reset_config trst_and_srst #jtag scan chain if { [info exists CPUTAPID ] } { set _CPUTAPID $CPUTAPID } else { # See STM Document RM0008 # Section 26.6.3 set _CPUTAPID 0x3ba00477 } jtag newtap $_CHIPNAME cpu -irlen 4 -ircapture 0x1 -irmask 0xf -expected-id $_CPUTAPID if { [info exists BSTAPID ] } { set _BSTAPID $BSTAPID } else { # See STM Document RM0008 # Section 26.6.2 # Medium Density RevA set _BSTAPID 0x06410041 # Rev B and Rev Z set _BSTAPID 0x16410041 # High Density Devices, Rev A #set _BSTAPID 0x06414041 } jtag newtap $_CHIPNAME bs -irlen 5 -ircapture 0x1 -irmask 0x1 -expected-id $_BSTAPID set _TARGETNAME [format "%s.cpu" $_CHIPNAME] target create $_TARGETNAME cortex_m3 -endian $_ENDIAN -chain-position $_TARGETNAME $_TARGETNAME configure -work-area-virt 0 -work-area-phys 0x20000000 -work-area-size 0x5000 -work-area-backup 0 #$_TARGETNAME configure -event halted halt_handle #flash bank stm32x 0 0 0 0 0 #target create cortex_m3 -endian little #run_and_halt_time 0 30 #working_area 0 0x20000000 0x4000 nobackup flash bank stm32x 0x08000000 0x00010000 0 0 0 # For more information about the configuration files, take a look at: # openocd.texi #script flash.script proc halt_handle {} { resume } proc flash_test {} { puts "Trying to flash" sleep 100 halt sleep 300 stm32x mass_erase 0 sleep 20 flash write_bank 0 tmpflash.bin 0 sleep 50 # reset run # sleep 500 reset run shutdown } init flash_test
{ "pile_set_name": "Github" }
# Blender v2.67 (sub 0) OBJ File: 'base.blend' # www.blender.org g base v -0.500000 0.000000 0.500000 v -0.500000 0.000000 -0.500000 v 0.500000 0.000000 -0.500000 v 0.500000 0.000000 0.500000 v -0.500000 0.125000 0.500000 v -0.500000 0.125000 -0.500000 v 0.500000 0.125000 -0.500000 v 0.500000 0.125000 0.500000 vt 0.000000 0.875000 vt 1.000000 0.875000 vt 1.000000 1.000000 vt 0.000000 1.000000 vt 0.000000 0.000000 vt 1.000000 0.000000 vn -1.000000 0.000000 0.000000 vn 0.000000 0.000000 -1.000000 vn 1.000000 -0.000000 0.000000 vn 0.000000 -0.000000 1.000000 vn -0.000000 -1.000000 0.000000 vn -0.000000 1.000000 0.000000 s off f 5/1/1 6/2/1 2/3/1 1/4/1 f 6/1/2 7/2/2 3/3/2 2/4/2 f 7/1/3 8/2/3 4/3/3 3/4/3 f 8/1/4 5/2/4 1/3/4 4/4/4 f 1/5/5 2/6/5 3/3/5 4/4/5 f 8/5/6 7/6/6 6/3/6 5/4/6
{ "pile_set_name": "Github" }
/* RetroArch - A frontend for libretro. * Copyright (C) 2011-2017 - Daniel De Matteis * * RetroArch is free software: you can redistribute it and/or modify it under the terms * of the GNU General Public License as published by the Free Software Found- * ation, either version 3 of the License, or (at your option) any later version. * * RetroArch is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; * without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR * PURPOSE. See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along with RetroArch. * If not, see <http://www.gnu.org/licenses/>. */ #include <stdint.h> #include <boolean.h> #include <stddef.h> #include <stdlib.h> #include <string.h> #include "../../gfx/common/win32_common.h" #include <windows.h> #include <commdlg.h> #include <commctrl.h> #include "../../ui_companion_driver.h" #include "../../configuration.h" static bool ui_browser_window_win32_core( ui_browser_window_state_t *state, bool save) { OPENFILENAME ofn; bool okay = false; settings_t *settings = config_get_ptr(); bool video_fullscreen = settings->bools.video_fullscreen; ofn.lStructSize = sizeof(OPENFILENAME); ofn.hwndOwner = (HWND)state->window; ofn.hInstance = NULL; ofn.lpstrFilter = state->filters; /* actually const */ ofn.lpstrCustomFilter = NULL; ofn.nMaxCustFilter = 0; ofn.nFilterIndex = 0; ofn.lpstrFile = state->path; ofn.nMaxFile = PATH_MAX; ofn.lpstrFileTitle = NULL; ofn.nMaxFileTitle = 0; ofn.lpstrInitialDir = state->startdir; ofn.lpstrTitle = state->title; ofn.Flags = OFN_FILEMUSTEXIST | OFN_HIDEREADONLY | OFN_NOCHANGEDIR; ofn.nFileOffset = 0; ofn.nFileExtension = 0; ofn.lpstrDefExt = ""; ofn.lCustData = 0; ofn.lpfnHook = NULL; ofn.lpTemplateName = NULL; #if (_WIN32_WINNT >= 0x0500) ofn.pvReserved = NULL; ofn.dwReserved = 0; ofn.FlagsEx = 0; #endif /* Full Screen: Show mouse for the file dialog */ if (video_fullscreen) video_driver_show_mouse(); okay = true; if (!save && !GetOpenFileName(&ofn)) okay = false; if (save && !GetSaveFileName(&ofn)) okay = false; /* Full screen: Hide mouse after the file dialog */ if (video_fullscreen) video_driver_hide_mouse(); return okay; } static bool ui_browser_window_win32_open(ui_browser_window_state_t *state) { return ui_browser_window_win32_core(state, false); } static bool ui_browser_window_win32_save(ui_browser_window_state_t *state) { return ui_browser_window_win32_core(state, true); } ui_browser_window_t ui_browser_window_win32 = { ui_browser_window_win32_open, ui_browser_window_win32_save, "win32" };
{ "pile_set_name": "Github" }
""" Use this module directly: import xarray.plot as xplt Or use the methods on a DataArray or Dataset: DataArray.plot._____ Dataset.plot._____ """ import functools import numpy as np import pandas as pd from .facetgrid import _easy_facetgrid from .utils import ( _add_colorbar, _assert_valid_xy, _ensure_plottable, _infer_interval_breaks, _infer_xy_labels, _process_cmap_cbar_kwargs, _rescale_imshow_rgb, _resolve_intervals_1dplot, _resolve_intervals_2dplot, _update_axes, get_axis, import_matplotlib_pyplot, label_from_attrs, ) def _infer_line_data(darray, x, y, hue): ndims = len(darray.dims) if x is not None and y is not None: raise ValueError("Cannot specify both x and y kwargs for line plots.") if x is not None: _assert_valid_xy(darray, x, "x") if y is not None: _assert_valid_xy(darray, y, "y") if ndims == 1: huename = None hueplt = None huelabel = "" if x is not None: xplt = darray[x] yplt = darray elif y is not None: xplt = darray yplt = darray[y] else: # Both x & y are None dim = darray.dims[0] xplt = darray[dim] yplt = darray else: if x is None and y is None and hue is None: raise ValueError("For 2D inputs, please specify either hue, x or y.") if y is None: xname, huename = _infer_xy_labels(darray=darray, x=x, y=hue) xplt = darray[xname] if xplt.ndim > 1: if huename in darray.dims: otherindex = 1 if darray.dims.index(huename) == 0 else 0 otherdim = darray.dims[otherindex] yplt = darray.transpose(otherdim, huename, transpose_coords=False) xplt = xplt.transpose(otherdim, huename, transpose_coords=False) else: raise ValueError( "For 2D inputs, hue must be a dimension" " i.e. one of " + repr(darray.dims) ) else: (xdim,) = darray[xname].dims (huedim,) = darray[huename].dims yplt = darray.transpose(xdim, huedim) else: yname, huename = _infer_xy_labels(darray=darray, x=y, y=hue) yplt = darray[yname] if yplt.ndim > 1: if huename in darray.dims: otherindex = 1 if darray.dims.index(huename) == 0 else 0 otherdim = darray.dims[otherindex] xplt = darray.transpose(otherdim, huename, transpose_coords=False) yplt = yplt.transpose(otherdim, huename, transpose_coords=False) else: raise ValueError( "For 2D inputs, hue must be a dimension" " i.e. one of " + repr(darray.dims) ) else: (ydim,) = darray[yname].dims (huedim,) = darray[huename].dims xplt = darray.transpose(ydim, huedim) huelabel = label_from_attrs(darray[huename]) hueplt = darray[huename] xlabel = label_from_attrs(xplt) ylabel = label_from_attrs(yplt) return xplt, yplt, hueplt, xlabel, ylabel, huelabel def plot( darray, row=None, col=None, col_wrap=None, ax=None, hue=None, rtol=0.01, subplot_kws=None, **kwargs, ): """ Default plot of DataArray using matplotlib.pyplot. Calls xarray plotting function based on the dimensions of darray.squeeze() =============== =========================== Dimensions Plotting function --------------- --------------------------- 1 :py:func:`xarray.plot.line` 2 :py:func:`xarray.plot.pcolormesh` Anything else :py:func:`xarray.plot.hist` =============== =========================== Parameters ---------- darray : DataArray row : str, optional If passed, make row faceted plots on this dimension name col : str, optional If passed, make column faceted plots on this dimension name hue : str, optional If passed, make faceted line plots with hue on this dimension name col_wrap : int, optional Use together with ``col`` to wrap faceted plots ax : matplotlib.axes.Axes, optional If None, uses the current axis. Not applicable when using facets. rtol : float, optional Relative tolerance used to determine if the indexes are uniformly spaced. Usually a small positive number. subplot_kws : dict, optional Dictionary of keyword arguments for matplotlib subplots. **kwargs : optional Additional keyword arguments to matplotlib """ darray = darray.squeeze().compute() plot_dims = set(darray.dims) plot_dims.discard(row) plot_dims.discard(col) plot_dims.discard(hue) ndims = len(plot_dims) error_msg = ( "Only 1d and 2d plots are supported for facets in xarray. " "See the package `Seaborn` for more options." ) if ndims in [1, 2]: if row or col: kwargs["subplot_kws"] = subplot_kws kwargs["row"] = row kwargs["col"] = col kwargs["col_wrap"] = col_wrap if ndims == 1: plotfunc = line kwargs["hue"] = hue elif ndims == 2: if hue: plotfunc = line kwargs["hue"] = hue else: plotfunc = pcolormesh kwargs["subplot_kws"] = subplot_kws else: if row or col or hue: raise ValueError(error_msg) plotfunc = hist kwargs["ax"] = ax return plotfunc(darray, **kwargs) # This function signature should not change so that it can use # matplotlib format strings def line( darray, *args, row=None, col=None, figsize=None, aspect=None, size=None, ax=None, hue=None, x=None, y=None, xincrease=None, yincrease=None, xscale=None, yscale=None, xticks=None, yticks=None, xlim=None, ylim=None, add_legend=True, _labels=True, **kwargs, ): """ Line plot of DataArray index against values Wraps :func:`matplotlib:matplotlib.pyplot.plot` Parameters ---------- darray : DataArray Must be 1 dimensional figsize : tuple, optional A tuple (width, height) of the figure in inches. Mutually exclusive with ``size`` and ``ax``. aspect : scalar, optional Aspect ratio of plot, so that ``aspect * size`` gives the width in inches. Only used if a ``size`` is provided. size : scalar, optional If provided, create a new figure for the plot with the given size. Height (in inches) of each plot. See also: ``aspect``. ax : matplotlib axes object, optional Axis on which to plot this figure. By default, use the current axis. Mutually exclusive with ``size`` and ``figsize``. hue : string, optional Dimension or coordinate for which you want multiple lines plotted. If plotting against a 2D coordinate, ``hue`` must be a dimension. x, y : string, optional Dimension, coordinate or MultiIndex level for x, y axis. Only one of these may be specified. The other coordinate plots values from the DataArray on which this plot method is called. xscale, yscale : 'linear', 'symlog', 'log', 'logit', optional Specifies scaling for the x- and y-axes respectively xticks, yticks : Specify tick locations for x- and y-axes xlim, ylim : Specify x- and y-axes limits xincrease : None, True, or False, optional Should the values on the x axes be increasing from left to right? if None, use the default for the matplotlib function. yincrease : None, True, or False, optional Should the values on the y axes be increasing from top to bottom? if None, use the default for the matplotlib function. add_legend : bool, optional Add legend with y axis coordinates (2D inputs only). *args, **kwargs : optional Additional arguments to matplotlib.pyplot.plot """ # Handle facetgrids first if row or col: allargs = locals().copy() allargs.update(allargs.pop("kwargs")) allargs.pop("darray") return _easy_facetgrid(darray, line, kind="line", **allargs) ndims = len(darray.dims) if ndims > 2: raise ValueError( "Line plots are for 1- or 2-dimensional DataArrays. " "Passed DataArray has {ndims} " "dimensions".format(ndims=ndims) ) # The allargs dict passed to _easy_facetgrid above contains args if args == (): args = kwargs.pop("args", ()) else: assert "args" not in kwargs ax = get_axis(figsize, size, aspect, ax) xplt, yplt, hueplt, xlabel, ylabel, hue_label = _infer_line_data(darray, x, y, hue) # Remove pd.Intervals if contained in xplt.values and/or yplt.values. xplt_val, yplt_val, xlabel, ylabel, kwargs = _resolve_intervals_1dplot( xplt.values, yplt.values, xlabel, ylabel, kwargs ) _ensure_plottable(xplt_val, yplt_val) primitive = ax.plot(xplt_val, yplt_val, *args, **kwargs) if _labels: if xlabel is not None: ax.set_xlabel(xlabel) if ylabel is not None: ax.set_ylabel(ylabel) ax.set_title(darray._title_for_slice()) if darray.ndim == 2 and add_legend: ax.legend(handles=primitive, labels=list(hueplt.values), title=hue_label) # Rotate dates on xlabels # Do this without calling autofmt_xdate so that x-axes ticks # on other subplots (if any) are not deleted. # https://stackoverflow.com/questions/17430105/autofmt-xdate-deletes-x-axis-labels-of-all-subplots if np.issubdtype(xplt.dtype, np.datetime64): for xlabels in ax.get_xticklabels(): xlabels.set_rotation(30) xlabels.set_ha("right") _update_axes(ax, xincrease, yincrease, xscale, yscale, xticks, yticks, xlim, ylim) return primitive def step(darray, *args, where="pre", drawstyle=None, ds=None, **kwargs): """ Step plot of DataArray index against values Similar to :func:`matplotlib:matplotlib.pyplot.step` Parameters ---------- where : {"pre", "post", "mid"}, default: "pre" Define where the steps should be placed: - "pre": The y value is continued constantly to the left from every *x* position, i.e. the interval ``(x[i-1], x[i]]`` has the value ``y[i]``. - "post": The y value is continued constantly to the right from every *x* position, i.e. the interval ``[x[i], x[i+1])`` has the value ``y[i]``. - "mid": Steps occur half-way between the *x* positions. Note that this parameter is ignored if one coordinate consists of :py:func:`pandas.Interval` values, e.g. as a result of :py:func:`xarray.Dataset.groupby_bins`. In this case, the actual boundaries of the interval are used. *args, **kwargs : optional Additional arguments following :py:func:`xarray.plot.line` """ if where not in {"pre", "post", "mid"}: raise ValueError("'where' argument to step must be " "'pre', 'post' or 'mid'") if ds is not None: if drawstyle is None: drawstyle = ds else: raise TypeError("ds and drawstyle are mutually exclusive") if drawstyle is None: drawstyle = "" drawstyle = "steps-" + where + drawstyle return line(darray, *args, drawstyle=drawstyle, **kwargs) def hist( darray, figsize=None, size=None, aspect=None, ax=None, xincrease=None, yincrease=None, xscale=None, yscale=None, xticks=None, yticks=None, xlim=None, ylim=None, **kwargs, ): """ Histogram of DataArray Wraps :func:`matplotlib:matplotlib.pyplot.hist` Plots N dimensional arrays by first flattening the array. Parameters ---------- darray : DataArray Can be any dimension figsize : tuple, optional A tuple (width, height) of the figure in inches. Mutually exclusive with ``size`` and ``ax``. aspect : scalar, optional Aspect ratio of plot, so that ``aspect * size`` gives the width in inches. Only used if a ``size`` is provided. size : scalar, optional If provided, create a new figure for the plot with the given size. Height (in inches) of each plot. See also: ``aspect``. ax : matplotlib.axes.Axes, optional Axis on which to plot this figure. By default, use the current axis. Mutually exclusive with ``size`` and ``figsize``. **kwargs : optional Additional keyword arguments to matplotlib.pyplot.hist """ ax = get_axis(figsize, size, aspect, ax) no_nan = np.ravel(darray.values) no_nan = no_nan[pd.notnull(no_nan)] primitive = ax.hist(no_nan, **kwargs) ax.set_title("Histogram") ax.set_xlabel(label_from_attrs(darray)) _update_axes(ax, xincrease, yincrease, xscale, yscale, xticks, yticks, xlim, ylim) return primitive # MUST run before any 2d plotting functions are defined since # _plot2d decorator adds them as methods here. class _PlotMethods: """ Enables use of xarray.plot functions as attributes on a DataArray. For example, DataArray.plot.imshow """ __slots__ = ("_da",) def __init__(self, darray): self._da = darray def __call__(self, **kwargs): return plot(self._da, **kwargs) # we can't use functools.wraps here since that also modifies the name / qualname __doc__ = __call__.__doc__ = plot.__doc__ __call__.__wrapped__ = plot # type: ignore __call__.__annotations__ = plot.__annotations__ @functools.wraps(hist) def hist(self, ax=None, **kwargs): return hist(self._da, ax=ax, **kwargs) @functools.wraps(line) def line(self, *args, **kwargs): return line(self._da, *args, **kwargs) @functools.wraps(step) def step(self, *args, **kwargs): return step(self._da, *args, **kwargs) def override_signature(f): def wrapper(func): func.__wrapped__ = f return func return wrapper def _plot2d(plotfunc): """ Decorator for common 2d plotting logic Also adds the 2d plot method to class _PlotMethods """ commondoc = """ Parameters ---------- darray : DataArray Must be 2 dimensional, unless creating faceted plots x : string, optional Coordinate for x axis. If None use darray.dims[1] y : string, optional Coordinate for y axis. If None use darray.dims[0] figsize : tuple, optional A tuple (width, height) of the figure in inches. Mutually exclusive with ``size`` and ``ax``. aspect : scalar, optional Aspect ratio of plot, so that ``aspect * size`` gives the width in inches. Only used if a ``size`` is provided. size : scalar, optional If provided, create a new figure for the plot with the given size. Height (in inches) of each plot. See also: ``aspect``. ax : matplotlib axes object, optional Axis on which to plot this figure. By default, use the current axis. Mutually exclusive with ``size`` and ``figsize``. row : string, optional If passed, make row faceted plots on this dimension name col : string, optional If passed, make column faceted plots on this dimension name col_wrap : int, optional Use together with ``col`` to wrap faceted plots xscale, yscale : 'linear', 'symlog', 'log', 'logit', optional Specifies scaling for the x- and y-axes respectively xticks, yticks : Specify tick locations for x- and y-axes xlim, ylim : Specify x- and y-axes limits xincrease : None, True, or False, optional Should the values on the x axes be increasing from left to right? if None, use the default for the matplotlib function. yincrease : None, True, or False, optional Should the values on the y axes be increasing from top to bottom? if None, use the default for the matplotlib function. add_colorbar : bool, optional Adds colorbar to axis add_labels : bool, optional Use xarray metadata to label axes norm : ``matplotlib.colors.Normalize`` instance, optional If the ``norm`` has vmin or vmax specified, the corresponding kwarg must be None. vmin, vmax : floats, optional Values to anchor the colormap, otherwise they are inferred from the data and other keyword arguments. When a diverging dataset is inferred, setting one of these values will fix the other by symmetry around ``center``. Setting both values prevents use of a diverging colormap. If discrete levels are provided as an explicit list, both of these values are ignored. cmap : matplotlib colormap name or object, optional The mapping from data values to color space. If not provided, this will be either be ``viridis`` (if the function infers a sequential dataset) or ``RdBu_r`` (if the function infers a diverging dataset). When `Seaborn` is installed, ``cmap`` may also be a `seaborn` color palette. If ``cmap`` is seaborn color palette and the plot type is not ``contour`` or ``contourf``, ``levels`` must also be specified. colors : discrete colors to plot, optional A single color or a list of colors. If the plot type is not ``contour`` or ``contourf``, the ``levels`` argument is required. center : float, optional The value at which to center the colormap. Passing this value implies use of a diverging colormap. Setting it to ``False`` prevents use of a diverging colormap. robust : bool, optional If True and ``vmin`` or ``vmax`` are absent, the colormap range is computed with 2nd and 98th percentiles instead of the extreme values. extend : {"neither", "both", "min", "max"}, optional How to draw arrows extending the colorbar beyond its limits. If not provided, extend is inferred from vmin, vmax and the data limits. levels : int or list-like object, optional Split the colormap (cmap) into discrete color intervals. If an integer is provided, "nice" levels are chosen based on the data range: this can imply that the final number of levels is not exactly the expected one. Setting ``vmin`` and/or ``vmax`` with ``levels=N`` is equivalent to setting ``levels=np.linspace(vmin, vmax, N)``. infer_intervals : bool, optional Only applies to pcolormesh. If True, the coordinate intervals are passed to pcolormesh. If False, the original coordinates are used (this can be useful for certain map projections). The default is to always infer intervals, unless the mesh is irregular and plotted on a map projection. subplot_kws : dict, optional Dictionary of keyword arguments for matplotlib subplots. Only used for 2D and FacetGrid plots. cbar_ax : matplotlib Axes, optional Axes in which to draw the colorbar. cbar_kwargs : dict, optional Dictionary of keyword arguments to pass to the colorbar. **kwargs : optional Additional arguments to wrapped matplotlib function Returns ------- artist : The same type of primitive artist that the wrapped matplotlib function returns """ # Build on the original docstring plotfunc.__doc__ = f"{plotfunc.__doc__}\n{commondoc}" # plotfunc and newplotfunc have different signatures: # - plotfunc: (x, y, z, ax, **kwargs) # - newplotfunc: (darray, x, y, **kwargs) # where plotfunc accepts numpy arrays, while newplotfunc accepts a DataArray # and variable names. newplotfunc also explicitly lists most kwargs, so we # need to shorten it def signature(darray, x, y, **kwargs): pass @override_signature(signature) @functools.wraps(plotfunc) def newplotfunc( darray, x=None, y=None, figsize=None, size=None, aspect=None, ax=None, row=None, col=None, col_wrap=None, xincrease=True, yincrease=True, add_colorbar=None, add_labels=True, vmin=None, vmax=None, cmap=None, center=None, robust=False, extend=None, levels=None, infer_intervals=None, colors=None, subplot_kws=None, cbar_ax=None, cbar_kwargs=None, xscale=None, yscale=None, xticks=None, yticks=None, xlim=None, ylim=None, norm=None, **kwargs, ): # All 2d plots in xarray share this function signature. # Method signature below should be consistent. # Decide on a default for the colorbar before facetgrids if add_colorbar is None: add_colorbar = plotfunc.__name__ != "contour" imshow_rgb = plotfunc.__name__ == "imshow" and darray.ndim == ( 3 + (row is not None) + (col is not None) ) if imshow_rgb: # Don't add a colorbar when showing an image with explicit colors add_colorbar = False # Matplotlib does not support normalising RGB data, so do it here. # See eg. https://github.com/matplotlib/matplotlib/pull/10220 if robust or vmax is not None or vmin is not None: darray = _rescale_imshow_rgb(darray, vmin, vmax, robust) vmin, vmax, robust = None, None, False # Handle facetgrids first if row or col: allargs = locals().copy() del allargs["darray"] del allargs["imshow_rgb"] allargs.update(allargs.pop("kwargs")) # Need the decorated plotting function allargs["plotfunc"] = globals()[plotfunc.__name__] return _easy_facetgrid(darray, kind="dataarray", **allargs) plt = import_matplotlib_pyplot() rgb = kwargs.pop("rgb", None) if rgb is not None and plotfunc.__name__ != "imshow": raise ValueError('The "rgb" keyword is only valid for imshow()') elif rgb is not None and not imshow_rgb: raise ValueError( 'The "rgb" keyword is only valid for imshow()' "with a three-dimensional array (per facet)" ) xlab, ylab = _infer_xy_labels( darray=darray, x=x, y=y, imshow=imshow_rgb, rgb=rgb ) # better to pass the ndarrays directly to plotting functions xval = darray[xlab].values yval = darray[ylab].values # check if we need to broadcast one dimension if xval.ndim < yval.ndim: dims = darray[ylab].dims if xval.shape[0] == yval.shape[0]: xval = np.broadcast_to(xval[:, np.newaxis], yval.shape) else: xval = np.broadcast_to(xval[np.newaxis, :], yval.shape) elif yval.ndim < xval.ndim: dims = darray[xlab].dims if yval.shape[0] == xval.shape[0]: yval = np.broadcast_to(yval[:, np.newaxis], xval.shape) else: yval = np.broadcast_to(yval[np.newaxis, :], xval.shape) elif xval.ndim == 2: dims = darray[xlab].dims else: dims = (darray[ylab].dims[0], darray[xlab].dims[0]) # May need to transpose for correct x, y labels # xlab may be the name of a coord, we have to check for dim names if imshow_rgb: # For RGB[A] images, matplotlib requires the color dimension # to be last. In Xarray the order should be unimportant, so # we transpose to (y, x, color) to make this work. yx_dims = (ylab, xlab) dims = yx_dims + tuple(d for d in darray.dims if d not in yx_dims) if dims != darray.dims: darray = darray.transpose(*dims, transpose_coords=True) # Pass the data as a masked ndarray too zval = darray.to_masked_array(copy=False) # Replace pd.Intervals if contained in xval or yval. xplt, xlab_extra = _resolve_intervals_2dplot(xval, plotfunc.__name__) yplt, ylab_extra = _resolve_intervals_2dplot(yval, plotfunc.__name__) _ensure_plottable(xplt, yplt, zval) cmap_params, cbar_kwargs = _process_cmap_cbar_kwargs( plotfunc, zval.data, **locals(), _is_facetgrid=kwargs.pop("_is_facetgrid", False), ) if "contour" in plotfunc.__name__: # extend is a keyword argument only for contour and contourf, but # passing it to the colorbar is sufficient for imshow and # pcolormesh kwargs["extend"] = cmap_params["extend"] kwargs["levels"] = cmap_params["levels"] # if colors == a single color, matplotlib draws dashed negative # contours. we lose this feature if we pass cmap and not colors if isinstance(colors, str): cmap_params["cmap"] = None kwargs["colors"] = colors if "pcolormesh" == plotfunc.__name__: kwargs["infer_intervals"] = infer_intervals if "imshow" == plotfunc.__name__ and isinstance(aspect, str): # forbid usage of mpl strings raise ValueError("plt.imshow's `aspect` kwarg is not available in xarray") if subplot_kws is None: subplot_kws = dict() ax = get_axis(figsize, size, aspect, ax, **subplot_kws) primitive = plotfunc( xplt, yplt, zval, ax=ax, cmap=cmap_params["cmap"], vmin=cmap_params["vmin"], vmax=cmap_params["vmax"], norm=cmap_params["norm"], **kwargs, ) # Label the plot with metadata if add_labels: ax.set_xlabel(label_from_attrs(darray[xlab], xlab_extra)) ax.set_ylabel(label_from_attrs(darray[ylab], ylab_extra)) ax.set_title(darray._title_for_slice()) if add_colorbar: if add_labels and "label" not in cbar_kwargs: cbar_kwargs["label"] = label_from_attrs(darray) cbar = _add_colorbar(primitive, ax, cbar_ax, cbar_kwargs, cmap_params) elif cbar_ax is not None or cbar_kwargs: # inform the user about keywords which aren't used raise ValueError( "cbar_ax and cbar_kwargs can't be used with add_colorbar=False." ) # origin kwarg overrides yincrease if "origin" in kwargs: yincrease = None _update_axes( ax, xincrease, yincrease, xscale, yscale, xticks, yticks, xlim, ylim ) # Rotate dates on xlabels # Do this without calling autofmt_xdate so that x-axes ticks # on other subplots (if any) are not deleted. # https://stackoverflow.com/questions/17430105/autofmt-xdate-deletes-x-axis-labels-of-all-subplots if np.issubdtype(xplt.dtype, np.datetime64): for xlabels in ax.get_xticklabels(): xlabels.set_rotation(30) xlabels.set_ha("right") return primitive # For use as DataArray.plot.plotmethod @functools.wraps(newplotfunc) def plotmethod( _PlotMethods_obj, x=None, y=None, figsize=None, size=None, aspect=None, ax=None, row=None, col=None, col_wrap=None, xincrease=True, yincrease=True, add_colorbar=None, add_labels=True, vmin=None, vmax=None, cmap=None, colors=None, center=None, robust=False, extend=None, levels=None, infer_intervals=None, subplot_kws=None, cbar_ax=None, cbar_kwargs=None, xscale=None, yscale=None, xticks=None, yticks=None, xlim=None, ylim=None, norm=None, **kwargs, ): """ The method should have the same signature as the function. This just makes the method work on Plotmethods objects, and passes all the other arguments straight through. """ allargs = locals() allargs["darray"] = _PlotMethods_obj._da allargs.update(kwargs) for arg in ["_PlotMethods_obj", "newplotfunc", "kwargs"]: del allargs[arg] return newplotfunc(**allargs) # Add to class _PlotMethods setattr(_PlotMethods, plotmethod.__name__, plotmethod) return newplotfunc @_plot2d def imshow(x, y, z, ax, **kwargs): """ Image plot of 2d DataArray using matplotlib.pyplot Wraps :func:`matplotlib:matplotlib.pyplot.imshow` While other plot methods require the DataArray to be strictly two-dimensional, ``imshow`` also accepts a 3D array where some dimension can be interpreted as RGB or RGBA color channels and allows this dimension to be specified via the kwarg ``rgb=``. Unlike matplotlib, Xarray can apply ``vmin`` and ``vmax`` to RGB or RGBA data, by applying a single scaling factor and offset to all bands. Passing ``robust=True`` infers ``vmin`` and ``vmax`` :ref:`in the usual way <robust-plotting>`. .. note:: This function needs uniformly spaced coordinates to properly label the axes. Call DataArray.plot() to check. The pixels are centered on the coordinates values. Ie, if the coordinate value is 3.2 then the pixels for those coordinates will be centered on 3.2. """ if x.ndim != 1 or y.ndim != 1: raise ValueError( "imshow requires 1D coordinates, try using " "pcolormesh or contour(f)" ) # Centering the pixels- Assumes uniform spacing try: xstep = (x[1] - x[0]) / 2.0 except IndexError: # Arbitrary default value, similar to matplotlib behaviour xstep = 0.1 try: ystep = (y[1] - y[0]) / 2.0 except IndexError: ystep = 0.1 left, right = x[0] - xstep, x[-1] + xstep bottom, top = y[-1] + ystep, y[0] - ystep defaults = {"origin": "upper", "interpolation": "nearest"} if not hasattr(ax, "projection"): # not for cartopy geoaxes defaults["aspect"] = "auto" # Allow user to override these defaults defaults.update(kwargs) if defaults["origin"] == "upper": defaults["extent"] = [left, right, bottom, top] else: defaults["extent"] = [left, right, top, bottom] if z.ndim == 3: # matplotlib imshow uses black for missing data, but Xarray makes # missing data transparent. We therefore add an alpha channel if # there isn't one, and set it to transparent where data is masked. if z.shape[-1] == 3: alpha = np.ma.ones(z.shape[:2] + (1,), dtype=z.dtype) if np.issubdtype(z.dtype, np.integer): alpha *= 255 z = np.ma.concatenate((z, alpha), axis=2) else: z = z.copy() z[np.any(z.mask, axis=-1), -1] = 0 primitive = ax.imshow(z, **defaults) return primitive @_plot2d def contour(x, y, z, ax, **kwargs): """ Contour plot of 2d DataArray Wraps :func:`matplotlib:matplotlib.pyplot.contour` """ primitive = ax.contour(x, y, z, **kwargs) return primitive @_plot2d def contourf(x, y, z, ax, **kwargs): """ Filled contour plot of 2d DataArray Wraps :func:`matplotlib:matplotlib.pyplot.contourf` """ primitive = ax.contourf(x, y, z, **kwargs) return primitive @_plot2d def pcolormesh(x, y, z, ax, infer_intervals=None, **kwargs): """ Pseudocolor plot of 2d DataArray Wraps :func:`matplotlib:matplotlib.pyplot.pcolormesh` """ # decide on a default for infer_intervals (GH781) x = np.asarray(x) if infer_intervals is None: if hasattr(ax, "projection"): if len(x.shape) == 1: infer_intervals = True else: infer_intervals = False else: infer_intervals = True if infer_intervals and ( (np.shape(x)[0] == np.shape(z)[1]) or ((x.ndim > 1) and (np.shape(x)[1] == np.shape(z)[1])) ): if len(x.shape) == 1: x = _infer_interval_breaks(x, check_monotonic=True) else: # we have to infer the intervals on both axes x = _infer_interval_breaks(x, axis=1) x = _infer_interval_breaks(x, axis=0) if infer_intervals and (np.shape(y)[0] == np.shape(z)[0]): if len(y.shape) == 1: y = _infer_interval_breaks(y, check_monotonic=True) else: # we have to infer the intervals on both axes y = _infer_interval_breaks(y, axis=1) y = _infer_interval_breaks(y, axis=0) primitive = ax.pcolormesh(x, y, z, **kwargs) # by default, pcolormesh picks "round" values for bounds # this results in ugly looking plots with lots of surrounding whitespace if not hasattr(ax, "projection") and x.ndim == 1 and y.ndim == 1: # not a cartopy geoaxis ax.set_xlim(x[0], x[-1]) ax.set_ylim(y[0], y[-1]) return primitive
{ "pile_set_name": "Github" }
/* * Minio Cloud Storage (C) 2018 Minio, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import Moment from "moment" import storage from "local-storage-fallback" import * as alertActions from "../alert/actions" import * as objectsActions from "../objects/actions" import { getCurrentBucket } from "../buckets/selectors" import { getCurrentPrefix } from "../objects/selectors" import { minioBrowserPrefix } from "../constants" export const ADD = "uploads/ADD" export const UPDATE_PROGRESS = "uploads/UPDATE_PROGRESS" export const STOP = "uploads/STOP" export const SHOW_ABORT_MODAL = "uploads/SHOW_ABORT_MODAL" export const add = (slug, size, name) => ({ type: ADD, slug, size, name }) export const updateProgress = (slug, loaded) => ({ type: UPDATE_PROGRESS, slug, loaded }) export const stop = slug => ({ type: STOP, slug }) export const showAbortModal = () => ({ type: SHOW_ABORT_MODAL, show: true }) export const hideAbortModal = () => ({ type: SHOW_ABORT_MODAL, show: false }) let requests = {} export const addUpload = (xhr, slug, size, name) => { return function(dispatch) { requests[slug] = xhr dispatch(add(slug, size, name)) } } export const abortUpload = slug => { return function(dispatch) { const xhr = requests[slug] if (xhr) { xhr.abort() } dispatch(stop(slug)) dispatch(hideAbortModal()) } } export const uploadFile = file => { return function(dispatch, getState) { const state = getState() const currentBucket = getCurrentBucket(state) if (!currentBucket) { dispatch( alertActions.set({ type: "danger", message: "Please choose a bucket before trying to upload files." }) ) return } const currentPrefix = getCurrentPrefix(state) const objectName = `${currentPrefix}${file.name}` const uploadUrl = `${ window.location.origin }${minioBrowserPrefix}/upload/${currentBucket}/${objectName}` const slug = `${currentBucket}-${currentPrefix}-${file.name}` let xhr = new XMLHttpRequest() xhr.open("PUT", uploadUrl, true) xhr.withCredentials = false const token = storage.getItem("token") if (token) { xhr.setRequestHeader( "Authorization", "Bearer " + storage.getItem("token") ) } xhr.setRequestHeader( "x-amz-date", Moment() .utc() .format("YYYYMMDDTHHmmss") + "Z" ) dispatch(addUpload(xhr, slug, file.size, file.name)) xhr.onload = function(event) { if (xhr.status == 401 || xhr.status == 403) { dispatch(hideAbortModal()) dispatch(stop(slug)) dispatch( alertActions.set({ type: "danger", message: "Unauthorized request." }) ) } if (xhr.status == 500) { dispatch(hideAbortModal()) dispatch(stop(slug)) dispatch( alertActions.set({ type: "danger", message: xhr.responseText }) ) } if (xhr.status == 200) { dispatch(hideAbortModal()) dispatch(stop(slug)) dispatch( alertActions.set({ type: "success", message: "File '" + file.name + "' uploaded successfully." }) ) dispatch(objectsActions.selectPrefix(currentPrefix)) } } xhr.upload.addEventListener("error", event => { dispatch(stop(slug)) dispatch( alertActions.set({ type: "danger", message: "Error occurred uploading '" + file.name + "'." }) ) }) xhr.upload.addEventListener("progress", event => { if (event.lengthComputable) { let loaded = event.loaded let total = event.total // Update the counter dispatch(updateProgress(slug, loaded)) } }) xhr.send(file) } }
{ "pile_set_name": "Github" }
/* **************************************************************************** * * Copyright (c) Microsoft Corporation. * * This source code is subject to terms and conditions of the Apache License, Version 2.0. A * copy of the license can be found in the License.html file at the root of this distribution. If * you cannot locate the Apache License, Version 2.0, please send an email to * [email protected]. By using this source code in any fashion, you are agreeing to be bound * by the terms of the Apache License, Version 2.0. * * You must not remove this notice, or any other, from this software. * * * ***************************************************************************/ using System.Diagnostics; namespace System.Management.Automation.Interpreter { internal abstract class GreaterThanInstruction : Instruction { private static Instruction s_SByte, s_int16, s_char, s_int32, s_int64, s_byte, s_UInt16, s_UInt32, s_UInt64, s_single, s_double; public override int ConsumedStack { get { return 2; } } public override int ProducedStack { get { return 1; } } private GreaterThanInstruction() { } internal sealed class GreaterThanSByte : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { sbyte right = (sbyte)frame.Pop(); frame.Push(((sbyte)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanInt16 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Int16 right = (Int16)frame.Pop(); frame.Push(((Int16)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanChar : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { char right = (char)frame.Pop(); frame.Push(((char)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanInt32 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Int32 right = (Int32)frame.Pop(); frame.Push(((Int32)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanInt64 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Int64 right = (Int64)frame.Pop(); frame.Push(((Int64)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanByte : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { byte right = (byte)frame.Pop(); frame.Push(((byte)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanUInt16 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { UInt16 right = (UInt16)frame.Pop(); frame.Push(((UInt16)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanUInt32 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { UInt32 right = (UInt32)frame.Pop(); frame.Push(((UInt32)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanUInt64 : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { UInt64 right = (UInt64)frame.Pop(); frame.Push(((UInt64)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanSingle : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { Single right = (Single)frame.Pop(); frame.Push(((Single)frame.Pop()) > right); return +1; } } internal sealed class GreaterThanDouble : GreaterThanInstruction { public override int Run(InterpretedFrame frame) { double right = (double)frame.Pop(); frame.Push(((double)frame.Pop()) > right); return +1; } } public static Instruction Create(Type type) { Debug.Assert(!type.IsEnum); switch (type.GetTypeCode()) { case TypeCode.SByte: return s_SByte ?? (s_SByte = new GreaterThanSByte()); case TypeCode.Byte: return s_byte ?? (s_byte = new GreaterThanByte()); case TypeCode.Char: return s_char ?? (s_char = new GreaterThanChar()); case TypeCode.Int16: return s_int16 ?? (s_int16 = new GreaterThanInt16()); case TypeCode.Int32: return s_int32 ?? (s_int32 = new GreaterThanInt32()); case TypeCode.Int64: return s_int64 ?? (s_int64 = new GreaterThanInt64()); case TypeCode.UInt16: return s_UInt16 ?? (s_UInt16 = new GreaterThanUInt16()); case TypeCode.UInt32: return s_UInt32 ?? (s_UInt32 = new GreaterThanUInt32()); case TypeCode.UInt64: return s_UInt64 ?? (s_UInt64 = new GreaterThanUInt64()); case TypeCode.Single: return s_single ?? (s_single = new GreaterThanSingle()); case TypeCode.Double: return s_double ?? (s_double = new GreaterThanDouble()); default: throw Assert.Unreachable; } } public override string ToString() { return "GreaterThan()"; } } }
{ "pile_set_name": "Github" }
/****************************************************************************/ // Eclipse SUMO, Simulation of Urban MObility; see https://eclipse.org/sumo // Copyright (C) 2001-2020 German Aerospace Center (DLR) and others. // This program and the accompanying materials are made available under the // terms of the Eclipse Public License 2.0 which is available at // https://www.eclipse.org/legal/epl-2.0/ // This Source Code may also be made available under the following Secondary // Licenses when the conditions for such availability set forth in the Eclipse // Public License 2.0 are satisfied: GNU General Public License, version 2 // or later which is available at // https://www.gnu.org/licenses/old-licenses/gpl-2.0-standalone.html // SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later /****************************************************************************/ /// @file RailEdge.h /// @author Jakob Erdmann /// @date 26.02.2020 /// // The RailEdge is a wrapper around a ROEdge or a MSEdge used for railway routing /****************************************************************************/ #pragma once #include <config.h> #include <cassert> //#define RailEdge_DEBUG_TURNS //#define RailEdge_DEBUG_INIT //#define RailEdge_DEBUG_SUCCESSORS #define RailEdge_DEBUGID "" //#define RailEdge_DEBUG_COND(obj) ((obj != 0 && (obj)->getID() == RailEdge_DEBUGID)) #define RailEdge_DEBUG_COND(obj) (true) // =========================================================================== // class definitions // =========================================================================== /// @brief the edge type representing backward edges template<class E, class V> class RailEdge { public: typedef RailEdge<E, V> _RailEdge; typedef std::vector<std::pair<const _RailEdge*, const _RailEdge*> > ConstEdgePairVector; RailEdge(const E* orig) : myNumericalID(orig->getNumericalID()), myOriginal(orig), myTurnaround(nullptr), myIsVirtual(true) { } RailEdge(const E* turnStart, const E* turnEnd, int numericalID) : myNumericalID(numericalID), myID("TrainReversal!" + turnStart->getID() + "->" + turnEnd->getID()), myOriginal(nullptr), myTurnaround(nullptr), myIsVirtual(true), myMaxLength(turnStart->getLength()), myStartLength(turnStart->getLength()) { myViaSuccessors.push_back(std::make_pair(turnEnd->getRailwayRoutingEdge(), nullptr)); } void update(double maxTrainLength, const std::vector<const E*>& replacementEdges) { if (maxTrainLength > myMaxLength) { myMaxLength = maxTrainLength; myReplacementEdges = replacementEdges; #ifdef RailEdge_DEBUG_INIT std::cout << " update RailEdge " << getID() << " myMaxLength=" << myMaxLength << " repl=" << toString(myReplacementEdges) << "\n"; #endif } } void addVirtualTurns(const E* forward, const E* backward, std::vector<_RailEdge*>& railEdges, int& numericalID, double dist, double maxTrainLength, const std::vector<const E*>& replacementEdges) { // search backwards until dist and add virtual turnaround edges with // replacement edges up to the real turnaround #ifdef RailEdge_DEBUG_INIT std::cout << "addVirtualTurns forward=" << forward->getID() << " backward=" << backward->getID() << " dist=" << dist << " maxLength=" << maxTrainLength << " repl=" << toString(replacementEdges) << "\n"; #endif if (dist <= 0) { return; } for (const E* prev : forward->getPredecessors()) { if (prev == backward) { continue; } const E* bidi = prev->getBidiEdge(); if (backward->isConnectedTo(*bidi, SVC_IGNORING)) { _RailEdge* prevRailEdge = prev->getRailwayRoutingEdge(); if (prevRailEdge->myTurnaround == nullptr) { prevRailEdge->myTurnaround = new _RailEdge(prev, bidi, numericalID++); prevRailEdge->myViaSuccessors.push_back(std::make_pair(prevRailEdge->myTurnaround, nullptr)); railEdges.push_back(prevRailEdge->myTurnaround); #ifdef RailEdge_DEBUG_INIT std::cout << " RailEdge " << prevRailEdge->getID() << " virtual turnaround " << prevRailEdge->myTurnaround->getID() << "\n"; #endif } prevRailEdge->myTurnaround->update(prev->getLength() + maxTrainLength, replacementEdges); std::vector<const E*> replacementEdges2; replacementEdges2.push_back(prev); replacementEdges2.insert(replacementEdges2.end(), replacementEdges.begin(), replacementEdges.end()); addVirtualTurns(prev, bidi, railEdges, numericalID, dist - prev->getLength(), maxTrainLength + prev->getLength(), replacementEdges2); } } } void init(std::vector<_RailEdge*>& railEdges, int& numericalID, double maxTrainLength) { // replace turnaround-via with an explicit RailEdge that checks length for (const auto& viaPair : myOriginal->getViaSuccessors()) { if (viaPair.first == myOriginal->getBidiEdge()) { // direction reversal if (myTurnaround == nullptr) { myTurnaround = new _RailEdge(myOriginal, viaPair.first, numericalID++); myViaSuccessors.push_back(std::make_pair(myTurnaround, nullptr)); railEdges.push_back(myTurnaround); #ifdef RailEdge_DEBUG_INIT std::cout << "RailEdge " << getID() << " actual turnaround " << myTurnaround->getID() << "\n"; #endif } myTurnaround->myIsVirtual = false; addVirtualTurns(myOriginal, viaPair.first, railEdges, numericalID, maxTrainLength - getLength(), getLength(), std::vector<const E*> {myOriginal}); } else { myViaSuccessors.push_back(std::make_pair(viaPair.first->getRailwayRoutingEdge(), viaPair.second == nullptr ? nullptr : viaPair.second->getRailwayRoutingEdge())); } } #ifdef RailEdge_DEBUG_SUCCESSORS std::cout << "RailEdge " << getID() << " successors=" << myViaSuccessors.size() << " orig=" << myOriginal->getViaSuccessors().size() << "\n"; for (const auto& viaPair : myViaSuccessors) { std::cout << " " << viaPair.first->getID() << "\n"; } #endif } /// @brief Returns the index (numeric id) of the edge inline int getNumericalID() const { return myNumericalID; } /// @brief Returns the original edge const E* getOriginal() const { return myOriginal; } /** @brief Returns the id of the edge * @return The original edge's id */ const std::string& getID() const { return myOriginal != nullptr ? myOriginal->getID() : myID; } void insertOriginalEdges(double length, std::vector<const E*>& into) const { if (myOriginal != nullptr) { into.push_back(myOriginal); } else { double seen = myStartLength; int nPushed = 0; if (seen >= length && !myIsVirtual) { return; } // we need to find a replacement edge that has a real turn for (const E* edge : myReplacementEdges) { into.push_back(edge); nPushed++; seen += edge->getLength(); if (seen >= length && edge->isConnectedTo(*edge->getBidiEdge(), SVC_IGNORING)) { break; } //std::cout << "insertOriginalEdges length=" << length << " seen=" << seen << " into=" << toString(into) << "\n"; } const int last = (int)into.size() - 1; for (int i = 0; i < nPushed; i++) { into.push_back(into[last - i]->getBidiEdge()); } } } /** @brief Returns the length of the edge * @return The original edge's length */ double getLength() const { return myOriginal == nullptr ? 0 : myOriginal->getLength(); } //const RailEdge* getBidiEdge() const { // return myOriginal->getBidiEdge()->getRailwayRoutingEdge(); //} bool isInternal() const { return myOriginal->isInternal(); } inline bool prohibits(const V* const vehicle) const { #ifdef RailEdge_DEBUG_TURNS if (myOriginal == nullptr && RailEdge_DEBUG_COND(vehicle)) { std::cout << getID() << " maxLength=" << myMaxLength << " veh=" << vehicle->getID() << " length=" << vehicle->getLength() << "\n"; } #endif return vehicle->getLength() > myMaxLength || (myOriginal != nullptr && myOriginal->prohibits(vehicle)); } inline bool restricts(const V* const vehicle) const { return myOriginal != nullptr && myOriginal->restricts(vehicle); } const ConstEdgePairVector& getViaSuccessors(SUMOVehicleClass vClass = SVC_IGNORING) const { if (vClass == SVC_IGNORING || myOriginal == nullptr || myOriginal->isTazConnector()) { // || !MSNet::getInstance()->hasPermissions()) { return myViaSuccessors; } #ifdef HAVE_FOX FXMutexLock lock(mySuccessorMutex); #endif auto i = myClassesViaSuccessorMap.find(vClass); if (i != myClassesViaSuccessorMap.end()) { // can use cached value return i->second; } // instantiate vector ConstEdgePairVector& result = myClassesViaSuccessorMap[vClass]; // this vClass is requested for the first time. rebuild all successors for (const auto& viaPair : myViaSuccessors) { if (viaPair.first->myOriginal == nullptr || viaPair.first->myOriginal->isTazConnector() || myOriginal->isConnectedTo(*viaPair.first->myOriginal, vClass)) { result.push_back(viaPair); } } return result; } bool isVirtual() const { return myIsVirtual; } private: const int myNumericalID; const std::string myID; const E* myOriginal; _RailEdge* myTurnaround; bool myIsVirtual; /// @brief actual edges to return when passing this (turnaround) edge - only forward std::vector<const E*> myReplacementEdges; /// @brief maximum train length for passing this (turnaround) edge double myMaxLength = std::numeric_limits<double>::max(); /// @brief length of the edge where this turn starts double myStartLength = 0; /// @brief The successors available for a given vClass mutable std::map<SUMOVehicleClass, ConstEdgePairVector> myClassesViaSuccessorMap; mutable ConstEdgePairVector myViaSuccessors; #ifdef HAVE_FOX /// @brief Mutex for accessing successor edges mutable FXMutex mySuccessorMutex; #endif };
{ "pile_set_name": "Github" }
# readable-stream ***Node-core streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) [![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) [![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) [![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) ```bash npm install --save readable-stream ``` ***Node-core streams for userland*** This package is a mirror of the Streams2 and Streams3 implementations in Node-core, including [documentation](doc/stream.markdown). If you want to guarantee a stable streams base, regardless of what version of Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). As of version 2.0.0 **readable-stream** uses semantic versioning. # Streams WG Team Members * **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) &lt;[email protected]&gt; - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B * **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) &lt;[email protected]&gt; - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 * **Rod Vagg** ([@rvagg](https://github.com/rvagg)) &lt;[email protected]&gt; - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D * **Sam Newman** ([@sonewman](https://github.com/sonewman)) &lt;[email protected]&gt; * **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) &lt;[email protected]&gt; * **Domenic Denicola** ([@domenic](https://github.com/domenic)) &lt;[email protected]&gt;
{ "pile_set_name": "Github" }
/** * Loads a Wavefront .mtl file specifying materials * * @author angelxuanchang */ THREE.MTLLoader = function( baseUrl, options, crossOrigin ) { this.baseUrl = baseUrl; this.options = options; this.crossOrigin = crossOrigin; }; THREE.MTLLoader.prototype = { constructor: THREE.MTLLoader, load: function ( url, onLoad, onProgress, onError ) { var scope = this; var loader = new THREE.XHRLoader(); loader.setCrossOrigin( this.crossOrigin ); loader.load( url, function ( text ) { onLoad( scope.parse( text ) ); }, onProgress, onError ); }, /** * Parses loaded MTL file * @param text - Content of MTL file * @return {THREE.MTLLoader.MaterialCreator} */ parse: function ( text ) { var lines = text.split( "\n" ); var info = {}; var delimiter_pattern = /\s+/; var materialsInfo = {}; for ( var i = 0; i < lines.length; i ++ ) { var line = lines[ i ]; line = line.trim(); if ( line.length === 0 || line.charAt( 0 ) === '#' ) { // Blank line or comment ignore continue; } var pos = line.indexOf( ' ' ); var key = ( pos >= 0 ) ? line.substring( 0, pos ) : line; key = key.toLowerCase(); var value = ( pos >= 0 ) ? line.substring( pos + 1 ) : ""; value = value.trim(); if ( key === "newmtl" ) { // New material info = { name: value }; materialsInfo[ value ] = info; } else if ( info ) { if ( key === "ka" || key === "kd" || key === "ks" ) { var ss = value.split( delimiter_pattern, 3 ); info[ key ] = [ parseFloat( ss[0] ), parseFloat( ss[1] ), parseFloat( ss[2] ) ]; } else { info[ key ] = value; } } } var materialCreator = new THREE.MTLLoader.MaterialCreator( this.baseUrl, this.options ); materialCreator.setMaterials( materialsInfo ); return materialCreator; } }; /** * Create a new THREE-MTLLoader.MaterialCreator * @param baseUrl - Url relative to which textures are loaded * @param options - Set of options on how to construct the materials * side: Which side to apply the material * THREE.FrontSide (default), THREE.BackSide, THREE.DoubleSide * wrap: What type of wrapping to apply for textures * THREE.RepeatWrapping (default), THREE.ClampToEdgeWrapping, THREE.MirroredRepeatWrapping * normalizeRGB: RGBs need to be normalized to 0-1 from 0-255 * Default: false, assumed to be already normalized * ignoreZeroRGBs: Ignore values of RGBs (Ka,Kd,Ks) that are all 0's * Default: false * invertTransparency: If transparency need to be inverted (inversion is needed if d = 0 is fully opaque) * Default: false (d = 1 is fully opaque) * @constructor */ THREE.MTLLoader.MaterialCreator = function( baseUrl, options ) { this.baseUrl = baseUrl; this.options = options; this.materialsInfo = {}; this.materials = {}; this.materialsArray = []; this.nameLookup = {}; this.side = ( this.options && this.options.side )? this.options.side: THREE.FrontSide; this.wrap = ( this.options && this.options.wrap )? this.options.wrap: THREE.RepeatWrapping; }; THREE.MTLLoader.MaterialCreator.prototype = { constructor: THREE.MTLLoader.MaterialCreator, setMaterials: function( materialsInfo ) { this.materialsInfo = this.convert( materialsInfo ); this.materials = {}; this.materialsArray = []; this.nameLookup = {}; }, convert: function( materialsInfo ) { if ( !this.options ) return materialsInfo; var converted = {}; for ( var mn in materialsInfo ) { // Convert materials info into normalized form based on options var mat = materialsInfo[ mn ]; var covmat = {}; converted[ mn ] = covmat; for ( var prop in mat ) { var save = true; var value = mat[ prop ]; var lprop = prop.toLowerCase(); switch ( lprop ) { case 'kd': case 'ka': case 'ks': // Diffuse color (color under white light) using RGB values if ( this.options && this.options.normalizeRGB ) { value = [ value[ 0 ] / 255, value[ 1 ] / 255, value[ 2 ] / 255 ]; } if ( this.options && this.options.ignoreZeroRGBs ) { if ( value[ 0 ] === 0 && value[ 1 ] === 0 && value[ 1 ] === 0 ) { // ignore save = false; } } break; case 'd': // According to MTL format (http://paulbourke.net/dataformats/mtl/): // d is dissolve for current material // factor of 1.0 is fully opaque, a factor of 0 is fully dissolved (completely transparent) if ( this.options && this.options.invertTransparency ) { value = 1 - value; } break; default: break; } if ( save ) { covmat[ lprop ] = value; } } } return converted; }, preload: function () { for ( var mn in this.materialsInfo ) { this.create( mn ); } }, getIndex: function( materialName ) { return this.nameLookup[ materialName ]; }, getAsArray: function() { var index = 0; for ( var mn in this.materialsInfo ) { this.materialsArray[ index ] = this.create( mn ); this.nameLookup[ mn ] = index; index ++; } return this.materialsArray; }, create: function ( materialName ) { if ( this.materials[ materialName ] === undefined ) { this.createMaterial_( materialName ); } return this.materials[ materialName ]; }, createMaterial_: function ( materialName ) { // Create material var mat = this.materialsInfo[ materialName ]; var params = { name: materialName, side: this.side }; for ( var prop in mat ) { var value = mat[ prop ]; switch ( prop.toLowerCase() ) { // Ns is material specular exponent case 'kd': // Diffuse color (color under white light) using RGB values params[ 'diffuse' ] = new THREE.Color().fromArray( value ); break; case 'ka': // Ambient color (color under shadow) using RGB values params[ 'ambient' ] = new THREE.Color().fromArray( value ); break; case 'ks': // Specular color (color when light is reflected from shiny surface) using RGB values params[ 'specular' ] = new THREE.Color().fromArray( value ); break; case 'map_kd': // Diffuse texture map params[ 'map' ] = this.loadTexture( this.baseUrl + value ); params[ 'map' ].wrapS = this.wrap; params[ 'map' ].wrapT = this.wrap; break; case 'ns': // The specular exponent (defines the focus of the specular highlight) // A high exponent results in a tight, concentrated highlight. Ns values normally range from 0 to 1000. params['shininess'] = value; break; case 'd': // According to MTL format (http://paulbourke.net/dataformats/mtl/): // d is dissolve for current material // factor of 1.0 is fully opaque, a factor of 0 is fully dissolved (completely transparent) if ( value < 1 ) { params['transparent'] = true; params['opacity'] = value; } break; default: break; } } if ( params[ 'diffuse' ] ) { if ( !params[ 'ambient' ]) params[ 'ambient' ] = params[ 'diffuse' ]; params[ 'color' ] = params[ 'diffuse' ]; } this.materials[ materialName ] = new THREE.MeshPhongMaterial( params ); return this.materials[ materialName ]; }, loadTexture: function ( url, mapping, onLoad, onError ) { var texture; var loader = THREE.Loader.Handlers.get( url ); if ( loader !== null ) { texture = loader.load( url, onLoad ); } else { texture = new THREE.Texture(); loader = new THREE.ImageLoader(); loader.crossOrigin = this.crossOrigin; loader.load( url, function ( image ) { texture.image = THREE.MTLLoader.ensurePowerOfTwo_( image ); texture.needsUpdate = true; if ( onLoad ) onLoad( texture ); } ); } texture.mapping = mapping; return texture; } }; THREE.MTLLoader.ensurePowerOfTwo_ = function ( image ) { if ( ! THREE.Math.isPowerOfTwo( image.width ) || ! THREE.Math.isPowerOfTwo( image.height ) ) { var canvas = document.createElement( "canvas" ); canvas.width = THREE.MTLLoader.nextHighestPowerOfTwo_( image.width ); canvas.height = THREE.MTLLoader.nextHighestPowerOfTwo_( image.height ); var ctx = canvas.getContext("2d"); ctx.drawImage( image, 0, 0, image.width, image.height, 0, 0, canvas.width, canvas.height ); return canvas; } return image; }; THREE.MTLLoader.nextHighestPowerOfTwo_ = function( x ) { --x; for ( var i = 1; i < 32; i <<= 1 ) { x = x | x >> i; } return x + 1; }; THREE.EventDispatcher.prototype.apply( THREE.MTLLoader.prototype );
{ "pile_set_name": "Github" }
var lodash = require('./lodash'); /** * Creates a `lodash` object that wraps `value` with explicit method * chaining enabled. * * @static * @memberOf _ * @category Chain * @param {*} value The value to wrap. * @returns {Object} Returns the new `lodash` wrapper instance. * @example * * var users = [ * { 'user': 'barney', 'age': 36 }, * { 'user': 'fred', 'age': 40 }, * { 'user': 'pebbles', 'age': 1 } * ]; * * var youngest = _.chain(users) * .sortBy('age') * .map(function(chr) { * return chr.user + ' is ' + chr.age; * }) * .first() * .value(); * // => 'pebbles is 1' */ function chain(value) { var result = lodash(value); result.__chain__ = true; return result; } module.exports = chain;
{ "pile_set_name": "Github" }
<div {!! $attributes !!}> @if ($visibled) @if (!empty($value)) <a href="{{ $value }}" data-toggle="lightbox"> <img class="thumbnail" src="{{ $value }}"> </a> @endif {!! $append !!} @if($small) <small class="clearfix">{!! $small !!}</small> @endif @endif </div>
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2013 by Steve Nygard. // #pragma mark - // // File: /Applications/Xcode-7GM.app/Contents/Developer/Platforms/WatchSimulator.platform/Developer/SDKs/WatchSimulator.sdk/System/Library/PrivateFrameworks/GraphicsServices.framework/GraphicsServices // UUID: BC90061B-82E0-3083-B8ED-645846C96184 // // Arch: i386 // Current version: 14.0.0 // Compatibility version: 1.0.0 // Source version: 622.0.0.0.0 // // // This file does not contain any Objective-C runtime information. //
{ "pile_set_name": "Github" }
"use strict"; var isImplemented = require("../../../../string/#/@@iterator/is-implemented"); module.exports = function (a) { a(isImplemented(), true); };
{ "pile_set_name": "Github" }
import React, { useState } from 'react'; import { useHistory } from 'react-router-dom'; import { Card, PageSection } from '@patternfly/react-core'; import { CardBody } from '../../../components/Card'; import ProjectForm from '../shared/ProjectForm'; import { ProjectsAPI } from '../../../api'; function ProjectAdd() { const [formSubmitError, setFormSubmitError] = useState(null); const history = useHistory(); const handleSubmit = async values => { if (values.scm_type === 'manual') { values.scm_type = ''; } if (!values.credential) { // Depending on the permissions of the user submitting the form, // the API might throw an unexpected error if our creation request // has a zero-length string as its credential field. As a work-around, // normalize falsey credential fields by deleting them. delete values.credential; } setFormSubmitError(null); try { const { data: { id }, } = await ProjectsAPI.create({ ...values, organization: values.organization.id, }); history.push(`/projects/${id}/details`); } catch (error) { setFormSubmitError(error); } }; const handleCancel = () => { history.push(`/projects`); }; return ( <PageSection> <Card> <CardBody> <ProjectForm handleCancel={handleCancel} handleSubmit={handleSubmit} submitError={formSubmitError} /> </CardBody> </Card> </PageSection> ); } export default ProjectAdd;
{ "pile_set_name": "Github" }
package web // FieldError is used to indicate an error with a specific request field. type FieldError struct { Field string `json:"field"` Error string `json:"error"` } // ErrorResponse is the form used for API responses from failures in the API. type ErrorResponse struct { Error string `json:"error"` Fields []FieldError `json:"fields,omitempty"` } // Error is used to pass an error during the request through the // application with web specific context. type Error struct { Err error Status int Fields []FieldError } // NewRequestError wraps a provided error with an HTTP status code. This // function should be used when handlers encounter expected errors. func NewRequestError(err error, status int) error { return &Error{err, status, nil} } // Error implements the error interface. It uses the default message of the // wrapped error. This is what will be shown in the services' logs. func (err *Error) Error() string { return err.Err.Error() }
{ "pile_set_name": "Github" }
# Developing with Minikube This guide will guide you through the process of deploying and running your locally built Nuclio sources on a local Kubernetes cluster in your Minikube VM. This is helpful when you're developing new functionality in the Kubernetes platform and need to test it against a real Kubernetes cluster. ## Working assumptions This guide assumes that: - You set up your Minikube VM as described in the [Minikube getting started guide](/docs/setup/minikube/getting-started-minikube.md) - You have previously deployed a _released_ Nuclio version on top of it and interacted with it - You have a working Nuclio development environment and you're on a branch containing changes you made which pertain to the Kubernetes platform ## Get your local images onto Minikube When you install Nuclio's services onto Minikube (using `kubectl apply`), Kubernetes examines the given resource specification to determine which images to use for Nuclio's controller and dashboard services. To get it to take your images, we must first push them onto the local Docker registry running inside the Minikube VM. To do this: - Make sure you've built container images with your changes (`make build`) - Push them by running the script located at `hack/minikube/scripts/push_images.py`. Keep in mind the script assumes the local Docker registry to be listening on port 5000 of the Minikube VM. It does the following: - Iterates over the existing Nuclio container images on the host machine - For each such image: - Tags it locally as `$(minikube ip):5000/<image>` (i.e., `192.168.64.4:5000/processor:latest-amd64`) - Pushes it to the Docker registry. Since the image's tag refers to a registry, it's pushed to the Minikube registry. - Untags it locally - _(in the Minikube VM)_ Pulls the image, specifying the local Docker registry (i.e., `docker pull localhost:5000/processor:latest-amd64`) - _(in the Minikube VM)_ Tags it with the `nuclio/` prefix (i.e., `nuclio/processor:latest-amd64`) - _(in the Minikube VM)_ Untags the Minikube-specific tag This will make the latest versions of our locally-built images available from the Docker registry in the Minikube VM. ## Deploy a custom version of the Nuclio services The `nuclio.yaml` resource specification that we feed `kubectl apply` with when deploying a released Nuclio version always points to controller and dashboard images fixed to that version. In our case, we must use a modified version: ```sh kubectl apply -f https://raw.githubusercontent.com/nuclio/nuclio/development/hack/minikube/resources/devel/nuclio.yaml ``` It differs from the usual `nuclio.yaml` in that: 1) Controller/dashboard images are "latest", resulting in the images you pushed in the last step being used 2) Controller/dashboard images are never pulled from Docker Hub 3) Dashboard is told (via an environment variable) not to pull base images when deploying functions (it'll use the images you pushed) You should now have a functional Kubernetes cluster using images built from your local changes, and can test against it to make sure they work as expected. Keep in mind when using a locally-built latest `nuctl`, to specify `--no-pull` such that the base images you pushed are used.
{ "pile_set_name": "Github" }
smallrye.messaging.worker.my-pool.max-concurrency=2 smallrye.messaging.worker.another-pool.max-concurrency=5
{ "pile_set_name": "Github" }
FieldType name = "FLD_SUBSPACE_RIFT" description = "FLD_SUBSPACE_RIFT_DESC" stealth = 0 effectsgroups = [ EffectsGroup // pull in objects scope = And [ System Not Contains Source WithinDistance distance = Source.Size condition = Source ] effects = MoveTowards speed = 5 target = Source EffectsGroup // destroy close objects scope = And [ Not Source Not Contains Source WithinDistance distance = 10 condition = Source ] effects = Destroy EffectsGroup // shrink at same speed objects are pulled scope = Source effects = SetSize value = Target.Size - 5 EffectsGroup // collapse upon self when small enough scope = Source activation = Size high = 5 effects = Destroy ] graphic = "nebulae/nebula9.png"
{ "pile_set_name": "Github" }
{\rtf1\ansi\ansicpg1252\cocoartf1038\cocoasubrtf320 {\fonttbl\f0\fnil\fcharset0 Verdana;\f1\fnil\fcharset0 LucidaGrande;} {\colortbl;\red255\green255\blue255;\red73\green73\blue73;} {\*\listtable{\list\listtemplateid1\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid1\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid1}} {\*\listoverridetable{\listoverride\listid1\listoverridecount0\ls1}} \vieww9000\viewh8400\viewkind0 \deftab720 \pard\pardeftab720\sl400\sa280\ql\qnatural \f0\fs24 \cf2 Copyright (c) 2010, Oomph Inc. \f1 \uc0\u8232 \f0 All rights reserved.\ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\ \pard\tx220\tx720\pardeftab720\li720\fi-720\sl400\sa20\ql\qnatural \ls1\ilvl0\cf2 {\listtext \'95 }Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.\ {\listtext \'95 }Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.\ {\listtext \'95 }Neither the name of Oomph Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.\ \pard\pardeftab720\sl400\sa280\ql\qnatural \cf2 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.}
{ "pile_set_name": "Github" }
c() => null;
{ "pile_set_name": "Github" }
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Windows system calls. package syscall import ( errorspkg "errors" "internal/race" "sync" "unicode/utf16" "unsafe" ) //go:generate go run mksyscall_windows.go -systemdll -output zsyscall_windows.go syscall_windows.go security_windows.go type Handle uintptr const InvalidHandle = ^Handle(0) // StringToUTF16 returns the UTF-16 encoding of the UTF-8 string s, // with a terminating NUL added. If s contains a NUL byte this // function panics instead of returning an error. // // Deprecated: Use UTF16FromString instead. func StringToUTF16(s string) []uint16 { a, err := UTF16FromString(s) if err != nil { panic("syscall: string with NUL passed to StringToUTF16") } return a } // UTF16FromString returns the UTF-16 encoding of the UTF-8 string // s, with a terminating NUL added. If s contains a NUL byte at any // location, it returns (nil, EINVAL). func UTF16FromString(s string) ([]uint16, error) { for i := 0; i < len(s); i++ { if s[i] == 0 { return nil, EINVAL } } return utf16.Encode([]rune(s + "\x00")), nil } // UTF16ToString returns the UTF-8 encoding of the UTF-16 sequence s, // with a terminating NUL removed. func UTF16ToString(s []uint16) string { for i, v := range s { if v == 0 { s = s[0:i] break } } return string(utf16.Decode(s)) } // StringToUTF16Ptr returns pointer to the UTF-16 encoding of // the UTF-8 string s, with a terminating NUL added. If s // If s contains a NUL byte this function panics instead of // returning an error. // // Deprecated: Use UTF16PtrFromString instead. func StringToUTF16Ptr(s string) *uint16 { return &StringToUTF16(s)[0] } // UTF16PtrFromString returns pointer to the UTF-16 encoding of // the UTF-8 string s, with a terminating NUL added. If s // contains a NUL byte at any location, it returns (nil, EINVAL). func UTF16PtrFromString(s string) (*uint16, error) { a, err := UTF16FromString(s) if err != nil { return nil, err } return &a[0], nil } func Getpagesize() int { return 4096 } // Errno is the Windows error number. type Errno uintptr func langid(pri, sub uint16) uint32 { return uint32(sub)<<10 | uint32(pri) } // FormatMessage is deprecated (msgsrc should be uintptr, not uint32, but can // not be changed due to the Go 1 compatibility guarantee). // // Deprecated: Use FormatMessage from golang.org/x/sys/windows instead. func FormatMessage(flags uint32, msgsrc uint32, msgid uint32, langid uint32, buf []uint16, args *byte) (n uint32, err error) { return formatMessage(flags, uintptr(msgsrc), msgid, langid, buf, args) } func (e Errno) Error() string { // deal with special go errors idx := int(e - APPLICATION_ERROR) if 0 <= idx && idx < len(errors) { return errors[idx] } // ask windows for the remaining errors var flags uint32 = FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_ARGUMENT_ARRAY | FORMAT_MESSAGE_IGNORE_INSERTS b := make([]uint16, 300) n, err := formatMessage(flags, 0, uint32(e), langid(LANG_ENGLISH, SUBLANG_ENGLISH_US), b, nil) if err != nil { n, err = formatMessage(flags, 0, uint32(e), 0, b, nil) if err != nil { return "winapi error #" + itoa(int(e)) } } // trim terminating \r and \n for ; n > 0 && (b[n-1] == '\n' || b[n-1] == '\r'); n-- { } return string(utf16.Decode(b[:n])) } func (e Errno) Temporary() bool { return e == EINTR || e == EMFILE || e.Timeout() } func (e Errno) Timeout() bool { return e == EAGAIN || e == EWOULDBLOCK || e == ETIMEDOUT } // Implemented in runtime/syscall_windows.go. func compileCallback(fn interface{}, cleanstack bool) uintptr // Converts a Go function to a function pointer conforming // to the stdcall calling convention. This is useful when // interoperating with Windows code requiring callbacks. func NewCallback(fn interface{}) uintptr { return compileCallback(fn, true) } // Converts a Go function to a function pointer conforming // to the cdecl calling convention. This is useful when // interoperating with Windows code requiring callbacks. func NewCallbackCDecl(fn interface{}) uintptr { return compileCallback(fn, false) } // windows api calls //sys GetLastError() (lasterr error) //sys LoadLibrary(libname string) (handle Handle, err error) = LoadLibraryW //sys FreeLibrary(handle Handle) (err error) //sys GetProcAddress(module Handle, procname string) (proc uintptr, err error) //sys GetVersion() (ver uint32, err error) //sys formatMessage(flags uint32, msgsrc uintptr, msgid uint32, langid uint32, buf []uint16, args *byte) (n uint32, err error) = FormatMessageW //sys ExitProcess(exitcode uint32) //sys CreateFile(name *uint16, access uint32, mode uint32, sa *SecurityAttributes, createmode uint32, attrs uint32, templatefile int32) (handle Handle, err error) [failretval==InvalidHandle] = CreateFileW //sys ReadFile(handle Handle, buf []byte, done *uint32, overlapped *Overlapped) (err error) //sys WriteFile(handle Handle, buf []byte, done *uint32, overlapped *Overlapped) (err error) //sys SetFilePointer(handle Handle, lowoffset int32, highoffsetptr *int32, whence uint32) (newlowoffset uint32, err error) [failretval==0xffffffff] //sys CloseHandle(handle Handle) (err error) //sys GetStdHandle(stdhandle int) (handle Handle, err error) [failretval==InvalidHandle] //sys findFirstFile1(name *uint16, data *win32finddata1) (handle Handle, err error) [failretval==InvalidHandle] = FindFirstFileW //sys findNextFile1(handle Handle, data *win32finddata1) (err error) = FindNextFileW //sys FindClose(handle Handle) (err error) //sys GetFileInformationByHandle(handle Handle, data *ByHandleFileInformation) (err error) //sys GetCurrentDirectory(buflen uint32, buf *uint16) (n uint32, err error) = GetCurrentDirectoryW //sys SetCurrentDirectory(path *uint16) (err error) = SetCurrentDirectoryW //sys CreateDirectory(path *uint16, sa *SecurityAttributes) (err error) = CreateDirectoryW //sys RemoveDirectory(path *uint16) (err error) = RemoveDirectoryW //sys DeleteFile(path *uint16) (err error) = DeleteFileW //sys MoveFile(from *uint16, to *uint16) (err error) = MoveFileW //sys GetComputerName(buf *uint16, n *uint32) (err error) = GetComputerNameW //sys SetEndOfFile(handle Handle) (err error) //sys GetSystemTimeAsFileTime(time *Filetime) //sys GetTimeZoneInformation(tzi *Timezoneinformation) (rc uint32, err error) [failretval==0xffffffff] //sys CreateIoCompletionPort(filehandle Handle, cphandle Handle, key uint32, threadcnt uint32) (handle Handle, err error) //sys GetQueuedCompletionStatus(cphandle Handle, qty *uint32, key *uint32, overlapped **Overlapped, timeout uint32) (err error) //sys PostQueuedCompletionStatus(cphandle Handle, qty uint32, key uint32, overlapped *Overlapped) (err error) //sys CancelIo(s Handle) (err error) //sys CancelIoEx(s Handle, o *Overlapped) (err error) //sys CreateProcess(appName *uint16, commandLine *uint16, procSecurity *SecurityAttributes, threadSecurity *SecurityAttributes, inheritHandles bool, creationFlags uint32, env *uint16, currentDir *uint16, startupInfo *StartupInfo, outProcInfo *ProcessInformation) (err error) = CreateProcessW //sys OpenProcess(da uint32, inheritHandle bool, pid uint32) (handle Handle, err error) //sys TerminateProcess(handle Handle, exitcode uint32) (err error) //sys GetExitCodeProcess(handle Handle, exitcode *uint32) (err error) //sys GetStartupInfo(startupInfo *StartupInfo) (err error) = GetStartupInfoW //sys GetCurrentProcess() (pseudoHandle Handle, err error) //sys GetProcessTimes(handle Handle, creationTime *Filetime, exitTime *Filetime, kernelTime *Filetime, userTime *Filetime) (err error) //sys DuplicateHandle(hSourceProcessHandle Handle, hSourceHandle Handle, hTargetProcessHandle Handle, lpTargetHandle *Handle, dwDesiredAccess uint32, bInheritHandle bool, dwOptions uint32) (err error) //sys WaitForSingleObject(handle Handle, waitMilliseconds uint32) (event uint32, err error) [failretval==0xffffffff] //sys GetTempPath(buflen uint32, buf *uint16) (n uint32, err error) = GetTempPathW //sys CreatePipe(readhandle *Handle, writehandle *Handle, sa *SecurityAttributes, size uint32) (err error) //sys GetFileType(filehandle Handle) (n uint32, err error) //sys CryptAcquireContext(provhandle *Handle, container *uint16, provider *uint16, provtype uint32, flags uint32) (err error) = advapi32.CryptAcquireContextW //sys CryptReleaseContext(provhandle Handle, flags uint32) (err error) = advapi32.CryptReleaseContext //sys CryptGenRandom(provhandle Handle, buflen uint32, buf *byte) (err error) = advapi32.CryptGenRandom //sys GetEnvironmentStrings() (envs *uint16, err error) [failretval==nil] = kernel32.GetEnvironmentStringsW //sys FreeEnvironmentStrings(envs *uint16) (err error) = kernel32.FreeEnvironmentStringsW //sys GetEnvironmentVariable(name *uint16, buffer *uint16, size uint32) (n uint32, err error) = kernel32.GetEnvironmentVariableW //sys SetEnvironmentVariable(name *uint16, value *uint16) (err error) = kernel32.SetEnvironmentVariableW //sys SetFileTime(handle Handle, ctime *Filetime, atime *Filetime, wtime *Filetime) (err error) //sys GetFileAttributes(name *uint16) (attrs uint32, err error) [failretval==INVALID_FILE_ATTRIBUTES] = kernel32.GetFileAttributesW //sys SetFileAttributes(name *uint16, attrs uint32) (err error) = kernel32.SetFileAttributesW //sys GetFileAttributesEx(name *uint16, level uint32, info *byte) (err error) = kernel32.GetFileAttributesExW //sys GetCommandLine() (cmd *uint16) = kernel32.GetCommandLineW //sys CommandLineToArgv(cmd *uint16, argc *int32) (argv *[8192]*[8192]uint16, err error) [failretval==nil] = shell32.CommandLineToArgvW //sys LocalFree(hmem Handle) (handle Handle, err error) [failretval!=0] //sys SetHandleInformation(handle Handle, mask uint32, flags uint32) (err error) //sys FlushFileBuffers(handle Handle) (err error) //sys GetFullPathName(path *uint16, buflen uint32, buf *uint16, fname **uint16) (n uint32, err error) = kernel32.GetFullPathNameW //sys GetLongPathName(path *uint16, buf *uint16, buflen uint32) (n uint32, err error) = kernel32.GetLongPathNameW //sys GetShortPathName(longpath *uint16, shortpath *uint16, buflen uint32) (n uint32, err error) = kernel32.GetShortPathNameW //sys CreateFileMapping(fhandle Handle, sa *SecurityAttributes, prot uint32, maxSizeHigh uint32, maxSizeLow uint32, name *uint16) (handle Handle, err error) = kernel32.CreateFileMappingW //sys MapViewOfFile(handle Handle, access uint32, offsetHigh uint32, offsetLow uint32, length uintptr) (addr uintptr, err error) //sys UnmapViewOfFile(addr uintptr) (err error) //sys FlushViewOfFile(addr uintptr, length uintptr) (err error) //sys VirtualLock(addr uintptr, length uintptr) (err error) //sys VirtualUnlock(addr uintptr, length uintptr) (err error) //sys TransmitFile(s Handle, handle Handle, bytesToWrite uint32, bytsPerSend uint32, overlapped *Overlapped, transmitFileBuf *TransmitFileBuffers, flags uint32) (err error) = mswsock.TransmitFile //sys ReadDirectoryChanges(handle Handle, buf *byte, buflen uint32, watchSubTree bool, mask uint32, retlen *uint32, overlapped *Overlapped, completionRoutine uintptr) (err error) = kernel32.ReadDirectoryChangesW //sys CertOpenSystemStore(hprov Handle, name *uint16) (store Handle, err error) = crypt32.CertOpenSystemStoreW //sys CertOpenStore(storeProvider uintptr, msgAndCertEncodingType uint32, cryptProv uintptr, flags uint32, para uintptr) (handle Handle, err error) [failretval==InvalidHandle] = crypt32.CertOpenStore //sys CertEnumCertificatesInStore(store Handle, prevContext *CertContext) (context *CertContext, err error) [failretval==nil] = crypt32.CertEnumCertificatesInStore //sys CertAddCertificateContextToStore(store Handle, certContext *CertContext, addDisposition uint32, storeContext **CertContext) (err error) = crypt32.CertAddCertificateContextToStore //sys CertCloseStore(store Handle, flags uint32) (err error) = crypt32.CertCloseStore //sys CertGetCertificateChain(engine Handle, leaf *CertContext, time *Filetime, additionalStore Handle, para *CertChainPara, flags uint32, reserved uintptr, chainCtx **CertChainContext) (err error) = crypt32.CertGetCertificateChain //sys CertFreeCertificateChain(ctx *CertChainContext) = crypt32.CertFreeCertificateChain //sys CertCreateCertificateContext(certEncodingType uint32, certEncoded *byte, encodedLen uint32) (context *CertContext, err error) [failretval==nil] = crypt32.CertCreateCertificateContext //sys CertFreeCertificateContext(ctx *CertContext) (err error) = crypt32.CertFreeCertificateContext //sys CertVerifyCertificateChainPolicy(policyOID uintptr, chain *CertChainContext, para *CertChainPolicyPara, status *CertChainPolicyStatus) (err error) = crypt32.CertVerifyCertificateChainPolicy //sys RegOpenKeyEx(key Handle, subkey *uint16, options uint32, desiredAccess uint32, result *Handle) (regerrno error) = advapi32.RegOpenKeyExW //sys RegCloseKey(key Handle) (regerrno error) = advapi32.RegCloseKey //sys RegQueryInfoKey(key Handle, class *uint16, classLen *uint32, reserved *uint32, subkeysLen *uint32, maxSubkeyLen *uint32, maxClassLen *uint32, valuesLen *uint32, maxValueNameLen *uint32, maxValueLen *uint32, saLen *uint32, lastWriteTime *Filetime) (regerrno error) = advapi32.RegQueryInfoKeyW //sys RegEnumKeyEx(key Handle, index uint32, name *uint16, nameLen *uint32, reserved *uint32, class *uint16, classLen *uint32, lastWriteTime *Filetime) (regerrno error) = advapi32.RegEnumKeyExW //sys RegQueryValueEx(key Handle, name *uint16, reserved *uint32, valtype *uint32, buf *byte, buflen *uint32) (regerrno error) = advapi32.RegQueryValueExW //sys getCurrentProcessId() (pid uint32) = kernel32.GetCurrentProcessId //sys GetConsoleMode(console Handle, mode *uint32) (err error) = kernel32.GetConsoleMode //sys WriteConsole(console Handle, buf *uint16, towrite uint32, written *uint32, reserved *byte) (err error) = kernel32.WriteConsoleW //sys ReadConsole(console Handle, buf *uint16, toread uint32, read *uint32, inputControl *byte) (err error) = kernel32.ReadConsoleW //sys CreateToolhelp32Snapshot(flags uint32, processId uint32) (handle Handle, err error) [failretval==InvalidHandle] = kernel32.CreateToolhelp32Snapshot //sys Process32First(snapshot Handle, procEntry *ProcessEntry32) (err error) = kernel32.Process32FirstW //sys Process32Next(snapshot Handle, procEntry *ProcessEntry32) (err error) = kernel32.Process32NextW //sys DeviceIoControl(handle Handle, ioControlCode uint32, inBuffer *byte, inBufferSize uint32, outBuffer *byte, outBufferSize uint32, bytesReturned *uint32, overlapped *Overlapped) (err error) // This function returns 1 byte BOOLEAN rather than the 4 byte BOOL. //sys CreateSymbolicLink(symlinkfilename *uint16, targetfilename *uint16, flags uint32) (err error) [failretval&0xff==0] = CreateSymbolicLinkW //sys CreateHardLink(filename *uint16, existingfilename *uint16, reserved uintptr) (err error) [failretval&0xff==0] = CreateHardLinkW // syscall interface implementation for other packages func Exit(code int) { ExitProcess(uint32(code)) } func makeInheritSa() *SecurityAttributes { var sa SecurityAttributes sa.Length = uint32(unsafe.Sizeof(sa)) sa.InheritHandle = 1 return &sa } func Open(path string, mode int, perm uint32) (fd Handle, err error) { if len(path) == 0 { return InvalidHandle, ERROR_FILE_NOT_FOUND } pathp, err := UTF16PtrFromString(path) if err != nil { return InvalidHandle, err } var access uint32 switch mode & (O_RDONLY | O_WRONLY | O_RDWR) { case O_RDONLY: access = GENERIC_READ case O_WRONLY: access = GENERIC_WRITE case O_RDWR: access = GENERIC_READ | GENERIC_WRITE } if mode&O_CREAT != 0 { access |= GENERIC_WRITE } if mode&O_APPEND != 0 { access &^= GENERIC_WRITE access |= FILE_APPEND_DATA } sharemode := uint32(FILE_SHARE_READ | FILE_SHARE_WRITE) var sa *SecurityAttributes if mode&O_CLOEXEC == 0 { sa = makeInheritSa() } var createmode uint32 switch { case mode&(O_CREAT|O_EXCL) == (O_CREAT | O_EXCL): createmode = CREATE_NEW case mode&(O_CREAT|O_TRUNC) == (O_CREAT | O_TRUNC): createmode = CREATE_ALWAYS case mode&O_CREAT == O_CREAT: createmode = OPEN_ALWAYS case mode&O_TRUNC == O_TRUNC: createmode = TRUNCATE_EXISTING default: createmode = OPEN_EXISTING } h, e := CreateFile(pathp, access, sharemode, sa, createmode, FILE_ATTRIBUTE_NORMAL, 0) return h, e } func Read(fd Handle, p []byte) (n int, err error) { var done uint32 e := ReadFile(fd, p, &done, nil) if e != nil { if e == ERROR_BROKEN_PIPE { // NOTE(brainman): work around ERROR_BROKEN_PIPE is returned on reading EOF from stdin return 0, nil } return 0, e } if race.Enabled { if done > 0 { race.WriteRange(unsafe.Pointer(&p[0]), int(done)) } race.Acquire(unsafe.Pointer(&ioSync)) } if msanenabled && done > 0 { msanWrite(unsafe.Pointer(&p[0]), int(done)) } return int(done), nil } func Write(fd Handle, p []byte) (n int, err error) { if race.Enabled { race.ReleaseMerge(unsafe.Pointer(&ioSync)) } var done uint32 e := WriteFile(fd, p, &done, nil) if e != nil { return 0, e } if race.Enabled && done > 0 { race.ReadRange(unsafe.Pointer(&p[0]), int(done)) } if msanenabled && done > 0 { msanRead(unsafe.Pointer(&p[0]), int(done)) } return int(done), nil } var ioSync int64 func Seek(fd Handle, offset int64, whence int) (newoffset int64, err error) { var w uint32 switch whence { case 0: w = FILE_BEGIN case 1: w = FILE_CURRENT case 2: w = FILE_END } hi := int32(offset >> 32) lo := int32(offset) // use GetFileType to check pipe, pipe can't do seek ft, _ := GetFileType(fd) if ft == FILE_TYPE_PIPE { return 0, EPIPE } rlo, e := SetFilePointer(fd, lo, &hi, w) if e != nil { return 0, e } return int64(hi)<<32 + int64(rlo), nil } func Close(fd Handle) (err error) { return CloseHandle(fd) } var ( Stdin = getStdHandle(STD_INPUT_HANDLE) Stdout = getStdHandle(STD_OUTPUT_HANDLE) Stderr = getStdHandle(STD_ERROR_HANDLE) ) func getStdHandle(h int) (fd Handle) { r, _ := GetStdHandle(h) CloseOnExec(r) return r } const ImplementsGetwd = true func Getwd() (wd string, err error) { b := make([]uint16, 300) n, e := GetCurrentDirectory(uint32(len(b)), &b[0]) if e != nil { return "", e } return string(utf16.Decode(b[0:n])), nil } func Chdir(path string) (err error) { pathp, err := UTF16PtrFromString(path) if err != nil { return err } return SetCurrentDirectory(pathp) } func Mkdir(path string, mode uint32) (err error) { pathp, err := UTF16PtrFromString(path) if err != nil { return err } return CreateDirectory(pathp, nil) } func Rmdir(path string) (err error) { pathp, err := UTF16PtrFromString(path) if err != nil { return err } return RemoveDirectory(pathp) } func Unlink(path string) (err error) { pathp, err := UTF16PtrFromString(path) if err != nil { return err } return DeleteFile(pathp) } func Rename(oldpath, newpath string) (err error) { from, err := UTF16PtrFromString(oldpath) if err != nil { return err } to, err := UTF16PtrFromString(newpath) if err != nil { return err } return MoveFile(from, to) } func ComputerName() (name string, err error) { var n uint32 = MAX_COMPUTERNAME_LENGTH + 1 b := make([]uint16, n) e := GetComputerName(&b[0], &n) if e != nil { return "", e } return string(utf16.Decode(b[0:n])), nil } func Ftruncate(fd Handle, length int64) (err error) { curoffset, e := Seek(fd, 0, 1) if e != nil { return e } defer Seek(fd, curoffset, 0) _, e = Seek(fd, length, 0) if e != nil { return e } e = SetEndOfFile(fd) if e != nil { return e } return nil } func Gettimeofday(tv *Timeval) (err error) { var ft Filetime GetSystemTimeAsFileTime(&ft) *tv = NsecToTimeval(ft.Nanoseconds()) return nil } func Pipe(p []Handle) (err error) { if len(p) != 2 { return EINVAL } var r, w Handle e := CreatePipe(&r, &w, makeInheritSa(), 0) if e != nil { return e } p[0] = r p[1] = w return nil } func Utimes(path string, tv []Timeval) (err error) { if len(tv) != 2 { return EINVAL } pathp, e := UTF16PtrFromString(path) if e != nil { return e } h, e := CreateFile(pathp, FILE_WRITE_ATTRIBUTES, FILE_SHARE_WRITE, nil, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, 0) if e != nil { return e } defer Close(h) a := NsecToFiletime(tv[0].Nanoseconds()) w := NsecToFiletime(tv[1].Nanoseconds()) return SetFileTime(h, nil, &a, &w) } func UtimesNano(path string, ts []Timespec) (err error) { if len(ts) != 2 { return EINVAL } pathp, e := UTF16PtrFromString(path) if e != nil { return e } h, e := CreateFile(pathp, FILE_WRITE_ATTRIBUTES, FILE_SHARE_WRITE, nil, OPEN_EXISTING, FILE_FLAG_BACKUP_SEMANTICS, 0) if e != nil { return e } defer Close(h) a := NsecToFiletime(TimespecToNsec(ts[0])) w := NsecToFiletime(TimespecToNsec(ts[1])) return SetFileTime(h, nil, &a, &w) } func Fsync(fd Handle) (err error) { return FlushFileBuffers(fd) } func Chmod(path string, mode uint32) (err error) { if mode == 0 { return EINVAL } p, e := UTF16PtrFromString(path) if e != nil { return e } attrs, e := GetFileAttributes(p) if e != nil { return e } if mode&S_IWRITE != 0 { attrs &^= FILE_ATTRIBUTE_READONLY } else { attrs |= FILE_ATTRIBUTE_READONLY } return SetFileAttributes(p, attrs) } func LoadCancelIoEx() error { return procCancelIoEx.Find() } func LoadSetFileCompletionNotificationModes() error { return procSetFileCompletionNotificationModes.Find() } // net api calls const socket_error = uintptr(^uint32(0)) //sys WSAStartup(verreq uint32, data *WSAData) (sockerr error) = ws2_32.WSAStartup //sys WSACleanup() (err error) [failretval==socket_error] = ws2_32.WSACleanup //sys WSAIoctl(s Handle, iocc uint32, inbuf *byte, cbif uint32, outbuf *byte, cbob uint32, cbbr *uint32, overlapped *Overlapped, completionRoutine uintptr) (err error) [failretval==socket_error] = ws2_32.WSAIoctl //sys socket(af int32, typ int32, protocol int32) (handle Handle, err error) [failretval==InvalidHandle] = ws2_32.socket //sys Setsockopt(s Handle, level int32, optname int32, optval *byte, optlen int32) (err error) [failretval==socket_error] = ws2_32.setsockopt //sys Getsockopt(s Handle, level int32, optname int32, optval *byte, optlen *int32) (err error) [failretval==socket_error] = ws2_32.getsockopt //sys bind(s Handle, name unsafe.Pointer, namelen int32) (err error) [failretval==socket_error] = ws2_32.bind //sys connect(s Handle, name unsafe.Pointer, namelen int32) (err error) [failretval==socket_error] = ws2_32.connect //sys getsockname(s Handle, rsa *RawSockaddrAny, addrlen *int32) (err error) [failretval==socket_error] = ws2_32.getsockname //sys getpeername(s Handle, rsa *RawSockaddrAny, addrlen *int32) (err error) [failretval==socket_error] = ws2_32.getpeername //sys listen(s Handle, backlog int32) (err error) [failretval==socket_error] = ws2_32.listen //sys shutdown(s Handle, how int32) (err error) [failretval==socket_error] = ws2_32.shutdown //sys Closesocket(s Handle) (err error) [failretval==socket_error] = ws2_32.closesocket //sys AcceptEx(ls Handle, as Handle, buf *byte, rxdatalen uint32, laddrlen uint32, raddrlen uint32, recvd *uint32, overlapped *Overlapped) (err error) = mswsock.AcceptEx //sys GetAcceptExSockaddrs(buf *byte, rxdatalen uint32, laddrlen uint32, raddrlen uint32, lrsa **RawSockaddrAny, lrsalen *int32, rrsa **RawSockaddrAny, rrsalen *int32) = mswsock.GetAcceptExSockaddrs //sys WSARecv(s Handle, bufs *WSABuf, bufcnt uint32, recvd *uint32, flags *uint32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSARecv //sys WSASend(s Handle, bufs *WSABuf, bufcnt uint32, sent *uint32, flags uint32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSASend //sys WSARecvFrom(s Handle, bufs *WSABuf, bufcnt uint32, recvd *uint32, flags *uint32, from *RawSockaddrAny, fromlen *int32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSARecvFrom //sys WSASendTo(s Handle, bufs *WSABuf, bufcnt uint32, sent *uint32, flags uint32, to *RawSockaddrAny, tolen int32, overlapped *Overlapped, croutine *byte) (err error) [failretval==socket_error] = ws2_32.WSASendTo //sys GetHostByName(name string) (h *Hostent, err error) [failretval==nil] = ws2_32.gethostbyname //sys GetServByName(name string, proto string) (s *Servent, err error) [failretval==nil] = ws2_32.getservbyname //sys Ntohs(netshort uint16) (u uint16) = ws2_32.ntohs //sys GetProtoByName(name string) (p *Protoent, err error) [failretval==nil] = ws2_32.getprotobyname //sys DnsQuery(name string, qtype uint16, options uint32, extra *byte, qrs **DNSRecord, pr *byte) (status error) = dnsapi.DnsQuery_W //sys DnsRecordListFree(rl *DNSRecord, freetype uint32) = dnsapi.DnsRecordListFree //sys DnsNameCompare(name1 *uint16, name2 *uint16) (same bool) = dnsapi.DnsNameCompare_W //sys GetAddrInfoW(nodename *uint16, servicename *uint16, hints *AddrinfoW, result **AddrinfoW) (sockerr error) = ws2_32.GetAddrInfoW //sys FreeAddrInfoW(addrinfo *AddrinfoW) = ws2_32.FreeAddrInfoW //sys GetIfEntry(pIfRow *MibIfRow) (errcode error) = iphlpapi.GetIfEntry //sys GetAdaptersInfo(ai *IpAdapterInfo, ol *uint32) (errcode error) = iphlpapi.GetAdaptersInfo //sys SetFileCompletionNotificationModes(handle Handle, flags uint8) (err error) = kernel32.SetFileCompletionNotificationModes //sys WSAEnumProtocols(protocols *int32, protocolBuffer *WSAProtocolInfo, bufferLength *uint32) (n int32, err error) [failretval==-1] = ws2_32.WSAEnumProtocolsW // For testing: clients can set this flag to force // creation of IPv6 sockets to return EAFNOSUPPORT. var SocketDisableIPv6 bool type RawSockaddrInet4 struct { Family uint16 Port uint16 Addr [4]byte /* in_addr */ Zero [8]uint8 } type RawSockaddrInet6 struct { Family uint16 Port uint16 Flowinfo uint32 Addr [16]byte /* in6_addr */ Scope_id uint32 } type RawSockaddr struct { Family uint16 Data [14]int8 } type RawSockaddrAny struct { Addr RawSockaddr Pad [96]int8 } type Sockaddr interface { sockaddr() (ptr unsafe.Pointer, len int32, err error) // lowercase; only we can define Sockaddrs } type SockaddrInet4 struct { Port int Addr [4]byte raw RawSockaddrInet4 } func (sa *SockaddrInet4) sockaddr() (unsafe.Pointer, int32, error) { if sa.Port < 0 || sa.Port > 0xFFFF { return nil, 0, EINVAL } sa.raw.Family = AF_INET p := (*[2]byte)(unsafe.Pointer(&sa.raw.Port)) p[0] = byte(sa.Port >> 8) p[1] = byte(sa.Port) for i := 0; i < len(sa.Addr); i++ { sa.raw.Addr[i] = sa.Addr[i] } return unsafe.Pointer(&sa.raw), int32(unsafe.Sizeof(sa.raw)), nil } type SockaddrInet6 struct { Port int ZoneId uint32 Addr [16]byte raw RawSockaddrInet6 } func (sa *SockaddrInet6) sockaddr() (unsafe.Pointer, int32, error) { if sa.Port < 0 || sa.Port > 0xFFFF { return nil, 0, EINVAL } sa.raw.Family = AF_INET6 p := (*[2]byte)(unsafe.Pointer(&sa.raw.Port)) p[0] = byte(sa.Port >> 8) p[1] = byte(sa.Port) sa.raw.Scope_id = sa.ZoneId for i := 0; i < len(sa.Addr); i++ { sa.raw.Addr[i] = sa.Addr[i] } return unsafe.Pointer(&sa.raw), int32(unsafe.Sizeof(sa.raw)), nil } type SockaddrUnix struct { Name string } func (sa *SockaddrUnix) sockaddr() (unsafe.Pointer, int32, error) { // TODO(brainman): implement SockaddrUnix.sockaddr() return nil, 0, EWINDOWS } func (rsa *RawSockaddrAny) Sockaddr() (Sockaddr, error) { switch rsa.Addr.Family { case AF_UNIX: return nil, EWINDOWS case AF_INET: pp := (*RawSockaddrInet4)(unsafe.Pointer(rsa)) sa := new(SockaddrInet4) p := (*[2]byte)(unsafe.Pointer(&pp.Port)) sa.Port = int(p[0])<<8 + int(p[1]) for i := 0; i < len(sa.Addr); i++ { sa.Addr[i] = pp.Addr[i] } return sa, nil case AF_INET6: pp := (*RawSockaddrInet6)(unsafe.Pointer(rsa)) sa := new(SockaddrInet6) p := (*[2]byte)(unsafe.Pointer(&pp.Port)) sa.Port = int(p[0])<<8 + int(p[1]) sa.ZoneId = pp.Scope_id for i := 0; i < len(sa.Addr); i++ { sa.Addr[i] = pp.Addr[i] } return sa, nil } return nil, EAFNOSUPPORT } func Socket(domain, typ, proto int) (fd Handle, err error) { if domain == AF_INET6 && SocketDisableIPv6 { return InvalidHandle, EAFNOSUPPORT } return socket(int32(domain), int32(typ), int32(proto)) } func SetsockoptInt(fd Handle, level, opt int, value int) (err error) { v := int32(value) return Setsockopt(fd, int32(level), int32(opt), (*byte)(unsafe.Pointer(&v)), int32(unsafe.Sizeof(v))) } func Bind(fd Handle, sa Sockaddr) (err error) { ptr, n, err := sa.sockaddr() if err != nil { return err } return bind(fd, ptr, n) } func Connect(fd Handle, sa Sockaddr) (err error) { ptr, n, err := sa.sockaddr() if err != nil { return err } return connect(fd, ptr, n) } func Getsockname(fd Handle) (sa Sockaddr, err error) { var rsa RawSockaddrAny l := int32(unsafe.Sizeof(rsa)) if err = getsockname(fd, &rsa, &l); err != nil { return } return rsa.Sockaddr() } func Getpeername(fd Handle) (sa Sockaddr, err error) { var rsa RawSockaddrAny l := int32(unsafe.Sizeof(rsa)) if err = getpeername(fd, &rsa, &l); err != nil { return } return rsa.Sockaddr() } func Listen(s Handle, n int) (err error) { return listen(s, int32(n)) } func Shutdown(fd Handle, how int) (err error) { return shutdown(fd, int32(how)) } func WSASendto(s Handle, bufs *WSABuf, bufcnt uint32, sent *uint32, flags uint32, to Sockaddr, overlapped *Overlapped, croutine *byte) (err error) { rsa, l, err := to.sockaddr() if err != nil { return err } return WSASendTo(s, bufs, bufcnt, sent, flags, (*RawSockaddrAny)(unsafe.Pointer(rsa)), l, overlapped, croutine) } func LoadGetAddrInfo() error { return procGetAddrInfoW.Find() } var connectExFunc struct { once sync.Once addr uintptr err error } func LoadConnectEx() error { connectExFunc.once.Do(func() { var s Handle s, connectExFunc.err = Socket(AF_INET, SOCK_STREAM, IPPROTO_TCP) if connectExFunc.err != nil { return } defer CloseHandle(s) var n uint32 connectExFunc.err = WSAIoctl(s, SIO_GET_EXTENSION_FUNCTION_POINTER, (*byte)(unsafe.Pointer(&WSAID_CONNECTEX)), uint32(unsafe.Sizeof(WSAID_CONNECTEX)), (*byte)(unsafe.Pointer(&connectExFunc.addr)), uint32(unsafe.Sizeof(connectExFunc.addr)), &n, nil, 0) }) return connectExFunc.err } func connectEx(s Handle, name unsafe.Pointer, namelen int32, sendBuf *byte, sendDataLen uint32, bytesSent *uint32, overlapped *Overlapped) (err error) { r1, _, e1 := Syscall9(connectExFunc.addr, 7, uintptr(s), uintptr(name), uintptr(namelen), uintptr(unsafe.Pointer(sendBuf)), uintptr(sendDataLen), uintptr(unsafe.Pointer(bytesSent)), uintptr(unsafe.Pointer(overlapped)), 0, 0) if r1 == 0 { if e1 != 0 { err = error(e1) } else { err = EINVAL } } return } func ConnectEx(fd Handle, sa Sockaddr, sendBuf *byte, sendDataLen uint32, bytesSent *uint32, overlapped *Overlapped) error { err := LoadConnectEx() if err != nil { return errorspkg.New("failed to find ConnectEx: " + err.Error()) } ptr, n, err := sa.sockaddr() if err != nil { return err } return connectEx(fd, ptr, n, sendBuf, sendDataLen, bytesSent, overlapped) } // Invented structures to support what package os expects. type Rusage struct { CreationTime Filetime ExitTime Filetime KernelTime Filetime UserTime Filetime } type WaitStatus struct { ExitCode uint32 } func (w WaitStatus) Exited() bool { return true } func (w WaitStatus) ExitStatus() int { return int(w.ExitCode) } func (w WaitStatus) Signal() Signal { return -1 } func (w WaitStatus) CoreDump() bool { return false } func (w WaitStatus) Stopped() bool { return false } func (w WaitStatus) Continued() bool { return false } func (w WaitStatus) StopSignal() Signal { return -1 } func (w WaitStatus) Signaled() bool { return false } func (w WaitStatus) TrapCause() int { return -1 } // Timespec is an invented structure on Windows, but here for // consistency with the syscall package for other operating systems. type Timespec struct { Sec int64 Nsec int64 } func TimespecToNsec(ts Timespec) int64 { return int64(ts.Sec)*1e9 + int64(ts.Nsec) } func NsecToTimespec(nsec int64) (ts Timespec) { ts.Sec = nsec / 1e9 ts.Nsec = nsec % 1e9 return } // TODO(brainman): fix all needed for net func Accept(fd Handle) (nfd Handle, sa Sockaddr, err error) { return 0, nil, EWINDOWS } func Recvfrom(fd Handle, p []byte, flags int) (n int, from Sockaddr, err error) { return 0, nil, EWINDOWS } func Sendto(fd Handle, p []byte, flags int, to Sockaddr) (err error) { return EWINDOWS } func SetsockoptTimeval(fd Handle, level, opt int, tv *Timeval) (err error) { return EWINDOWS } // The Linger struct is wrong but we only noticed after Go 1. // sysLinger is the real system call structure. // BUG(brainman): The definition of Linger is not appropriate for direct use // with Setsockopt and Getsockopt. // Use SetsockoptLinger instead. type Linger struct { Onoff int32 Linger int32 } type sysLinger struct { Onoff uint16 Linger uint16 } type IPMreq struct { Multiaddr [4]byte /* in_addr */ Interface [4]byte /* in_addr */ } type IPv6Mreq struct { Multiaddr [16]byte /* in6_addr */ Interface uint32 } func GetsockoptInt(fd Handle, level, opt int) (int, error) { return -1, EWINDOWS } func SetsockoptLinger(fd Handle, level, opt int, l *Linger) (err error) { sys := sysLinger{Onoff: uint16(l.Onoff), Linger: uint16(l.Linger)} return Setsockopt(fd, int32(level), int32(opt), (*byte)(unsafe.Pointer(&sys)), int32(unsafe.Sizeof(sys))) } func SetsockoptInet4Addr(fd Handle, level, opt int, value [4]byte) (err error) { return Setsockopt(fd, int32(level), int32(opt), (*byte)(unsafe.Pointer(&value[0])), 4) } func SetsockoptIPMreq(fd Handle, level, opt int, mreq *IPMreq) (err error) { return Setsockopt(fd, int32(level), int32(opt), (*byte)(unsafe.Pointer(mreq)), int32(unsafe.Sizeof(*mreq))) } func SetsockoptIPv6Mreq(fd Handle, level, opt int, mreq *IPv6Mreq) (err error) { return EWINDOWS } func Getpid() (pid int) { return int(getCurrentProcessId()) } func FindFirstFile(name *uint16, data *Win32finddata) (handle Handle, err error) { // NOTE(rsc): The Win32finddata struct is wrong for the system call: // the two paths are each one uint16 short. Use the correct struct, // a win32finddata1, and then copy the results out. // There is no loss of expressivity here, because the final // uint16, if it is used, is supposed to be a NUL, and Go doesn't need that. // For Go 1.1, we might avoid the allocation of win32finddata1 here // by adding a final Bug [2]uint16 field to the struct and then // adjusting the fields in the result directly. var data1 win32finddata1 handle, err = findFirstFile1(name, &data1) if err == nil { copyFindData(data, &data1) } return } func FindNextFile(handle Handle, data *Win32finddata) (err error) { var data1 win32finddata1 err = findNextFile1(handle, &data1) if err == nil { copyFindData(data, &data1) } return } func getProcessEntry(pid int) (*ProcessEntry32, error) { snapshot, err := CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0) if err != nil { return nil, err } defer CloseHandle(snapshot) var procEntry ProcessEntry32 procEntry.Size = uint32(unsafe.Sizeof(procEntry)) if err = Process32First(snapshot, &procEntry); err != nil { return nil, err } for { if procEntry.ProcessID == uint32(pid) { return &procEntry, nil } err = Process32Next(snapshot, &procEntry) if err != nil { return nil, err } } } func Getppid() (ppid int) { pe, err := getProcessEntry(Getpid()) if err != nil { return -1 } return int(pe.ParentProcessID) } // TODO(brainman): fix all needed for os func Fchdir(fd Handle) (err error) { return EWINDOWS } func Link(oldpath, newpath string) (err error) { return EWINDOWS } func Symlink(path, link string) (err error) { return EWINDOWS } func Fchmod(fd Handle, mode uint32) (err error) { return EWINDOWS } func Chown(path string, uid int, gid int) (err error) { return EWINDOWS } func Lchown(path string, uid int, gid int) (err error) { return EWINDOWS } func Fchown(fd Handle, uid int, gid int) (err error) { return EWINDOWS } func Getuid() (uid int) { return -1 } func Geteuid() (euid int) { return -1 } func Getgid() (gid int) { return -1 } func Getegid() (egid int) { return -1 } func Getgroups() (gids []int, err error) { return nil, EWINDOWS } type Signal int func (s Signal) Signal() {} func (s Signal) String() string { if 0 <= s && int(s) < len(signals) { str := signals[s] if str != "" { return str } } return "signal " + itoa(int(s)) } func LoadCreateSymbolicLink() error { return procCreateSymbolicLinkW.Find() } // Readlink returns the destination of the named symbolic link. func Readlink(path string, buf []byte) (n int, err error) { fd, err := CreateFile(StringToUTF16Ptr(path), GENERIC_READ, 0, nil, OPEN_EXISTING, FILE_FLAG_OPEN_REPARSE_POINT|FILE_FLAG_BACKUP_SEMANTICS, 0) if err != nil { return -1, err } defer CloseHandle(fd) rdbbuf := make([]byte, MAXIMUM_REPARSE_DATA_BUFFER_SIZE) var bytesReturned uint32 err = DeviceIoControl(fd, FSCTL_GET_REPARSE_POINT, nil, 0, &rdbbuf[0], uint32(len(rdbbuf)), &bytesReturned, nil) if err != nil { return -1, err } rdb := (*reparseDataBuffer)(unsafe.Pointer(&rdbbuf[0])) var s string switch rdb.ReparseTag { case IO_REPARSE_TAG_SYMLINK: data := (*symbolicLinkReparseBuffer)(unsafe.Pointer(&rdb.reparseBuffer)) p := (*[0xffff]uint16)(unsafe.Pointer(&data.PathBuffer[0])) s = UTF16ToString(p[data.PrintNameOffset/2 : (data.PrintNameLength-data.PrintNameOffset)/2]) case _IO_REPARSE_TAG_MOUNT_POINT: data := (*mountPointReparseBuffer)(unsafe.Pointer(&rdb.reparseBuffer)) p := (*[0xffff]uint16)(unsafe.Pointer(&data.PathBuffer[0])) s = UTF16ToString(p[data.PrintNameOffset/2 : (data.PrintNameLength-data.PrintNameOffset)/2]) default: // the path is not a symlink or junction but another type of reparse // point return -1, ENOENT } n = copy(buf, []byte(s)) return n, nil }
{ "pile_set_name": "Github" }
class Api::V1::AccountsController < Api::V1::ApiController before_action -> { doorkeeper_authorize! :account } def me render json: current_resource_owner.as_json end end
{ "pile_set_name": "Github" }
{ "_from": "ansi-escapes@^3.0.0", "_id": "[email protected]", "_inBundle": false, "_integrity": "sha512-UgAb8H9D41AQnu/PbWlCofQVcnV4Gs2bBJi9eZPxfU/hgglFh3SMDMENRIqdr7H6XFnXdoknctFByVsCOotTVw==", "_location": "/ansi-escapes", "_phantomChildren": {}, "_requested": { "type": "range", "registry": true, "raw": "ansi-escapes@^3.0.0", "name": "ansi-escapes", "escapedName": "ansi-escapes", "rawSpec": "^3.0.0", "saveSpec": null, "fetchSpec": "^3.0.0" }, "_requiredBy": [ "/inquirer" ], "_resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.1.0.tgz", "_shasum": "f73207bb81207d75fd6c83f125af26eea378ca30", "_spec": "ansi-escapes@^3.0.0", "_where": "/Users/sunxin/DOClever/node_modules/inquirer", "author": { "name": "Sindre Sorhus", "email": "[email protected]", "url": "sindresorhus.com" }, "bugs": { "url": "https://github.com/sindresorhus/ansi-escapes/issues" }, "bundleDependencies": false, "deprecated": false, "description": "ANSI escape codes for manipulating the terminal", "devDependencies": { "ava": "*", "xo": "*" }, "engines": { "node": ">=4" }, "files": [ "index.js" ], "homepage": "https://github.com/sindresorhus/ansi-escapes#readme", "keywords": [ "ansi", "terminal", "console", "cli", "string", "tty", "escape", "escapes", "formatting", "shell", "xterm", "log", "logging", "command-line", "text", "vt100", "sequence", "control", "code", "codes", "cursor", "iterm", "iterm2" ], "license": "MIT", "name": "ansi-escapes", "repository": { "type": "git", "url": "git+https://github.com/sindresorhus/ansi-escapes.git" }, "scripts": { "test": "xo && ava" }, "version": "3.1.0" }
{ "pile_set_name": "Github" }
 ACROS Security 0patch (0PatchServicex64.exe) Unquoted Service Path Privilege Escalation Vendor: ACROS, d.o.o. Product web page: https://www.0patch.com Affected version: 2016.05.19.539 Summary: 0patch (pronounced 'zero patch') is a platform for instantly distributing, applying and removing microscopic binary patches to/from running processes without having to restart these processes (much less reboot the entire computer). Desc: The application suffers from an unquoted search path issue impacting the service '0patchservice' for Windows deployed as part of 0patch solution. This could potentially allow an authorized but non-privileged local user to execute arbitrary code with elevated privileges on the system. A successful attempt would require the local user to be able to insert their code in the system root path undetected by the OS or other security applications where it could potentially be executed during application startup or reboot. If successful, the local user’s code would execute with the elevated privileges of the application. Tested on: Microsoft Windows 7 Ultimate SP1 (EN) Microsoft Windows 7 Professional SP1 (EN) Vulnerability discovered by Gjoko 'LiquidWorm' Krstic @zeroscience Advisory ID: ZSL-2016-5331 Advisory URL: http://www.zeroscience.mk/en/vulnerabilities/ZSL-2016-5331.php Vendor: https://0patch.blogspot.com/2016/06/new-release-0patch-agent-20160614850.html 08.06.2016 -- C:\>sc qc 0patchservice [SC] QueryServiceConfig SUCCESS SERVICE_NAME: 0patchservice TYPE : 10 WIN32_OWN_PROCESS START_TYPE : 2 AUTO_START ERROR_CONTROL : 1 NORMAL BINARY_PATH_NAME : C:\Program Files (x86)\0patch\Agent\0PatchServicex64.exe LOAD_ORDER_GROUP : TAG : 0 DISPLAY_NAME : 0patch Service DEPENDENCIES : SERVICE_START_NAME : LocalSystem C:\>cacls "C:\Program Files (x86)\0patch\Agent\0PatchServicex64.exe" C:\Program Files (x86)\0patch\Agent\0patchServicex64.exe NT AUTHORITY\SYSTEM:(ID)F BUILTIN\Administrators:(ID)F BUILTIN\Users:(ID)R C:\>
{ "pile_set_name": "Github" }
# Configure paths for libopusenc # Jean-Marc Valin <[email protected]> 11-12-2017 # Jack Moffitt <[email protected]> 10-21-2000 # Shamelessly stolen from Owen Taylor and Manish Singh dnl XIPH_PATH_LIBOPUSENC([ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND]]) dnl Test for libopusenc, and define LIBOPUSENC_CFLAGS and LIBOPUSENC_LIBS dnl AC_DEFUN([XIPH_PATH_LIBOPUSENC], [dnl dnl Get the cflags and libraries dnl AC_ARG_WITH(libopusenc,AC_HELP_STRING([--with-libopusenc=PFX],[Prefix where libopusenc is installed (optional)]), libopusenc_prefix="$withval", libopusenc_prefix="") AC_ARG_WITH(libopusenc-libraries,AC_HELP_STRING([--with-libopusenc-libraries=DIR],[Directory where libopusenc library is installed (optional)]), libopusenc_libraries="$withval", libopusenc_libraries="") AC_ARG_WITH(libopusenc-includes,AC_HELP_STRING([--with-libopusenc-includes=DIR],[Directory where libopusenc header files are installed (optional)]), libopusenc_includes="$withval", libopusenc_includes="") AC_ARG_ENABLE(libopusenctest,AC_HELP_STRING([--disable-libopusenctest],[Do not try to compile and run a test libopusenc program]),, enable_libopusenctest=yes) if test "x$libopusenc_libraries" != "x" ; then LIBOPUSENC_LIBS="-L$libopusenc_libraries" elif test "x$libopusenc_prefix" = "xno" || test "x$libopusenc_prefix" = "xyes" ; then LIBOPUSENC_LIBS="" elif test "x$libopusenc_prefix" != "x" ; then LIBOPUSENC_LIBS="-L$libopusenc_prefix/lib" elif test "x$prefix" != "xNONE" ; then LIBOPUSENC_LIBS="-L$prefix/lib" fi if test "x$libopusenc_prefix" != "xno" ; then LIBOPUSENC_LIBS="$LIBOPUSENC_LIBS -lopusenc" fi if test "x$libopusenc_includes" != "x" ; then LIBOPUSENC_CFLAGS="-I$libopusenc_includes" elif test "x$libopusenc_prefix" = "xno" || test "x$libopusenc_prefix" = "xyes" ; then LIBOPUSENC_CFLAGS="" elif test "x$libopusenc_prefix" != "x" ; then LIBOPUSENC_CFLAGS="-I$libopusenc_prefix/include/opus" elif test "x$prefix" != "xNONE"; then LIBOPUSENC_CFLAGS="-I$prefix/include/opus" fi AC_MSG_CHECKING(for libopusenc) if test "x$libopusenc_prefix" = "xno" ; then no_libopusenc="disabled" enable_libopusenctest="no" else no_libopusenc="" fi if test "x$enable_libopusenctest" = "xyes" ; then ac_save_CFLAGS="$CFLAGS" ac_save_LIBS="$LIBS" CFLAGS="$CFLAGS $LIBOPUSENC_CFLAGS $OPUS_CFLAGS" LIBS="$LIBS $LIBOPUSENC_LIBS $OPUS_LIBS" dnl dnl Now check if the installed libopusenc is sufficiently new. dnl rm -f conf.libopusenctest AC_TRY_RUN([ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <opusenc.h> int main () { system("touch conf.libopusenctest"); return 0; } ],, no_libopusenc=yes,[echo $ac_n "cross compiling; assumed OK... $ac_c"]) CFLAGS="$ac_save_CFLAGS" LIBS="$ac_save_LIBS" fi if test "x$no_libopusenc" = "xdisabled" ; then AC_MSG_RESULT(no) ifelse([$2], , :, [$2]) elif test "x$no_libopusenc" = "x" ; then AC_MSG_RESULT(yes) ifelse([$1], , :, [$1]) else AC_MSG_RESULT(no) if test -f conf.libopusenctest ; then : else echo "*** Could not run libopusenc test program, checking why..." CFLAGS="$CFLAGS $LIBOPUSENC_CFLAGS" LIBS="$LIBS $LIBOPUSENC_LIBS" AC_TRY_LINK([ #include <stdio.h> #include <opusenc.h> ], [ return 0; ], [ echo "*** The test program compiled, but did not run. This usually means" echo "*** that the run-time linker is not finding libopusenc or finding the wrong" echo "*** version of libopusenc. If it is not finding libopusenc, you'll need to set your" echo "*** LD_LIBRARY_PATH environment variable, or edit /etc/ld.so.conf to point" echo "*** to the installed location Also, make sure you have run ldconfig if that" echo "*** is required on your system" echo "***" echo "*** If you have an old version installed, it is best to remove it, although" echo "*** you may also be able to get things to work by modifying LD_LIBRARY_PATH"], [ echo "*** The test program failed to compile or link. See the file config.log for the" echo "*** exact error that occured. This usually means libopusenc was incorrectly installed" echo "*** or that you have moved libopusenc since it was installed." ]) CFLAGS="$ac_save_CFLAGS" LIBS="$ac_save_LIBS" fi LIBOPUSENC_CFLAGS="" LIBOPUSENC_LIBS="" ifelse([$2], , :, [$2]) fi AC_SUBST(LIBOPUSENC_CFLAGS) AC_SUBST(LIBOPUSENC_LIBS) rm -f conf.libopusenctest ])
{ "pile_set_name": "Github" }
define( [ "./core", "./var/document", "./var/rnotwhite", "./var/slice", "./data/var/dataPriv", "./core/init", "./selector" ], function( jQuery, document, rnotwhite, slice, dataPriv ) { var rkeyEvent = /^key/, rmouseEvent = /^(?:mouse|pointer|contextmenu|drag|drop)|click/, rtypenamespace = /^([^.]*)(?:\.(.+)|)/; function returnTrue() { return true; } function returnFalse() { return false; } // Support: IE9 // See #13393 for more info function safeActiveElement() { try { return document.activeElement; } catch ( err ) { } } function on( elem, types, selector, data, fn, one ) { var origFn, type; // Types can be a map of types/handlers if ( typeof types === "object" ) { // ( types-Object, selector, data ) if ( typeof selector !== "string" ) { // ( types-Object, data ) data = data || selector; selector = undefined; } for ( type in types ) { on( elem, type, selector, data, types[ type ], one ); } return elem; } if ( data == null && fn == null ) { // ( types, fn ) fn = selector; data = selector = undefined; } else if ( fn == null ) { if ( typeof selector === "string" ) { // ( types, selector, fn ) fn = data; data = undefined; } else { // ( types, data, fn ) fn = data; data = selector; selector = undefined; } } if ( fn === false ) { fn = returnFalse; } else if ( !fn ) { return elem; } if ( one === 1 ) { origFn = fn; fn = function( event ) { // Can use an empty set, since event contains the info jQuery().off( event ); return origFn.apply( this, arguments ); }; // Use same guid so caller can remove using origFn fn.guid = origFn.guid || ( origFn.guid = jQuery.guid++ ); } return elem.each( function() { jQuery.event.add( this, types, fn, data, selector ); } ); } /* * Helper functions for managing events -- not part of the public interface. * Props to Dean Edwards' addEvent library for many of the ideas. */ jQuery.event = { global: {}, add: function( elem, types, handler, data, selector ) { var handleObjIn, eventHandle, tmp, events, t, handleObj, special, handlers, type, namespaces, origType, elemData = dataPriv.get( elem ); // Don't attach events to noData or text/comment nodes (but allow plain objects) if ( !elemData ) { return; } // Caller can pass in an object of custom data in lieu of the handler if ( handler.handler ) { handleObjIn = handler; handler = handleObjIn.handler; selector = handleObjIn.selector; } // Make sure that the handler has a unique ID, used to find/remove it later if ( !handler.guid ) { handler.guid = jQuery.guid++; } // Init the element's event structure and main handler, if this is the first if ( !( events = elemData.events ) ) { events = elemData.events = {}; } if ( !( eventHandle = elemData.handle ) ) { eventHandle = elemData.handle = function( e ) { // Discard the second event of a jQuery.event.trigger() and // when an event is called after a page has unloaded return typeof jQuery !== "undefined" && jQuery.event.triggered !== e.type ? jQuery.event.dispatch.apply( elem, arguments ) : undefined; }; } // Handle multiple events separated by a space types = ( types || "" ).match( rnotwhite ) || [ "" ]; t = types.length; while ( t-- ) { tmp = rtypenamespace.exec( types[ t ] ) || []; type = origType = tmp[ 1 ]; namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); // There *must* be a type, no attaching namespace-only handlers if ( !type ) { continue; } // If event changes its type, use the special event handlers for the changed type special = jQuery.event.special[ type ] || {}; // If selector defined, determine special event api type, otherwise given type type = ( selector ? special.delegateType : special.bindType ) || type; // Update special based on newly reset type special = jQuery.event.special[ type ] || {}; // handleObj is passed to all event handlers handleObj = jQuery.extend( { type: type, origType: origType, data: data, handler: handler, guid: handler.guid, selector: selector, needsContext: selector && jQuery.expr.match.needsContext.test( selector ), namespace: namespaces.join( "." ) }, handleObjIn ); // Init the event handler queue if we're the first if ( !( handlers = events[ type ] ) ) { handlers = events[ type ] = []; handlers.delegateCount = 0; // Only use addEventListener if the special events handler returns false if ( !special.setup || special.setup.call( elem, data, namespaces, eventHandle ) === false ) { if ( elem.addEventListener ) { elem.addEventListener( type, eventHandle ); } } } if ( special.add ) { special.add.call( elem, handleObj ); if ( !handleObj.handler.guid ) { handleObj.handler.guid = handler.guid; } } // Add to the element's handler list, delegates in front if ( selector ) { handlers.splice( handlers.delegateCount++, 0, handleObj ); } else { handlers.push( handleObj ); } // Keep track of which events have ever been used, for event optimization jQuery.event.global[ type ] = true; } }, // Detach an event or set of events from an element remove: function( elem, types, handler, selector, mappedTypes ) { var j, origCount, tmp, events, t, handleObj, special, handlers, type, namespaces, origType, elemData = dataPriv.hasData( elem ) && dataPriv.get( elem ); if ( !elemData || !( events = elemData.events ) ) { return; } // Once for each type.namespace in types; type may be omitted types = ( types || "" ).match( rnotwhite ) || [ "" ]; t = types.length; while ( t-- ) { tmp = rtypenamespace.exec( types[ t ] ) || []; type = origType = tmp[ 1 ]; namespaces = ( tmp[ 2 ] || "" ).split( "." ).sort(); // Unbind all events (on this namespace, if provided) for the element if ( !type ) { for ( type in events ) { jQuery.event.remove( elem, type + types[ t ], handler, selector, true ); } continue; } special = jQuery.event.special[ type ] || {}; type = ( selector ? special.delegateType : special.bindType ) || type; handlers = events[ type ] || []; tmp = tmp[ 2 ] && new RegExp( "(^|\\.)" + namespaces.join( "\\.(?:.*\\.|)" ) + "(\\.|$)" ); // Remove matching events origCount = j = handlers.length; while ( j-- ) { handleObj = handlers[ j ]; if ( ( mappedTypes || origType === handleObj.origType ) && ( !handler || handler.guid === handleObj.guid ) && ( !tmp || tmp.test( handleObj.namespace ) ) && ( !selector || selector === handleObj.selector || selector === "**" && handleObj.selector ) ) { handlers.splice( j, 1 ); if ( handleObj.selector ) { handlers.delegateCount--; } if ( special.remove ) { special.remove.call( elem, handleObj ); } } } // Remove generic event handler if we removed something and no more handlers exist // (avoids potential for endless recursion during removal of special event handlers) if ( origCount && !handlers.length ) { if ( !special.teardown || special.teardown.call( elem, namespaces, elemData.handle ) === false ) { jQuery.removeEvent( elem, type, elemData.handle ); } delete events[ type ]; } } // Remove data and the expando if it's no longer used if ( jQuery.isEmptyObject( events ) ) { dataPriv.remove( elem, "handle events" ); } }, dispatch: function( event ) { // Make a writable jQuery.Event from the native event object event = jQuery.event.fix( event ); var i, j, ret, matched, handleObj, handlerQueue = [], args = slice.call( arguments ), handlers = ( dataPriv.get( this, "events" ) || {} )[ event.type ] || [], special = jQuery.event.special[ event.type ] || {}; // Use the fix-ed jQuery.Event rather than the (read-only) native event args[ 0 ] = event; event.delegateTarget = this; // Call the preDispatch hook for the mapped type, and let it bail if desired if ( special.preDispatch && special.preDispatch.call( this, event ) === false ) { return; } // Determine handlers handlerQueue = jQuery.event.handlers.call( this, event, handlers ); // Run delegates first; they may want to stop propagation beneath us i = 0; while ( ( matched = handlerQueue[ i++ ] ) && !event.isPropagationStopped() ) { event.currentTarget = matched.elem; j = 0; while ( ( handleObj = matched.handlers[ j++ ] ) && !event.isImmediatePropagationStopped() ) { // Triggered event must either 1) have no namespace, or 2) have namespace(s) // a subset or equal to those in the bound event (both can have no namespace). if ( !event.rnamespace || event.rnamespace.test( handleObj.namespace ) ) { event.handleObj = handleObj; event.data = handleObj.data; ret = ( ( jQuery.event.special[ handleObj.origType ] || {} ).handle || handleObj.handler ).apply( matched.elem, args ); if ( ret !== undefined ) { if ( ( event.result = ret ) === false ) { event.preventDefault(); event.stopPropagation(); } } } } } // Call the postDispatch hook for the mapped type if ( special.postDispatch ) { special.postDispatch.call( this, event ); } return event.result; }, handlers: function( event, handlers ) { var i, matches, sel, handleObj, handlerQueue = [], delegateCount = handlers.delegateCount, cur = event.target; // Support (at least): Chrome, IE9 // Find delegate handlers // Black-hole SVG <use> instance trees (#13180) // // Support: Firefox<=42+ // Avoid non-left-click in FF but don't block IE radio events (#3861, gh-2343) if ( delegateCount && cur.nodeType && ( event.type !== "click" || isNaN( event.button ) || event.button < 1 ) ) { for ( ; cur !== this; cur = cur.parentNode || this ) { // Don't check non-elements (#13208) // Don't process clicks on disabled elements (#6911, #8165, #11382, #11764) if ( cur.nodeType === 1 && ( cur.disabled !== true || event.type !== "click" ) ) { matches = []; for ( i = 0; i < delegateCount; i++ ) { handleObj = handlers[ i ]; // Don't conflict with Object.prototype properties (#13203) sel = handleObj.selector + " "; if ( matches[ sel ] === undefined ) { matches[ sel ] = handleObj.needsContext ? jQuery( sel, this ).index( cur ) > -1 : jQuery.find( sel, this, null, [ cur ] ).length; } if ( matches[ sel ] ) { matches.push( handleObj ); } } if ( matches.length ) { handlerQueue.push( { elem: cur, handlers: matches } ); } } } } // Add the remaining (directly-bound) handlers if ( delegateCount < handlers.length ) { handlerQueue.push( { elem: this, handlers: handlers.slice( delegateCount ) } ); } return handlerQueue; }, // Includes some event props shared by KeyEvent and MouseEvent props: ( "altKey bubbles cancelable ctrlKey currentTarget detail eventPhase " + "metaKey relatedTarget shiftKey target timeStamp view which" ).split( " " ), fixHooks: {}, keyHooks: { props: "char charCode key keyCode".split( " " ), filter: function( event, original ) { // Add which for key events if ( event.which == null ) { event.which = original.charCode != null ? original.charCode : original.keyCode; } return event; } }, mouseHooks: { props: ( "button buttons clientX clientY offsetX offsetY pageX pageY " + "screenX screenY toElement" ).split( " " ), filter: function( event, original ) { var eventDoc, doc, body, button = original.button; // Calculate pageX/Y if missing and clientX/Y available if ( event.pageX == null && original.clientX != null ) { eventDoc = event.target.ownerDocument || document; doc = eventDoc.documentElement; body = eventDoc.body; event.pageX = original.clientX + ( doc && doc.scrollLeft || body && body.scrollLeft || 0 ) - ( doc && doc.clientLeft || body && body.clientLeft || 0 ); event.pageY = original.clientY + ( doc && doc.scrollTop || body && body.scrollTop || 0 ) - ( doc && doc.clientTop || body && body.clientTop || 0 ); } // Add which for click: 1 === left; 2 === middle; 3 === right // Note: button is not normalized, so don't use it if ( !event.which && button !== undefined ) { event.which = ( button & 1 ? 1 : ( button & 2 ? 3 : ( button & 4 ? 2 : 0 ) ) ); } return event; } }, fix: function( event ) { if ( event[ jQuery.expando ] ) { return event; } // Create a writable copy of the event object and normalize some properties var i, prop, copy, type = event.type, originalEvent = event, fixHook = this.fixHooks[ type ]; if ( !fixHook ) { this.fixHooks[ type ] = fixHook = rmouseEvent.test( type ) ? this.mouseHooks : rkeyEvent.test( type ) ? this.keyHooks : {}; } copy = fixHook.props ? this.props.concat( fixHook.props ) : this.props; event = new jQuery.Event( originalEvent ); i = copy.length; while ( i-- ) { prop = copy[ i ]; event[ prop ] = originalEvent[ prop ]; } // Support: Cordova 2.5 (WebKit) (#13255) // All events should have a target; Cordova deviceready doesn't if ( !event.target ) { event.target = document; } // Support: Safari 6.0+, Chrome<28 // Target should not be a text node (#504, #13143) if ( event.target.nodeType === 3 ) { event.target = event.target.parentNode; } return fixHook.filter ? fixHook.filter( event, originalEvent ) : event; }, special: { load: { // Prevent triggered image.load events from bubbling to window.load noBubble: true }, focus: { // Fire native event if possible so blur/focus sequence is correct trigger: function() { if ( this !== safeActiveElement() && this.focus ) { this.focus(); return false; } }, delegateType: "focusin" }, blur: { trigger: function() { if ( this === safeActiveElement() && this.blur ) { this.blur(); return false; } }, delegateType: "focusout" }, click: { // For checkbox, fire native event so checked state will be right trigger: function() { if ( this.type === "checkbox" && this.click && jQuery.nodeName( this, "input" ) ) { this.click(); return false; } }, // For cross-browser consistency, don't fire native .click() on links _default: function( event ) { return jQuery.nodeName( event.target, "a" ); } }, beforeunload: { postDispatch: function( event ) { // Support: Firefox 20+ // Firefox doesn't alert if the returnValue field is not set. if ( event.result !== undefined && event.originalEvent ) { event.originalEvent.returnValue = event.result; } } } } }; jQuery.removeEvent = function( elem, type, handle ) { // This "if" is needed for plain objects if ( elem.removeEventListener ) { elem.removeEventListener( type, handle ); } }; jQuery.Event = function( src, props ) { // Allow instantiation without the 'new' keyword if ( !( this instanceof jQuery.Event ) ) { return new jQuery.Event( src, props ); } // Event object if ( src && src.type ) { this.originalEvent = src; this.type = src.type; // Events bubbling up the document may have been marked as prevented // by a handler lower down the tree; reflect the correct value. this.isDefaultPrevented = src.defaultPrevented || src.defaultPrevented === undefined && // Support: Android<4.0 src.returnValue === false ? returnTrue : returnFalse; // Event type } else { this.type = src; } // Put explicitly provided properties onto the event object if ( props ) { jQuery.extend( this, props ); } // Create a timestamp if incoming event doesn't have one this.timeStamp = src && src.timeStamp || jQuery.now(); // Mark it as fixed this[ jQuery.expando ] = true; }; // jQuery.Event is based on DOM3 Events as specified by the ECMAScript Language Binding // http://www.w3.org/TR/2003/WD-DOM-Level-3-Events-20030331/ecma-script-binding.html jQuery.Event.prototype = { constructor: jQuery.Event, isDefaultPrevented: returnFalse, isPropagationStopped: returnFalse, isImmediatePropagationStopped: returnFalse, preventDefault: function() { var e = this.originalEvent; this.isDefaultPrevented = returnTrue; if ( e ) { e.preventDefault(); } }, stopPropagation: function() { var e = this.originalEvent; this.isPropagationStopped = returnTrue; if ( e ) { e.stopPropagation(); } }, stopImmediatePropagation: function() { var e = this.originalEvent; this.isImmediatePropagationStopped = returnTrue; if ( e ) { e.stopImmediatePropagation(); } this.stopPropagation(); } }; // Create mouseenter/leave events using mouseover/out and event-time checks // so that event delegation works in jQuery. // Do the same for pointerenter/pointerleave and pointerover/pointerout // // Support: Safari 7 only // Safari sends mouseenter too often; see: // https://code.google.com/p/chromium/issues/detail?id=470258 // for the description of the bug (it existed in older Chrome versions as well). jQuery.each( { mouseenter: "mouseover", mouseleave: "mouseout", pointerenter: "pointerover", pointerleave: "pointerout" }, function( orig, fix ) { jQuery.event.special[ orig ] = { delegateType: fix, bindType: fix, handle: function( event ) { var ret, target = this, related = event.relatedTarget, handleObj = event.handleObj; // For mouseenter/leave call the handler if related is outside the target. // NB: No relatedTarget if the mouse left/entered the browser window if ( !related || ( related !== target && !jQuery.contains( target, related ) ) ) { event.type = handleObj.origType; ret = handleObj.handler.apply( this, arguments ); event.type = fix; } return ret; } }; } ); jQuery.fn.extend( { on: function( types, selector, data, fn ) { return on( this, types, selector, data, fn ); }, one: function( types, selector, data, fn ) { return on( this, types, selector, data, fn, 1 ); }, off: function( types, selector, fn ) { var handleObj, type; if ( types && types.preventDefault && types.handleObj ) { // ( event ) dispatched jQuery.Event handleObj = types.handleObj; jQuery( types.delegateTarget ).off( handleObj.namespace ? handleObj.origType + "." + handleObj.namespace : handleObj.origType, handleObj.selector, handleObj.handler ); return this; } if ( typeof types === "object" ) { // ( types-object [, selector] ) for ( type in types ) { this.off( type, selector, types[ type ] ); } return this; } if ( selector === false || typeof selector === "function" ) { // ( types [, fn] ) fn = selector; selector = undefined; } if ( fn === false ) { fn = returnFalse; } return this.each( function() { jQuery.event.remove( this, types, fn, selector ); } ); } } ); return jQuery; } );
{ "pile_set_name": "Github" }
package structure; import java.util.AbstractSet; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** A simple Map implementation, implemented in terms of a * pair of ArrayLists just to show what a Map has to do (it would * have been easier, but less informative, to subclass AbstractMap). * This Map implementation, like TreeSet, guarantees that the * Map's contents will be kept in ascending element order, * sorted according to the natural order of the elements; * see Comparable. This does not (yet) allow you to specify your own * Comparator. * <p> * It is a requirement that all objects inserted be able to * call compareTo on all other objects, i.e., they must all * be of the same or related classes. * <p> * Be warned that the entrySet() method is <b>not implemented</b> yet. */ public class MyMap<K,V> implements Map<K,V> { private ArrayList<K> keys; private ArrayList<V> values; public MyMap() { keys = new ArrayList<K>(); values = new ArrayList<V>(); } /** Return the number of mappings in this Map. */ public int size() { return keys.size(); } /** Return true if this map is empty. */ public boolean isEmpty() { return size() == 0; } /** Return true if o is contained as a Key in this Map. */ public boolean containsKey(Object o) { return keys.contains(o); } /** Return true if o is contained as a Value in this Map. */ public boolean containsValue(Object o) { return values.contains(o); } /** Get the object value corresponding to key k. */ public V get(Object k) { int i = keys.indexOf(k); if (i == -1) return null; return values.get(i); } /** Put the given pair (k, v) into this map, by maintaining "keys" * in sorted order. */ @SuppressWarnings("unchecked") public V put(Object k, Object v) { for (int i=0; i < keys.size(); i++) { /* Does the key already exist? */ if (((Comparable<K>)k).compareTo(keys.get(i)) == 0) { values.set(i, (V)v); return values.get(i); } /* Did we just go past where to put it? * i.e., keep keys in sorted order. */ if (((Comparable<K>)k).compareTo(keys.get(i)) == +1) { int where = i > 0 ? i -1 : 0; keys.add(where, (K)k); values.add(where, (V)v); return null; } } // Else it goes at the end. keys.add((K) k); values.add((V) v); return null; } /** Put all the pairs from oldMap into this map */ @Override public void putAll(@SuppressWarnings("rawtypes") Map oldMap) { @SuppressWarnings("unchecked") Iterator<K> keysIter = oldMap.keySet().iterator(); while (keysIter.hasNext()) { Object k = keysIter.next(); Object v = oldMap.get(k); put(k, v); } } public V remove(Object k) { int i = keys.indexOf(k); if (i == -1) return null; V old = values.get(i); keys.remove(i); values.remove(i); return old; } public void clear() { keys.clear(); values.clear(); } public java.util.Set<K> keySet() { return new TreeSet<K>(keys); } public java.util.Collection<V> values() { return values; } /** The Map.Entry objects contained in the Set returned by entrySet(). */ @SuppressWarnings("rawtypes") private class MyMapEntry implements Map.Entry<K,V>, Comparable { private K key; private V value; MyMapEntry(K k, V v) { key = k; value = v; } public K getKey() { return key; } public V getValue() { return value; } public V setValue(V nv) { throw new UnsupportedOperationException("setValue"); } @SuppressWarnings("unchecked") public int compareTo(Object o2) { // if (!(o2 instanceof MyMapEntry)) // throw new IllegalArgumentException( // "Huh? Not a MapEntry?"); Object otherKey = ((MyMapEntry)o2).getKey(); return ((Comparable)key).compareTo((Comparable)otherKey); } } /** The set of Map.Entry objects returned from entrySet(). */ private class MyMapSet<T> extends AbstractSet<T> { List<T> list; MyMapSet(List<T> al) { list = al; } public Iterator<T> iterator() { return list.iterator(); } public int size() { return list.size(); } } /** Returns a set view of the mappings contained in this Map. * Each element in the returned set is a Map.Entry. * NOT guaranteed fully to implement the contract of entrySet * declared in java.util.Map. */ @SuppressWarnings({ "unchecked", "rawtypes" }) public Set entrySet() { if (keys.size() != values.size()) throw new IllegalStateException( "InternalError: keys and values out of sync"); ArrayList<MyMapEntry> al = new ArrayList<MyMapEntry>(); for (int i=0; i<keys.size(); i++) { al.add(new MyMapEntry(keys.get(i), values.get(i))); } return new MyMapSet<MyMapEntry>(al); } }
{ "pile_set_name": "Github" }
#さらに詳しく知るために <!-- 参考書 --> <!-- ビットコイン関連 --> <!-- mastering bitcoin, blockchain -->
{ "pile_set_name": "Github" }
// // MediaObserver.swift // RCTAgora // // Created by LXH on 2020/4/10. // Copyright © 2020 Syan. All rights reserved. // import Foundation import AgoraRtcKit class MediaObserver: NSObject { private var emitter: (_ data: Dictionary<String, Any?>?) -> Void private var maxMetadataSize = 0 private var metadataList = [String]() init(_ emitter: @escaping (_ data: Dictionary<String, Any?>?) -> Void) { self.emitter = emitter } func addMetadata(_ metadata: String) { metadataList.append(metadata) } func setMaxMetadataSize(_ size: Int) { maxMetadataSize = size } } extension MediaObserver: AgoraMediaMetadataDataSource { func metadataMaxSize() -> Int { maxMetadataSize } func readyToSendMetadata(atTimestamp timestamp: TimeInterval) -> Data? { if metadataList.count > 0 { return metadataList.remove(at: 0).data(using: .utf8) } return nil } } extension MediaObserver: AgoraMediaMetadataDelegate { func receiveMetadata(_ data: Data, fromUser uid: Int, atTimestamp timestamp: TimeInterval) { emitter([ "buffer": String(data: data, encoding: .utf8), "uid": uid, "timeStampMs": timestamp ]) } }
{ "pile_set_name": "Github" }
--- title: JSON Protocol menu: influxdb_09: weight: 20 parent: write_protocols --- The JSON write protocol is deprecated as of InfluxDB 0.9.1. It is still present but it will be removed when InfluxDB 1.0 is released. The [line protocol](/influxdb/v0.9/write_protocols/line/) is the primary write protocol for InfluxDB 0.9.1+. For reasons behind the deprecation, please see the comments on the line protocol pull request, particularly the comments on JSON serialization [CPU costs](https://github.com/influxdb/influxdb/pull/2696#issuecomment-106968181) and on the [ease of use](https://github.com/influxdb/influxdb/pull/2696#issuecomment-107043910) concerns.
{ "pile_set_name": "Github" }
<!DOCTYPE RCC><RCC version="1.0"> <qresource> <file>images/copy.png</file> <file>images/cut.png</file> <file>images/new.png</file> <file>images/open.png</file> <file>images/paste.png</file> <file>images/save.png</file> </qresource> </RCC>
{ "pile_set_name": "Github" }
{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "Title: Hyperparameter Tuning Using Random Search \n", "Slug: hyperparameter_tuning_using_random_search \n", "Summary: How to conduct random search for hyperparameter tuning in scikit-learn for machine learning in Python. \n", "Date: 2017-09-18 12:00 \n", "Category: Machine Learning \n", "Tags: Model Selection\n", "Authors: Chris Albon" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Preliminaries" ] }, { "cell_type": "code", "execution_count": 9, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Load libraries\n", "from scipy.stats import uniform\n", "from sklearn import linear_model, datasets\n", "from sklearn.model_selection import RandomizedSearchCV" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Load Iris Dataset" ] }, { "cell_type": "code", "execution_count": 10, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Load data\n", "iris = datasets.load_iris()\n", "X = iris.data\n", "y = iris.target" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Create Logistic Regression" ] }, { "cell_type": "code", "execution_count": 11, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Create logistic regression\n", "logistic = linear_model.LogisticRegression()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Create Hyperparameter Search Space" ] }, { "cell_type": "code", "execution_count": 12, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Create regularization penalty space\n", "penalty = ['l1', 'l2']\n", "\n", "# Create regularization hyperparameter distribution using uniform distribution\n", "C = uniform(loc=0, scale=4)\n", "\n", "# Create hyperparameter options\n", "hyperparameters = dict(C=C, penalty=penalty)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Create Random Search" ] }, { "cell_type": "code", "execution_count": 13, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Create randomized search 5-fold cross validation and 100 iterations\n", "clf = RandomizedSearchCV(logistic, hyperparameters, random_state=1, n_iter=100, cv=5, verbose=0, n_jobs=-1)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Conduct Random Search" ] }, { "cell_type": "code", "execution_count": 14, "metadata": { "collapsed": true }, "outputs": [], "source": [ "# Fit randomized search\n", "best_model = clf.fit(X, y)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## View Hyperparameter Values Of Best Model" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Best Penalty: l1\n", "Best C: 1.66808801881\n" ] } ], "source": [ "# View best hyperparameters\n", "print('Best Penalty:', best_model.best_estimator_.get_params()['penalty'])\n", "print('Best C:', best_model.best_estimator_.get_params()['C'])" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Predict Using Best Model" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", " 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", " 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 1, 1, 1, 1, 1, 1, 1,\n", " 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,\n", " 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 1, 2, 2, 2, 2,\n", " 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])" ] }, "execution_count": 16, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Predict target vector\n", "best_model.predict(X)" ] } ], "metadata": { "anaconda-cloud": {}, "kernelspec": { "display_name": "Python [default]", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.5.3" } }, "nbformat": 4, "nbformat_minor": 1 }
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: d16c92d59af8a9a4a9a0db824132b067 timeCreated: 1467670142 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }