text
stringlengths 2
100k
| meta
dict |
---|---|
# markdown/searializers.py
#
# Add x/html serialization to Elementree
# Taken from ElementTree 1.3 preview with slight modifications
#
# Copyright (c) 1999-2007 by Fredrik Lundh. All rights reserved.
#
# [email protected]
# http://www.pythonware.com
#
# --------------------------------------------------------------------
# The ElementTree toolkit is
#
# Copyright (c) 1999-2007 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import unicode_literals
from . import util
ElementTree = util.etree.ElementTree
QName = util.etree.QName
if hasattr(util.etree, 'test_comment'): # pragma: no cover
Comment = util.etree.test_comment
else: # pragma: no cover
Comment = util.etree.Comment
PI = util.etree.PI
ProcessingInstruction = util.etree.ProcessingInstruction
__all__ = ['to_html_string', 'to_xhtml_string']
HTML_EMPTY = ("area", "base", "basefont", "br", "col", "frame", "hr",
"img", "input", "isindex", "link", "meta" "param")
try:
HTML_EMPTY = set(HTML_EMPTY)
except NameError: # pragma: no cover
pass
_namespace_map = {
# "well-known" namespace prefixes
"http://www.w3.org/XML/1998/namespace": "xml",
"http://www.w3.org/1999/xhtml": "html",
"http://www.w3.org/1999/02/22-rdf-syntax-ns#": "rdf",
"http://schemas.xmlsoap.org/wsdl/": "wsdl",
# xml schema
"http://www.w3.org/2001/XMLSchema": "xs",
"http://www.w3.org/2001/XMLSchema-instance": "xsi",
# dublic core
"http://purl.org/dc/elements/1.1/": "dc",
}
def _raise_serialization_error(text): # pragma: no cover
raise TypeError(
"cannot serialize %r (type %s)" % (text, type(text).__name__)
)
def _encode(text, encoding):
try:
return text.encode(encoding, "xmlcharrefreplace")
except (TypeError, AttributeError): # pragma: no cover
_raise_serialization_error(text)
def _escape_cdata(text):
# escape character data
try:
# it's worth avoiding do-nothing calls for strings that are
# shorter than 500 character, or so. assume that's, by far,
# the most common case in most applications.
if "&" in text:
text = text.replace("&", "&")
if "<" in text:
text = text.replace("<", "<")
if ">" in text:
text = text.replace(">", ">")
return text
except (TypeError, AttributeError): # pragma: no cover
_raise_serialization_error(text)
def _escape_attrib(text):
# escape attribute value
try:
if "&" in text:
text = text.replace("&", "&")
if "<" in text:
text = text.replace("<", "<")
if ">" in text:
text = text.replace(">", ">")
if "\"" in text:
text = text.replace("\"", """)
if "\n" in text:
text = text.replace("\n", " ")
return text
except (TypeError, AttributeError): # pragma: no cover
_raise_serialization_error(text)
def _escape_attrib_html(text):
# escape attribute value
try:
if "&" in text:
text = text.replace("&", "&")
if "<" in text:
text = text.replace("<", "<")
if ">" in text:
text = text.replace(">", ">")
if "\"" in text:
text = text.replace("\"", """)
return text
except (TypeError, AttributeError): # pragma: no cover
_raise_serialization_error(text)
def _serialize_html(write, elem, qnames, namespaces, format):
tag = elem.tag
text = elem.text
if tag is Comment:
write("<!--%s-->" % _escape_cdata(text))
elif tag is ProcessingInstruction:
write("<?%s?>" % _escape_cdata(text))
else:
tag = qnames[tag]
if tag is None:
if text:
write(_escape_cdata(text))
for e in elem:
_serialize_html(write, e, qnames, None, format)
else:
write("<" + tag)
items = elem.items()
if items or namespaces:
items = sorted(items) # lexical order
for k, v in items:
if isinstance(k, QName):
k = k.text
if isinstance(v, QName):
v = qnames[v.text]
else:
v = _escape_attrib_html(v)
if qnames[k] == v and format == 'html':
# handle boolean attributes
write(" %s" % v)
else:
write(" %s=\"%s\"" % (qnames[k], v))
if namespaces:
items = namespaces.items()
items.sort(key=lambda x: x[1]) # sort on prefix
for v, k in items:
if k:
k = ":" + k
write(" xmlns%s=\"%s\"" % (k, _escape_attrib(v)))
if format == "xhtml" and tag.lower() in HTML_EMPTY:
write(" />")
else:
write(">")
if text:
if tag.lower() in ["script", "style"]:
write(text)
else:
write(_escape_cdata(text))
for e in elem:
_serialize_html(write, e, qnames, None, format)
if tag.lower() not in HTML_EMPTY:
write("</" + tag + ">")
if elem.tail:
write(_escape_cdata(elem.tail))
def _write_html(root,
encoding=None,
default_namespace=None,
format="html"):
assert root is not None
data = []
write = data.append
qnames, namespaces = _namespaces(root, default_namespace)
_serialize_html(write, root, qnames, namespaces, format)
if encoding is None:
return "".join(data)
else:
return _encode("".join(data))
# --------------------------------------------------------------------
# serialization support
def _namespaces(elem, default_namespace=None):
# identify namespaces used in this tree
# maps qnames to *encoded* prefix:local names
qnames = {None: None}
# maps uri:s to prefixes
namespaces = {}
if default_namespace:
namespaces[default_namespace] = ""
def add_qname(qname):
# calculate serialized qname representation
try:
if qname[:1] == "{":
uri, tag = qname[1:].split("}", 1)
prefix = namespaces.get(uri)
if prefix is None:
prefix = _namespace_map.get(uri)
if prefix is None:
prefix = "ns%d" % len(namespaces)
if prefix != "xml":
namespaces[uri] = prefix
if prefix:
qnames[qname] = "%s:%s" % (prefix, tag)
else:
qnames[qname] = tag # default element
else:
if default_namespace:
raise ValueError(
"cannot use non-qualified names with "
"default_namespace option"
)
qnames[qname] = qname
except TypeError: # pragma: no cover
_raise_serialization_error(qname)
# populate qname and namespaces table
try:
iterate = elem.iter
except AttributeError:
iterate = elem.getiterator # cET compatibility
for elem in iterate():
tag = elem.tag
if isinstance(tag, QName) and tag.text not in qnames:
add_qname(tag.text)
elif isinstance(tag, util.string_type):
if tag not in qnames:
add_qname(tag)
elif tag is not None and tag is not Comment and tag is not PI:
_raise_serialization_error(tag)
for key, value in elem.items():
if isinstance(key, QName):
key = key.text
if key not in qnames:
add_qname(key)
if isinstance(value, QName) and value.text not in qnames:
add_qname(value.text)
text = elem.text
if isinstance(text, QName) and text.text not in qnames:
add_qname(text.text)
return qnames, namespaces
def to_html_string(element):
return _write_html(ElementTree(element).getroot(), format="html")
def to_xhtml_string(element):
return _write_html(ElementTree(element).getroot(), format="xhtml")
| {
"pile_set_name": "Github"
} |
/*
Visual Studio-like style based on original C# coloring by Jason Diamond <[email protected]>
*/
.hljs {
display: block;
overflow-x: auto;
padding: 0.5em;
background: white;
color: black;
}
.hljs-comment,
.hljs-quote,
.hljs-variable {
color: #008000;
}
.hljs-keyword,
.hljs-selector-tag,
.hljs-built_in,
.hljs-name,
.hljs-tag {
color: #00f;
}
.hljs-string,
.hljs-title,
.hljs-section,
.hljs-attribute,
.hljs-literal,
.hljs-template-tag,
.hljs-template-variable,
.hljs-type,
.hljs-addition {
color: #a31515;
}
.hljs-deletion,
.hljs-selector-attr,
.hljs-selector-pseudo,
.hljs-meta {
color: #2b91af;
}
.hljs-doctag {
color: #808080;
}
.hljs-attr {
color: #f00;
}
.hljs-symbol,
.hljs-bullet,
.hljs-link {
color: #00b0e8;
}
.hljs-emphasis {
font-style: italic;
}
.hljs-strong {
font-weight: bold;
}
| {
"pile_set_name": "Github"
} |
/**
* This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at http://mozilla.org/MPL/2.0/.
* If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice.
* You may add additional accurate notices of copyright ownership.
*
* It is desirable to notify that Covered Software was "Powered by AlternativaPlatform" with link to http://www.alternativaplatform.com/
* */
package alternativa.engine3d.core.events {
import alternativa.engine3d.alternativa3d;
import alternativa.engine3d.core.*;
import alternativa.engine3d.objects.Surface;
import flash.events.Event;
use namespace alternativa3d;
/**
*
* Event <code>MouseEvent3D</code> dispatches by <code>Object3D</code>, in cases when <code>MouseEvent</code> dispatches by <code>DisplayObject</code>.
*/
public class MouseEvent3D extends Event3D {
/**
* Defines the value of the <code>type</code> property of a <code>click3D</code> event object.
* @eventType click3D
*/
public static const CLICK:String = "click3D";
/**
* Defines the value of the <code>type</code> property of a <code>doubleClick3D</code> event object.
* @eventType doubleClick3D
*/
public static const DOUBLE_CLICK:String = "doubleClick3D";
/**
* Defines the value of the <code>type</code> property of a <code>mouseDown3D</code> event object.
* @eventType mouseDown3D
*/
public static const MOUSE_DOWN:String = "mouseDown3D";
/**
* Defines the value of the <code>type</code> property of a <code>mouseUp3D</code> event object.
* @eventType mouseUp3D
*/
public static const MOUSE_UP:String = "mouseUp3D";
/**
* Defines the value of the <code>type</code> property of a <code>rightClick3D</code> event object.
* @eventType rightClick3D
*/
public static const RIGHT_CLICK:String = "rightClick3D";
/**
* Defines the value of the <code>type</code> property of a <code>rightMouseDown3D</code> event object.
* @eventType rightMouseDown3D
*/
public static const RIGHT_MOUSE_DOWN:String = "rightMouseDown3D";
/**
* Defines the value of the <code>type</code> property of a <code>rightMouseUp3D</code> event object.
* @eventType rightMouseUp3D
*/
public static const RIGHT_MOUSE_UP:String = "rightMouseUp3D";
/**
* Defines the value of the <code>type</code> property of a <code>middleClick3D</code> event object.
* @eventType middleClick3D
*/
public static const MIDDLE_CLICK:String = "middleClick3D";
/**
* Defines the value of the <code>type</code> property of a <code>middleMouseDown3D</code> event object.
* @eventType middleMouseDown3D
*/
public static const MIDDLE_MOUSE_DOWN:String = "middleMouseDown3D";
/**
* Defines the value of the <code>type</code> property of a <code>middleMouseUp3D</code> event object.
* @eventType middleMouseUp3D
*/
public static const MIDDLE_MOUSE_UP:String = "middleMouseUp3D";
/**
* Defines the value of the <code>type</code> property of a <code>mouseOver3D</code> event object.
* @eventType mouseOver3D
*/
public static const MOUSE_OVER:String = "mouseOver3D";
/**
* Defines the value of the <code>type</code> property of a <code>mouseOut3D</code> event object.
* @eventType mouseOut3D
*/
public static const MOUSE_OUT:String = "mouseOut3D";
/**
* Defines the value of the <code>type</code> property of a <code>rollOver3D</code> event object.
* @eventType rollOver3D
*/
public static const ROLL_OVER:String = "rollOver3D";
/**
* Defines the value of the <code>type</code> property of a <code>rollOut3D</code> event object.
* @eventType rollOut3D
*/
public static const ROLL_OUT:String = "rollOut3D";
/**
* Defines the value of the <code>type</code> property of a <code>mouseMove3D</code> event object.
* @eventType mouseMove3D
*/
public static const MOUSE_MOVE:String = "mouseMove3D";
/**
* Defines the value of the <code>type</code> property of a <code>mouseWheel3D</code> event object.
* @eventType mouseWheel3D
*/
public static const MOUSE_WHEEL:String = "mouseWheel3D";
/**
* On Windows or Linux, indicates whether the Ctrl key is active (<code>true</code>) or inactive (<code>false</code>). On Macintosh, indicates whether either the Control key or the Command key is activated.
*/
public var ctrlKey:Boolean;
/**
* Indicates whether the Alt key is active (<code>true</code>) or inactive (<code>false</code>).
*/
public var altKey:Boolean;
/**
* Indicates whether the Shift key is active (<code>true</code>) or inactive (<code>false</code>).
*/
public var shiftKey:Boolean;
/**
* Indicates whether the main mouse button is active (<code>true</code>) or inactive (<code>false</code>).
*/
public var buttonDown:Boolean;
/**
* Indicates how many lines should be scrolled for each unit the user rotates the mouse wheel.
*/
public var delta:int;
/**
* A reference to an object that is related to the event. This property applies to the <code>mouseOut</code>, <code>mouseOver</code>, <code>rollOut</code> и <code>rollOver</code>.
* For example, when <code>mouseOut</code> occurs, <code>relatedObject</code> point to the object over which mouse cursor placed now.
*/
public var relatedObject:Object3D;
/**
* X coordinate of the event at local target object's space.
*/
public var localX:Number;
/**
* Y coordinate of the event at local target object's space.
*/
public var localY:Number;
/**
* Z coordinate of the event at local target object's space.
*/
public var localZ:Number;
/**
* @private
*/
alternativa3d var _surface:Surface;
/**
* Creates a MouseEvent3D object.
* @param type Type.
* @param bubbles Indicates whether an event is a bubbling event.
* @param localY Y coordinate of the event at local target object's space.
* @param localX X coordinate of the event at local target object's space.
* @param localZ Z coordinate of the event at local target object's space.
* @param relatedObject <code>Object3D</code>, eelated to the <code>MouseEvent3D</code>.
* @param altKey Indicates whether the Alt key is active.
* @param ctrlKey Indicates whether the Control key is active.
* @param shiftKey Indicates whether the Shift key is active.
* @param buttonDown Indicates whether the main mouse button is active .
* @param delta Indicates how many lines should be scrolled for each unit the user rotates the mouse wheel.
*/
public function MouseEvent3D(type:String, bubbles:Boolean = true, localX:Number = NaN, localY:Number = NaN, localZ:Number = NaN, relatedObject:Object3D = null, ctrlKey:Boolean = false, altKey:Boolean = false, shiftKey:Boolean = false, buttonDown:Boolean = false, delta:int = 0) {
super(type, bubbles);
this.localX = localX;
this.localY = localY;
this.localZ = localZ;
this.relatedObject = relatedObject;
this.ctrlKey = ctrlKey;
this.altKey = altKey;
this.shiftKey = shiftKey;
this.buttonDown = buttonDown;
this.delta = delta;
}
/**
* <code>Surface</code> on which event has been received. The object that owns the surface, can differs from the target event.
*
*/
public function get surface():Surface {
return _surface;
}
/**
* Duplicates an instance of an Event subclass.
* Returns a new <code>MouseEvent3D</code> object that is a copy of the original instance of the Event object.
* @return A new <code>MouseEvent3D</code> object that is identical to the original.
*/
override public function clone():Event {
return new MouseEvent3D(type, _bubbles, localX, localY, localZ, relatedObject, ctrlKey, altKey, shiftKey, buttonDown, delta);
}
/**
* Returns a string containing all the properties of the <code>MouseEvent3D</code> object.
* @return A string containing all the properties of the <code>MouseEvent3D</code> object
*/
override public function toString():String {
return formatToString("MouseEvent3D", "type", "bubbles", "eventPhase", "localX", "localY", "localZ", "relatedObject", "altKey", "ctrlKey", "shiftKey", "buttonDown", "delta");
}
}
}
| {
"pile_set_name": "Github"
} |
CodeMirror.defineMode("diff", function() {
return {
token: function(stream) {
var ch = stream.next();
stream.skipToEnd();
if (ch == "+") return "plus";
if (ch == "-") return "minus";
if (ch == "@") return "rangeinfo";
}
};
});
CodeMirror.defineMIME("text/x-diff", "diff");
| {
"pile_set_name": "Github"
} |
// Copyright 2015 XLGAMES Inc.
//
// Distributed under the MIT License (See
// accompanying file "LICENSE" or the website
// http://www.opensource.org/licenses/mit-license.php)
#include "Resource.h"
#include "DeviceContext.h"
#include "DX11Utils.h"
namespace RenderCore { namespace Metal_DX11
{
void Copy(DeviceContext& context, ID3D::Resource* dst, ID3D::Resource* src)
{
context.GetUnderlying()->CopyResource(dst, src);
}
void CopyPartial(
DeviceContext& context,
const CopyPartial_Dest& dst, const CopyPartial_Src& src)
{
bool useSrcBox = false;
D3D11_BOX srcBox;
if (src._leftTopFront._x != ~0u || src._rightBottomBack._x != ~0u) {
srcBox = D3D11_BOX {
src._leftTopFront._x, src._leftTopFront._y, src._leftTopFront._z,
src._rightBottomBack._x, src._rightBottomBack._y, src._rightBottomBack._z
};
useSrcBox = true;
}
context.GetUnderlying()->CopySubresourceRegion(
dst._resource, dst._subResource,
dst._leftTopFront._x, dst._leftTopFront._y, dst._leftTopFront._z,
src._resource, src._subResource,
useSrcBox ? &srcBox : nullptr);
}
intrusive_ptr<ID3D::Resource> Duplicate(DeviceContext& context, ID3D::Resource* inputResource)
{
return DuplicateResource(context.GetUnderlying(), inputResource);
}
}}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0"?>
<ZopeData>
<record id="1" aka="AAAAAAAAAAE=">
<pickle>
<global name="ProxyField" module="Products.ERP5Form.ProxyField"/>
</pickle>
<pickle>
<dictionary>
<item>
<key> <string>delegated_list</string> </key>
<value>
<list/>
</value>
</item>
<item>
<key> <string>id</string> </key>
<value> <string>my_bank_account_number</string> </value>
</item>
<item>
<key> <string>message_values</string> </key>
<value>
<dictionary>
<item>
<key> <string>external_validator_failed</string> </key>
<value> <string>The input failed the external validator.</string> </value>
</item>
</dictionary>
</value>
</item>
<item>
<key> <string>overrides</string> </key>
<value>
<dictionary>
<item>
<key> <string>field_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>form_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>target</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</value>
</item>
<item>
<key> <string>tales</string> </key>
<value>
<dictionary>
<item>
<key> <string>field_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>form_id</string> </key>
<value> <string></string> </value>
</item>
<item>
<key> <string>target</string> </key>
<value> <string></string> </value>
</item>
</dictionary>
</value>
</item>
<item>
<key> <string>values</string> </key>
<value>
<dictionary>
<item>
<key> <string>description</string> </key>
<value> <string>The number of the bank account.</string> </value>
</item>
<item>
<key> <string>field_id</string> </key>
<value> <string>my_bank_account_number</string> </value>
</item>
<item>
<key> <string>form_id</string> </key>
<value> <string>BankAccount_viewFieldLibrary</string> </value>
</item>
<item>
<key> <string>target</string> </key>
<value> <string>Click to edit the target</string> </value>
</item>
<item>
<key> <string>title</string> </key>
<value> <string>Bank Account Number</string> </value>
</item>
</dictionary>
</value>
</item>
</dictionary>
</pickle>
</record>
</ZopeData>
| {
"pile_set_name": "Github"
} |
/*! =========================================================
* bootstrap-slider.js
*
* Maintainers:
* Kyle Kemp
* - Twitter: @seiyria
* - Github: seiyria
* Rohit Kalkur
* - Twitter: @Rovolutionary
* - Github: rovolution
*
* =========================================================
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ========================================================= */
/**
* Bridget makes jQuery widgets
* v1.0.1
* MIT license
*/
( function( $ ) {
( function( $ ) {
'use strict';
// -------------------------- utils -------------------------- //
var slice = Array.prototype.slice;
function noop() {}
// -------------------------- definition -------------------------- //
function defineBridget( $ ) {
// bail if no jQuery
if ( !$ ) {
return;
}
// -------------------------- addOptionMethod -------------------------- //
/**
* adds option method -> $().plugin('option', {...})
* @param {Function} PluginClass - constructor class
*/
function addOptionMethod( PluginClass ) {
// don't overwrite original option method
if ( PluginClass.prototype.option ) {
return;
}
// option setter
PluginClass.prototype.option = function( opts ) {
// bail out if not an object
if ( !$.isPlainObject( opts ) ){
return;
}
this.options = $.extend( true, this.options, opts );
};
}
// -------------------------- plugin bridge -------------------------- //
// helper function for logging errors
// $.error breaks jQuery chaining
var logError = typeof console === 'undefined' ? noop :
function( message ) {
console.error( message );
};
/**
* jQuery plugin bridge, access methods like $elem.plugin('method')
* @param {String} namespace - plugin name
* @param {Function} PluginClass - constructor class
*/
function bridge( namespace, PluginClass ) {
// add to jQuery fn namespace
$.fn[ namespace ] = function( options ) {
if ( typeof options === 'string' ) {
// call plugin method when first argument is a string
// get arguments for method
var args = slice.call( arguments, 1 );
for ( var i=0, len = this.length; i < len; i++ ) {
var elem = this[i];
var instance = $.data( elem, namespace );
if ( !instance ) {
logError( "cannot call methods on " + namespace + " prior to initialization; " +
"attempted to call '" + options + "'" );
continue;
}
if ( !$.isFunction( instance[options] ) || options.charAt(0) === '_' ) {
logError( "no such method '" + options + "' for " + namespace + " instance" );
continue;
}
// trigger method with arguments
var returnValue = instance[ options ].apply( instance, args);
// break look and return first value if provided
if ( returnValue !== undefined && returnValue !== instance) {
return returnValue;
}
}
// return this if no return value
return this;
} else {
var objects = this.map( function() {
var instance = $.data( this, namespace );
if ( instance ) {
// apply options & init
instance.option( options );
instance._init();
} else {
// initialize new instance
instance = new PluginClass( this, options );
$.data( this, namespace, instance );
}
return $(this);
});
if(!objects || objects.length > 1) {
return objects;
} else {
return objects[0];
}
}
};
}
// -------------------------- bridget -------------------------- //
/**
* converts a Prototypical class into a proper jQuery plugin
* the class must have a ._init method
* @param {String} namespace - plugin name, used in $().pluginName
* @param {Function} PluginClass - constructor class
*/
$.bridget = function( namespace, PluginClass ) {
addOptionMethod( PluginClass );
bridge( namespace, PluginClass );
};
return $.bridget;
}
// get jquery from browser global
defineBridget( $ );
})( $ );
/*************************************************
BOOTSTRAP-SLIDER SOURCE CODE
**************************************************/
(function( $ ) {
var ErrorMsgs = {
formatInvalidInputErrorMsg : function(input) {
return "Invalid input value '" + input + "' passed in";
},
callingContextNotSliderInstance : "Calling context element does not have instance of Slider bound to it. Check your code to make sure the JQuery object returned from the call to the slider() initializer is calling the method"
};
/*************************************************
CONSTRUCTOR
**************************************************/
var Slider = function(element, options) {
createNewSlider.call(this, element, options);
return this;
};
function createNewSlider(element, options) {
/*************************************************
Create Markup
**************************************************/
if(typeof element === "string") {
this.element = document.querySelector(element);
} else if(element instanceof HTMLElement) {
this.element = element;
}
var origWidth = this.element.style.width;
var updateSlider = false;
var parent = this.element.parentNode;
var sliderTrackSelection;
var sliderMinHandle;
var sliderMaxHandle;
if (this.sliderElem) {
updateSlider = true;
} else {
/* Create elements needed for slider */
this.sliderElem = document.createElement("div");
this.sliderElem.className = "slider";
/* Create slider track elements */
var sliderTrack = document.createElement("div");
sliderTrack.className = "slider-track";
sliderTrackSelection = document.createElement("div");
sliderTrackSelection.className = "slider-selection";
sliderMinHandle = document.createElement("div");
sliderMinHandle.className = "slider-handle min-slider-handle";
sliderMaxHandle = document.createElement("div");
sliderMaxHandle.className = "slider-handle max-slider-handle";
sliderTrack.appendChild(sliderTrackSelection);
sliderTrack.appendChild(sliderMinHandle);
sliderTrack.appendChild(sliderMaxHandle);
var createAndAppendTooltipSubElements = function(tooltipElem) {
var arrow = document.createElement("div");
arrow.className = "tooltip-arrow";
var inner = document.createElement("div");
inner.className = "tooltip-inner";
tooltipElem.appendChild(arrow);
tooltipElem.appendChild(inner);
};
/* Create tooltip elements */
var sliderTooltip = document.createElement("div");
sliderTooltip.className = "tooltip tooltip-main";
createAndAppendTooltipSubElements(sliderTooltip);
var sliderTooltipMin = document.createElement("div");
sliderTooltipMin.className = "tooltip tooltip-min";
createAndAppendTooltipSubElements(sliderTooltipMin);
var sliderTooltipMax = document.createElement("div");
sliderTooltipMax.className = "tooltip tooltip-max";
createAndAppendTooltipSubElements(sliderTooltipMax);
/* Append components to sliderElem */
this.sliderElem.appendChild(sliderTrack);
this.sliderElem.appendChild(sliderTooltip);
this.sliderElem.appendChild(sliderTooltipMin);
this.sliderElem.appendChild(sliderTooltipMax);
/* Append slider element to parent container, right before the original <input> element */
parent.insertBefore(this.sliderElem, this.element);
/* Hide original <input> element */
this.element.style.display = "none";
}
/* If JQuery exists, cache JQ references */
if($) {
this.$element = $(this.element);
this.$sliderElem = $(this.sliderElem);
}
/*************************************************
Process Options
**************************************************/
options = options ? options : {};
var optionTypes = Object.keys(this.defaultOptions);
for(var i = 0; i < optionTypes.length; i++) {
var optName = optionTypes[i];
// First check if an option was passed in via the constructor
var val = options[optName];
// If no data attrib, then check data atrributes
val = (typeof val !== 'undefined') ? val : getDataAttrib(this.element, optName);
// Finally, if nothing was specified, use the defaults
val = (val !== null) ? val : this.defaultOptions[optName];
// Set all options on the instance of the Slider
if(!this.options) {
this.options = {};
}
this.options[optName] = val;
}
function getDataAttrib(element, optName) {
var dataName = "data-slider-" + optName;
var dataValString = element.getAttribute(dataName);
try {
return JSON.parse(dataValString);
}
catch(err) {
return dataValString;
}
}
/*************************************************
Setup
**************************************************/
this.eventToCallbackMap = {};
this.sliderElem.id = this.options.id;
this.touchCapable = 'ontouchstart' in window || (window.DocumentTouch && document instanceof window.DocumentTouch);
this.tooltip = this.sliderElem.querySelector('.tooltip-main');
this.tooltipInner = this.tooltip.querySelector('.tooltip-inner');
this.tooltip_min = this.sliderElem.querySelector('.tooltip-min');
this.tooltipInner_min = this.tooltip_min.querySelector('.tooltip-inner');
this.tooltip_max = this.sliderElem.querySelector('.tooltip-max');
this.tooltipInner_max= this.tooltip_max.querySelector('.tooltip-inner');
if (updateSlider === true) {
// Reset classes
this._removeClass(this.sliderElem, 'slider-horizontal');
this._removeClass(this.sliderElem, 'slider-vertical');
this._removeClass(this.tooltip, 'hide');
this._removeClass(this.tooltip_min, 'hide');
this._removeClass(this.tooltip_max, 'hide');
// Undo existing inline styles for track
["left", "top", "width", "height"].forEach(function(prop) {
this._removeProperty(this.trackSelection, prop);
}, this);
// Undo inline styles on handles
[this.handle1, this.handle2].forEach(function(handle) {
this._removeProperty(handle, 'left');
this._removeProperty(handle, 'top');
}, this);
// Undo inline styles and classes on tooltips
[this.tooltip, this.tooltip_min, this.tooltip_max].forEach(function(tooltip) {
this._removeProperty(tooltip, 'left');
this._removeProperty(tooltip, 'top');
this._removeProperty(tooltip, 'margin-left');
this._removeProperty(tooltip, 'margin-top');
this._removeClass(tooltip, 'right');
this._removeClass(tooltip, 'top');
}, this);
}
if(this.options.orientation === 'vertical') {
this._addClass(this.sliderElem,'slider-vertical');
this.stylePos = 'top';
this.mousePos = 'pageY';
this.sizePos = 'offsetHeight';
this._addClass(this.tooltip, 'right');
this.tooltip.style.left = '100%';
this._addClass(this.tooltip_min, 'right');
this.tooltip_min.style.left = '100%';
this._addClass(this.tooltip_max, 'right');
this.tooltip_max.style.left = '100%';
} else {
this._addClass(this.sliderElem, 'slider-horizontal');
this.sliderElem.style.width = origWidth;
this.options.orientation = 'horizontal';
this.stylePos = 'left';
this.mousePos = 'pageX';
this.sizePos = 'offsetWidth';
this._addClass(this.tooltip, 'top');
this.tooltip.style.top = -this.tooltip.outerHeight - 14 + 'px';
this._addClass(this.tooltip_min, 'top');
this.tooltip_min.style.top = -this.tooltip_min.outerHeight - 14 + 'px';
this._addClass(this.tooltip_max, 'top');
this.tooltip_max.style.top = -this.tooltip_max.outerHeight - 14 + 'px';
}
if (this.options.value instanceof Array) {
this.options.range = true;
} else if (this.options.range) {
// User wants a range, but value is not an array
this.options.value = [this.options.value, this.options.max];
}
this.trackSelection = sliderTrackSelection || this.trackSelection;
if (this.options.selection === 'none') {
this._addClass(this.trackSelection, 'hide');
}
this.handle1 = sliderMinHandle || this.handle1;
this.handle2 = sliderMaxHandle || this.handle2;
if (updateSlider === true) {
// Reset classes
this._removeClass(this.handle1, 'round triangle');
this._removeClass(this.handle2, 'round triangle hide');
}
var availableHandleModifiers = ['round', 'triangle', 'custom'];
var isValidHandleType = availableHandleModifiers.indexOf(this.options.handle) !== -1;
if (isValidHandleType) {
this._addClass(this.handle1, this.options.handle);
this._addClass(this.handle2, this.options.handle);
}
this.offset = this._offset(this.sliderElem);
this.size = this.sliderElem[this.sizePos];
this.setValue(this.options.value);
/******************************************
Bind Event Listeners
******************************************/
// Bind keyboard handlers
this.handle1Keydown = this._keydown.bind(this, 0);
this.handle1.addEventListener("keydown", this.handle1Keydown, false);
this.handle2Keydown = this._keydown.bind(this, 0);
this.handle2.addEventListener("keydown", this.handle2Keydown, false);
if (this.touchCapable) {
// Bind touch handlers
this.mousedown = this._mousedown.bind(this);
this.sliderElem.addEventListener("touchstart", this.mousedown, false);
} else {
// Bind mouse handlers
this.mousedown = this._mousedown.bind(this);
this.sliderElem.addEventListener("mousedown", this.mousedown, false);
}
// Bind tooltip-related handlers
if(this.options.tooltip === 'hide') {
this._addClass(this.tooltip, 'hide');
this._addClass(this.tooltip_min, 'hide');
this._addClass(this.tooltip_max, 'hide');
} else if(this.options.tooltip === 'always') {
this._showTooltip();
this._alwaysShowTooltip = true;
} else {
this.showTooltip = this._showTooltip.bind(this);
this.hideTooltip = this._hideTooltip.bind(this);
this.sliderElem.addEventListener("mouseenter", this.showTooltip, false);
this.sliderElem.addEventListener("mouseleave", this.hideTooltip, false);
this.handle1.addEventListener("focus", this.showTooltip, false);
this.handle1.addEventListener("blur", this.hideTooltip, false);
this.handle2.addEventListener("focus", this.showTooltip, false);
this.handle2.addEventListener("blur", this.hideTooltip, false);
}
if(this.options.enabled) {
this.enable();
} else {
this.disable();
}
}
/*************************************************
INSTANCE PROPERTIES/METHODS
- Any methods bound to the prototype are considered
part of the plugin's `public` interface
**************************************************/
Slider.prototype = {
_init: function() {}, // NOTE: Must exist to support bridget
constructor: Slider,
defaultOptions: {
id: "",
min: 0,
max: 10,
step: 1,
precision: 0,
orientation: 'horizontal',
value: 5,
range: false,
selection: 'before',
tooltip: 'show',
tooltip_split: false,
handle: 'round',
reversed: false,
enabled: true,
formatter: function(val) {
if(val instanceof Array) {
return val[0] + " : " + val[1];
} else {
return val;
}
},
natural_arrow_keys: false
},
over: false,
inDrag: false,
getValue: function() {
if (this.options.range) {
return this.options.value;
}
return this.options.value[0];
},
setValue: function(val, triggerSlideEvent) {
if (!val) {
val = 0;
}
this.options.value = this._validateInputValue(val);
var applyPrecision = this._applyPrecision.bind(this);
if (this.options.range) {
this.options.value[0] = applyPrecision(this.options.value[0]);
this.options.value[1] = applyPrecision(this.options.value[1]);
this.options.value[0] = Math.max(this.options.min, Math.min(this.options.max, this.options.value[0]));
this.options.value[1] = Math.max(this.options.min, Math.min(this.options.max, this.options.value[1]));
} else {
this.options.value = applyPrecision(this.options.value);
this.options.value = [ Math.max(this.options.min, Math.min(this.options.max, this.options.value))];
this._addClass(this.handle2, 'hide');
if (this.options.selection === 'after') {
this.options.value[1] = this.options.max;
} else {
this.options.value[1] = this.options.min;
}
}
this.diff = this.options.max - this.options.min;
if (this.diff > 0) {
this.percentage = [
(this.options.value[0] - this.options.min) * 100 / this.diff,
(this.options.value[1] - this.options.min) * 100 / this.diff,
this.options.step * 100 / this.diff
];
} else {
this.percentage = [0, 0, 100];
}
this._layout();
var sliderValue = this.options.range ? this.options.value : this.options.value[0];
this._setDataVal(sliderValue);
if(triggerSlideEvent === true) {
this._trigger('slide', sliderValue);
}
return this;
},
destroy: function(){
// Remove event handlers on slider elements
this._removeSliderEventHandlers();
// Remove the slider from the DOM
this.sliderElem.parentNode.removeChild(this.sliderElem);
/* Show original <input> element */
this.element.style.display = "";
// Clear out custom event bindings
this._cleanUpEventCallbacksMap();
// Remove data values
this.element.removeAttribute("data");
// Remove JQuery handlers/data
if($) {
this._unbindJQueryEventHandlers();
this.$element.removeData('slider');
}
},
disable: function() {
this.options.enabled = false;
this.handle1.removeAttribute("tabindex");
this.handle2.removeAttribute("tabindex");
this._addClass(this.sliderElem, 'slider-disabled');
this._trigger('slideDisabled');
return this;
},
enable: function() {
this.options.enabled = true;
this.handle1.setAttribute("tabindex", 0);
this.handle2.setAttribute("tabindex", 0);
this._removeClass(this.sliderElem, 'slider-disabled');
this._trigger('slideEnabled');
return this;
},
toggle: function() {
if(this.options.enabled) {
this.disable();
} else {
this.enable();
}
return this;
},
isEnabled: function() {
return this.options.enabled;
},
on: function(evt, callback) {
if($) {
this.$element.on(evt, callback);
this.$sliderElem.on(evt, callback);
} else {
this._bindNonQueryEventHandler(evt, callback);
}
return this;
},
getAttribute: function(attribute) {
if(attribute) {
return this.options[attribute];
} else {
return this.options;
}
},
setAttribute: function(attribute, value) {
this.options[attribute] = value;
return this;
},
refresh: function() {
this._removeSliderEventHandlers();
createNewSlider.call(this, this.element, this.options);
if($) {
// Bind new instance of slider to the element
$.data(this.element, 'slider', this);
}
return this;
},
/******************************+
HELPERS
- Any method that is not part of the public interface.
- Place it underneath this comment block and write its signature like so:
_fnName : function() {...}
********************************/
_removeSliderEventHandlers: function() {
// Remove event listeners from handle1
this.handle1.removeEventListener("keydown", this.handle1Keydown, false);
this.handle1.removeEventListener("focus", this.showTooltip, false);
this.handle1.removeEventListener("blur", this.hideTooltip, false);
// Remove event listeners from handle2
this.handle2.removeEventListener("keydown", this.handle2Keydown, false);
this.handle2.removeEventListener("focus", this.handle2Keydown, false);
this.handle2.removeEventListener("blur", this.handle2Keydown, false);
// Remove event listeners from sliderElem
this.sliderElem.removeEventListener("mouseenter", this.showTooltip, false);
this.sliderElem.removeEventListener("mouseleave", this.hideTooltip, false);
this.sliderElem.removeEventListener("touchstart", this.mousedown, false);
this.sliderElem.removeEventListener("mousedown", this.mousedown, false);
},
_bindNonQueryEventHandler: function(evt, callback) {
if(this.eventToCallbackMap[evt]===undefined) {
this.eventToCallbackMap[evt] = [];
}
this.eventToCallbackMap[evt].push(callback);
},
_cleanUpEventCallbacksMap: function() {
var eventNames = Object.keys(this.eventToCallbackMap);
for(var i = 0; i < eventNames.length; i++) {
var eventName = eventNames[i];
this.eventToCallbackMap[eventName] = null;
}
},
_showTooltip: function() {
if (this.options.tooltip_split === false ){
this._addClass(this.tooltip, 'in');
} else {
this._addClass(this.tooltip_min, 'in');
this._addClass(this.tooltip_max, 'in');
}
this.over = true;
},
_hideTooltip: function() {
if (this.inDrag === false && this.alwaysShowTooltip !== true) {
this._removeClass(this.tooltip, 'in');
this._removeClass(this.tooltip_min, 'in');
this._removeClass(this.tooltip_max, 'in');
}
this.over = false;
},
_layout: function() {
var positionPercentages;
if(this.options.reversed) {
positionPercentages = [ 100 - this.percentage[0], this.percentage[1] ];
} else {
positionPercentages = [ this.percentage[0], this.percentage[1] ];
}
this.handle1.style[this.stylePos] = positionPercentages[0]+'%';
this.handle2.style[this.stylePos] = positionPercentages[1]+'%';
if (this.options.orientation === 'vertical') {
this.trackSelection.style.top = Math.min(positionPercentages[0], positionPercentages[1]) +'%';
this.trackSelection.style.height = Math.abs(positionPercentages[0] - positionPercentages[1]) +'%';
} else {
this.trackSelection.style.left = Math.min(positionPercentages[0], positionPercentages[1]) +'%';
this.trackSelection.style.width = Math.abs(positionPercentages[0] - positionPercentages[1]) +'%';
var offset_min = this.tooltip_min.getBoundingClientRect();
var offset_max = this.tooltip_max.getBoundingClientRect();
if (offset_min.right > offset_max.left) {
this._removeClass(this.tooltip_max, 'top');
this._addClass(this.tooltip_max, 'bottom');
this.tooltip_max.style.top = 18 + 'px';
} else {
this._removeClass(this.tooltip_max, 'bottom');
this._addClass(this.tooltip_max, 'top');
this.tooltip_max.style.top = -30 + 'px';
}
}
var formattedTooltipVal;
if (this.options.range) {
formattedTooltipVal = this.options.formatter(this.options.value);
this._setText(this.tooltipInner, formattedTooltipVal);
this.tooltip.style[this.stylePos] = (positionPercentages[1] + positionPercentages[0])/2 + '%';
if (this.options.orientation === 'vertical') {
this._css(this.tooltip, 'margin-top', -this.tooltip.offsetHeight / 2 + 'px');
} else {
this._css(this.tooltip, 'margin-left', -this.tooltip.offsetWidth / 2 + 'px');
}
if (this.options.orientation === 'vertical') {
this._css(this.tooltip, 'margin-top', -this.tooltip.offsetHeight / 2 + 'px');
} else {
this._css(this.tooltip, 'margin-left', -this.tooltip.offsetWidth / 2 + 'px');
}
var innerTooltipMinText = this.options.formatter(this.options.value[0]);
this._setText(this.tooltipInner_min, innerTooltipMinText);
var innerTooltipMaxText = this.options.formatter(this.options.value[1]);
this._setText(this.tooltipInner_max, innerTooltipMaxText);
this.tooltip_min.style[this.stylePos] = positionPercentages[0] + '%';
if (this.options.orientation === 'vertical') {
this._css(this.tooltip_min, 'margin-top', -this.tooltip_min.offsetHeight / 2 + 'px');
} else {
this._css(this.tooltip_min, 'margin-left', -this.tooltip_min.offsetWidth / 2 + 'px');
}
this.tooltip_max.style[this.stylePos] = positionPercentages[1] + '%';
if (this.options.orientation === 'vertical') {
this._css(this.tooltip_max, 'margin-top', -this.tooltip_max.offsetHeight / 2 + 'px');
} else {
this._css(this.tooltip_max, 'margin-left', -this.tooltip_max.offsetWidth / 2 + 'px');
}
} else {
formattedTooltipVal = this.options.formatter(this.options.value[0]);
this._setText(this.tooltipInner, formattedTooltipVal);
this.tooltip.style[this.stylePos] = positionPercentages[0] + '%';
if (this.options.orientation === 'vertical') {
this._css(this.tooltip, 'margin-top', -this.tooltip.offsetHeight / 2 + 'px');
} else {
this._css(this.tooltip, 'margin-left', -this.tooltip.offsetWidth / 2 + 'px');
}
}
},
_removeProperty: function(element, prop) {
if (element.style.removeProperty) {
element.style.removeProperty(prop);
} else {
element.style.removeAttribute(prop);
}
},
_mousedown: function(ev) {
if(!this.options.enabled) {
return false;
}
this._triggerFocusOnHandle();
this.offset = this._offset(this.sliderElem);
this.size = this.sliderElem[this.sizePos];
var percentage = this._getPercentage(ev);
if (this.options.range) {
var diff1 = Math.abs(this.percentage[0] - percentage);
var diff2 = Math.abs(this.percentage[1] - percentage);
this.dragged = (diff1 < diff2) ? 0 : 1;
} else {
this.dragged = 0;
}
this.percentage[this.dragged] = this.options.reversed ? 100 - percentage : percentage;
this._layout();
this.mousemove = this._mousemove.bind(this);
this.mouseup = this._mouseup.bind(this);
if (this.touchCapable) {
// Touch: Bind touch events:
document.addEventListener("touchmove", this.mousemove, false);
document.addEventListener("touchend", this.mouseup, false);
} else {
// Bind mouse events:
document.addEventListener("mousemove", this.mousemove, false);
document.addEventListener("mouseup", this.mouseup, false);
}
this.inDrag = true;
var val = this._calculateValue();
this._trigger('slideStart', val);
this._setDataVal(val);
this.setValue(val);
this._pauseEvent(ev);
return true;
},
_triggerFocusOnHandle: function(handleIdx) {
if(handleIdx === 0) {
this.handle1.focus();
}
if(handleIdx === 1) {
this.handle2.focus();
}
},
_keydown: function(handleIdx, ev) {
if(!this.options.enabled) {
return false;
}
var dir;
switch (ev.keyCode) {
case 37: // left
case 40: // down
dir = -1;
break;
case 39: // right
case 38: // up
dir = 1;
break;
}
if (!dir) {
return;
}
// use natural arrow keys instead of from min to max
if (this.options.natural_arrow_keys) {
var ifVerticalAndNotReversed = (this.options.orientation === 'vertical' && !this.options.reversed);
var ifHorizontalAndReversed = (this.options.orientation === 'horizontal' && this.options.reversed);
if (ifVerticalAndNotReversed || ifHorizontalAndReversed) {
dir = dir * -1;
}
}
var oneStepValuePercentageChange = dir * this.percentage[2];
var percentage = this.percentage[handleIdx] + oneStepValuePercentageChange;
if (percentage > 100) {
percentage = 100;
} else if (percentage < 0) {
percentage = 0;
}
this.dragged = handleIdx;
this._adjustPercentageForRangeSliders(percentage);
this.percentage[this.dragged] = percentage;
this._layout();
var val = this._calculateValue();
this._trigger('slideStart', val);
this._setDataVal(val);
this.setValue(val, true);
this._trigger('slideStop', val);
this._setDataVal(val);
this._pauseEvent(ev);
return false;
},
_pauseEvent: function(ev) {
if(ev.stopPropagation) {
ev.stopPropagation();
}
if(ev.preventDefault) {
ev.preventDefault();
}
ev.cancelBubble=true;
ev.returnValue=false;
},
_mousemove: function(ev) {
if(!this.options.enabled) {
return false;
}
var percentage = this._getPercentage(ev);
this._adjustPercentageForRangeSliders(percentage);
this.percentage[this.dragged] = this.options.reversed ? 100 - percentage : percentage;
this._layout();
var val = this._calculateValue();
this.setValue(val, true);
return false;
},
_adjustPercentageForRangeSliders: function(percentage) {
if (this.options.range) {
if (this.dragged === 0 && this.percentage[1] < percentage) {
this.percentage[0] = this.percentage[1];
this.dragged = 1;
} else if (this.dragged === 1 && this.percentage[0] > percentage) {
this.percentage[1] = this.percentage[0];
this.dragged = 0;
}
}
},
_mouseup: function() {
if(!this.options.enabled) {
return false;
}
if (this.touchCapable) {
// Touch: Unbind touch event handlers:
document.removeEventListener("touchmove", this.mousemove, false);
document.removeEventListener("touchend", this.mouseup, false);
} else {
// Unbind mouse event handlers:
document.removeEventListener("mousemove", this.mousemove, false);
document.removeEventListener("mouseup", this.mouseup, false);
}
this.inDrag = false;
if (this.over === false) {
this._hideTooltip();
}
var val = this._calculateValue();
this._layout();
this._setDataVal(val);
this._trigger('slideStop', val);
return false;
},
_calculateValue: function() {
var val;
if (this.options.range) {
val = [this.options.min,this.options.max];
if (this.percentage[0] !== 0){
val[0] = (Math.max(this.options.min, this.options.min + Math.round((this.diff * this.percentage[0]/100)/this.options.step)*this.options.step));
val[0] = this._applyPrecision(val[0]);
}
if (this.percentage[1] !== 100){
val[1] = (Math.min(this.options.max, this.options.min + Math.round((this.diff * this.percentage[1]/100)/this.options.step)*this.options.step));
val[1] = this._applyPrecision(val[1]);
}
this.options.value = val;
} else {
val = (this.options.min + Math.round((this.diff * this.percentage[0]/100)/this.options.step)*this.options.step);
if (val < this.options.min) {
val = this.options.min;
}
else if (val > this.options.max) {
val = this.options.max;
}
val = parseFloat(val);
val = this._applyPrecision(val);
this.options.value = [val, this.options.value[1]];
}
return val;
},
_applyPrecision: function(val) {
var precision = this.options.precision || this._getNumDigitsAfterDecimalPlace(this.step);
return this._applyToFixedAndParseFloat(val, precision);
},
_getNumDigitsAfterDecimalPlace: function(num) {
var match = (''+num).match(/(?:\.(\d+))?(?:[eE]([+-]?\d+))?$/);
if (!match) { return 0; }
return Math.max(0, (match[1] ? match[1].length : 0) - (match[2] ? +match[2] : 0));
},
_applyToFixedAndParseFloat: function(num, toFixedInput) {
var truncatedNum = num.toFixed(toFixedInput);
return parseFloat(truncatedNum);
},
/*
Credits to Mike Samuel for the following method!
Source: http://stackoverflow.com/questions/10454518/javascript-how-to-retrieve-the-number-of-decimals-of-a-string-number
*/
_getPercentage: function(ev) {
if (this.touchCapable && (ev.type === 'touchstart' || ev.type === 'touchmove')) {
ev = ev.touches[0];
}
var percentage = (ev[this.mousePos] - this.offset[this.stylePos])*100/this.size;
percentage = Math.round(percentage/this.percentage[2])*this.percentage[2];
return Math.max(0, Math.min(100, percentage));
},
_validateInputValue: function(val) {
if(typeof val === 'number') {
return val;
} else if(val instanceof Array) {
this._validateArray(val);
return val;
} else {
throw new Error( ErrorMsgs.formatInvalidInputErrorMsg(val) );
}
},
_validateArray: function(val) {
for(var i = 0; i < val.length; i++) {
var input = val[i];
if (typeof input !== 'number') { throw new Error( ErrorMsgs.formatInvalidInputErrorMsg(input) ); }
}
},
_setDataVal: function(val) {
var value = "value: '" + val + "'";
this.element.setAttribute('data', value);
this.element.setAttribute('value', val);
},
_trigger: function(evt, val) {
val = val || undefined;
var callbackFnArray = this.eventToCallbackMap[evt];
if(callbackFnArray && callbackFnArray.length) {
for(var i = 0; i < callbackFnArray.length; i++) {
var callbackFn = callbackFnArray[i];
callbackFn(val);
}
}
/* If JQuery exists, trigger JQuery events */
if($) {
this._triggerJQueryEvent(evt, val);
}
},
_triggerJQueryEvent: function(evt, val) {
var eventData = {
type: evt,
value: val
};
this.$element.trigger(eventData);
this.$sliderElem.trigger(eventData);
},
_unbindJQueryEventHandlers: function() {
this.$element.off();
this.$sliderElem.off();
},
_setText: function(element, text) {
if(typeof element.innerText !== "undefined") {
element.innerText = text;
} else if(typeof element.textContent !== "undefined") {
element.textContent = text;
}
},
_removeClass: function(element, classString) {
var classes = classString.split(" ");
var newClasses = element.className;
for(var i = 0; i < classes.length; i++) {
var classTag = classes[i];
var regex = new RegExp("(?:\\s|^)" + classTag + "(?:\\s|$)");
newClasses = newClasses.replace(regex, " ");
}
element.className = newClasses.trim();
},
_addClass: function(element, classString) {
var classes = classString.split(" ");
var newClasses = element.className;
for(var i = 0; i < classes.length; i++) {
var classTag = classes[i];
var regex = new RegExp("(?:\\s|^)" + classTag + "(?:\\s|$)");
var ifClassExists = regex.test(newClasses);
if(!ifClassExists) {
newClasses += " " + classTag;
}
}
element.className = newClasses.trim();
},
_offset: function (obj) {
var ol = 0;
var ot = 0;
if (obj.offsetParent) {
do {
ol += obj.offsetLeft;
ot += obj.offsetTop;
} while (obj = obj.offsetParent);
}
return {
left: ol,
top: ot
};
},
_css: function(elementRef, styleName, value) {
elementRef.style[styleName] = value;
}
};
/*********************************
Attach to global namespace
*********************************/
if($) {
var namespace = $.fn.slider ? 'bootstrapSlider' : 'slider';
$.bridget(namespace, Slider);
} else {
window.Slider = Slider;
}
})( $ );
})( window.jQuery ); | {
"pile_set_name": "Github"
} |
---
layout: post
title: "Jenkins Pipeline Stage Result Visualization Improvements"
tags:
- pipeline
- blueocean
author: dwnusbaum
---
Some changes have recently been released to give Pipeline authors some new tools to improve Pipeline visualizations in Blue Ocean, in particular to address the highly-voted issue link:https://issues.jenkins-ci.org/browse/JENKINS-39203[JENKINS-39203], which causes all non-failing stages to be visualized as though they were unstable if the overall build result of the Pipeline was unstable. This issue made it difficult to quickly identify why a build was unstable, and forced users to read through builds logs and the Jenkinsfile to figure out what actually happened.
In order to fix this issue, we introduced a new Pipeline API that can be used to attach additional result information to individual Pipeline steps. Visualization tools like Blue Ocean use this new API when deciding how a given stage should be displayed. Steps like `junit` that used to set only the overall build result now additionally use the new API to set step-level result information. We created the new `unstable` and `warnError` steps so that Pipeline authors with more complicated use cases can still take advantage of this new API.
The core fixes for the issue are present in the following plugins, all of which require Jenkins 2.138.4 or newer:
* Pipeline: API 2.34
* Pipeline: Basic Steps 2.18 (requires a simultaneous update to Pipeline: Groovy 2.70)
* Pipeline: Graph Analysis 1.10
* Pipeline: Declarative 1.3.9
* Blue Ocean 1.17.0
Here is a screenshot from Blue Ocean of a Pipeline using the `unstable` step where only the failing stage is marked as unstable:
image::/images/post-images/2019/07/2019-07-05-jenkins-pipeline-stage-result-visualization-improvements/unstable-stage-example.png[Visualization of a Pipeline in Blue Ocean with a single stage shown as unstable]
== Examples
Here are some examples of how to update your Pipelines to use the new improvements:
* **Use the new link:https://jenkins.io/doc/pipeline/steps/workflow-basic-steps/#warnerror-catch-error-and-set-build-and-stage-result-to-unstable[`warnError`] step to catch errors and mark the build and stage as unstable.** `warnError` requires a single `String` parameter, which is a message to log when an error is caught. When `warnError` catches an error, it logs the message and the error and sets the build and stage result to unstable. Using it looks like this:
+
[source,groovy]
----
warnError('Script failed!') {
sh('false')
}
----
* **Use the new link:https://jenkins.io/doc/pipeline/steps/workflow-basic-steps/#unstable-set-stage-result-to-unstable[`unstable`] step to set the build and stage result to unstable**. This step can be used as a direct replacement for `currentBuild.result = 'UNSTABLE'`, and may be useful in cases where `warnError` is not flexible enough. `unstable` requires a single `String` parameter, which is a message to log when the step runs. Using it might look like this:
+
[source,groovy]
----
try {
sh('false')
} catch (ex) {
unstable('Script failed!')
}
----
* **link:https://plugins.jenkins.io/junit[JUnit Plugin]**: Update to version 1.28 or newer to pick up fixes for the `junit` step so that it correctly marks the stage as unstable.
* **link:https://plugins.jenkins.io/warnings-ng[Warnings Next Generation Plugin]**: Update to version 5.2.0 or newer to pick up fixes for the `publishIssues` and `recordIssues` steps so that they correctly mark the stage as unstable.
* **Other Plugins**: If your Pipeline is marked as unstable by a step in another plugin, please link:https://issues.jenkins-ci.org[file a new issue] with the component set to that plugin (after checking for duplicates), clearly describing which step has the problem and under what circumstances it occurs, and link to the developer section of this post as a reference for how the maintainer might be able to address the problem.
== Limitations
* If you do not migrate to the `unstable` or `warnError` steps, or update plugins that set the build result to versions that integrate with the new API, then in cases where the build is unstable, Blue Ocean will not show any stages as unstable.
* Even after these changes, `currentBuild.result` continues to refer only to the overall build result. Unfortunately, it was not possible to adapt the `currentBuild` global variable to make it track step or stage-level results, since it is implemented as a global variable, which means it does not have any step-level context through which it could use the new API.
* link:https://plugins.jenkins.io/pipeline-stage-view[Pipeline Stage View Plugin] has not yet been updated to use the new API, so these changes do not affect the visualization it provides.
== History
Jenkins Pipeline steps can complete in one of two ways: successfully, by returning a (possibly null) result, or unsuccessfully, by throwing an exception. When a step fails by throwing an exception, that exception propagates throughout the Pipeline until another step or Groovy code catches it, or it reaches the top level of the Pipeline, which causes the Pipeline itself to fail. Depending on the type of exception thrown, the final result of the Pipeline may be something other than failure (for example in some cases it will be aborted). Because of the way the exception propagates, it is easy for tools like Blue Ocean to identify steps (and therefore stages) which failed due to an exception.
In order for Pipelines to be able to interact with established Jenkins APIs, it was also necessary for Pipeline builds to have an overall build result that can be modified during the build. Among other things, this allows Pipelines to use build steps and wrappers that were originally written for use in Freestyle projects.
In some cases, it is desirable for a Pipeline step to be able to complete successfully so that the rest of the Pipeline continues normal execution, but for it to be able to note that some kind of error occurred so that visualizations are able to identify that something went wrong with the step, even though it didn't fail completely. A good example of this is the `junit` step. This step looks at specified test results, and if there were any failures, marks the overall build result as unstable. This kind of behavior is problematic for visualization tools like Blue Ocean, because the step completed successfully, and there is no programmatic way to associate the overall build result with the step that ended up setting that result.
Looking at link:https://issues.jenkins-ci.org/browse/JENKINS-39203[JENKINS-39203] again, we see that there were essentially two options for the visualization. If the overall build result was unstable, either all steps that completed successfully could be shown as unstable, because they may have been the step that caused the build to become unstable, or they could be shown as successful, because we have no way to relate the setting of the build result to a specific step. In the end, the first option was chosen.
To work around this issue, some users tried to do things like throw exceptions and add try/catch blocks around stages that handle exceptions so that Blue Ocean would be able to use the exceptions to mark step and stage results as desired, and then by catching the exception the Pipeline would be able to continue normal execution. These kinds of workarounds were hard to understand, fragile, and did not work well (if at all) for Declarative Pipelines.
== Developers
If you are a developer of a plugin that integrates with Pipeline using a step, and want to take advantage of the new API so that your step can report an non-successful result without throwing an exception, please see link:https://groups.google.com/d/msg/jenkinsci-dev/5A7U1KmfX08/IP5Bg_OaAgAJ[this post] to the Jenkins Developers mailing list, and respond there if you have any questions.
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2016 The OpenYOLO Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bbq;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.valid4j.Assertive.require;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.text.TextUtils;
import android.util.Log;
import com.google.bbq.Protobufs.BroadcastQuery;
import com.google.bbq.Protobufs.BroadcastQueryResponse;
import com.google.bbq.internal.ClientVersionUtil;
import com.google.protobuf.ByteString;
import com.google.protobuf.MessageLite;
import java.io.IOException;
import java.security.SecureRandom;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
/**
* Dispatches broadcast queries to available data providers.
*/
public class BroadcastQueryClient {
/**
* The default amount of time that this client will wait for responses from providers, before
* ignoring them.
*/
public static final long DEFAULT_TIMEOUT_MS = TimeUnit.SECONDS.toMillis(2);
private static final String LOG_TAG = "BroadcastQueryClient";
private static final AtomicReference<BroadcastQueryClient> INSTANCE =
new AtomicReference<>();
@NonNull
private final Context mContext;
@NonNull
private final SecureRandom mSecureRandom;
@NonNull
private final ConcurrentHashMap<Long, PendingQuery> mPendingQueries;
@NonNull
private final ScheduledExecutorService mExecutorService;
@NonNull
private final AtomicBoolean mDisposed;
/**
* Retrieves the global instance of the broadcast query client for the application
* associated to the provided context.
*/
@NonNull
public static BroadcastQueryClient getInstance(Context context) {
Context applicationContext = context.getApplicationContext();
BroadcastQueryClient client = new BroadcastQueryClient(applicationContext);
if (!INSTANCE.compareAndSet(null, client)) {
client.dispose();
client = INSTANCE.get();
}
return client;
}
BroadcastQueryClient(@NonNull Context context) {
mContext = context;
mSecureRandom = new SecureRandom();
mPendingQueries = new ConcurrentHashMap<>();
mExecutorService = Executors.newSingleThreadScheduledExecutor();
mDisposed = new AtomicBoolean(false);
}
/**
* Dispatches a query for the specified data type, carrying the specified protocol buffer
* message (if required). The response to this query will be provided to the specified callback.
* A {@link #DEFAULT_TIMEOUT_MS default timeout} will be used.
*/
public void queryFor(
@NonNull String dataType,
@Nullable MessageLite queryMessage,
@NonNull QueryCallback callback) {
queryFor(dataType,
queryMessage,
DEFAULT_TIMEOUT_MS,
callback);
}
/**
* Dispatches a query for the specified data type, carrying the specified protocol buffer
* message (if required). The response to this query will be provided to the specified callback.
*/
public void queryFor(
@NonNull String dataType,
@Nullable MessageLite queryMessage,
long timeoutInMs,
@NonNull QueryCallback callback) {
queryFor(dataType,
queryMessage != null ? queryMessage.toByteArray() : null,
timeoutInMs,
callback);
}
/**
* Dispatches a query for the specified data type, carrying the specified message (if required).
* The response to this query will be provided to the specified callback.
*/
public void queryFor(
@NonNull String dataType,
@Nullable byte[] queryMessage,
long timeoutInMs,
@NonNull QueryCallback callback) {
require(!TextUtils.isEmpty(dataType), "dataType must not be null or empty");
require(timeoutInMs > 0, "Timeout must be greater than zero");
require(callback, notNullValue());
require(!isDisposed(), "BroadcastQueryClient has been disposed");
PendingQuery pq = new PendingQuery(
dataType,
queryMessage,
timeoutInMs,
callback);
long queryId;
do {
queryId = mSecureRandom.nextLong();
} while (mPendingQueries.putIfAbsent(queryId, pq) != null);
pq.dispatch(queryId);
}
/**
* Disposes all leakable resources associated with this client.
*/
private void dispose() {
if (!mDisposed.compareAndSet(false, true)) {
return;
}
mExecutorService.shutdownNow();
for (PendingQuery pq : mPendingQueries.values()) {
mContext.unregisterReceiver(pq.mResponseReceiver);
}
}
/**
* Determines whether this client has been disposed, and therefore should no longer be used.
*/
private boolean isDisposed() {
return mDisposed.get();
}
private Intent createQueryIntent(
PendingQuery pendingQuery,
String responderPackage,
long responseId) {
Intent queryIntent = QueryUtil.createEmptyQueryIntent(pendingQuery.mDataType);
queryIntent.setPackage(responderPackage);
queryIntent.putExtra(QueryUtil.EXTRA_QUERY_MESSAGE,
BroadcastQuery.newBuilder()
.setClientVersion(ClientVersionUtil.getClientVersion())
.setRequestingApp(mContext.getPackageName())
.setDataType(pendingQuery.mDataType)
.setRequestId(pendingQuery.mQueryId)
.setResponseId(responseId)
.setQueryMessage(pendingQuery.mQueryMessage != null
? ByteString.copyFrom(pendingQuery.mQueryMessage)
: null)
.build()
.toByteArray());
return queryIntent;
}
private final class PendingQuery {
final String mDataType;
final byte[] mQueryMessage;
final Map<Long, String> mRespondersById;
final CopyOnWriteArraySet<Long> mPendingResponses;
final ConcurrentHashMap<String, QueryResponse> mResponses;
final QueryCallback mQueryCallback;
final long mTimeoutInMs;
long mQueryId;
ScheduledFuture<Void> mTimeoutFuture;
BroadcastReceiver mResponseReceiver;
PendingQuery(
String dataType,
byte[] queryMessage,
long timeoutInMs,
QueryCallback queryCallback) {
mDataType = dataType;
mQueryMessage = queryMessage;
mTimeoutInMs = timeoutInMs;
mRespondersById = buildRespondersById();
mPendingResponses = new CopyOnWriteArraySet<>();
for (long responderId : mRespondersById.keySet()) {
mPendingResponses.add(responderId);
}
mResponses = new ConcurrentHashMap<>();
mQueryCallback = queryCallback;
}
Map<Long, String> buildRespondersById() {
Set<String> responders = QueryUtil.getRespondersForDataType(mContext, mDataType);
HashMap<Long, String> tempRespondersById = new HashMap<>();
for (String responderPackage : responders) {
long responderId;
do {
responderId = mSecureRandom.nextLong();
} while (tempRespondersById.containsKey(responderId));
tempRespondersById.put(responderId, responderPackage);
}
return tempRespondersById;
}
void dispatch(long queryId) {
mQueryId = queryId;
if (mRespondersById.isEmpty()) {
complete();
return;
}
mResponseReceiver = new ResponseHandler(this);
mContext.registerReceiver(mResponseReceiver, getResponseFilter());
for (Map.Entry<Long, String> responderEntry : mRespondersById.entrySet()) {
long responseId = responderEntry.getKey();
String responderPackage = responderEntry.getValue();
mContext.sendBroadcast(createQueryIntent(this, responderPackage, responseId));
}
mTimeoutFuture = mExecutorService.schedule(
new QueryTimeoutHandler(this),
mTimeoutInMs,
TimeUnit.MILLISECONDS);
}
void complete() {
if (!mPendingQueries.remove(mQueryId, this)) {
// response already delivered
return;
}
if (mTimeoutFuture != null) {
mTimeoutFuture.cancel(false);
}
if (mResponseReceiver != null) {
mContext.unregisterReceiver(mResponseReceiver);
}
mQueryCallback.onResponse(mQueryId, new ArrayList<>(mResponses.values()));
}
IntentFilter getResponseFilter() {
IntentFilter filter = new IntentFilter();
filter.addAction(QueryUtil.createResponseAction(mDataType, mQueryId));
filter.addCategory(QueryUtil.BBQ_CATEGORY);
return filter;
}
}
/**
* Forcibly completes a pending query when a timeout is reached.
*/
private final class QueryTimeoutHandler implements Callable<Void> {
final PendingQuery mPendingQuery;
QueryTimeoutHandler(PendingQuery pendingQuery) {
mPendingQuery = pendingQuery;
}
@Override
public Void call() throws Exception {
mPendingQuery.complete();
return null;
}
}
/**
* Captures broadcast responses for queries.
*/
private final class ResponseHandler extends BroadcastReceiver {
final PendingQuery mPendingQuery;
ResponseHandler(PendingQuery pendingQuery) {
mPendingQuery = pendingQuery;
}
@Override
public void onReceive(Context context, Intent intent) {
byte[] responseBytes = intent.getByteArrayExtra(QueryUtil.EXTRA_RESPONSE_MESSAGE);
if (responseBytes == null) {
Log.w(LOG_TAG, "Received query response without a defined message");
return;
}
BroadcastQueryResponse response;
try {
response = BroadcastQueryResponse.parseFrom(responseBytes);
} catch (IOException e) {
Log.w(LOG_TAG, "Unable to parse query response message");
return;
}
String responder = mPendingQuery.mRespondersById.get(response.getResponseId());
if (responder == null) {
Log.w(LOG_TAG, "Received response from unknown responder");
return;
}
if (!mPendingQuery.mPendingResponses.remove(response.getResponseId())) {
Log.w(LOG_TAG, "Duplicate response received; ignoring");
return;
}
if (response.getResponseMessage() != null) {
QueryResponse queryResponse = new QueryResponse(
responder,
response.getResponseId(),
response.getResponseMessage().toByteArray());
mPendingQuery.mResponses.put(responder, queryResponse);
}
if (mPendingQuery.mPendingResponses.isEmpty()) {
mPendingQuery.complete();
}
}
}
}
| {
"pile_set_name": "Github"
} |
---
- job:
name: apm-agent-dotnet
project-type: folder
| {
"pile_set_name": "Github"
} |
//------------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation.
// All rights reserved.
//
// This code is licensed under the MIT License.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files(the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and / or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions :
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
//------------------------------------------------------------------------------
using System.IdentityModel.Tokens.Jwt;
using Microsoft.IdentityModel.JsonWebTokens;
using Microsoft.IdentityModel.Tokens;
using Microsoft.IdentityModel.Tokens.Saml;
using Microsoft.IdentityModel.Tokens.Saml2;
namespace RuntimeCommon
{
public class TokenTestRunData : TestRunData
{
public JsonWebTokenHandler JsonWebTokenHandler { get; set; }
public JwtSecurityTokenHandler JwtSecurityTokenHandler { get; set; }
public string JwtToken { get; set; }
public string Saml2Token { get; set; }
public string SamlToken { get; set; }
public Saml2SecurityTokenHandler Saml2SecurityTokenHandler { get; set; }
public SamlSecurityTokenHandler SamlSecurityTokenHandler{ get; set; }
public SecurityTokenDescriptor SecurityTokenDescriptor { get; set; }
public TokenValidationParameters TokenValidationParameters { get; set; }
}
}
| {
"pile_set_name": "Github"
} |
---
# SPDX-License-Identifier: GPL-3.0-or-later
- name: udp_test resolve nic.cz
tags:
- test
shell: kdig @127.0.0.1 nic.cz
register: res
failed_when: '"status: NOERROR" not in res.stdout'
| {
"pile_set_name": "Github"
} |
[config]
command = record
args = --call-graph fp kill >/dev/null 2>&1
[event:base-record]
sample_type=295
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0
/* cnode related routines for the coda kernel code
(C) 1996 Peter Braam
*/
#include <linux/types.h>
#include <linux/string.h>
#include <linux/time.h>
#include <linux/coda.h>
#include <linux/pagemap.h>
#include "coda_psdev.h"
#include "coda_linux.h"
static inline int coda_fideq(struct CodaFid *fid1, struct CodaFid *fid2)
{
return memcmp(fid1, fid2, sizeof(*fid1)) == 0;
}
static const struct inode_operations coda_symlink_inode_operations = {
.get_link = page_get_link,
.setattr = coda_setattr,
};
/* cnode.c */
static void coda_fill_inode(struct inode *inode, struct coda_vattr *attr)
{
coda_vattr_to_iattr(inode, attr);
if (S_ISREG(inode->i_mode)) {
inode->i_op = &coda_file_inode_operations;
inode->i_fop = &coda_file_operations;
} else if (S_ISDIR(inode->i_mode)) {
inode->i_op = &coda_dir_inode_operations;
inode->i_fop = &coda_dir_operations;
} else if (S_ISLNK(inode->i_mode)) {
inode->i_op = &coda_symlink_inode_operations;
inode_nohighmem(inode);
inode->i_data.a_ops = &coda_symlink_aops;
inode->i_mapping = &inode->i_data;
} else
init_special_inode(inode, inode->i_mode, huge_decode_dev(attr->va_rdev));
}
static int coda_test_inode(struct inode *inode, void *data)
{
struct CodaFid *fid = (struct CodaFid *)data;
struct coda_inode_info *cii = ITOC(inode);
return coda_fideq(&cii->c_fid, fid);
}
static int coda_set_inode(struct inode *inode, void *data)
{
struct CodaFid *fid = (struct CodaFid *)data;
struct coda_inode_info *cii = ITOC(inode);
cii->c_fid = *fid;
return 0;
}
struct inode * coda_iget(struct super_block * sb, struct CodaFid * fid,
struct coda_vattr * attr)
{
struct inode *inode;
struct coda_inode_info *cii;
unsigned long hash = coda_f2i(fid);
inode = iget5_locked(sb, hash, coda_test_inode, coda_set_inode, fid);
if (!inode)
return ERR_PTR(-ENOMEM);
if (inode->i_state & I_NEW) {
cii = ITOC(inode);
/* we still need to set i_ino for things like stat(2) */
inode->i_ino = hash;
/* inode is locked and unique, no need to grab cii->c_lock */
cii->c_mapcount = 0;
unlock_new_inode(inode);
}
/* always replace the attributes, type might have changed */
coda_fill_inode(inode, attr);
return inode;
}
/* this is effectively coda_iget:
- get attributes (might be cached)
- get the inode for the fid using vfs iget
- link the two up if this is needed
- fill in the attributes
*/
struct inode *coda_cnode_make(struct CodaFid *fid, struct super_block *sb)
{
struct coda_vattr attr;
struct inode *inode;
int error;
/* We get inode numbers from Venus -- see venus source */
error = venus_getattr(sb, fid, &attr);
if (error)
return ERR_PTR(error);
inode = coda_iget(sb, fid, &attr);
if (IS_ERR(inode))
pr_warn("%s: coda_iget failed\n", __func__);
return inode;
}
/* Although we treat Coda file identifiers as immutable, there is one
* special case for files created during a disconnection where they may
* not be globally unique. When an identifier collision is detected we
* first try to flush the cached inode from the kernel and finally
* resort to renaming/rehashing in-place. Userspace remembers both old
* and new values of the identifier to handle any in-flight upcalls.
* The real solution is to use globally unique UUIDs as identifiers, but
* retrofitting the existing userspace code for this is non-trivial. */
void coda_replace_fid(struct inode *inode, struct CodaFid *oldfid,
struct CodaFid *newfid)
{
struct coda_inode_info *cii = ITOC(inode);
unsigned long hash = coda_f2i(newfid);
BUG_ON(!coda_fideq(&cii->c_fid, oldfid));
/* replace fid and rehash inode */
/* XXX we probably need to hold some lock here! */
remove_inode_hash(inode);
cii->c_fid = *newfid;
inode->i_ino = hash;
__insert_inode_hash(inode, hash);
}
/* convert a fid to an inode. */
struct inode *coda_fid_to_inode(struct CodaFid *fid, struct super_block *sb)
{
struct inode *inode;
unsigned long hash = coda_f2i(fid);
inode = ilookup5(sb, hash, coda_test_inode, fid);
if ( !inode )
return NULL;
/* we should never see newly created inodes because we intentionally
* fail in the initialization callback */
BUG_ON(inode->i_state & I_NEW);
return inode;
}
struct coda_file_info *coda_ftoc(struct file *file)
{
struct coda_file_info *cfi = file->private_data;
BUG_ON(!cfi || cfi->cfi_magic != CODA_MAGIC);
return cfi;
}
/* the CONTROL inode is made without asking attributes from Venus */
struct inode *coda_cnode_makectl(struct super_block *sb)
{
struct inode *inode = new_inode(sb);
if (inode) {
inode->i_ino = CTL_INO;
inode->i_op = &coda_ioctl_inode_operations;
inode->i_fop = &coda_ioctl_operations;
inode->i_mode = 0444;
return inode;
}
return ERR_PTR(-ENOMEM);
}
| {
"pile_set_name": "Github"
} |
[
{
"cmd": [
"vpython",
"-u",
"RECIPE_MODULE[recipe_engine::file]/resources/fileutil.py",
"--json-output",
"/path/to/tmp/json",
"copy",
"[START_DIR]/cache/work/skia/infra/bots/assets/clang_linux/VERSION",
"/path/to/tmp/"
],
"infra_step": true,
"name": "Get clang_linux VERSION",
"~followup_annotations": [
"@@@STEP_LOG_LINE@VERSION@42@@@",
"@@@STEP_LOG_END@VERSION@@@"
]
},
{
"cmd": [
"python",
"-u",
"[START_DIR]/cache/work/skia/bin/fetch-gn"
],
"cwd": "[START_DIR]/cache/work/skia",
"env": {
"CHROME_HEADLESS": "1",
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
},
"infra_step": true,
"name": "fetch-gn"
},
{
"cmd": [
"[START_DIR]/ccache_linux/bin/ccache",
"-s"
],
"cwd": "[START_DIR]/cache/work/skia",
"env": {
"CCACHE_COMPILERCHECK": "content",
"CCACHE_DIR": "[START_DIR]/cache/ccache",
"CCACHE_MAXFILES": "0",
"CCACHE_MAXSIZE": "75G",
"CHROME_HEADLESS": "1",
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
},
"name": "ccache stats-start"
},
{
"cmd": [
"[START_DIR]/cache/work/skia/bin/gn",
"gen",
"[START_DIR]/cache/work/skia/out/Build-Debian10-Clang-x86_64-Debug-Wuffs/Debug",
"--args=cc=\"[START_DIR]/clang_linux/bin/clang\" cc_wrapper=\"[START_DIR]/ccache_linux/bin/ccache\" cxx=\"[START_DIR]/clang_linux/bin/clang++\" extra_cflags=[\"-B[START_DIR]/clang_linux/bin\", \"-DDUMMY_clang_linux_version=42\", \"-O1\"] extra_ldflags=[\"-B[START_DIR]/clang_linux/bin\", \"-fuse-ld=lld\", \"-L[START_DIR]/clang_linux/lib\"] skia_use_wuffs=true target_cpu=\"x86_64\" werror=true"
],
"cwd": "[START_DIR]/cache/work/skia",
"env": {
"CCACHE_COMPILERCHECK": "content",
"CCACHE_DIR": "[START_DIR]/cache/ccache",
"CCACHE_MAXFILES": "0",
"CCACHE_MAXSIZE": "75G",
"CHROME_HEADLESS": "1",
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
},
"name": "gn gen"
},
{
"cmd": [
"ninja",
"-C",
"[START_DIR]/cache/work/skia/out/Build-Debian10-Clang-x86_64-Debug-Wuffs/Debug"
],
"cwd": "[START_DIR]/cache/work/skia",
"env": {
"CCACHE_COMPILERCHECK": "content",
"CCACHE_DIR": "[START_DIR]/cache/ccache",
"CCACHE_MAXFILES": "0",
"CCACHE_MAXSIZE": "75G",
"CHROME_HEADLESS": "1",
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
},
"name": "ninja"
},
{
"cmd": [
"[START_DIR]/ccache_linux/bin/ccache",
"-s"
],
"cwd": "[START_DIR]/cache/work/skia",
"env": {
"CCACHE_COMPILERCHECK": "content",
"CCACHE_DIR": "[START_DIR]/cache/ccache",
"CCACHE_MAXFILES": "0",
"CCACHE_MAXSIZE": "75G",
"CHROME_HEADLESS": "1",
"PATH": "<PATH>:RECIPE_REPO[depot_tools]"
},
"name": "ccache stats-end"
},
{
"cmd": [
"python",
"-u",
"import errno\nimport glob\nimport os\nimport shutil\nimport sys\n\nsrc = sys.argv[1]\ndst = sys.argv[2]\nbuild_products = ['dm', 'dm.exe', 'dm.app', 'fm', 'nanobench.app', 'get_images_from_skps', 'get_images_from_skps.exe', 'hello-opencl', 'hello-opencl.exe', 'nanobench', 'nanobench.exe', 'skpbench', 'skpbench.exe', '*.so', '*.dll', '*.dylib', 'skia_launcher', 'skottie_tool', 'lib/*.so', 'run_testlab', 'skqp-universal-debug.apk']\n\ntry:\n os.makedirs(dst)\nexcept OSError as e:\n if e.errno != errno.EEXIST:\n raise\n\nfor pattern in build_products:\n path = os.path.join(src, pattern)\n for f in glob.glob(path):\n dst_path = os.path.join(dst, os.path.relpath(f, src))\n if not os.path.isdir(os.path.dirname(dst_path)):\n os.makedirs(os.path.dirname(dst_path))\n print 'Copying build product %s to %s' % (f, dst_path)\n shutil.move(f, dst_path)\n",
"[START_DIR]/cache/work/skia/out/Build-Debian10-Clang-x86_64-Debug-Wuffs/Debug",
"[START_DIR]/[SWARM_OUT_DIR]/out/Debug"
],
"infra_step": true,
"name": "copy build products",
"~followup_annotations": [
"@@@[email protected]@import errno@@@",
"@@@[email protected]@import glob@@@",
"@@@[email protected]@import os@@@",
"@@@[email protected]@import shutil@@@",
"@@@[email protected]@import sys@@@",
"@@@[email protected]@@@@",
"@@@[email protected]@src = sys.argv[1]@@@",
"@@@[email protected]@dst = sys.argv[2]@@@",
"@@@[email protected]@build_products = ['dm', 'dm.exe', 'dm.app', 'fm', 'nanobench.app', 'get_images_from_skps', 'get_images_from_skps.exe', 'hello-opencl', 'hello-opencl.exe', 'nanobench', 'nanobench.exe', 'skpbench', 'skpbench.exe', '*.so', '*.dll', '*.dylib', 'skia_launcher', 'skottie_tool', 'lib/*.so', 'run_testlab', 'skqp-universal-debug.apk']@@@",
"@@@[email protected]@@@@",
"@@@[email protected]@try:@@@",
"@@@[email protected]@ os.makedirs(dst)@@@",
"@@@[email protected]@except OSError as e:@@@",
"@@@[email protected]@ if e.errno != errno.EEXIST:@@@",
"@@@[email protected]@ raise@@@",
"@@@[email protected]@@@@",
"@@@[email protected]@for pattern in build_products:@@@",
"@@@[email protected]@ path = os.path.join(src, pattern)@@@",
"@@@[email protected]@ for f in glob.glob(path):@@@",
"@@@[email protected]@ dst_path = os.path.join(dst, os.path.relpath(f, src))@@@",
"@@@[email protected]@ if not os.path.isdir(os.path.dirname(dst_path)):@@@",
"@@@[email protected]@ os.makedirs(os.path.dirname(dst_path))@@@",
"@@@[email protected]@ print 'Copying build product %s to %s' % (f, dst_path)@@@",
"@@@[email protected]@ shutil.move(f, dst_path)@@@",
"@@@[email protected]@@@"
]
},
{
"name": "$result"
}
] | {
"pile_set_name": "Github"
} |
#!/bin/sh
DIST_PROP="ez:distribution"
DIST_DIR_PROP="ez:distribution_include_all"
function make_dir
{
local DIR
DIR=`echo "$1" | sed 's#^\./##'`
if [ ! -d "$DEST/$DIR" ]; then
mkdir "$DEST/$DIR"
fi
}
function copy_file
{
local SRC_FILE DST_FILE
SRC_FILE=`echo $1 | sed 's#^\./##'`
DST_FILE="$SRC_FILE"
cp -f "$SRC_FILE" "$DEST/$DST_FILE"
}
function scan_dir_normal
{
local file
local DIR
DIR=$1
# echo "Scanning dir $DIR normally"
for file in $DIR/*; do
if [ -e "$file" -a ! "$file" = "$DIR/.svn" -a ! "$file" = "$DIR/.." -a ! "$file" = "$DIR/." ]; then
# if ! echo $file | grep "/\*" &>/dev/null; then
if [ -d "$file" ]; then
# Do not include .svn dirs
if [ "$file" != ".svn" ]; then
make_dir "$file"
scan_dir_normal "$file"
fi
else
# Do not include temporary files
if ! echo "$file" | grep '[~#]$' &>/dev/null; then
copy_file "$file"
fi
fi
fi
done
}
function scan_dir_svn
{
local file
local DIR
local DIST_PROP_TYPE
local DIST_DIR
DIR=$1
for file in $DIR/* $DIR/.*; do
# Skip '.svn', '.', '..'.
[ ! -e "$file" -o "$file" = "$DIR/.svn" -o "$file" = "$DIR/.." -o "$file" = "$DIR/." ] && continue
DIST_PROP_TYPE=`svn propget $DIST_PROP $file`
if [ $? -eq 0 -a -n "$DIST_PROP_TYPE" ]; then
if echo $DIST_PROP_TYPE | grep $DIST_TYPE &>/dev/null; then
DIST_DIR=`svn propget $DIST_DIR_PROP $file 2>/dev/null`
DIST_DIR_RECURSIVE=""
if [ $? -eq 0 -a -n "$DIST_DIR" ]; then
if echo $DIST_DIR | grep $DIST_TYPE &>/dev/null; then
DIST_DIR_RECURSIVE=$DIST_TYPE
fi
fi
if [ -d "$file" ]; then
echo -n " "`$SETCOLOR_DIR`"$file"`$SETCOLOR_NORMAL`"/"
make_dir "$file"
if [ -z $DIST_DIR_RECURSIVE ]; then
scan_dir "$file"
else
echo -n "*"
scan_dir_normal "$file"
fi
else
echo -n " "`$SETCOLOR_FILE`"$file"`$SETCOLOR_NORMAL`
copy_file "$file"
fi
fi
fi
done
}
function scan_dir_nosvn
{
local file
local DIR
local DIST_PROP_TYPE
local DIST_DIR
DIR=$1
for file in $DIR/* $DIR/.*; do
# Skip '.svn', '.', '..'.
[ ! -e "$file" -o "$file" = "$DIR/.svn" -o "$file" = "$DIR/.." -o "$file" = "$DIR/." ] && continue
if [ -d "$file" ]; then
# Copy directory recursively.
echo -n " "`$SETCOLOR_DIR`"$file"`$SETCOLOR_NORMAL`"/"
make_dir "$file"
scan_dir_normal "$file"
else
# Copy file.
echo -n " "`$SETCOLOR_FILE`"$file"`$SETCOLOR_NORMAL`
copy_file "$file"
fi
done
}
function scan_dir
{
local DIR
local NOSVN
DIR=$1
NOSVN=$2
if [ -n "$NOSVN" ]; then
scan_dir_nosvn $DIR
else
scan_dir_svn $DIR
fi
}
| {
"pile_set_name": "Github"
} |
// Boost enable_if library
// Copyright 2003 (c) The Trustees of Indiana University.
// Use, modification, and distribution is subject to the Boost Software
// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
// Authors: Jaakko Jarvi (jajarvi at osl.iu.edu)
// Jeremiah Willcock (jewillco at osl.iu.edu)
// Andrew Lumsdaine (lums at osl.iu.edu)
#ifndef BOOST_CORE_ENABLE_IF_HPP
#define BOOST_CORE_ENABLE_IF_HPP
#include "boost/config.hpp"
// Even the definition of enable_if causes problems on some compilers,
// so it's macroed out for all compilers that do not support SFINAE
#ifndef BOOST_NO_SFINAE
namespace boost
{
template<typename T, typename R=void>
struct enable_if_has_type
{
typedef R type;
};
template <bool B, class T = void>
struct enable_if_c {
typedef T type;
};
template <class T>
struct enable_if_c<false, T> {};
template <class Cond, class T = void>
struct enable_if : public enable_if_c<Cond::value, T> {};
template <bool B, class T>
struct lazy_enable_if_c {
typedef typename T::type type;
};
template <class T>
struct lazy_enable_if_c<false, T> {};
template <class Cond, class T>
struct lazy_enable_if : public lazy_enable_if_c<Cond::value, T> {};
template <bool B, class T = void>
struct disable_if_c {
typedef T type;
};
template <class T>
struct disable_if_c<true, T> {};
template <class Cond, class T = void>
struct disable_if : public disable_if_c<Cond::value, T> {};
template <bool B, class T>
struct lazy_disable_if_c {
typedef typename T::type type;
};
template <class T>
struct lazy_disable_if_c<true, T> {};
template <class Cond, class T>
struct lazy_disable_if : public lazy_disable_if_c<Cond::value, T> {};
} // namespace boost
#else
namespace boost {
namespace detail { typedef void enable_if_default_T; }
template <typename T>
struct enable_if_does_not_work_on_this_compiler;
template<typename T, typename R=void>
struct enable_if_has_type : enable_if_does_not_work_on_this_compiler<T>
{ };
template <bool B, class T = detail::enable_if_default_T>
struct enable_if_c : enable_if_does_not_work_on_this_compiler<T>
{ };
template <bool B, class T = detail::enable_if_default_T>
struct disable_if_c : enable_if_does_not_work_on_this_compiler<T>
{ };
template <bool B, class T = detail::enable_if_default_T>
struct lazy_enable_if_c : enable_if_does_not_work_on_this_compiler<T>
{ };
template <bool B, class T = detail::enable_if_default_T>
struct lazy_disable_if_c : enable_if_does_not_work_on_this_compiler<T>
{ };
template <class Cond, class T = detail::enable_if_default_T>
struct enable_if : enable_if_does_not_work_on_this_compiler<T>
{ };
template <class Cond, class T = detail::enable_if_default_T>
struct disable_if : enable_if_does_not_work_on_this_compiler<T>
{ };
template <class Cond, class T = detail::enable_if_default_T>
struct lazy_enable_if : enable_if_does_not_work_on_this_compiler<T>
{ };
template <class Cond, class T = detail::enable_if_default_T>
struct lazy_disable_if : enable_if_does_not_work_on_this_compiler<T>
{ };
} // namespace boost
#endif // BOOST_NO_SFINAE
#endif
| {
"pile_set_name": "Github"
} |
try:
import re2 as re
except ImportError:
import re
from django.template.defaultfilters import register
from collections import OrderedDict
@register.filter("mongo_id")
def mongo_id(value):
"""Retrieve _id value.
@todo: it will be removed in future.
"""
if isinstance(value, dict):
if value.has_key("_id"):
value = value["_id"]
# Return value
return unicode(value)
@register.filter("is_dict")
def is_dict(value):
"""Checks if value is an instance of dict"""
return isinstance(value, dict)
@register.filter
def get_item(dictionary, key):
return dictionary.get(key, "")
@register.filter(name="dehex")
def dehex(value):
return re.sub(r"\\x[0-9a-f]{2}", "", value)
@register.filter(name="stats_total")
def stats_total(value):
total = float()
for item in value:
total += item["time"]
return total
@register.filter(name="sort")
def sort(value):
if isinstance(value, dict):
sorteddict = OrderedDict()
sortedkeys = sorted(value.keys())
for key in sortedkeys:
sorteddict[key] = value[key]
return sorteddict
return value
@register.filter(name="format_cli")
def format_cli(cli, length):
if cli.startswith("\""):
ret = " ".join(cli[cli[1:].index("\"")+2:].split()).strip()
else:
ret = " ".join(cli.split()[1:]).strip()
if len(ret) >= length + 15:
ret = ret[:length] + " ...(truncated)"
# Return blank string instead of 'None'
if not ret:
return ""
return ret
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2004-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Core, stable abstractions for representing runtime executions of flow definitions.
*
* <p>The central concept defined by this package is the {@link org.springframework.webflow.execution.FlowExecution}
* interface, which represents a single instance of a top-level flow definition.
*
* <p>The following classes and interfaces are of particular interest:
* <ul>
* <li>{@link org.springframework.webflow.execution.FlowExecutionFactory} - An abstract factory for creating new flow
* executions.</li>
* <li>{@link org.springframework.webflow.execution.repository.FlowExecutionRepository} - A DAO for persisting and
* restoring existing flow executions.</li>
* <li>{@link org.springframework.webflow.execution.FlowExecutionListener} - An observer interface to be implemented
* by objects that are interested in flow execution lifecycle events.</li>
* </ul>
*
* <p>This package depends on the definition package.
*/
package org.springframework.webflow.execution;
| {
"pile_set_name": "Github"
} |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
////////////////////////////////////////////////////////////////////////////////
package subtle
import "github.com/google/tink/go/subtle"
// ECIESHKDFRecipientKem represents a HKDF-based KEM (key encapsulation mechanism)
// for ECIES recipient.
type ECIESHKDFRecipientKem struct {
recipientPrivateKey *ECPrivateKey
}
// decapsulate uses the KEM to generate a new HKDF-based key.
func (s *ECIESHKDFRecipientKem) decapsulate(kem []byte, hashAlg string, salt []byte, info []byte, keySize uint32, pointFormat string) ([]byte, error) {
pubPoint, err := PointDecode(s.recipientPrivateKey.PublicKey.Curve, pointFormat, kem)
if err != nil {
return nil, err
}
secret, err := ComputeSharedSecret(pubPoint, s.recipientPrivateKey)
if err != nil {
return nil, err
}
i := append(kem, secret...)
return subtle.ComputeHKDF(hashAlg, i, salt, info, keySize)
}
| {
"pile_set_name": "Github"
} |
#ifndef __MEMOIZE
#define __MEMOIZE
typedef struct Memoize_ * Memoize;
Memoize memoize_ini(const Emitter, const Func);
void memoize_end(MemPool, Memoize);
INSTR(MemoizeIni);
INSTR(MemoizeStore);
#endif
| {
"pile_set_name": "Github"
} |
# Copyright 2013 the V8 project authors. All rights reserved.
# Copyright (C) 2005, 2006, 2007, 2008, 2009 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Test behaviour of JSON reviver function.
On success, you will see a series of "PASS" messages, followed by "TEST COMPLETE".
Ensure the holder for our array is indeed an array
PASS Array.isArray(currentHolder) is true
PASS currentHolder.length is 5
Ensure that the holder already has all the properties present at the start of filtering
PASS currentHolder[0] is "a value"
PASS currentHolder[1] is "another value"
PASS currentHolder[2] is "and another value"
PASS currentHolder[3] is "to delete"
PASS currentHolder[4] is "extra value"
Ensure the holder for our array is indeed an array
PASS Array.isArray(currentHolder) is true
PASS currentHolder.length is 5
Ensure that we always get the same holder
PASS currentHolder is lastHolder
Ensure that returning undefined has removed the property 0 from the holder during filtering.
FAIL currentHolder.hasOwnProperty(0) should be false. Was true.
Ensure the holder for our array is indeed an array
PASS Array.isArray(currentHolder) is true
PASS currentHolder.length is 5
Ensure that we always get the same holder
PASS currentHolder is lastHolder
Ensure that changing the value of a property is reflected while filtering.
PASS currentHolder[2] is "a replaced value"
Ensure that the changed value is reflected in the arguments passed to the reviver
PASS value is currentHolder[2]
Ensure the holder for our array is indeed an array
PASS Array.isArray(currentHolder) is true
PASS currentHolder.length is 5
Ensure that we always get the same holder
PASS currentHolder is lastHolder
Ensure that we visited a value that we have deleted, and that deletion is reflected while filtering.
PASS currentHolder.hasOwnProperty(3) is false
Ensure that when visiting a deleted property value is undefined
PASS value is undefined.
Ensure the holder for our array is indeed an array
PASS Array.isArray(currentHolder) is true
FAIL currentHolder.length should be 3. Was 4.
Ensure that we always get the same holder
PASS currentHolder is lastHolder
FAIL Did not call reviver for deleted property
Ensure that we created the root holder as specified in ES5
PASS '' in lastHolder is true
PASS result is lastHolder['']
Ensure that a deleted value is revived if the reviver function returns a value
FAIL result.hasOwnProperty(3) should be true. Was false.
Test behaviour of revivor used in conjunction with an object
PASS currentHolder != globalObject is true
Ensure that the holder already has all the properties present at the start of filtering
PASS currentHolder['a property'] is "a value"
PASS currentHolder['another property'] is "another value"
PASS currentHolder['and another property'] is "and another value"
PASS currentHolder['to delete'] is "will be deleted"
PASS currentHolder != globalObject is true
Ensure that we get the same holder object for each property
PASS currentHolder is lastHolder
Ensure that returning undefined has correctly removed the property 'a property' from the holder object
PASS currentHolder.hasOwnProperty('a property') is false
PASS currentHolder != globalObject is true
Ensure that we get the same holder object for each property
PASS currentHolder is lastHolder
Ensure that changing the value of a property is reflected while filtering.
PASS currentHolder['and another property'] is "a replaced value"
Ensure that the changed value is reflected in the arguments passed to the reviver
PASS value is "a replaced value"
Ensure that we created the root holder as specified in ES5
PASS lastHolder.hasOwnProperty('') is true
PASS result.hasOwnProperty('a property') is false
FAIL result.hasOwnProperty('to delete') should be true. Was false.
PASS result is lastHolder['']
Test behaviour of revivor that introduces a cycle
PASS JSON.parse("[0,1]", reviveAddsCycle) threw exception RangeError: Maximum call stack size exceeded.
Test behaviour of revivor that introduces a new array classed object (the result of a regex)
PASS JSON.stringify(JSON.parse("[0,1]", reviveIntroducesNewArrayLikeObject)) is '[0,["a","a"]]'
PASS successfullyParsed is true
TEST COMPLETE
| {
"pile_set_name": "Github"
} |
// +build windows
package iso9660
func statt(sys interface{}) (uint32, uint32, uint32) {
return uint32(0), uint32(0), uint32(0)
}
| {
"pile_set_name": "Github"
} |
import pytest
pytest.importorskip('sqlalchemy')
from datashape import dshape
from functools import partial
from toolz import first
from sqlalchemy.exc import OperationalError
from odo import into, drop
from blaze import data as bz_data, join, symbol
from blaze import create_index
@pytest.fixture
def sql():
data = [(1, 2), (10, 20), (100, 200)]
sql = bz_data(
'sqlite:///:memory:::foo',
dshape='var * {x: int, y: int}',
)
into(sql, data)
return sql
def test_column(sql):
t = bz_data(sql)
r = list(t['x'])
assert r == [1, 10, 100]
assert list(t[['x']]) == [(1,), (10,), (100,)]
assert int(t.count()) == 3
def test_drop(sql):
sql = sql.data
assert sql.exists(sql.bind)
drop(sql)
assert not sql.exists(sql.bind)
@pytest.mark.parametrize('cols', (
'x', ['x'], ['y'], ['x', 'y'], ('x',), ('y',), ('x', 'y'),
))
def test_create_index(sql, cols):
create_index(sql, cols, name='idx')
with pytest.raises(OperationalError):
create_index(sql, cols, name='idx')
def test_create_index_fails(sql):
with pytest.raises(KeyError):
create_index(sql, 'z', name='zidx')
with pytest.raises(ValueError):
create_index(sql, 'x')
with pytest.raises(ValueError):
create_index(sql, 'z')
def test_create_index_unique(sql):
create_index(sql, 'y', name='y_idx', unique=True)
assert len(sql.data.indexes) == 1
idx = first(sql.data.indexes)
assert idx.unique
assert idx.columns.y == sql.data.c.y
def test_composite_index(sql):
create_index(sql, ['x', 'y'], name='idx_xy')
with pytest.raises(OperationalError):
create_index(sql, ['x', 'y'], name='idx_xy')
def test_composite_index_fails(sql):
with pytest.raises(KeyError):
create_index(sql, ['z', 'bizz'], name='idx_name')
def test_composite_index_fails_with_existing_columns(sql):
with pytest.raises(KeyError):
create_index(sql, ['x', 'z', 'bizz'], name='idx_name')
@pytest.mark.parametrize('cols', ('x', ['x', 'y']))
def test_ignore_existing(sql, cols):
create_call = partial(
create_index,
sql,
cols,
name='idx_name',
)
create_call()
with pytest.raises(OperationalError):
create_call(ignore_existing=False)
# Shouldn't error
create_call(ignore_existing=True)
def test_join_foreign_key():
a = symbol('a', "var * {timestamp: string, pkid: map[int32, {pkid: int32, label: ?string}]}")
b = symbol('a', "var * {pkid: int32, label: ?string}")
assert join(a, b, 'pkid', 'pkid').dshape == dshape("var * {pkid: int32, timestamp: string, label: ?string}")
| {
"pile_set_name": "Github"
} |
// SPDX-License-Identifier: GPL-2.0
/*
* mtu3_gadget.c - MediaTek usb3 DRD peripheral support
*
* Copyright (C) 2016 MediaTek Inc.
*
* Author: Chunfeng Yun <[email protected]>
*/
#include "mtu3.h"
#include "mtu3_trace.h"
void mtu3_req_complete(struct mtu3_ep *mep,
struct usb_request *req, int status)
__releases(mep->mtu->lock)
__acquires(mep->mtu->lock)
{
struct mtu3_request *mreq = to_mtu3_request(req);
struct mtu3 *mtu = mreq->mtu;
list_del(&mreq->list);
if (req->status == -EINPROGRESS)
req->status = status;
trace_mtu3_req_complete(mreq);
spin_unlock(&mtu->lock);
/* ep0 makes use of PIO, needn't unmap it */
if (mep->epnum)
usb_gadget_unmap_request(&mtu->g, req, mep->is_in);
dev_dbg(mtu->dev, "%s complete req: %p, sts %d, %d/%d\n",
mep->name, req, req->status, req->actual, req->length);
usb_gadget_giveback_request(&mep->ep, req);
spin_lock(&mtu->lock);
}
static void nuke(struct mtu3_ep *mep, const int status)
{
struct mtu3_request *mreq = NULL;
if (list_empty(&mep->req_list))
return;
dev_dbg(mep->mtu->dev, "abort %s's req: sts %d\n", mep->name, status);
/* exclude EP0 */
if (mep->epnum)
mtu3_qmu_flush(mep);
while (!list_empty(&mep->req_list)) {
mreq = list_first_entry(&mep->req_list,
struct mtu3_request, list);
mtu3_req_complete(mep, &mreq->request, status);
}
}
static int mtu3_ep_enable(struct mtu3_ep *mep)
{
const struct usb_endpoint_descriptor *desc;
const struct usb_ss_ep_comp_descriptor *comp_desc;
struct mtu3 *mtu = mep->mtu;
u32 interval = 0;
u32 mult = 0;
u32 burst = 0;
int max_packet;
int ret;
desc = mep->desc;
comp_desc = mep->comp_desc;
mep->type = usb_endpoint_type(desc);
max_packet = usb_endpoint_maxp(desc);
mep->maxp = max_packet & GENMASK(10, 0);
switch (mtu->g.speed) {
case USB_SPEED_SUPER:
case USB_SPEED_SUPER_PLUS:
if (usb_endpoint_xfer_int(desc) ||
usb_endpoint_xfer_isoc(desc)) {
interval = desc->bInterval;
interval = clamp_val(interval, 1, 16) - 1;
if (usb_endpoint_xfer_isoc(desc) && comp_desc)
mult = comp_desc->bmAttributes;
}
if (comp_desc)
burst = comp_desc->bMaxBurst;
break;
case USB_SPEED_HIGH:
if (usb_endpoint_xfer_isoc(desc) ||
usb_endpoint_xfer_int(desc)) {
interval = desc->bInterval;
interval = clamp_val(interval, 1, 16) - 1;
burst = (max_packet & GENMASK(12, 11)) >> 11;
}
break;
default:
break; /*others are ignored */
}
dev_dbg(mtu->dev, "%s maxp:%d, interval:%d, burst:%d, mult:%d\n",
__func__, mep->maxp, interval, burst, mult);
mep->ep.maxpacket = mep->maxp;
mep->ep.desc = desc;
mep->ep.comp_desc = comp_desc;
/* slot mainly affects bulk/isoc transfer, so ignore int */
mep->slot = usb_endpoint_xfer_int(desc) ? 0 : mtu->slot;
ret = mtu3_config_ep(mtu, mep, interval, burst, mult);
if (ret < 0)
return ret;
ret = mtu3_gpd_ring_alloc(mep);
if (ret < 0) {
mtu3_deconfig_ep(mtu, mep);
return ret;
}
mtu3_qmu_start(mep);
return 0;
}
static int mtu3_ep_disable(struct mtu3_ep *mep)
{
struct mtu3 *mtu = mep->mtu;
mtu3_qmu_stop(mep);
/* abort all pending requests */
nuke(mep, -ESHUTDOWN);
mtu3_deconfig_ep(mtu, mep);
mtu3_gpd_ring_free(mep);
mep->desc = NULL;
mep->ep.desc = NULL;
mep->comp_desc = NULL;
mep->type = 0;
mep->flags = 0;
return 0;
}
static int mtu3_gadget_ep_enable(struct usb_ep *ep,
const struct usb_endpoint_descriptor *desc)
{
struct mtu3_ep *mep;
struct mtu3 *mtu;
unsigned long flags;
int ret = -EINVAL;
if (!ep || !desc || desc->bDescriptorType != USB_DT_ENDPOINT) {
pr_debug("%s invalid parameters\n", __func__);
return -EINVAL;
}
if (!desc->wMaxPacketSize) {
pr_debug("%s missing wMaxPacketSize\n", __func__);
return -EINVAL;
}
mep = to_mtu3_ep(ep);
mtu = mep->mtu;
/* check ep number and direction against endpoint */
if (usb_endpoint_num(desc) != mep->epnum)
return -EINVAL;
if (!!usb_endpoint_dir_in(desc) ^ !!mep->is_in)
return -EINVAL;
dev_dbg(mtu->dev, "%s %s\n", __func__, ep->name);
if (mep->flags & MTU3_EP_ENABLED) {
dev_WARN_ONCE(mtu->dev, true, "%s is already enabled\n",
mep->name);
return 0;
}
spin_lock_irqsave(&mtu->lock, flags);
mep->desc = desc;
mep->comp_desc = ep->comp_desc;
ret = mtu3_ep_enable(mep);
if (ret)
goto error;
mep->flags = MTU3_EP_ENABLED;
mtu->active_ep++;
error:
spin_unlock_irqrestore(&mtu->lock, flags);
dev_dbg(mtu->dev, "%s active_ep=%d\n", __func__, mtu->active_ep);
trace_mtu3_gadget_ep_enable(mep);
return ret;
}
static int mtu3_gadget_ep_disable(struct usb_ep *ep)
{
struct mtu3_ep *mep = to_mtu3_ep(ep);
struct mtu3 *mtu = mep->mtu;
unsigned long flags;
dev_dbg(mtu->dev, "%s %s\n", __func__, mep->name);
trace_mtu3_gadget_ep_disable(mep);
if (!(mep->flags & MTU3_EP_ENABLED)) {
dev_warn(mtu->dev, "%s is already disabled\n", mep->name);
return 0;
}
spin_lock_irqsave(&mtu->lock, flags);
mtu3_ep_disable(mep);
mep->flags = 0;
mtu->active_ep--;
spin_unlock_irqrestore(&(mtu->lock), flags);
dev_dbg(mtu->dev, "%s active_ep=%d, mtu3 is_active=%d\n",
__func__, mtu->active_ep, mtu->is_active);
return 0;
}
struct usb_request *mtu3_alloc_request(struct usb_ep *ep, gfp_t gfp_flags)
{
struct mtu3_ep *mep = to_mtu3_ep(ep);
struct mtu3_request *mreq;
mreq = kzalloc(sizeof(*mreq), gfp_flags);
if (!mreq)
return NULL;
mreq->request.dma = DMA_ADDR_INVALID;
mreq->epnum = mep->epnum;
mreq->mep = mep;
trace_mtu3_alloc_request(mreq);
return &mreq->request;
}
void mtu3_free_request(struct usb_ep *ep, struct usb_request *req)
{
struct mtu3_request *mreq = to_mtu3_request(req);
trace_mtu3_free_request(mreq);
kfree(mreq);
}
static int mtu3_gadget_queue(struct usb_ep *ep,
struct usb_request *req, gfp_t gfp_flags)
{
struct mtu3_ep *mep = to_mtu3_ep(ep);
struct mtu3_request *mreq = to_mtu3_request(req);
struct mtu3 *mtu = mep->mtu;
unsigned long flags;
int ret = 0;
if (!req->buf)
return -ENODATA;
if (mreq->mep != mep)
return -EINVAL;
dev_dbg(mtu->dev, "%s %s EP%d(%s), req=%p, maxp=%d, len#%d\n",
__func__, mep->is_in ? "TX" : "RX", mreq->epnum, ep->name,
mreq, ep->maxpacket, mreq->request.length);
if (req->length > GPD_BUF_SIZE ||
(mtu->gen2cp && req->length > GPD_BUF_SIZE_EL)) {
dev_warn(mtu->dev,
"req length > supported MAX:%d requested:%d\n",
mtu->gen2cp ? GPD_BUF_SIZE_EL : GPD_BUF_SIZE,
req->length);
return -EOPNOTSUPP;
}
/* don't queue if the ep is down */
if (!mep->desc) {
dev_dbg(mtu->dev, "req=%p queued to %s while it's disabled\n",
req, ep->name);
return -ESHUTDOWN;
}
mreq->mtu = mtu;
mreq->request.actual = 0;
mreq->request.status = -EINPROGRESS;
ret = usb_gadget_map_request(&mtu->g, req, mep->is_in);
if (ret) {
dev_err(mtu->dev, "dma mapping failed\n");
return ret;
}
spin_lock_irqsave(&mtu->lock, flags);
if (mtu3_prepare_transfer(mep)) {
ret = -EAGAIN;
goto error;
}
list_add_tail(&mreq->list, &mep->req_list);
mtu3_insert_gpd(mep, mreq);
mtu3_qmu_resume(mep);
error:
spin_unlock_irqrestore(&mtu->lock, flags);
trace_mtu3_gadget_queue(mreq);
return ret;
}
static int mtu3_gadget_dequeue(struct usb_ep *ep, struct usb_request *req)
{
struct mtu3_ep *mep = to_mtu3_ep(ep);
struct mtu3_request *mreq = to_mtu3_request(req);
struct mtu3_request *r;
struct mtu3 *mtu = mep->mtu;
unsigned long flags;
int ret = 0;
if (mreq->mep != mep)
return -EINVAL;
dev_dbg(mtu->dev, "%s : req=%p\n", __func__, req);
trace_mtu3_gadget_dequeue(mreq);
spin_lock_irqsave(&mtu->lock, flags);
list_for_each_entry(r, &mep->req_list, list) {
if (r == mreq)
break;
}
if (r != mreq) {
dev_dbg(mtu->dev, "req=%p not queued to %s\n", req, ep->name);
ret = -EINVAL;
goto done;
}
mtu3_qmu_flush(mep); /* REVISIT: set BPS ?? */
mtu3_req_complete(mep, req, -ECONNRESET);
mtu3_qmu_start(mep);
done:
spin_unlock_irqrestore(&mtu->lock, flags);
return ret;
}
/*
* Set or clear the halt bit of an EP.
* A halted EP won't TX/RX any data but will queue requests.
*/
static int mtu3_gadget_ep_set_halt(struct usb_ep *ep, int value)
{
struct mtu3_ep *mep = to_mtu3_ep(ep);
struct mtu3 *mtu = mep->mtu;
struct mtu3_request *mreq;
unsigned long flags;
int ret = 0;
dev_dbg(mtu->dev, "%s : %s...", __func__, ep->name);
spin_lock_irqsave(&mtu->lock, flags);
if (mep->type == USB_ENDPOINT_XFER_ISOC) {
ret = -EINVAL;
goto done;
}
mreq = next_request(mep);
if (value) {
/*
* If there is not request for TX-EP, QMU will not transfer
* data to TX-FIFO, so no need check whether TX-FIFO
* holds bytes or not here
*/
if (mreq) {
dev_dbg(mtu->dev, "req in progress, cannot halt %s\n",
ep->name);
ret = -EAGAIN;
goto done;
}
} else {
mep->flags &= ~MTU3_EP_WEDGE;
}
dev_dbg(mtu->dev, "%s %s stall\n", ep->name, value ? "set" : "clear");
mtu3_ep_stall_set(mep, value);
done:
spin_unlock_irqrestore(&mtu->lock, flags);
trace_mtu3_gadget_ep_set_halt(mep);
return ret;
}
/* Sets the halt feature with the clear requests ignored */
static int mtu3_gadget_ep_set_wedge(struct usb_ep *ep)
{
struct mtu3_ep *mep = to_mtu3_ep(ep);
mep->flags |= MTU3_EP_WEDGE;
return usb_ep_set_halt(ep);
}
static const struct usb_ep_ops mtu3_ep_ops = {
.enable = mtu3_gadget_ep_enable,
.disable = mtu3_gadget_ep_disable,
.alloc_request = mtu3_alloc_request,
.free_request = mtu3_free_request,
.queue = mtu3_gadget_queue,
.dequeue = mtu3_gadget_dequeue,
.set_halt = mtu3_gadget_ep_set_halt,
.set_wedge = mtu3_gadget_ep_set_wedge,
};
static int mtu3_gadget_get_frame(struct usb_gadget *gadget)
{
struct mtu3 *mtu = gadget_to_mtu3(gadget);
return (int)mtu3_readl(mtu->mac_base, U3D_USB20_FRAME_NUM);
}
static int mtu3_gadget_wakeup(struct usb_gadget *gadget)
{
struct mtu3 *mtu = gadget_to_mtu3(gadget);
unsigned long flags;
dev_dbg(mtu->dev, "%s\n", __func__);
/* remote wakeup feature is not enabled by host */
if (!mtu->may_wakeup)
return -EOPNOTSUPP;
spin_lock_irqsave(&mtu->lock, flags);
if (mtu->g.speed >= USB_SPEED_SUPER) {
mtu3_setbits(mtu->mac_base, U3D_LINK_POWER_CONTROL, UX_EXIT);
} else {
mtu3_setbits(mtu->mac_base, U3D_POWER_MANAGEMENT, RESUME);
spin_unlock_irqrestore(&mtu->lock, flags);
usleep_range(10000, 11000);
spin_lock_irqsave(&mtu->lock, flags);
mtu3_clrbits(mtu->mac_base, U3D_POWER_MANAGEMENT, RESUME);
}
spin_unlock_irqrestore(&mtu->lock, flags);
return 0;
}
static int mtu3_gadget_set_self_powered(struct usb_gadget *gadget,
int is_selfpowered)
{
struct mtu3 *mtu = gadget_to_mtu3(gadget);
mtu->is_self_powered = !!is_selfpowered;
return 0;
}
static int mtu3_gadget_pullup(struct usb_gadget *gadget, int is_on)
{
struct mtu3 *mtu = gadget_to_mtu3(gadget);
unsigned long flags;
dev_dbg(mtu->dev, "%s (%s) for %sactive device\n", __func__,
is_on ? "on" : "off", mtu->is_active ? "" : "in");
/* we'd rather not pullup unless the device is active. */
spin_lock_irqsave(&mtu->lock, flags);
is_on = !!is_on;
if (!mtu->is_active) {
/* save it for mtu3_start() to process the request */
mtu->softconnect = is_on;
} else if (is_on != mtu->softconnect) {
mtu->softconnect = is_on;
mtu3_dev_on_off(mtu, is_on);
}
spin_unlock_irqrestore(&mtu->lock, flags);
return 0;
}
static int mtu3_gadget_start(struct usb_gadget *gadget,
struct usb_gadget_driver *driver)
{
struct mtu3 *mtu = gadget_to_mtu3(gadget);
unsigned long flags;
if (mtu->gadget_driver) {
dev_err(mtu->dev, "%s is already bound to %s\n",
mtu->g.name, mtu->gadget_driver->driver.name);
return -EBUSY;
}
dev_dbg(mtu->dev, "bind driver %s\n", driver->function);
spin_lock_irqsave(&mtu->lock, flags);
mtu->softconnect = 0;
mtu->gadget_driver = driver;
if (mtu->ssusb->dr_mode == USB_DR_MODE_PERIPHERAL)
mtu3_start(mtu);
spin_unlock_irqrestore(&mtu->lock, flags);
return 0;
}
static void stop_activity(struct mtu3 *mtu)
{
struct usb_gadget_driver *driver = mtu->gadget_driver;
int i;
/* don't disconnect if it's not connected */
if (mtu->g.speed == USB_SPEED_UNKNOWN)
driver = NULL;
else
mtu->g.speed = USB_SPEED_UNKNOWN;
/* deactivate the hardware */
if (mtu->softconnect) {
mtu->softconnect = 0;
mtu3_dev_on_off(mtu, 0);
}
/*
* killing any outstanding requests will quiesce the driver;
* then report disconnect
*/
nuke(mtu->ep0, -ESHUTDOWN);
for (i = 1; i < mtu->num_eps; i++) {
nuke(mtu->in_eps + i, -ESHUTDOWN);
nuke(mtu->out_eps + i, -ESHUTDOWN);
}
if (driver) {
spin_unlock(&mtu->lock);
driver->disconnect(&mtu->g);
spin_lock(&mtu->lock);
}
}
static int mtu3_gadget_stop(struct usb_gadget *g)
{
struct mtu3 *mtu = gadget_to_mtu3(g);
unsigned long flags;
dev_dbg(mtu->dev, "%s\n", __func__);
spin_lock_irqsave(&mtu->lock, flags);
stop_activity(mtu);
mtu->gadget_driver = NULL;
if (mtu->ssusb->dr_mode == USB_DR_MODE_PERIPHERAL)
mtu3_stop(mtu);
spin_unlock_irqrestore(&mtu->lock, flags);
return 0;
}
static void
mtu3_gadget_set_speed(struct usb_gadget *g, enum usb_device_speed speed)
{
struct mtu3 *mtu = gadget_to_mtu3(g);
unsigned long flags;
dev_dbg(mtu->dev, "%s %s\n", __func__, usb_speed_string(speed));
spin_lock_irqsave(&mtu->lock, flags);
mtu3_set_speed(mtu, speed);
spin_unlock_irqrestore(&mtu->lock, flags);
}
static const struct usb_gadget_ops mtu3_gadget_ops = {
.get_frame = mtu3_gadget_get_frame,
.wakeup = mtu3_gadget_wakeup,
.set_selfpowered = mtu3_gadget_set_self_powered,
.pullup = mtu3_gadget_pullup,
.udc_start = mtu3_gadget_start,
.udc_stop = mtu3_gadget_stop,
.udc_set_speed = mtu3_gadget_set_speed,
};
static void mtu3_state_reset(struct mtu3 *mtu)
{
mtu->address = 0;
mtu->ep0_state = MU3D_EP0_STATE_SETUP;
mtu->may_wakeup = 0;
mtu->u1_enable = 0;
mtu->u2_enable = 0;
mtu->delayed_status = false;
mtu->test_mode = false;
}
static void init_hw_ep(struct mtu3 *mtu, struct mtu3_ep *mep,
u32 epnum, u32 is_in)
{
mep->epnum = epnum;
mep->mtu = mtu;
mep->is_in = is_in;
INIT_LIST_HEAD(&mep->req_list);
sprintf(mep->name, "ep%d%s", epnum,
!epnum ? "" : (is_in ? "in" : "out"));
mep->ep.name = mep->name;
INIT_LIST_HEAD(&mep->ep.ep_list);
/* initialize maxpacket as SS */
if (!epnum) {
usb_ep_set_maxpacket_limit(&mep->ep, 512);
mep->ep.caps.type_control = true;
mep->ep.ops = &mtu3_ep0_ops;
mtu->g.ep0 = &mep->ep;
} else {
usb_ep_set_maxpacket_limit(&mep->ep, 1024);
mep->ep.caps.type_iso = true;
mep->ep.caps.type_bulk = true;
mep->ep.caps.type_int = true;
mep->ep.ops = &mtu3_ep_ops;
list_add_tail(&mep->ep.ep_list, &mtu->g.ep_list);
}
dev_dbg(mtu->dev, "%s, name=%s, maxp=%d\n", __func__, mep->ep.name,
mep->ep.maxpacket);
if (!epnum) {
mep->ep.caps.dir_in = true;
mep->ep.caps.dir_out = true;
} else if (is_in) {
mep->ep.caps.dir_in = true;
} else {
mep->ep.caps.dir_out = true;
}
}
static void mtu3_gadget_init_eps(struct mtu3 *mtu)
{
u8 epnum;
/* initialize endpoint list just once */
INIT_LIST_HEAD(&(mtu->g.ep_list));
dev_dbg(mtu->dev, "%s num_eps(1 for a pair of tx&rx ep)=%d\n",
__func__, mtu->num_eps);
init_hw_ep(mtu, mtu->ep0, 0, 0);
for (epnum = 1; epnum < mtu->num_eps; epnum++) {
init_hw_ep(mtu, mtu->in_eps + epnum, epnum, 1);
init_hw_ep(mtu, mtu->out_eps + epnum, epnum, 0);
}
}
int mtu3_gadget_setup(struct mtu3 *mtu)
{
mtu->g.ops = &mtu3_gadget_ops;
mtu->g.max_speed = mtu->max_speed;
mtu->g.speed = USB_SPEED_UNKNOWN;
mtu->g.sg_supported = 0;
mtu->g.name = MTU3_DRIVER_NAME;
mtu->is_active = 0;
mtu->delayed_status = false;
mtu3_gadget_init_eps(mtu);
return usb_add_gadget_udc(mtu->dev, &mtu->g);
}
void mtu3_gadget_cleanup(struct mtu3 *mtu)
{
usb_del_gadget_udc(&mtu->g);
}
void mtu3_gadget_resume(struct mtu3 *mtu)
{
dev_dbg(mtu->dev, "gadget RESUME\n");
if (mtu->gadget_driver && mtu->gadget_driver->resume) {
spin_unlock(&mtu->lock);
mtu->gadget_driver->resume(&mtu->g);
spin_lock(&mtu->lock);
}
}
/* called when SOF packets stop for 3+ msec or enters U3 */
void mtu3_gadget_suspend(struct mtu3 *mtu)
{
dev_dbg(mtu->dev, "gadget SUSPEND\n");
if (mtu->gadget_driver && mtu->gadget_driver->suspend) {
spin_unlock(&mtu->lock);
mtu->gadget_driver->suspend(&mtu->g);
spin_lock(&mtu->lock);
}
}
/* called when VBUS drops below session threshold, and in other cases */
void mtu3_gadget_disconnect(struct mtu3 *mtu)
{
dev_dbg(mtu->dev, "gadget DISCONNECT\n");
if (mtu->gadget_driver && mtu->gadget_driver->disconnect) {
spin_unlock(&mtu->lock);
mtu->gadget_driver->disconnect(&mtu->g);
spin_lock(&mtu->lock);
}
mtu3_state_reset(mtu);
usb_gadget_set_state(&mtu->g, USB_STATE_NOTATTACHED);
}
void mtu3_gadget_reset(struct mtu3 *mtu)
{
dev_dbg(mtu->dev, "gadget RESET\n");
/* report disconnect, if we didn't flush EP state */
if (mtu->g.speed != USB_SPEED_UNKNOWN)
mtu3_gadget_disconnect(mtu);
else
mtu3_state_reset(mtu);
}
| {
"pile_set_name": "Github"
} |
/**
* WordPress dependencies
*/
import { PanelRow } from '@wordpress/components';
import {
PostSticky as PostStickyForm,
PostStickyCheck,
} from '@wordpress/editor';
export function PostSticky() {
return (
<PostStickyCheck>
<PanelRow>
<PostStickyForm />
</PanelRow>
</PostStickyCheck>
);
}
export default PostSticky;
| {
"pile_set_name": "Github"
} |
403
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2001-2016 Food and Agriculture Organization of the
* United Nations (FAO-UN), United Nations World Food Programme (WFP)
* and United Nations Environment Programme (UNEP)
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or (at
* your option) any later version.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
*
* Contact: Jeroen Ticheler - FAO - Viale delle Terme di Caracalla 2,
* Rome - Italy. email: [email protected]
*/
package org.fao.geonet.repository;
import org.springframework.data.jpa.domain.Specification;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.persistence.EntityManager;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaUpdate;
import javax.persistence.criteria.Path;
import javax.persistence.criteria.Root;
import java.util.ArrayList;
import java.util.List;
/**
* Defines what elements to update, what fields in each element to update and the new values.
*
* User: Jesse Date: 10/4/13 Time: 11:38 AM
*
* @param <T> They type of entity this query will update
* @see org.fao.geonet.repository.GeonetRepository#createBatchUpdateQuery(PathSpec,
* Object, org.springframework.data.jpa.domain.Specification)
*/
public class BatchUpdateQuery<T> {
private final Class<T> _entityClass;
private final EntityManager _entityManager;
private final List<PathSpec<T, ?>> _paths = new ArrayList<PathSpec<T, ?>>();
private final List<Object> _values = new ArrayList<Object>();
private Specification<T> _specification;
<V> BatchUpdateQuery(@Nonnull final Class<T> entityClass, @Nonnull final EntityManager entityManager,
@Nonnull final PathSpec<T, V> pathSpec, @Nullable final V newValue) {
this._entityClass = entityClass;
this._entityManager = entityManager;
_paths.add(pathSpec);
_values.add(newValue);
}
/**
* Add a new attribute path and value to the update query.
*
* @param pathSpec the path of the attribute to update with the new value. More paths and
* values can be added to the {@link BatchUpdateQuery} object after it is
* created.
* @param newValue the value to set on the attribute of all the affected entities
* @param <V> The type of the attribute
*/
public <V> BatchUpdateQuery<T> add(@Nonnull final PathSpec<T, V> pathSpec, @Nullable final V newValue) {
_paths.add(pathSpec);
_values.add(newValue);
return this;
}
/**
* Execute the query.
*
* @return the number of updated elements.
*/
public int execute() {
int updated = 0;
// this is a hack because at the moment hibernate JPA 2.1 is in beta and has a
// but where only one set can be called per execution.
// later this should be changed so that only one query is executed.
for (int i = 0; i < _paths.size(); i++) {
final CriteriaBuilder cb = _entityManager.getCriteriaBuilder();
final CriteriaUpdate<T> update = cb.createCriteriaUpdate(_entityClass);
final Root<T> root = update.from(_entityClass);
Path<Object> path = (Path<Object>) _paths.get(i).getPath(root);
Object value = _values.get(i);
update.set(path, value);
if (_specification != null) {
update.where(_specification.toPredicate(root, null, cb));
}
// when only 1 query is executed this hack is also not needed.
updated = Math.max(updated, _entityManager.createQuery(update).executeUpdate());
}
_entityManager.flush();
_entityManager.clear();
return updated;
}
/**
* Set the specification used to select the entities to update.
*
* @param specification a specification for controlling which entities will be affected by
* update.
* @return this query object
*/
public BatchUpdateQuery<T> setSpecification(@Nonnull final Specification<T> specification) {
this._specification = specification;
return this;
}
}
| {
"pile_set_name": "Github"
} |
WARNING: You _SHOULD NOT_ use `clojure.core/read-string` to read data
from untrusted sources. See `clojure.core/read` docs. The same
security issues exist for both `read` and `read-string`.
| {
"pile_set_name": "Github"
} |
<table id="items" class="table items" data-list-uri="[% c.uri_for(c.controller.action_for('list')) %]">
<thead>
<tr>
<th>[% l('WMI Rules') %]</th>
<th></th>
</tr>
</thead>
<tbody>
[% FOREACH item IN items %]
<tr>
<td class="item"><a href="[% c.uri_for(c.controller.action_for('view'), [ item.id ]) %]">[% item.id | html %]</a></td>
<td>
<div class="text-right">
[%- IF can_access("WMI_CREATE") %]
<a class="btn btn-mini" href="[% c.uri_for(c.controller.action_for('clone'), [ item.id]) %]">[% l('Clone') %]</a>
[%- END %]
[%- IF can_access("WMI_DELETE") %]
<a class="btn btn-mini btn-danger" href="[% c.uri_for(c.controller.action_for('remove'), [ item.id ]) %]">[% l('Delete') %]</a>
[%- END %]
</div>
</td>
</tr>
[% END -%]
</tbody>
</table>
| {
"pile_set_name": "Github"
} |
/*
* SonarQube
* Copyright (C) 2009-2020 SonarSource SA
* mailto:info AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
export enum MetricKey {
alert_status = 'alert_status',
blocker_violations = 'blocker_violations',
branch_coverage = 'branch_coverage',
bugs = 'bugs',
burned_budget = 'burned_budget',
business_value = 'business_value',
class_complexity = 'class_complexity',
classes = 'classes',
code_smells = 'code_smells',
cognitive_complexity = 'cognitive_complexity',
comment_lines = 'comment_lines',
comment_lines_data = 'comment_lines_data',
comment_lines_density = 'comment_lines_density',
complexity = 'complexity',
complexity_in_classes = 'complexity_in_classes',
complexity_in_functions = 'complexity_in_functions',
conditions_to_cover = 'conditions_to_cover',
confirmed_issues = 'confirmed_issues',
coverage = 'coverage',
critical_violations = 'critical_violations',
development_cost = 'development_cost',
directories = 'directories',
duplicated_blocks = 'duplicated_blocks',
duplicated_files = 'duplicated_files',
duplicated_lines = 'duplicated_lines',
duplicated_lines_density = 'duplicated_lines_density',
duplications_data = 'duplications_data',
effort_to_reach_maintainability_rating_a = 'effort_to_reach_maintainability_rating_a',
executable_lines_data = 'executable_lines_data',
false_positive_issues = 'false_positive_issues',
file_complexity = 'file_complexity',
file_complexity_distribution = 'file_complexity_distribution',
filename_size = 'filename_size',
filename_size_rating = 'filename_size_rating',
files = 'files',
function_complexity = 'function_complexity',
function_complexity_distribution = 'function_complexity_distribution',
functions = 'functions',
generated_lines = 'generated_lines',
generated_ncloc = 'generated_ncloc',
info_violations = 'info_violations',
last_change_on_maintainability_rating = 'last_change_on_maintainability_rating',
last_change_on_releasability_rating = 'last_change_on_releasability_rating',
last_change_on_reliability_rating = 'last_change_on_reliability_rating',
last_change_on_security_rating = 'last_change_on_security_rating',
last_change_on_security_review_rating = 'last_change_on_security_review_rating',
last_commit_date = 'last_commit_date',
leak_projects = 'leak_projects',
line_coverage = 'line_coverage',
lines = 'lines',
lines_to_cover = 'lines_to_cover',
maintainability_rating_effort = 'maintainability_rating_effort',
major_violations = 'major_violations',
minor_violations = 'minor_violations',
ncloc = 'ncloc',
ncloc_data = 'ncloc_data',
ncloc_language_distribution = 'ncloc_language_distribution',
new_blocker_violations = 'new_blocker_violations',
new_branch_coverage = 'new_branch_coverage',
new_bugs = 'new_bugs',
new_code_smells = 'new_code_smells',
new_conditions_to_cover = 'new_conditions_to_cover',
new_coverage = 'new_coverage',
new_critical_violations = 'new_critical_violations',
new_development_cost = 'new_development_cost',
new_duplicated_blocks = 'new_duplicated_blocks',
new_duplicated_lines = 'new_duplicated_lines',
new_duplicated_lines_density = 'new_duplicated_lines_density',
new_info_violations = 'new_info_violations',
new_line_coverage = 'new_line_coverage',
new_lines = 'new_lines',
new_lines_to_cover = 'new_lines_to_cover',
new_maintainability_rating = 'new_maintainability_rating',
new_major_violations = 'new_major_violations',
new_minor_violations = 'new_minor_violations',
new_reliability_rating = 'new_reliability_rating',
new_reliability_remediation_effort = 'new_reliability_remediation_effort',
new_security_hotspots = 'new_security_hotspots',
new_security_hotspots_reviewed = 'new_security_hotspots_reviewed',
new_security_rating = 'new_security_rating',
new_security_remediation_effort = 'new_security_remediation_effort',
new_security_review_rating = 'new_security_review_rating',
new_sqale_debt_ratio = 'new_sqale_debt_ratio',
new_technical_debt = 'new_technical_debt',
new_uncovered_conditions = 'new_uncovered_conditions',
new_uncovered_lines = 'new_uncovered_lines',
new_violations = 'new_violations',
new_vulnerabilities = 'new_vulnerabilities',
open_issues = 'open_issues',
projects = 'projects',
public_api = 'public_api',
public_documented_api_density = 'public_documented_api_density',
public_undocumented_api = 'public_undocumented_api',
quality_gate_details = 'quality_gate_details',
quality_profiles = 'quality_profiles',
releasability_effort = 'releasability_effort',
releasability_rating = 'releasability_rating',
reliability_rating = 'reliability_rating',
reliability_rating_effort = 'reliability_rating_effort',
reliability_remediation_effort = 'reliability_remediation_effort',
reopened_issues = 'reopened_issues',
security_hotspots = 'security_hotspots',
security_hotspots_reviewed = 'security_hotspots_reviewed',
security_rating = 'security_rating',
security_rating_effort = 'security_rating_effort',
security_remediation_effort = 'security_remediation_effort',
security_review_rating = 'security_review_rating',
security_review_rating_effort = 'security_review_rating_effort',
skipped_tests = 'skipped_tests',
sonarjava_feedback = 'sonarjava_feedback',
sqale_debt_ratio = 'sqale_debt_ratio',
sqale_index = 'sqale_index',
sqale_rating = 'sqale_rating',
statements = 'statements',
team_at_sonarsource = 'team_at_sonarsource',
team_size = 'team_size',
test_errors = 'test_errors',
test_execution_time = 'test_execution_time',
test_failures = 'test_failures',
test_success_density = 'test_success_density',
tests = 'tests',
uncovered_conditions = 'uncovered_conditions',
uncovered_lines = 'uncovered_lines',
violations = 'violations',
vulnerabilities = 'vulnerabilities',
wont_fix_issues = 'wont_fix_issues'
}
export function isMetricKey(key: string): key is MetricKey {
return (Object.values(MetricKey) as string[]).includes(key);
}
| {
"pile_set_name": "Github"
} |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import mock
from dragonflow.controller.common import constants as const
from dragonflow.db.models import l2
from dragonflow.db.models import l3
from dragonflow.tests.unit import test_app_base
class TestLegacySNatApp(test_app_base.DFAppTestBase):
apps_list = ["legacy_snat"]
def setUp(self):
super(TestLegacySNatApp, self).setUp()
self.app = self.open_flow_app.dispatcher.apps['legacy_snat']
mock.patch.object(self.app, '_add_router_port',
side_effect=self.app._add_router_port).start()
mock.patch.object(self.app, '_delete_router_port',
side_effect=self.app._delete_router_port).start()
self.app.mod_flow.reset_mock()
def test_create_router(self):
self.lswitch = l2.LogicalSwitch(unique_key=3,
name='test_lswitch_1',
is_external=False,
segmentation_id=41,
topic='fake_tenant1',
id='test_lswitch_1',
version=5)
self.subnet = l2.Subnet(dhcp_ip="10.1.0.2",
name="private-subnet",
enable_dhcp=True,
topic="fake_tenant1",
gateway_ip="10.1.0.1",
cidr="10.1.0.0/24",
id="test_subnet10_1",
lswitch='test_lswitch_1')
self.router_ports = [l3.LogicalRouterPort(network="10.1.0.1/24",
lswitch=self.lswitch,
topic="fake_tenant1",
mac="fa:16:3e:50:96:f5",
unique_key=4,
id="fake_router_1_port1")]
self.router = l3.LogicalRouter(name="fake_router_1",
topic="fake_tenant1",
version=10,
id="fake_router_1",
unique_key=5,
ports=self.router_ports)
self.controller.update(self.lswitch)
self.controller.update(self.subnet)
self.app.mod_flow.reset_mock()
self.controller.update(self.router)
self.app._add_router_port.assert_called_once_with(self.router_ports[0])
parser = self.app.parser
ofproto = self.app.ofproto
match = parser.OFPMatch(metadata=5, eth_dst="fa:16:3e:50:96:f5")
actions = [parser.OFPActionSetField(reg7=4)]
inst = [
parser.OFPInstructionActions(ofproto.OFPIT_APPLY_ACTIONS, actions),
parser.OFPInstructionGotoTable(const.EGRESS_TABLE),
]
self.app.mod_flow.assert_called_once_with(
inst=inst,
table_id=const.L3_LOOKUP_TABLE,
priority=const.PRIORITY_VERY_LOW,
match=match)
def test_delete_router(self):
self.test_create_router()
self.app.mod_flow.reset_mock()
self.controller.delete_by_id(l3.LogicalRouter, 'fake_router_1')
self.app._delete_router_port.assert_called_once_with(
self.router_ports[0])
ofproto = self.app.ofproto
parser = self.app.parser
match = parser.OFPMatch(metadata=5, eth_dst="fa:16:3e:50:96:f5")
self.app.mod_flow.assert_called_once_with(
command=ofproto.OFPFC_DELETE_STRICT,
table_id=const.L3_LOOKUP_TABLE,
priority=const.PRIORITY_VERY_LOW,
match=match)
def test_update_router(self):
self.test_create_router()
lswitch2 = l2.LogicalSwitch(unique_key=6,
name='test_lswitch_2',
is_external=False,
segmentation_id=42,
topic='fake_tenant1',
id='test_lswitch_2',
version=5)
subnet2 = l2.Subnet(dhcp_ip="10.2.0.2",
name="private-subnet",
enable_dhcp=True,
topic="fake_tenant1",
gateway_ip="10.2.0.1",
cidr="10.2.0.0/24",
id="test_subnet10_2",
lswitch='test_lswitch_2')
router_ports2 = [l3.LogicalRouterPort(network="10.2.0.1/24",
lswitch=lswitch2,
topic="fake_tenant1",
mac="fa:16:3e:50:96:f6",
unique_key=7,
id="fake_router_1_port2")]
self.controller.update(lswitch2)
self.controller.update(subnet2)
router = copy.copy(self.router)
router.ports = router_ports2
router.version += 1
self.app._add_router_port.reset_mock()
self.controller.update(router)
self.app._add_router_port.assert_called_once_with(router_ports2[0])
self.app._delete_router_port.assert_called_once_with(
self.router_ports[0])
| {
"pile_set_name": "Github"
} |
/// 查询正在上映的电影
export function queryMovies(city, start, count) {
return "https://api.douban.com/v2/movie/in_theaters?city=" + city + "&start=" + start + "&count=" + count
}
/// 查询即将上映的电影
export function comingMovies(city, start, count) {
return "https://api.douban.com/v2/movie/coming_soon?city=" + city + "&start=" + start + "&count=" + count
} | {
"pile_set_name": "Github"
} |
VERIFICATION
Verification is intended to assist the Chocolatey moderators and community
in verifying that this package's contents are trustworthy.
Package can be verified like this:
1. Go to
x32: https://autohotkey.com/download/2.0//AutoHotkey_2.0-a122-f595abc2.zip
x64: https://autohotkey.com/download/2.0//AutoHotkey_2.0-a122-f595abc2.zip
to download the installer.
2. You can use one of the following methods to obtain the SHA256 checksum:
- Use powershell function 'Get-FileHash'
- Use Chocolatey utility 'checksum.exe'
checksum32: 22BC447D3C6F038CB0636F5DF82CCAE56821DBCD65FE70CD09CF082245797C27
checksum64: 22BC447D3C6F038CB0636F5DF82CCAE56821DBCD65FE70CD09CF082245797C27
Using AU:
Get-RemoteChecksum https://autohotkey.com/download/2.0//AutoHotkey_2.0-a122-f595abc2.zip
File 'license.txt' is obtained from:
https://github.com/AutoHotkey/AutoHotkey/blob/26daaddf714135a6fcc730fe250bd1d41a9d3c53/license.txt
| {
"pile_set_name": "Github"
} |
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package gxui
type ButtonType int
const (
PushButton ButtonType = iota
ToggleButton
)
type Button interface {
LinearLayout
Text() string
SetText(string)
Type() ButtonType
SetType(ButtonType)
IsChecked() bool
SetChecked(bool)
}
| {
"pile_set_name": "Github"
} |
$LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'runt'
require 'date'
require 'time'
require 'rubygems' # Needed for minitest on 1.8.7
require 'minitest/autorun'
| {
"pile_set_name": "Github"
} |
/* ========================================================================
* Bootstrap: collapse.js v3.3.7
* http://getbootstrap.com/javascript/#collapse
* ========================================================================
* Copyright 2011-2016 Twitter, Inc.
* Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
* ======================================================================== */
/* jshint latedef: false */
+function ($) {
'use strict';
// COLLAPSE PUBLIC CLASS DEFINITION
// ================================
var Collapse = function (element, options) {
this.$element = $(element)
this.options = $.extend({}, Collapse.DEFAULTS, options)
this.$trigger = $('[data-toggle="collapse"][href="#' + element.id + '"],' +
'[data-toggle="collapse"][data-target="#' + element.id + '"]')
this.transitioning = null
if (this.options.parent) {
this.$parent = this.getParent()
} else {
this.addAriaAndCollapsedClass(this.$element, this.$trigger)
}
if (this.options.toggle) this.toggle()
}
Collapse.VERSION = '3.3.7'
Collapse.TRANSITION_DURATION = 350
Collapse.DEFAULTS = {
toggle: true
}
Collapse.prototype.dimension = function () {
var hasWidth = this.$element.hasClass('width')
return hasWidth ? 'width' : 'height'
}
Collapse.prototype.show = function () {
if (this.transitioning || this.$element.hasClass('in')) return
var activesData
var actives = this.$parent && this.$parent.children('.panel').children('.in, .collapsing')
if (actives && actives.length) {
activesData = actives.data('bs.collapse')
if (activesData && activesData.transitioning) return
}
var startEvent = $.Event('show.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
if (actives && actives.length) {
Plugin.call(actives, 'hide')
activesData || actives.data('bs.collapse', null)
}
var dimension = this.dimension()
this.$element
.removeClass('collapse')
.addClass('collapsing')[dimension](0)
.attr('aria-expanded', true)
this.$trigger
.removeClass('collapsed')
.attr('aria-expanded', true)
this.transitioning = 1
var complete = function () {
this.$element
.removeClass('collapsing')
.addClass('collapse in')[dimension]('')
this.transitioning = 0
this.$element
.trigger('shown.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
var scrollSize = $.camelCase(['scroll', dimension].join('-'))
this.$element
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)[dimension](this.$element[0][scrollSize])
}
Collapse.prototype.hide = function () {
if (this.transitioning || !this.$element.hasClass('in')) return
var startEvent = $.Event('hide.bs.collapse')
this.$element.trigger(startEvent)
if (startEvent.isDefaultPrevented()) return
var dimension = this.dimension()
this.$element[dimension](this.$element[dimension]())[0].offsetHeight
this.$element
.addClass('collapsing')
.removeClass('collapse in')
.attr('aria-expanded', false)
this.$trigger
.addClass('collapsed')
.attr('aria-expanded', false)
this.transitioning = 1
var complete = function () {
this.transitioning = 0
this.$element
.removeClass('collapsing')
.addClass('collapse')
.trigger('hidden.bs.collapse')
}
if (!$.support.transition) return complete.call(this)
this.$element
[dimension](0)
.one('bsTransitionEnd', $.proxy(complete, this))
.emulateTransitionEnd(Collapse.TRANSITION_DURATION)
}
Collapse.prototype.toggle = function () {
this[this.$element.hasClass('in') ? 'hide' : 'show']()
}
Collapse.prototype.getParent = function () {
return $(this.options.parent)
.find('[data-toggle="collapse"][data-parent="' + this.options.parent + '"]')
.each($.proxy(function (i, element) {
var $element = $(element)
this.addAriaAndCollapsedClass(getTargetFromTrigger($element), $element)
}, this))
.end()
}
Collapse.prototype.addAriaAndCollapsedClass = function ($element, $trigger) {
var isOpen = $element.hasClass('in')
$element.attr('aria-expanded', isOpen)
$trigger
.toggleClass('collapsed', !isOpen)
.attr('aria-expanded', isOpen)
}
function getTargetFromTrigger($trigger) {
var href
var target = $trigger.attr('data-target')
|| (href = $trigger.attr('href')) && href.replace(/.*(?=#[^\s]+$)/, '') // strip for ie7
return $(target)
}
// COLLAPSE PLUGIN DEFINITION
// ==========================
function Plugin(option) {
return this.each(function () {
var $this = $(this)
var data = $this.data('bs.collapse')
var options = $.extend({}, Collapse.DEFAULTS, $this.data(), typeof option == 'object' && option)
if (!data && options.toggle && /show|hide/.test(option)) options.toggle = false
if (!data) $this.data('bs.collapse', (data = new Collapse(this, options)))
if (typeof option == 'string') data[option]()
})
}
var old = $.fn.collapse
$.fn.collapse = Plugin
$.fn.collapse.Constructor = Collapse
// COLLAPSE NO CONFLICT
// ====================
$.fn.collapse.noConflict = function () {
$.fn.collapse = old
return this
}
// COLLAPSE DATA-API
// =================
$(document).on('click.bs.collapse.data-api', '[data-toggle="collapse"]', function (e) {
var $this = $(this)
if (!$this.attr('data-target')) e.preventDefault()
var $target = getTargetFromTrigger($this)
var data = $target.data('bs.collapse')
var option = data ? 'toggle' : $this.data()
Plugin.call($target, option)
})
}(jQuery);
| {
"pile_set_name": "Github"
} |
/*
Simple DirectMedia Layer
Copyright (C) 1997-2018 Sam Lantinga <[email protected]>
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
*/
/* This is an include file for windows.h with the SDL build settings */
#ifndef _INCLUDED_WINDOWS_H
#define _INCLUDED_WINDOWS_H
#if defined(__WIN32__)
#define WIN32_LEAN_AND_MEAN
#define STRICT
#ifndef UNICODE
#define UNICODE 1
#endif
#undef _WIN32_WINNT
#define _WIN32_WINNT 0x501 /* Need 0x410 for AlphaBlend() and 0x500 for EnumDisplayDevices(), 0x501 for raw input */
#endif
#include <windows.h>
#include <basetyps.h> /* for REFIID with broken mingw.org headers */
/* Routines to convert from UTF8 to native Windows text */
#if UNICODE
#define WIN_StringToUTF8(S) SDL_iconv_string("UTF-8", "UTF-16LE", (char *)(S), (SDL_wcslen(S)+1)*sizeof(WCHAR))
#define WIN_UTF8ToString(S) (WCHAR *)SDL_iconv_string("UTF-16LE", "UTF-8", (char *)(S), SDL_strlen(S)+1)
#else
/* !!! FIXME: UTF8ToString() can just be a SDL_strdup() here. */
#define WIN_StringToUTF8(S) SDL_iconv_string("UTF-8", "ASCII", (char *)(S), (SDL_strlen(S)+1))
#define WIN_UTF8ToString(S) SDL_iconv_string("ASCII", "UTF-8", (char *)(S), SDL_strlen(S)+1)
#endif
/* Sets an error message based on a given HRESULT */
extern int WIN_SetErrorFromHRESULT(const char *prefix, HRESULT hr);
/* Sets an error message based on GetLastError(). Always return -1. */
extern int WIN_SetError(const char *prefix);
/* Wrap up the oddities of CoInitialize() into a common function. */
extern HRESULT WIN_CoInitialize(void);
extern void WIN_CoUninitialize(void);
/* Returns SDL_TRUE if we're running on Windows Vista and newer */
extern BOOL WIN_IsWindowsVistaOrGreater(void);
/* Returns SDL_TRUE if we're running on Windows 7 and newer */
extern BOOL WIN_IsWindows7OrGreater(void);
/* You need to SDL_free() the result of this call. */
extern char *WIN_LookupAudioDeviceName(const WCHAR *name, const GUID *guid);
/* Checks to see if two GUID are the same. */
extern BOOL WIN_IsEqualGUID(const GUID * a, const GUID * b);
extern BOOL WIN_IsEqualIID(REFIID a, REFIID b);
#endif /* _INCLUDED_WINDOWS_H */
/* vi: set ts=4 sw=4 expandtab: */
| {
"pile_set_name": "Github"
} |
swagger: "2.0"
info:
description: Swagger doc for /api/spb/bmc
title: Swagger doc for /api/spb/bmc
version: 1.0.0
paths:
/api/spb/bmc:
get:
produces: [application/json]
responses:
200:
description: ok
schema:
type: object
additionalProperties: false
properties:
Information:
type: object
additionalProperties: false
properties:
load-1:
type: string
Memory Usage:
type: string
u-boot version:
type: string
SPI0 Vendor:
type: string
TPM FW version:
type: string
open-fds:
type: string
At-Scale-Debug Running:
type: boolean
SPI1 Vendor:
type: string
TPM TCG version:
type: string
load-5:
type: string
Reset Reason:
type: string
Description:
type: string
kernel version:
type: string
load-15:
type: string
OpenBMC Version:
type: string
Uptime:
type: string
CPU Usage:
type: string
uptime:
type: string
vboot:
type: object
additionalProperties: false
properties:
tpm_status:
type: string
status_text:
type: string
recovery_boot:
type: string
software_enforce:
type: string
recovery_retried:
type: string
cert_time:
type: string
hardware_enforce:
type: string
uboot_fallback_time:
type: string
status_crc:
type: string
status:
type: string
cert_fallback_time:
type: string
uboot_time:
type: string
force_recovery:
type: string
MAC Addr:
type: string
Resources:
type: array
items:
type: string
Actions:
type: array
items:
type: string
| {
"pile_set_name": "Github"
} |
// Copyright Neil Groves 2009. Use, modification and
// distribution is subject to the Boost Software License, Version
// 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
//
// For more information, see http://www.boost.org/libs/range/
//
#ifndef BOOST_RANGE_ALGORITHM_EQUAL_RANGE_HPP_INCLUDED
#define BOOST_RANGE_ALGORITHM_EQUAL_RANGE_HPP_INCLUDED
#include <boost/concept_check.hpp>
#include <boost/range/begin.hpp>
#include <boost/range/end.hpp>
#include <boost/range/concepts.hpp>
#include <algorithm>
namespace boost
{
namespace range
{
/// \brief template function equal_range
///
/// range-based version of the equal_range std algorithm
///
/// \pre ForwardRange is a model of the ForwardRangeConcept
/// \pre SortPredicate is a model of the BinaryPredicateConcept
template<class ForwardRange, class Value>
inline std::pair<
BOOST_DEDUCED_TYPENAME boost::range_iterator<ForwardRange>::type,
BOOST_DEDUCED_TYPENAME boost::range_iterator<ForwardRange>::type
>
equal_range(ForwardRange& rng, const Value& val)
{
BOOST_RANGE_CONCEPT_ASSERT(( ForwardRangeConcept<ForwardRange> ));
return std::equal_range(boost::begin(rng), boost::end(rng), val);
}
/// \overload
template<class ForwardRange, class Value>
inline std::pair<
BOOST_DEDUCED_TYPENAME boost::range_iterator<const ForwardRange>::type,
BOOST_DEDUCED_TYPENAME boost::range_iterator<const ForwardRange>::type
>
equal_range(const ForwardRange& rng, const Value& val)
{
BOOST_RANGE_CONCEPT_ASSERT(( ForwardRangeConcept<const ForwardRange> ));
return std::equal_range(boost::begin(rng), boost::end(rng), val);
}
/// \overload
template<class ForwardRange, class Value, class SortPredicate>
inline std::pair<
BOOST_DEDUCED_TYPENAME boost::range_iterator<ForwardRange>::type,
BOOST_DEDUCED_TYPENAME boost::range_iterator<ForwardRange>::type
>
equal_range(ForwardRange& rng, const Value& val, SortPredicate pred)
{
BOOST_RANGE_CONCEPT_ASSERT(( ForwardRangeConcept<ForwardRange> ));
return std::equal_range(boost::begin(rng), boost::end(rng), val, pred);
}
/// \overload
template<class ForwardRange, class Value, class SortPredicate>
inline std::pair<
BOOST_DEDUCED_TYPENAME boost::range_iterator<const ForwardRange>::type,
BOOST_DEDUCED_TYPENAME boost::range_iterator<const ForwardRange>::type
>
equal_range(const ForwardRange& rng, const Value& val, SortPredicate pred)
{
BOOST_RANGE_CONCEPT_ASSERT(( ForwardRangeConcept<const ForwardRange> ));
return std::equal_range(boost::begin(rng), boost::end(rng), val, pred);
}
} // namespace range
using range::equal_range;
} // namespace boost
#endif // include guard
| {
"pile_set_name": "Github"
} |
---
guid: testgucore
env_type: osp-sandbox
cloud_provider: osp
#key_name: opentlc_admin_backdoor
#key_name: gucore
admin_user: gcore
student_name: gcore
heat_retries: 0
user_pub_key: |
ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC4LIAh+LeqBsODfR9YMqyk6E74hE9/FzyDsBf7pukER7alw99JLySZFeO7hL0COXrQlbweVDTwbU5GJrLSbUQZvgVOIbH/roAwMBjzuwdh7ibLyxZNdJs/6gLifWUXSmaj/JKXCr2Lg+527wRfePY+1mXJNMc+cbrezWoUpNlhw3D1NrcJnxldmRn6Rw0jnQdaz1PtAgMfX4Xftr9RV0GggtAwCGjViBK6WM+Fkfp4hT0EMVoCtwvhmEIzeaYaHofit3pW/KONp1DG/OisojUcDuQ6S7i5Mt35N1LmzR06ED4c8kOLMaqZ7UDupFOoILL/vqayVjL67trgcsTcJXI5
repo_method: file
common_install_basic_packages_retries: 0
output_dir: /tmp/output_dir
| {
"pile_set_name": "Github"
} |
### 江天勇遭全天候监控 与跟踪偷拍的国保发生激烈争执
------------------------
#### [首页](https://github.com/gfw-breaker/banned-news/blob/master/README.md) | [手把手翻墙教程](https://github.com/gfw-breaker/guides/wiki) | [禁闻聚合安卓版](https://github.com/gfw-breaker/bn-android) | [网门安卓版](https://github.com/oGate2/oGate) | [神州正道安卓版](https://github.com/SzzdOgate/update)
<div class="zhidingtu">
<div class="ar-wrap-3x2">
<img alt="江天勇律师。(网络图片)" class="ar-wrap-inside-fill" src="http://img.soundofhope.org/2019/07/20170601-china-jty-600x400-600x400.jpg"/>
</div>
<div class="caption">
江天勇律师。(网络图片)
</div>
</div>
<hr/>
#### [翻墙必看视频(文昭、江峰、法轮功、八九六四、香港反送中...)](https://github.com/gfw-breaker/banned-news/blob/master/pages/links.md)
<div class="content">
<p>
<span class="content-info-date">
【希望之声2019年11月7日】
</span>
<span class="content-info-type">
(本台记者田溪采访报导)
</span>
“709律师”江天勇自今年2月出狱以来,一直受到河南当局派人进行全天候严密监控。近日,江天勇律师凌晨开门外出,再次与跟踪偷拍的国保警察发生争执。
</p>
<div class="widget ad-300x250 ad-ecf">
<!-- ZW30 Post Embed 300x250 1 -->
<ins class="adsbygoogle" data-ad-client="ca-pub-1519518652909441" data-ad-slot="9768754376" style="display:inline-block;width:300px;height:250px">
</ins>
</div>
<p>
江天勇出狱后,一直被软禁在河南老家父母家。11月5日凌晨,他临睡前开门准备遛狗,发现一名国保在眼前拿着手电筒并以手机摄录他的行动。国保的举动引起江天勇和他父母不满,双方发生激烈争执。
</p>
<p style="text-align: center;">
<img alt="" class="alignnone size-medium wp-image-3318204" src="http://img.soundofhope.org/2019/11/m1106gf1a-600x338.jpg" srcset="http://img.soundofhope.org/2019/11/m1106gf1a-600x338.jpg 600w, http://img.soundofhope.org/2019/11/m1106gf1a-768x432.jpg 768w, http://img.soundofhope.org/2019/11/m1106gf1a-180x101.jpg 180w, http://img.soundofhope.org/2019/11/m1106gf1a-366x206.jpg 366w, http://img.soundofhope.org/2019/11/m1106gf1a.jpg 800w">
<br/>
11月5日,河南一名国保在凌晨时分近距离跟踪江天勇,双方发生激烈口角。(陈光诚推特图片)
</img>
</p>
<p>
江天勇向本台记者讲述了当时的情况。
</p>
<p>
江天勇:“一出去,他们那些人可能看到我在院子里,他在那黑咕隆咚往里面瞄,拿着手电照,并且拿着手机对我拍照。然后我也对他拍,我问他干什么?争执开了,那个胖高个对我进行谩骂。并且我爸妈听到争吵声,他们都睡下了,然后出来,他对我爸妈也在谩骂,很嚣张的。那个胖高个姓何,特别就是一个无赖,他已经多次,不知为啥,很变态的。别人也不跟他这样,他还问我爸:你想干什么?你半夜跑到人家院子来,还问人家干什么,很笑的。”
</p>
<p>
江天勇分析,警方故意挑衅骚扰,让你不得安宁。
</p>
<p>
江天勇:“他现在可能就是这种骚扰,让你不得安宁。可能也跟信阳那个小国保,张家文(音)在这有关,张家文昨天下午走到我们院子来,没有理他,他现在就挑衅嘛。那个人相当于信阳市公安局国保派到这来的,他是信阳市公安局的国保。那个人以前我在长沙市被抓的时候,他就在这边对我爸妈维稳的。这个人特别恶劣,他可能就是把这个当作维稳项目。他一群人每天在这待着没事干,就看着我,挣钱又轻松。现在维稳费,什么费都可以欠着,维稳费有的是,不差钱。你说什么平常的治安案件,警察理都不理你,一说,哪涉及到维稳的,那他们就慌了,立马就(行动),现在哪缺钱都不算缺钱,现在这个维稳那是不差钱。所以他们维稳费从上面骗维稳费是最好骗,因此他们现在就把这个当作一个好项目。我估计是这样,不然为什么这样变态、这么卖力呢?”
</p>
<p style="text-align: center;">
<img alt="" class="alignnone size-medium wp-image-3318207" src="http://img.soundofhope.org/2019/11/untitled-1-118-600x336.jpg" srcset="http://img.soundofhope.org/2019/11/untitled-1-118-600x336.jpg 600w, http://img.soundofhope.org/2019/11/untitled-1-118-768x430.jpg 768w, http://img.soundofhope.org/2019/11/untitled-1-118-180x101.jpg 180w, http://img.soundofhope.org/2019/11/untitled-1-118-366x205.jpg 366w, http://img.soundofhope.org/2019/11/untitled-1-118.jpg 804w">
<br/>
11月5日,河南一名国保在凌晨时分近距离跟踪江天勇,双方发生激烈口角。(陈光诚推特图片)
</img>
</p>
<p>
据知,以这名国保为首,目前共有几十人日夜24小时,轮流值班跟踪监视江天勇。江家周围共布置了十几个摄像头,进出江家的人员也受到特别检查。
</p>
<p>
“709律师”包龙军表示,他出狱时,就曾和一个警察态度恶劣的打起来了。
</p>
<p>
包龙军:“对我们没象他态度这么恶劣,我们都非常客气,就有一个不客气的,我还跟他打起来了。都这样,我们出来正好一年嘛。就是滥用权力,本身来说,人家已经都完事了。弄这个,本身没有任何法律依据。”
</p>
<div>
</div>
<p>
709律师谢阳认为,江天勇的遭遇和陈光诚一模一样,控制他对外发声,官方对709律师的迫害是惯用的伎俩。看管他的人很多都是社会上的流氓。
</p>
<p>
谢阳:“对异议人士,它采用的荒谬的手段,他这个时候,已经处于自由的状态了,可能看管他的人都属于社会的流氓组织,不一定是国保,但是它后面肯定是国保操盘的,他里边接触的人不一定是国保,国保雇佣的这种黑社会。县里面的国保,在我的心目中,他们配合的大概就是5到6个人,而负责看管他的有2、30个人,显然本县的国保的力量肯定是不够的,他这个常规化的统治的话,他也不可能动用他们信阳市的国保支队,所以他本地的国保、或者本地的辖区派出所,它采取这种流氓的手段,雇佣黑社会。他这样的情况是非常非常恶劣的。”
</p>
<p>
另外,江天勇的身体状况一直不是太好,狱中酷刑折磨,在出狱后一直得不到很好的治疗。
</p>
<p>
江天勇:“身体出来之后一直没有得到检查,腿脚水肿,那次到信阳检查,他们也没检查个所以然,所以我现在一直也没有检查。现在稍微活动一点就气喘、心跳加快。也没有真正的检查过,也不知道到底怎么样。”
</p>
<p>
江天勇曾参与高智晟案、胡佳案,并大面积代理法轮功学员案件。2015年“709”大抓捕事件后,他曾参与营救709律师并协助家属维权。2016年11月,他看望被羁押律师的家属后失踪。2017年6月被以“煽动颠覆”罪批捕,11月被判刑2年。今年2月获释。但中共仍然将他软禁在河南父母家中。
</p>
<div class="content-info-btm">
<p class="content-info-zerenbianji">
<span class="content-info-title">
责任编辑:
</span>
<span class="content-info-content">
元明清
</span>
</p>
<p class="content-info-refernote">
(希望之声版权所有,未经希望之声书面允许,不得转载,违者必究。)
</p>
</div>
</div>
<hr/>
手机上长按并复制下列链接或二维码分享本文章:<br/>
https://github.com/gfw-breaker/banned-news/blob/master/pages/soh_zgxw/n3318189.md <br/>
<a href='https://github.com/gfw-breaker/banned-news/blob/master/pages/soh_zgxw/n3318189.md'><img src='https://github.com/gfw-breaker/banned-news/blob/master/pages/soh_zgxw/n3318189.md.png'/></a> <br/>
原文地址(需翻墙访问):http://www.soundofhope.org/gb/2019/11/07/n3318189.html
------------------------
#### [首页](https://github.com/gfw-breaker/banned-news/blob/master/README.md) | [一键翻墙软件](https://github.com/gfw-breaker/nogfw/blob/master/README.md) | [《九评共产党》](https://github.com/gfw-breaker/9ping.md/blob/master/README.md#九评之一评共产党是什么) | [《解体党文化》](https://github.com/gfw-breaker/jtdwh.md/blob/master/README.md) | [《共产主义的终极目的》](https://github.com/gfw-breaker/gczydzjmd.md/blob/master/README.md)
<img src='http://gfw-breaker.win/banned-news/pages/soh_zgxw/n3318189.md' width='0px' height='0px'/> | {
"pile_set_name": "Github"
} |
/**
* Copyright (c) 2014, the Railo Company Ltd.
* Copyright (c) 2015, Lucee Assosication Switzerland
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library. If not, see <http://www.gnu.org/licenses/>.
*
*/
package lucee.transformer.library.function;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import org.osgi.framework.Version;
import org.xml.sax.Attributes;
import lucee.commons.lang.CFTypes;
import lucee.commons.lang.ClassException;
import lucee.commons.lang.ExceptionUtil;
import lucee.commons.lang.Md5;
import lucee.commons.lang.StringUtil;
import lucee.runtime.config.Identification;
import lucee.runtime.db.ClassDefinition;
import lucee.runtime.exp.PageRuntimeException;
import lucee.runtime.exp.TemplateException;
import lucee.runtime.ext.function.BIF;
import lucee.runtime.functions.BIFProxy;
import lucee.runtime.op.Caster;
import lucee.runtime.osgi.OSGiUtil;
import lucee.runtime.reflection.Reflector;
import lucee.runtime.type.util.ListUtil;
import lucee.transformer.cfml.evaluator.FunctionEvaluator;
import lucee.transformer.library.ClassDefinitionImpl;
import lucee.transformer.library.tag.TagLib;
/**
* Eine FunctionLibFunction repraesentiert eine einzelne Funktion innerhalb einer FLD.
*/
public final class FunctionLibFunction {
/**
* Dynamischer Argument Typ
*/
public static final int ARG_DYNAMIC = 0;
/**
* statischer Argument Typ
*/
public static final int ARG_FIX = 1;
private FunctionLib functionLib;
private String name;
private ArrayList<FunctionLibFunctionArg> argument = new ArrayList<FunctionLibFunctionArg>();
private int argMin = 0;
private int argMax = -1;
private int argType = ARG_FIX;
private String strReturnType;
private ClassDefinition clazz;
private String description;
private boolean hasDefaultValues;
private FunctionEvaluator eval;
private ClassDefinition tteCD;
private short status = TagLib.STATUS_IMPLEMENTED;
private String[] memberNames;
private int memberPosition = 1;
private short memberType = CFTypes.TYPE_UNKNOW;
private boolean memberChaining;
private BIF bif;
private String[] keywords;
private ClassDefinition functionCD;
private Version introduced;
private final boolean core;
/**
* Geschuetzer Konstruktor ohne Argumente.
*/
/*
* public FunctionLibFunction() { this.core=false; }
*/
public FunctionLibFunction(boolean core) {
this.core = core;
}
public FunctionLibFunction(FunctionLib functionLib, boolean core) {
this.functionLib = functionLib;
this.core = core;
}
/**
* Gibt den Namen der Funktion zurueck.
*
* @return name Name der Funktion.
*/
public String getName() {
return name;
}
/**
* Gibt alle Argumente einer Funktion als ArrayList zurueck.
*
* @return Argumente der Funktion.
*/
public ArrayList<FunctionLibFunctionArg> getArg() {
return argument;
}
/**
* Gibt zurueck wieviele Argumente eine Funktion minimal haben muss.
*
* @return Minimale Anzahl Argumente der Funktion.
*/
public int getArgMin() {
return argMin;
}
/**
* Gibt zurueck wieviele Argumente eine Funktion minimal haben muss.
*
* @return Maximale Anzahl Argumente der Funktion.
*/
public int getArgMax() {
return argMax;
}
/**
* @return the status
* (TagLib.,TagLib.STATUS_IMPLEMENTED,TagLib.STATUS_DEPRECATED,TagLib.STATUS_UNIMPLEMENTED)
*/
public short getStatus() {
return status;
}
/**
* @param status the status to set
* (TagLib.,TagLib.STATUS_IMPLEMENTED,TagLib.STATUS_DEPRECATED,TagLib.STATUS_UNIMPLEMENTED)
*/
public void setStatus(short status) {
this.status = status;
}
/**
* Gibt die argument art zurueck.
*
* @return argument art
*/
public int getArgType() {
return argType;
}
/**
* Gibt die argument art als String zurueck.
*
* @return argument art
*/
public String getArgTypeAsString() {
if (argType == ARG_DYNAMIC) return "dynamic";
return "fixed";
}
/**
* Gibt zurueck von welchem Typ der Rueckgabewert dieser Funktion sein muss (query, string, struct,
* number usw.).
*
* @return Typ des Rueckgabewert.
*/
public String getReturnTypeAsString() {
return strReturnType;
}
/**
* Gibt die Klasse zurueck, welche diese Funktion implementiert.
*
* @return Klasse der Function.
* @throws ClassException
*/
public ClassDefinition getFunctionClassDefinition() {
return functionCD;
}
/**
* Gibt die Beschreibung der Funktion zurueck.
*
* @return String
*/
public String getDescription() {
return description;
}
/**
* Gibt die FunctionLib zurueck, zu der die Funktion gehoert.
*
* @return Zugehoerige FunctionLib.
*/
public FunctionLib getFunctionLib() {
return functionLib;
}
/**
* Setzt den Namen der Funktion.
*
* @param name Name der Funktion.
*/
public void setName(String name) {
this.name = name.toLowerCase();
}
/**
* Fuegt der Funktion ein Argument hinzu.
*
* @param arg Argument zur Funktion.
*/
public void addArg(FunctionLibFunctionArg arg) {
arg.setFunction(this);
argument.add(arg);
if (arg.getDefaultValue() != null) hasDefaultValues = true;
}
/**
* Fuegt der Funktion ein Argument hinzu, alias fuer addArg.
*
* @param arg Argument zur Funktion.
*/
public void setArg(FunctionLibFunctionArg arg) {
addArg(arg);
}
/**
* Setzt wieviele Argumente eine Funktion minimal haben muss.
*
* @param argMin Minimale Anzahl Argumente der Funktion.
*/
public void setArgMin(int argMin) {
this.argMin = argMin;
}
/**
* Setzt wieviele Argumente eine Funktion minimal haben muss.
*
* @param argMax Maximale Anzahl Argumente der Funktion.
*/
public void setArgMax(int argMax) {
this.argMax = argMax;
}
/**
* Setzt den Rueckgabewert der Funktion (query,array,string usw.)
*
* @param value
*/
public void setReturn(String value) {
strReturnType = value;
}
/**
* Setzt die Klassendefinition als Zeichenkette, welche diese Funktion implementiert.
*
* @param value Klassendefinition als Zeichenkette.
*/
public void setFunctionClass(String value, Identification id, Attributes attrs) {
functionCD = ClassDefinitionImpl.toClassDefinition(value, id, attrs);
}
/**
* Setzt die Beschreibung der Funktion.
*
* @param description Beschreibung der Funktion.
*/
public void setDescription(String description) {
this.description = description;
}
/**
* Setzt die zugehoerige FunctionLib.
*
* @param functionLib Zugehoerige FunctionLib.
*/
public void setFunctionLib(FunctionLib functionLib) {
this.functionLib = functionLib;
}
/**
* sets the argument type of the function
*
* @param argType
*/
public void setArgType(int argType) {
this.argType = argType;
}
public String getHash() {
StringBuilder sb = new StringBuilder();
sb.append(this.getArgMax());
sb.append(this.getArgMin());
sb.append(this.getArgType());
sb.append(this.getArgTypeAsString());
sb.append(getFunctionClassDefinition().toString());
sb.append(tteCD);
sb.append(this.getName());
sb.append(this.getReturnTypeAsString());
Iterator<FunctionLibFunctionArg> it = this.getArg().iterator();
FunctionLibFunctionArg arg;
while (it.hasNext()) {
arg = it.next();
sb.append(arg.getHash());
}
try {
return Md5.getDigestAsString(sb.toString());
}
catch (IOException e) {
return "";
}
}
public boolean hasDefaultValues() {
return hasDefaultValues;
}
public boolean hasTteClass() {
return tteCD != null;
}
public FunctionEvaluator getEvaluator() throws TemplateException {
if (!hasTteClass()) return null;
if (eval != null) return eval;
try {
eval = (FunctionEvaluator) tteCD.getClazz().newInstance();
}
catch (Exception e) {
throw new TemplateException(e.getMessage());
}
return eval;
}
public void setTTEClass(String tteClass, Identification id, Attributes attrs) {
this.tteCD = ClassDefinitionImpl.toClassDefinition(tteClass, id, attrs);
}
public void setMemberName(String memberNames) {
if (StringUtil.isEmpty(memberNames, true)) return;
this.memberNames = ListUtil.trimItems(ListUtil.listToStringArray(memberNames, ','));
}
public String[] getMemberNames() {
return memberNames;
}
public void setKeywords(String keywords) {
this.keywords = ListUtil.trimItems(ListUtil.listToStringArray(keywords, ','));
}
public String[] getKeywords() {
return keywords;
}
public boolean isCore() {
return core;
}
public void setMemberPosition(int pos) {
this.memberPosition = pos;
}
public int getMemberPosition() {
return memberPosition;
}
public void setMemberChaining(boolean memberChaining) {
this.memberChaining = memberChaining;
}
public boolean getMemberChaining() {
return memberChaining;
}
public void setMemberType(String memberType) {
this.memberType = CFTypes.toShortStrict(memberType, CFTypes.TYPE_UNKNOW);
}
public short getMemberType() {
if (memberNames != null && memberType == CFTypes.TYPE_UNKNOW) {
ArrayList<FunctionLibFunctionArg> args = getArg();
if (args.size() >= 1) {
memberType = CFTypes.toShortStrict(args.get(getMemberPosition() - 1).getTypeAsString(), CFTypes.TYPE_UNKNOW);
}
}
return memberType;
}
public String getMemberTypeAsString() {
return CFTypes.toString(getMemberType(), "any");
}
public BIF getBIF() {
if (bif != null) return bif;
Class clazz = null;
try {
clazz = getFunctionClassDefinition().getClazz();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
throw new PageRuntimeException(Caster.toPageException(t));
}
if (Reflector.isInstaneOf(clazz, BIF.class, false)) {
try {
bif = (BIF) clazz.newInstance();
}
catch (Throwable t) {
ExceptionUtil.rethrowIfNecessary(t);
throw new RuntimeException(t);
}
}
else {
return new BIFProxy(clazz);
}
return bif;
}
public void setIntroduced(String introduced) {
this.introduced = OSGiUtil.toVersion(introduced, null);
}
public Version getIntroduced() {
return introduced;
}
} | {
"pile_set_name": "Github"
} |
8
| {
"pile_set_name": "Github"
} |
/**
* Utility that makes it possible to hide fields when a checkbox is unchecked
*/
(function ($) {
function setupHiding() {
var $toggler = $(this);
// Getting the field which should be hidden:
var $subject = $($toggler.data('h5p-visibility-subject-selector'));
var toggle = function () {
$subject.toggle($toggler.is(':checked'));
};
$toggler.change(toggle);
toggle();
}
function setupRevealing() {
var $button = $(this);
// Getting the field which should have the value:
var $input = $('#' + $button.data('control'));
if (!$input.data('value')) {
$button.remove();
return;
}
// Setup button action
var revealed = false;
var text = $button.html();
$button.click(function () {
if (revealed) {
$input.val('');
$button.html(text);
revealed = false;
}
else {
$input.val($input.data('value'));
$button.html($button.data('hide'));
revealed = true;
}
});
}
$(document).ready(function () {
// Get the checkboxes making other fields being hidden:
$('.h5p-visibility-toggler').each(setupHiding);
// Get the buttons making other fields have hidden values:
$('.h5p-reveal-value').each(setupRevealing);
});
})(H5P.jQuery);
| {
"pile_set_name": "Github"
} |
local driver = require 'pallene.driver'
local function run_uninitialized(code)
local module, errs = driver.compile_internal("__test__.pln", code, "uninitialized")
return module, table.concat(errs, "\n")
end
local function assert_error(code, expected_err)
local module, errs = run_uninitialized(code)
assert.falsy(module)
assert.match(expected_err, errs, 1, true)
end
local missing_return =
"control reaches end of function with non-empty return type"
describe("Uninitialized variable analysis: ", function()
it("empty function", function()
assert_error([[
export function fn(): integer
end
]], missing_return)
end)
it("missing return in elseif", function()
assert_error([[
export function getval(a:integer): integer
if a == 1 then
return 10
elseif a == 2 then
else
return 30
end
end
]], missing_return)
end)
it("missing return in deep elseif", function()
assert_error([[
export function getval(a:integer): integer
if a == 1 then
return 10
elseif a == 2 then
return 20
else
if a < 5 then
if a == 3 then
return 30
end
else
return 50
end
end
end
]], missing_return)
end)
it("catches use of uninitialized variable", function()
assert_error([[
export function foo(): integer
local x:integer
return x
end
]], "variable 'x' is used before being initialized")
end)
it("assumes that loops might not execute", function()
assert_error([[
export function foo(cond: boolean): integer
local x: integer
while cond do
x = 0
cond = x == 0
end
return x
end
]], "variable 'x' is used before being initialized")
end)
end)
| {
"pile_set_name": "Github"
} |
#ifndef BOOST_MPL_AUX_VALUE_WKND_HPP_INCLUDED
#define BOOST_MPL_AUX_VALUE_WKND_HPP_INCLUDED
// Copyright Aleksey Gurtovoy 2000-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/mpl for documentation.
// $Id$
// $Date$
// $Revision$
#include <boost/mpl/aux_/static_cast.hpp>
#include <boost/mpl/aux_/config/integral.hpp>
#include <boost/mpl/aux_/config/eti.hpp>
#include <boost/mpl/aux_/config/workaround.hpp>
#if defined(BOOST_MPL_CFG_BCC_INTEGRAL_CONSTANTS) \
|| defined(BOOST_MPL_CFG_MSVC_60_ETI_BUG)
# include <boost/mpl/int.hpp>
namespace boost { namespace mpl { namespace aux {
template< typename C_ > struct value_wknd
: C_
{
};
#if defined(BOOST_MPL_CFG_MSVC_60_ETI_BUG)
template<> struct value_wknd<int>
: int_<1>
{
using int_<1>::value;
};
#endif
}}}
#if !defined(BOOST_MPL_CFG_MSVC_60_ETI_BUG)
# define BOOST_MPL_AUX_VALUE_WKND(C) \
::BOOST_MPL_AUX_ADL_BARRIER_NAMESPACE::aux::value_wknd< C > \
/**/
# define BOOST_MPL_AUX_MSVC_VALUE_WKND(C) BOOST_MPL_AUX_VALUE_WKND(C)
#else
# define BOOST_MPL_AUX_VALUE_WKND(C) C
# define BOOST_MPL_AUX_MSVC_VALUE_WKND(C) \
::boost::mpl::aux::value_wknd< C > \
/**/
#endif
#else // BOOST_MPL_CFG_BCC_INTEGRAL_CONSTANTS
# define BOOST_MPL_AUX_VALUE_WKND(C) C
# define BOOST_MPL_AUX_MSVC_VALUE_WKND(C) C
#endif
#if BOOST_WORKAROUND(__EDG_VERSION__, <= 238)
# define BOOST_MPL_AUX_NESTED_VALUE_WKND(T, C) \
BOOST_MPL_AUX_STATIC_CAST(T, C::value) \
/**/
#else
# define BOOST_MPL_AUX_NESTED_VALUE_WKND(T, C) \
BOOST_MPL_AUX_VALUE_WKND(C)::value \
/**/
#endif
namespace boost { namespace mpl { namespace aux {
template< typename T > struct value_type_wknd
{
typedef typename T::value_type type;
};
#if defined(BOOST_MPL_CFG_MSVC_ETI_BUG)
template<> struct value_type_wknd<int>
{
typedef int type;
};
#endif
}}}
#endif // BOOST_MPL_AUX_VALUE_WKND_HPP_INCLUDED
| {
"pile_set_name": "Github"
} |
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24.0"
android:viewportHeight="24.0">
<path
android:fillColor="#FF000000"
android:pathData="M15.67,4H14V2h-4v2H8.33C7.6,4 7,4.6 7,5.33V8h5.47L13,7v1h4V5.33C17,4.6 16.4,4 15.67,4z"
android:fillAlpha=".3"/>
<path
android:fillColor="#FF000000"
android:pathData="M13,12.5h2L11,20v-5.5H9L12.47,8H7v12.67C7,21.4 7.6,22 8.33,22h7.33c0.74,0 1.34,-0.6 1.34,-1.33V8h-4v4.5z"/>
</vector>
| {
"pile_set_name": "Github"
} |
--- a/net/minecraft/entity/passive/EntitySheep.java
+++ b/net/minecraft/entity/passive/EntitySheep.java
@@ -72,7 +72,7 @@
p_203402_0_.put(EnumDyeColor.RED, Blocks.field_196570_aZ);
p_203402_0_.put(EnumDyeColor.BLACK, Blocks.field_196602_ba);
});
- private static final Map<EnumDyeColor, float[]> field_175514_bm = Maps.newEnumMap(Arrays.stream(EnumDyeColor.values()).collect(Collectors.toMap((p_200204_0_) -> {
+ private static final Map<EnumDyeColor, float[]> field_175514_bm = Maps.newEnumMap(Arrays.stream(EnumDyeColor.values()).collect(Collectors.toMap((EnumDyeColor p_200204_0_) -> {
return p_200204_0_;
}, EntitySheep::func_192020_c)));
private int field_70899_e;
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2007 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.ide.common.resources.configuration;
/**
* Base class for resource qualifiers.
* <p/>The resource qualifier classes are designed as immutable.
*/
public abstract class ResourceQualifier implements Comparable<ResourceQualifier> {
/**
* Returns the human readable name of the qualifier.
*/
public abstract String getName();
/**
* Returns a shorter human readable name for the qualifier.
* @see #getName()
*/
public abstract String getShortName();
/**
* Returns when this qualifier was added to Android.
*/
public abstract int since();
/**
* Whether this qualifier is deprecated.
*/
public boolean deprecated() {
return false;
}
/**
* Returns whether the qualifier has a valid filter value.
*/
public abstract boolean isValid();
/**
* Returns whether the qualifier has a fake value.
* <p/>Fake values are used internally and should not be used as real qualifier value.
*/
public abstract boolean hasFakeValue();
/**
* Check if the value is valid for this qualifier, and if so sets the value
* into a Folder Configuration.
* @param value The value to check and set. Must not be null.
* @param config The folder configuration to receive the value. Must not be null.
* @return true if the value was valid and was set.
*/
public abstract boolean checkAndSet(String value, FolderConfiguration config);
/**
* Returns a string formatted to be used in a folder name.
* <p/>This is declared as abstract to force children classes to implement it.
*/
public abstract String getFolderSegment();
/**
* Returns whether the given qualifier is a match for the receiver.
* <p/>The default implementation returns the result of {@link #equals(Object)}.
* <p/>Children class that re-implements this must implement
* {@link #isBetterMatchThan(ResourceQualifier, ResourceQualifier)} too.
* @param qualifier the reference qualifier
* @return true if the receiver is a match.
*/
public boolean isMatchFor(ResourceQualifier qualifier) {
return equals(qualifier);
}
/**
* Returns true if the receiver is a better match for the given <var>reference</var> than
* the given <var>compareTo</var> comparable.
* @param compareTo The {@link ResourceQualifier} to compare to. Can be null, in which
* case the method must return <code>true</code>.
* @param reference The reference qualifier value for which the match is.
* @return true if the receiver is a better match.
*/
public boolean isBetterMatchThan(ResourceQualifier compareTo, ResourceQualifier reference) {
// the default is to always return false. This gives less overhead than always returning
// true, as it would only compare same values anyway.
return compareTo == null;
}
@Override
public String toString() {
return getFolderSegment();
}
/**
* Returns a string formatted for display purpose.
*/
public abstract String getShortDisplayValue();
/**
* Returns a string formatted for display purpose.
*/
public abstract String getLongDisplayValue();
/**
* Returns <code>true</code> if both objects are equal.
* <p/>This is declared as abstract to force children classes to implement it.
*/
@Override
public abstract boolean equals(Object object);
/**
* Returns a hash code value for the object.
* <p/>This is declared as abstract to force children classes to implement it.
*/
@Override
public abstract int hashCode();
@Override
public final int compareTo(ResourceQualifier o) {
return toString().compareTo(o.toString());
}
}
| {
"pile_set_name": "Github"
} |
// Scintilla source code edit control
/** @file StyleContext.h
** Lexer infrastructure.
**/
// Copyright 1998-2004 by Neil Hodgson <[email protected]>
// This file is in the public domain.
#ifndef STYLECONTEXT_H
#define STYLECONTEXT_H
#ifdef SCI_NAMESPACE
namespace Scintilla {
#endif
// All languages handled so far can treat all characters >= 0x80 as one class
// which just continues the current token or starts an identifier if in default.
// DBCS treated specially as the second character can be < 0x80 and hence
// syntactically significant. UTF-8 avoids this as all trail bytes are >= 0x80
class StyleContext {
LexAccessor &styler;
IDocumentWithLineEnd *multiByteAccess;
Sci_PositionU endPos;
Sci_PositionU lengthDocument;
// Used for optimizing GetRelativeCharacter
Sci_PositionU posRelative;
Sci_PositionU currentPosLastRelative;
Sci_Position offsetRelative;
StyleContext &operator=(const StyleContext &);
void GetNextChar() {
if (multiByteAccess) {
chNext = multiByteAccess->GetCharacterAndWidth(currentPos+width, &widthNext);
} else {
chNext = static_cast<unsigned char>(styler.SafeGetCharAt(currentPos+width, 0));
widthNext = 1;
}
// End of line determined from line end position, allowing CR, LF,
// CRLF and Unicode line ends as set by document.
if (currentLine < lineDocEnd)
atLineEnd = static_cast<Sci_Position>(currentPos) >= (lineStartNext-1);
else // Last line
atLineEnd = static_cast<Sci_Position>(currentPos) >= lineStartNext;
}
public:
Sci_PositionU currentPos;
Sci_Position currentLine;
Sci_Position lineDocEnd;
Sci_Position lineStartNext;
bool atLineStart;
bool atLineEnd;
int state;
int chPrev;
int ch;
Sci_Position width;
int chNext;
Sci_Position widthNext;
StyleContext(Sci_PositionU startPos, Sci_PositionU length,
int initStyle, LexAccessor &styler_, char chMask='\377') :
styler(styler_),
multiByteAccess(0),
endPos(startPos + length),
posRelative(0),
currentPosLastRelative(0x7FFFFFFF),
offsetRelative(0),
currentPos(startPos),
currentLine(-1),
lineStartNext(-1),
atLineEnd(false),
state(initStyle & chMask), // Mask off all bits which aren't in the chMask.
chPrev(0),
ch(0),
width(0),
chNext(0),
widthNext(1) {
if (styler.Encoding() != enc8bit) {
multiByteAccess = styler.MultiByteAccess();
}
styler.StartAt(startPos /*, chMask*/);
styler.StartSegment(startPos);
currentLine = styler.GetLine(startPos);
lineStartNext = styler.LineStart(currentLine+1);
lengthDocument = static_cast<Sci_PositionU>(styler.Length());
if (endPos == lengthDocument)
endPos++;
lineDocEnd = styler.GetLine(lengthDocument);
atLineStart = static_cast<Sci_PositionU>(styler.LineStart(currentLine)) == startPos;
// Variable width is now 0 so GetNextChar gets the char at currentPos into chNext/widthNext
width = 0;
GetNextChar();
ch = chNext;
width = widthNext;
GetNextChar();
}
void Complete() {
styler.ColourTo(currentPos - ((currentPos > lengthDocument) ? 2 : 1), state);
styler.Flush();
}
bool More() const {
return currentPos < endPos;
}
void Forward() {
if (currentPos < endPos) {
atLineStart = atLineEnd;
if (atLineStart) {
currentLine++;
lineStartNext = styler.LineStart(currentLine+1);
}
chPrev = ch;
currentPos += width;
ch = chNext;
width = widthNext;
GetNextChar();
} else {
atLineStart = false;
chPrev = ' ';
ch = ' ';
chNext = ' ';
atLineEnd = true;
}
}
void Forward(Sci_Position nb) {
for (Sci_Position i = 0; i < nb; i++) {
Forward();
}
}
void ForwardBytes(Sci_Position nb) {
Sci_PositionU forwardPos = currentPos + nb;
while (forwardPos > currentPos) {
Forward();
}
}
void ChangeState(int state_) {
state = state_;
}
void SetState(int state_) {
styler.ColourTo(currentPos - ((currentPos > lengthDocument) ? 2 : 1), state);
state = state_;
}
void ForwardSetState(int state_) {
Forward();
styler.ColourTo(currentPos - ((currentPos > lengthDocument) ? 2 : 1), state);
state = state_;
}
Sci_Position LengthCurrent() const {
return currentPos - styler.GetStartSegment();
}
int GetRelative(Sci_Position n) {
return static_cast<unsigned char>(styler.SafeGetCharAt(currentPos+n, 0));
}
int GetRelativeCharacter(Sci_Position n) {
if (n == 0)
return ch;
if (multiByteAccess) {
if ((currentPosLastRelative != currentPos) ||
((n > 0) && ((offsetRelative < 0) || (n < offsetRelative))) ||
((n < 0) && ((offsetRelative > 0) || (n > offsetRelative)))) {
posRelative = currentPos;
offsetRelative = 0;
}
Sci_Position diffRelative = n - offsetRelative;
Sci_Position posNew = multiByteAccess->GetRelativePosition(posRelative, diffRelative);
int chReturn = multiByteAccess->GetCharacterAndWidth(posNew, 0);
posRelative = posNew;
currentPosLastRelative = currentPos;
offsetRelative = n;
return chReturn;
} else {
// fast version for single byte encodings
return static_cast<unsigned char>(styler.SafeGetCharAt(currentPos + n, 0));
}
}
bool Match(char ch0) const {
return ch == static_cast<unsigned char>(ch0);
}
bool Match(char ch0, char ch1) const {
return (ch == static_cast<unsigned char>(ch0)) && (chNext == static_cast<unsigned char>(ch1));
}
bool Match(const char *s) {
if (ch != static_cast<unsigned char>(*s))
return false;
s++;
if (!*s)
return true;
if (chNext != static_cast<unsigned char>(*s))
return false;
s++;
for (int n=2; *s; n++) {
if (*s != styler.SafeGetCharAt(currentPos+n, 0))
return false;
s++;
}
return true;
}
// Non-inline
bool MatchIgnoreCase(const char *s);
void GetCurrent(char *s, Sci_PositionU len);
void GetCurrentLowered(char *s, Sci_PositionU len);
};
#ifdef SCI_NAMESPACE
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2014-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*
*/
#import "CKDataSourceModificationHelper.h"
#import <ComponentKit/CKBuildComponent.h>
#import <ComponentKit/CKComponentContext.h>
#import <ComponentKit/CKComponentController.h>
#import <ComponentKit/CKComponentProvider.h>
#import <ComponentKit/CKDataSourceConfigurationInternal.h>
#import <ComponentKit/CKDataSourceItemInternal.h>
#import <ComponentKit/CKExceptionInfoScopedValue.h>
#import <ComponentKit/CKMountable.h>
CKDataSourceItem *CKBuildDataSourceItem(CK::NonNull<CKComponentScopeRoot *> previousRoot,
const CKComponentStateUpdateMap &stateUpdates,
const CKSizeRange &sizeRange,
CKDataSourceConfiguration *configuration,
id model,
id context,
CKReflowTrigger reflowTrigger)
{
CKExceptionInfoScopedValue modelValue{@"ck_data_source_item_model", NSStringFromClass([model class]) ?: @"Nil"};
CKExceptionInfoScopedValue contextValue{@"ck_data_source_item_context", NSStringFromClass([context class]) ?: @"Nil"};
auto const componentProvider = [configuration componentProvider];
const auto componentFactory = ^{
return componentProvider(model, context);
};
auto const treeNeedsReflow = reflowTrigger != CKBuildTriggerNone;
auto const buildTrigger = CKBuildComponentTrigger(previousRoot, stateUpdates, treeNeedsReflow, NO);
const CKBuildComponentResult result = CKBuildComponent(previousRoot,
stateUpdates,
componentFactory,
buildTrigger,
reflowTrigger);
const auto rootLayout = CKComputeRootComponentLayout(result.component,
sizeRange,
[result.scopeRoot analyticsListener],
result.buildTrigger,
result.scopeRoot);
return [[CKDataSourceItem alloc] initWithRootLayout:rootLayout
model:model
scopeRoot:result.scopeRoot
boundsAnimation:result.boundsAnimation];
}
| {
"pile_set_name": "Github"
} |
//===----------------------------------------------------------------------===//
//
// The LLVM Compiler Infrastructure
//
// This file is dual licensed under the MIT and the University of Illinois Open
// Source Licenses. See LICENSE.TXT for details.
//
//===----------------------------------------------------------------------===//
// test:
// template <class charT, class traits, size_t N>
// basic_ostream<charT, traits>&
// operator<<(basic_ostream<charT, traits>& os, const bitset<N>& x);
#include <bitset>
#include <sstream>
#include <cassert>
int main()
{
std::istringstream in("01011010");
std::bitset<8> b;
in >> b;
assert(b.to_ulong() == 0x5A);
}
| {
"pile_set_name": "Github"
} |
// Copyright (C) 2019-2020 Algorand, Inc.
// This file is part of go-algorand
//
// go-algorand is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// go-algorand is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with go-algorand. If not, see <https://www.gnu.org/licenses/>.
package main
import (
"encoding/base64"
"fmt"
"github.com/spf13/cobra"
"github.com/algorand/go-algorand/libgoal"
)
var (
assetID uint64
assetCreator string
assetTotal uint64
assetDecimals uint32
assetFrozen bool
assetUnitName string
assetMetadataHashBase64 string
assetURL string
assetName string
assetManager string
assetClawback string
assetFreezer string
assetNewManager string
assetNewReserve string
assetNewFreezer string
assetNewClawback string
)
func init() {
assetCmd.AddCommand(createAssetCmd)
assetCmd.AddCommand(destroyAssetCmd)
assetCmd.AddCommand(configAssetCmd)
assetCmd.AddCommand(sendAssetCmd)
assetCmd.AddCommand(infoAssetCmd)
assetCmd.AddCommand(freezeAssetCmd)
assetCmd.PersistentFlags().StringVarP(&walletName, "wallet", "w", "", "Set the wallet to be used for the selected operation")
createAssetCmd.Flags().StringVar(&assetCreator, "creator", "", "Account address for creating an asset")
createAssetCmd.Flags().Uint64Var(&assetTotal, "total", 0, "Total amount of tokens for created asset")
createAssetCmd.Flags().Uint32Var(&assetDecimals, "decimals", 0, "The number of digits to use after the decimal point when displaying this asset. If set to 0, the asset is not divisible beyond its base unit. If set to 1, the base asset unit is tenths. If 2, the base asset unit is hundredths, and so on.")
createAssetCmd.Flags().BoolVar(&assetFrozen, "defaultfrozen", false, "Freeze or not freeze holdings by default")
createAssetCmd.Flags().StringVar(&assetUnitName, "unitname", "", "Name for the unit of asset")
createAssetCmd.Flags().StringVar(&assetName, "name", "", "Name for the entire asset")
createAssetCmd.Flags().StringVar(&assetURL, "asseturl", "", "URL where user can access more information about the asset (max 32 bytes)")
createAssetCmd.Flags().StringVar(&assetMetadataHashBase64, "assetmetadatab64", "", "base-64 encoded 32-byte commitment to asset metadata")
createAssetCmd.MarkFlagRequired("total")
createAssetCmd.MarkFlagRequired("creator")
destroyAssetCmd.Flags().StringVar(&assetManager, "manager", "", "Manager account to issue the destroy transaction (defaults to creator)")
destroyAssetCmd.Flags().StringVar(&assetCreator, "creator", "", "Account address for asset to destroy")
destroyAssetCmd.Flags().Uint64Var(&assetID, "assetid", 0, "Asset ID to destroy")
destroyAssetCmd.Flags().StringVar(&assetUnitName, "asset", "", "Unit name of asset to destroy")
configAssetCmd.Flags().StringVar(&assetManager, "manager", "", "Manager account to issue the config transaction (defaults to creator)")
configAssetCmd.Flags().StringVar(&assetCreator, "creator", "", "Account address for asset to configure")
configAssetCmd.Flags().Uint64Var(&assetID, "assetid", 0, "Asset ID to configure")
configAssetCmd.Flags().StringVar(&assetUnitName, "asset", "", "Unit name of asset to configure")
configAssetCmd.Flags().StringVar(&assetNewManager, "new-manager", "", "New manager address")
configAssetCmd.Flags().StringVar(&assetNewReserve, "new-reserve", "", "New reserve address")
configAssetCmd.Flags().StringVar(&assetNewFreezer, "new-freezer", "", "New freeze address")
configAssetCmd.Flags().StringVar(&assetNewClawback, "new-clawback", "", "New clawback address")
configAssetCmd.MarkFlagRequired("manager")
sendAssetCmd.Flags().StringVar(&assetClawback, "clawback", "", "Address to issue a clawback transaction from (defaults to no clawback)")
sendAssetCmd.Flags().StringVar(&assetCreator, "creator", "", "Account address for asset creator")
sendAssetCmd.Flags().Uint64Var(&assetID, "assetid", 0, "ID of the asset being transferred")
sendAssetCmd.Flags().StringVar(&assetUnitName, "asset", "", "Unit name of the asset being transferred")
sendAssetCmd.Flags().StringVarP(&account, "from", "f", "", "Account address to send the money from (if not specified, uses default account)")
sendAssetCmd.Flags().StringVarP(&toAddress, "to", "t", "", "Address to send to money to (required)")
sendAssetCmd.Flags().Uint64VarP(&amount, "amount", "a", 0, "The amount to be transferred (required), in base units of the asset.")
sendAssetCmd.Flags().StringVarP(&closeToAddress, "close-to", "c", "", "Close asset account and send remainder to this address")
sendAssetCmd.MarkFlagRequired("to")
sendAssetCmd.MarkFlagRequired("amount")
freezeAssetCmd.Flags().StringVar(&assetFreezer, "freezer", "", "Address to issue a freeze transaction from")
freezeAssetCmd.Flags().StringVar(&assetCreator, "creator", "", "Account address for asset creator")
freezeAssetCmd.Flags().Uint64Var(&assetID, "assetid", 0, "ID of the asset being frozen")
freezeAssetCmd.Flags().StringVar(&assetUnitName, "asset", "", "Unit name of the asset being frozen")
freezeAssetCmd.Flags().StringVar(&account, "account", "", "Account address to freeze/unfreeze")
freezeAssetCmd.Flags().BoolVar(&assetFrozen, "freeze", false, "Freeze or unfreeze")
freezeAssetCmd.MarkFlagRequired("freezer")
freezeAssetCmd.MarkFlagRequired("account")
freezeAssetCmd.MarkFlagRequired("freeze")
// Add common transaction flags to all txn-generating asset commands
addTxnFlags(createAssetCmd)
addTxnFlags(destroyAssetCmd)
addTxnFlags(configAssetCmd)
addTxnFlags(sendAssetCmd)
addTxnFlags(freezeAssetCmd)
infoAssetCmd.Flags().Uint64Var(&assetID, "assetid", 0, "ID of the asset to look up")
infoAssetCmd.Flags().StringVar(&assetUnitName, "asset", "", "DEPRECATED! Unit name of the asset to look up")
infoAssetCmd.Flags().StringVar(&assetUnitName, "unitname", "", "Unit name of the asset to look up")
infoAssetCmd.Flags().StringVar(&assetCreator, "creator", "", "Account address of the asset creator")
}
var assetCmd = &cobra.Command{
Use: "asset",
Short: "Manage assets",
Args: validateNoPosArgsFn,
Run: func(cmd *cobra.Command, args []string) {
// If no arguments passed, we should fallback to help
cmd.HelpFunc()(cmd, args)
},
}
func lookupAssetID(cmd *cobra.Command, creator string, client libgoal.Client) {
if cmd.Flags().Changed("asset") {
reportWarnln("The [--asset] flag is deprecated and will be removed in a future release, use [--unitname] instead.")
}
if cmd.Flags().Changed("asset") && cmd.Flags().Changed("unitname") {
reportErrorf("The [--asset] flag has been replaced by [--unitname], do not provide both flags.")
}
assetOrUnit := cmd.Flags().Changed("asset") || cmd.Flags().Changed("unitname")
if cmd.Flags().Changed("assetid") && assetOrUnit {
reportErrorf("Only one of [--assetid] or [--unitname and --creator] should be specified")
}
if cmd.Flags().Changed("assetid") {
return
}
if !assetOrUnit {
reportErrorf("Either [--assetid] or [--unitname and --creator] must be specified")
}
if !cmd.Flags().Changed("creator") {
reportErrorf("Asset creator must be specified if finding asset by name. " +
"Use the asset's integer identifier [--assetid] if the " +
"creator account is unknown.")
}
response, err := client.AccountInformation(creator)
if err != nil {
reportErrorf(errorRequestFail, err)
}
nmatch := 0
for id, params := range response.AssetParams {
if params.UnitName == assetUnitName {
assetID = id
nmatch++
}
}
if nmatch == 0 {
reportErrorf("No matches for asset unit name %s in creator %s", assetUnitName, creator)
}
if nmatch > 1 {
reportErrorf("Multiple matches for asset unit name %s in creator %s", assetUnitName, creator)
}
}
var createAssetCmd = &cobra.Command{
Use: "create",
Short: "Create an asset",
Long: "Post a transaction declaring and issuing a new layer-one asset on the network.",
Args: validateNoPosArgsFn,
Run: func(cmd *cobra.Command, _ []string) {
checkTxValidityPeriodCmdFlags(cmd)
dataDir := ensureSingleDataDir()
client := ensureFullClient(dataDir)
accountList := makeAccountsList(dataDir)
creator := accountList.getAddressByName(assetCreator)
var err error
var assetMetadataHash []byte
if assetMetadataHashBase64 != "" {
assetMetadataHash, err = base64.StdEncoding.DecodeString(assetMetadataHashBase64)
if err != nil {
reportErrorf(malformedMetadataHash, assetMetadataHashBase64, err)
}
}
tx, err := client.MakeUnsignedAssetCreateTx(assetTotal, assetFrozen, creator, creator, creator, creator, assetUnitName, assetName, assetURL, assetMetadataHash, assetDecimals)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
tx.Note = parseNoteField(cmd)
tx.Lease = parseLease(cmd)
fv, lv, err := client.ComputeValidityRounds(firstValid, lastValid, numValidRounds)
if err != nil {
reportErrorf("Cannot determine last valid round: %s", err)
}
tx, err = client.FillUnsignedTxTemplate(creator, fv, lv, fee, tx)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
if outFilename == "" {
wh, pw := ensureWalletHandleMaybePassword(dataDir, walletName, true)
signedTxn, err := client.SignTransactionWithWallet(wh, pw, tx)
if err != nil {
reportErrorf(errorSigningTX, err)
}
txid, err := client.BroadcastTransaction(signedTxn)
if err != nil {
reportErrorf(errorBroadcastingTX, err)
}
// Report tx details to user
reportInfof("Issued transaction from account %s, txid %s (fee %d)", tx.Sender, txid, tx.Fee.Raw)
if !noWaitAfterSend {
err = waitForCommit(client, txid)
if err != nil {
reportErrorf(err.Error())
}
// Check if we know about the transaction yet
txn, err := client.PendingTransactionInformation(txid)
if err != nil {
reportErrorf(err.Error())
}
if txn.TransactionResults != nil && txn.TransactionResults.CreatedAssetIndex != 0 {
reportInfof("Created asset with asset index %d", txn.TransactionResults.CreatedAssetIndex)
}
}
} else {
err = writeTxnToFile(client, sign, dataDir, walletName, tx, outFilename)
if err != nil {
reportErrorf(err.Error())
}
}
},
}
var destroyAssetCmd = &cobra.Command{
Use: "destroy",
Short: "Destroy an asset",
Long: `Issue a transaction deleting an asset from the network. This transaction must be issued by the asset owner, who must hold all outstanding asset tokens.`,
Args: validateNoPosArgsFn,
Run: func(cmd *cobra.Command, _ []string) {
checkTxValidityPeriodCmdFlags(cmd)
dataDir := ensureSingleDataDir()
client := ensureFullClient(dataDir)
accountList := makeAccountsList(dataDir)
if assetManager == "" {
assetManager = assetCreator
}
creator := accountList.getAddressByName(assetCreator)
manager := accountList.getAddressByName(assetManager)
lookupAssetID(cmd, creator, client)
tx, err := client.MakeUnsignedAssetDestroyTx(assetID)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
tx.Note = parseNoteField(cmd)
tx.Lease = parseLease(cmd)
firstValid, lastValid, err = client.ComputeValidityRounds(firstValid, lastValid, numValidRounds)
if err != nil {
reportErrorf("Cannot determine last valid round: %s", err)
}
tx, err = client.FillUnsignedTxTemplate(manager, firstValid, lastValid, fee, tx)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
if outFilename == "" {
wh, pw := ensureWalletHandleMaybePassword(dataDir, walletName, true)
signedTxn, err := client.SignTransactionWithWallet(wh, pw, tx)
if err != nil {
reportErrorf(errorSigningTX, err)
}
txid, err := client.BroadcastTransaction(signedTxn)
if err != nil {
reportErrorf(errorBroadcastingTX, err)
}
// Report tx details to user
reportInfof("Issued transaction from account %s, txid %s (fee %d)", tx.Sender, txid, tx.Fee.Raw)
if !noWaitAfterSend {
err = waitForCommit(client, txid)
if err != nil {
reportErrorf(err.Error())
}
}
} else {
err = writeTxnToFile(client, sign, dataDir, walletName, tx, outFilename)
if err != nil {
reportErrorf(err.Error())
}
}
},
}
var configAssetCmd = &cobra.Command{
Use: "config",
Short: "Configure an asset",
Long: `Change an asset configuration. This transaction must be issued by the asset manager. This allows any management address to be changed: manager, freezer, reserve, or clawback.`,
Args: validateNoPosArgsFn,
Run: func(cmd *cobra.Command, _ []string) {
checkTxValidityPeriodCmdFlags(cmd)
dataDir := ensureSingleDataDir()
client := ensureFullClient(dataDir)
accountList := makeAccountsList(dataDir)
if assetManager == "" {
assetManager = assetCreator
}
creator := accountList.getAddressByName(assetCreator)
manager := accountList.getAddressByName(assetManager)
lookupAssetID(cmd, creator, client)
var newManager, newReserve, newFreeze, newClawback *string
if cmd.Flags().Changed("new-manager") {
assetNewManager = accountList.getAddressByName(assetNewManager)
newManager = &assetNewManager
}
if cmd.Flags().Changed("new-reserve") {
assetNewReserve = accountList.getAddressByName(assetNewReserve)
newReserve = &assetNewReserve
}
if cmd.Flags().Changed("new-freezer") {
assetNewFreezer = accountList.getAddressByName(assetNewFreezer)
newFreeze = &assetNewFreezer
}
if cmd.Flags().Changed("new-clawback") {
assetNewClawback = accountList.getAddressByName(assetNewClawback)
newClawback = &assetNewClawback
}
tx, err := client.MakeUnsignedAssetConfigTx(creator, assetID, newManager, newReserve, newFreeze, newClawback)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
tx.Note = parseNoteField(cmd)
tx.Lease = parseLease(cmd)
firstValid, lastValid, err = client.ComputeValidityRounds(firstValid, lastValid, numValidRounds)
if err != nil {
reportErrorf("Cannot determine last valid round: %s", err)
}
tx, err = client.FillUnsignedTxTemplate(manager, firstValid, lastValid, fee, tx)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
if outFilename == "" {
wh, pw := ensureWalletHandleMaybePassword(dataDir, walletName, true)
signedTxn, err := client.SignTransactionWithWallet(wh, pw, tx)
if err != nil {
reportErrorf(errorSigningTX, err)
}
txid, err := client.BroadcastTransaction(signedTxn)
if err != nil {
reportErrorf(errorBroadcastingTX, err)
}
// Report tx details to user
reportInfof("Issued transaction from account %s, txid %s (fee %d)", tx.Sender, txid, tx.Fee.Raw)
if !noWaitAfterSend {
err = waitForCommit(client, txid)
if err != nil {
reportErrorf(err.Error())
}
}
} else {
err = writeTxnToFile(client, sign, dataDir, walletName, tx, outFilename)
if err != nil {
reportErrorf(err.Error())
}
}
},
}
var sendAssetCmd = &cobra.Command{
Use: "send",
Short: "Transfer assets",
Long: "Transfer asset holdings. An account can begin accepting an asset by issuing a zero-amount asset transfer to itself.",
Args: validateNoPosArgsFn,
Run: func(cmd *cobra.Command, _ []string) {
checkTxValidityPeriodCmdFlags(cmd)
dataDir := ensureSingleDataDir()
client := ensureFullClient(dataDir)
accountList := makeAccountsList(dataDir)
// Check if from was specified, else use default
if account == "" {
account = accountList.getDefaultAccount()
}
sender := accountList.getAddressByName(account)
toAddressResolved := accountList.getAddressByName(toAddress)
creatorResolved := accountList.getAddressByName(assetCreator)
lookupAssetID(cmd, creatorResolved, client)
var senderForClawback string
if assetClawback != "" {
senderForClawback = sender
sender = accountList.getAddressByName(assetClawback)
}
var closeToAddressResolved string
if closeToAddress != "" {
closeToAddressResolved = accountList.getAddressByName(closeToAddress)
}
tx, err := client.MakeUnsignedAssetSendTx(assetID, amount, toAddressResolved, closeToAddressResolved, senderForClawback)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
tx.Note = parseNoteField(cmd)
tx.Lease = parseLease(cmd)
firstValid, lastValid, err = client.ComputeValidityRounds(firstValid, lastValid, numValidRounds)
if err != nil {
reportErrorf("Cannot determine last valid round: %s", err)
}
tx, err = client.FillUnsignedTxTemplate(sender, firstValid, lastValid, fee, tx)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
if outFilename == "" {
wh, pw := ensureWalletHandleMaybePassword(dataDir, walletName, true)
signedTxn, err := client.SignTransactionWithWallet(wh, pw, tx)
if err != nil {
reportErrorf(errorSigningTX, err)
}
txid, err := client.BroadcastTransaction(signedTxn)
if err != nil {
reportErrorf(errorBroadcastingTX, err)
}
// Report tx details to user
reportInfof("Issued transaction from account %s, txid %s (fee %d)", tx.Sender, txid, tx.Fee.Raw)
if !noWaitAfterSend {
err = waitForCommit(client, txid)
if err != nil {
reportErrorf(err.Error())
}
}
} else {
err = writeTxnToFile(client, sign, dataDir, walletName, tx, outFilename)
if err != nil {
reportErrorf(err.Error())
}
}
},
}
var freezeAssetCmd = &cobra.Command{
Use: "freeze",
Short: "Freeze assets",
Long: `Freeze or unfreeze assets for a target account. The transaction must be issued by the freeze address for the asset in question.`,
Args: validateNoPosArgsFn,
Run: func(cmd *cobra.Command, _ []string) {
checkTxValidityPeriodCmdFlags(cmd)
dataDir := ensureSingleDataDir()
client := ensureFullClient(dataDir)
accountList := makeAccountsList(dataDir)
freezer := accountList.getAddressByName(assetFreezer)
creatorResolved := accountList.getAddressByName(assetCreator)
accountResolved := accountList.getAddressByName(account)
lookupAssetID(cmd, creatorResolved, client)
tx, err := client.MakeUnsignedAssetFreezeTx(assetID, accountResolved, assetFrozen)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
tx.Note = parseNoteField(cmd)
tx.Lease = parseLease(cmd)
firstValid, lastValid, err = client.ComputeValidityRounds(firstValid, lastValid, numValidRounds)
if err != nil {
reportErrorf("Cannot determine last valid round: %s", err)
}
tx, err = client.FillUnsignedTxTemplate(freezer, firstValid, lastValid, fee, tx)
if err != nil {
reportErrorf("Cannot construct transaction: %s", err)
}
if outFilename == "" {
wh, pw := ensureWalletHandleMaybePassword(dataDir, walletName, true)
signedTxn, err := client.SignTransactionWithWallet(wh, pw, tx)
if err != nil {
reportErrorf(errorSigningTX, err)
}
txid, err := client.BroadcastTransaction(signedTxn)
if err != nil {
reportErrorf(errorBroadcastingTX, err)
}
// Report tx details to user
reportInfof("Issued transaction from account %s, txid %s (fee %d)", tx.Sender, txid, tx.Fee.Raw)
if !noWaitAfterSend {
err = waitForCommit(client, txid)
if err != nil {
reportErrorf(err.Error())
}
}
} else {
err = writeTxnToFile(client, sign, dataDir, walletName, tx, outFilename)
if err != nil {
reportErrorf(err.Error())
}
}
},
}
func assetDecimalsFmt(amount uint64, decimals uint32) string {
// Just return the raw amount with no decimal if decimals is 0
if decimals == 0 {
return fmt.Sprintf("%d", amount)
}
// Otherwise, ensure there are decimals digits to the right of the decimal point
pow := uint64(1)
for i := uint32(0); i < decimals; i++ {
pow *= 10
}
return fmt.Sprintf("%d.%0*d", amount/pow, decimals, amount%pow)
}
var infoAssetCmd = &cobra.Command{
Use: "info",
Short: "Look up current parameters for an asset",
Long: `Look up asset information stored on the network, such as asset creator, management addresses, or asset name.`,
Args: validateNoPosArgsFn,
Run: func(cmd *cobra.Command, _ []string) {
dataDir := ensureSingleDataDir()
client := ensureFullClient(dataDir)
accountList := makeAccountsList(dataDir)
creator := accountList.getAddressByName(assetCreator)
lookupAssetID(cmd, creator, client)
params, err := client.AssetInformation(assetID)
if err != nil {
reportErrorf(errorRequestFail, err)
}
reserveEmpty := false
if params.ReserveAddr == "" {
reserveEmpty = true
params.ReserveAddr = params.Creator
}
reserve, err := client.AccountInformation(params.ReserveAddr)
if err != nil {
reportErrorf(errorRequestFail, err)
}
res := reserve.Assets[assetID]
fmt.Printf("Asset ID: %d\n", assetID)
fmt.Printf("Creator: %s\n", params.Creator)
fmt.Printf("Asset name: %s\n", params.AssetName)
fmt.Printf("Unit name: %s\n", params.UnitName)
fmt.Printf("Maximum issue: %s %s\n", assetDecimalsFmt(params.Total, params.Decimals), params.UnitName)
fmt.Printf("Reserve amount: %s %s\n", assetDecimalsFmt(res.Amount, params.Decimals), params.UnitName)
fmt.Printf("Issued: %s %s\n", assetDecimalsFmt(params.Total-res.Amount, params.Decimals), params.UnitName)
fmt.Printf("Decimals: %d\n", params.Decimals)
fmt.Printf("Default frozen: %v\n", params.DefaultFrozen)
fmt.Printf("Manager address: %s\n", params.ManagerAddr)
if reserveEmpty {
fmt.Printf("Reserve address: %s (Empty. Defaulting to creator)\n", params.ReserveAddr)
} else {
fmt.Printf("Reserve address: %s\n", params.ReserveAddr)
}
fmt.Printf("Freeze address: %s\n", params.FreezeAddr)
fmt.Printf("Clawback address: %s\n", params.ClawbackAddr)
},
}
| {
"pile_set_name": "Github"
} |
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
## [Unreleased]
## [1.0.9]
## Bugs
* Fix Bug [#41](https://github.com/shekohex/nest-router/issues/41)
## [1.0.8]
### Added
* Resolve Full Controller Path from anywhere ([#32](https://github.com/shekohex/nest-router/pull/32))
## Changed
* Update Dev Dependencies
## [1.0.7] - 2018-09-28
### Changed
* Project dependency refactor
* add node v10 as a test target
## [1.0.6] - 2018-06-20
### Changed
* Now Nest Router Module Using Nest V5+
> See examples folder, there is `nest-v5x`.
## [1.0.5] - 2018-02-27
### Deprecated
* `childrens`, use `children` instead.
see [why](https://github.com/shekohex/nest-router/issues/6)?
## [1.0.4] - 2018-02-12
### Added
* You can now Omit the `module` keyword and just using an arry
of `children` and one `path` proparty.
## [1.0.3] - 2018-02-10
### Added
* `children` array can be array with just modules.
this means you can omit the `path` keyword.
* Unreleased section to gather unreleased changes and encourage note
keeping prior to releases.
## [1.0.2] - 2018-02-08
### Added
* Routes now can be endless nested array.
## [1.0.1] - 2018-02-05
### Changed
* `children` now an Array insted of `object`
## [1.0.0] - 2018-02-05
* Published to NPM :rocket:
* add continuous integration "Travis CI"
## [0.0.0] - 2018-01-31
### Added
* Greenkeeper badge
* README
* Good examples and basic guidelines, in example folder and README.
* Build status badge
| {
"pile_set_name": "Github"
} |
/*********************************************************************************
* *
* The MIT License (MIT) *
* *
* Copyright (c) 2015-2020 aoju.org and other contributors. *
* *
* Permission is hereby granted, free of charge, to any person obtaining a copy *
* of this software and associated documentation files (the "Software"), to deal *
* in the Software without restriction, including without limitation the rights *
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell *
* copies of the Software, and to permit persons to whom the Software is *
* furnished to do so, subject to the following conditions: *
* *
* The above copyright notice and this permission notice shall be included in *
* all copies or substantial portions of the Software. *
* *
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR *
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE *
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER *
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, *
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN *
* THE SOFTWARE. *
********************************************************************************/
package org.aoju.bus.extra.effect.provider;
import org.aoju.bus.extra.effect.EffectProvider;
import org.xerial.snappy.Snappy;
import java.io.IOException;
/**
* 基于snappy的数据压缩
*
* @author Kimi Liu
* @version 6.1.0
* @since JDK 1.8+
*/
public class SnappyProvider implements EffectProvider {
@Override
public byte[] compress(byte[] data) throws IOException {
return Snappy.compress(data);
}
@Override
public byte[] uncompress(byte[] data) throws IOException {
return Snappy.uncompress(data);
}
} | {
"pile_set_name": "Github"
} |
#tb 0: 1/25
0, 0, 0, 1, 49152, 0x5017bc21
| {
"pile_set_name": "Github"
} |
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Crossbar.io Technologies GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import snappy
from autobahn.websocket.compress_base import PerMessageCompressOffer, \
PerMessageCompressOfferAccept, \
PerMessageCompressResponse, \
PerMessageCompressResponseAccept, \
PerMessageCompress
__all__ = (
'PerMessageSnappyMixin',
'PerMessageSnappyOffer',
'PerMessageSnappyOfferAccept',
'PerMessageSnappyResponse',
'PerMessageSnappyResponseAccept',
'PerMessageSnappy',
)
class PerMessageSnappyMixin(object):
"""
Mixin class for this extension.
"""
EXTENSION_NAME = "permessage-snappy"
"""
Name of this WebSocket extension.
"""
class PerMessageSnappyOffer(PerMessageCompressOffer, PerMessageSnappyMixin):
"""
Set of extension parameters for `permessage-snappy` WebSocket extension
offered by a client to a server.
"""
@classmethod
def parse(cls, params):
"""
Parses a WebSocket extension offer for `permessage-snappy` provided by a client to a server.
:param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`.
:type params: list
:returns: A new instance of :class:`autobahn.compress.PerMessageSnappyOffer`.
:rtype: obj
"""
# extension parameter defaults
accept_no_context_takeover = False
request_no_context_takeover = False
# verify/parse client ("client-to-server direction") parameters of permessage-snappy offer
for p in params:
if len(params[p]) > 1:
raise Exception("multiple occurrence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
val = params[p][0]
if p == 'client_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
accept_no_context_takeover = True
elif p == 'server_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
request_no_context_takeover = True
else:
raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
offer = cls(accept_no_context_takeover,
request_no_context_takeover)
return offer
def __init__(self,
accept_no_context_takeover=True,
request_no_context_takeover=False):
"""
:param accept_no_context_takeover: Iff true, client accepts "no context takeover" feature.
:type accept_no_context_takeover: bool
:param request_no_context_takeover: Iff true, client request "no context takeover" feature.
:type request_no_context_takeover: bool
"""
if type(accept_no_context_takeover) != bool:
raise Exception("invalid type %s for accept_no_context_takeover" % type(accept_no_context_takeover))
self.accept_no_context_takeover = accept_no_context_takeover
if type(request_no_context_takeover) != bool:
raise Exception("invalid type %s for request_no_context_takeover" % type(request_no_context_takeover))
self.request_no_context_takeover = request_no_context_takeover
def get_extension_string(self):
"""
Returns the WebSocket extension configuration string as sent to the server.
:returns: PMCE configuration string.
:rtype: str
"""
pmce_string = self.EXTENSION_NAME
if self.accept_no_context_takeover:
pmce_string += "; client_no_context_takeover"
if self.request_no_context_takeover:
pmce_string += "; server_no_context_takeover"
return pmce_string
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'accept_no_context_takeover': self.accept_no_context_takeover,
'request_no_context_takeover': self.request_no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyOffer(accept_no_context_takeover = %s, request_no_context_takeover = %s)" % (self.accept_no_context_takeover, self.request_no_context_takeover)
class PerMessageSnappyOfferAccept(PerMessageCompressOfferAccept, PerMessageSnappyMixin):
"""
Set of parameters with which to accept an `permessage-snappy` offer
from a client by a server.
"""
def __init__(self,
offer,
request_no_context_takeover=False,
no_context_takeover=None):
"""
:param offer: The offer being accepted.
:type offer: Instance of :class:`autobahn.compress.PerMessageSnappyOffer`.
:param request_no_context_takeover: Iff true, server request "no context takeover" feature.
:type request_no_context_takeover: bool
:param no_context_takeover: Override server ("server-to-client direction") context takeover (this must be compatible with offer).
:type no_context_takeover: bool
"""
if not isinstance(offer, PerMessageSnappyOffer):
raise Exception("invalid type %s for offer" % type(offer))
self.offer = offer
if type(request_no_context_takeover) != bool:
raise Exception("invalid type %s for request_no_context_takeover" % type(request_no_context_takeover))
if request_no_context_takeover and not offer.accept_no_context_takeover:
raise Exception("invalid value %s for request_no_context_takeover - feature unsupported by client" % request_no_context_takeover)
self.request_no_context_takeover = request_no_context_takeover
if no_context_takeover is not None:
if type(no_context_takeover) != bool:
raise Exception("invalid type %s for no_context_takeover" % type(no_context_takeover))
if offer.request_no_context_takeover and not no_context_takeover:
raise Exception("invalid value %s for no_context_takeover - client requested feature" % no_context_takeover)
self.no_context_takeover = no_context_takeover
def get_extension_string(self):
"""
Returns the WebSocket extension configuration string as sent to the server.
:returns: PMCE configuration string.
:rtype: str
"""
pmce_string = self.EXTENSION_NAME
if self.offer.request_no_context_takeover:
pmce_string += "; server_no_context_takeover"
if self.request_no_context_takeover:
pmce_string += "; client_no_context_takeover"
return pmce_string
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'offer': self.offer.__json__(),
'request_no_context_takeover': self.request_no_context_takeover,
'no_context_takeover': self.no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyAccept(offer = %s, request_no_context_takeover = %s, no_context_takeover = %s)" % (self.offer.__repr__(), self.request_no_context_takeover, self.no_context_takeover)
class PerMessageSnappyResponse(PerMessageCompressResponse, PerMessageSnappyMixin):
"""
Set of parameters for `permessage-snappy` responded by server.
"""
@classmethod
def parse(cls, params):
"""
Parses a WebSocket extension response for `permessage-snappy` provided by a server to a client.
:param params: Output from :func:`autobahn.websocket.WebSocketProtocol._parseExtensionsHeader`.
:type params: list
:returns: A new instance of :class:`autobahn.compress.PerMessageSnappyResponse`.
:rtype: obj
"""
client_no_context_takeover = False
server_no_context_takeover = False
for p in params:
if len(params[p]) > 1:
raise Exception("multiple occurrence of extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
val = params[p][0]
if p == 'client_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
client_no_context_takeover = True
elif p == 'server_no_context_takeover':
# noinspection PySimplifyBooleanCheck
if val is not True:
raise Exception("illegal extension parameter value '%s' for parameter '%s' of extension '%s'" % (val, p, cls.EXTENSION_NAME))
else:
server_no_context_takeover = True
else:
raise Exception("illegal extension parameter '%s' for extension '%s'" % (p, cls.EXTENSION_NAME))
response = cls(client_no_context_takeover,
server_no_context_takeover)
return response
def __init__(self,
client_no_context_takeover,
server_no_context_takeover):
self.client_no_context_takeover = client_no_context_takeover
self.server_no_context_takeover = server_no_context_takeover
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'client_no_context_takeover': self.client_no_context_takeover,
'server_no_context_takeover': self.server_no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyResponse(client_no_context_takeover = %s, server_no_context_takeover = %s)" % (self.client_no_context_takeover, self.server_no_context_takeover)
class PerMessageSnappyResponseAccept(PerMessageCompressResponseAccept, PerMessageSnappyMixin):
"""
Set of parameters with which to accept an `permessage-snappy` response
from a server by a client.
"""
def __init__(self,
response,
no_context_takeover=None):
"""
:param response: The response being accepted.
:type response: Instance of :class:`autobahn.compress.PerMessageSnappyResponse`.
:param no_context_takeover: Override client ("client-to-server direction") context takeover (this must be compatible with response).
:type no_context_takeover: bool
"""
if not isinstance(response, PerMessageSnappyResponse):
raise Exception("invalid type %s for response" % type(response))
self.response = response
if no_context_takeover is not None:
if type(no_context_takeover) != bool:
raise Exception("invalid type %s for no_context_takeover" % type(no_context_takeover))
if response.client_no_context_takeover and not no_context_takeover:
raise Exception("invalid value %s for no_context_takeover - server requested feature" % no_context_takeover)
self.no_context_takeover = no_context_takeover
def __json__(self):
"""
Returns a JSON serializable object representation.
:returns: JSON serializable representation.
:rtype: dict
"""
return {'extension': self.EXTENSION_NAME,
'response': self.response.__json__(),
'no_context_takeover': self.no_context_takeover}
def __repr__(self):
"""
Returns Python object representation that can be eval'ed to reconstruct the object.
:returns: Python string representation.
:rtype: str
"""
return "PerMessageSnappyResponseAccept(response = %s, no_context_takeover = %s)" % (self.response.__repr__(), self.no_context_takeover)
class PerMessageSnappy(PerMessageCompress, PerMessageSnappyMixin):
"""
`permessage-snappy` WebSocket extension processor.
"""
@classmethod
def create_from_response_accept(cls, is_server, accept):
pmce = cls(is_server,
accept.response.server_no_context_takeover,
accept.no_context_takeover if accept.no_context_takeover is not None else accept.response.client_no_context_takeover)
return pmce
@classmethod
def create_from_offer_accept(cls, is_server, accept):
pmce = cls(is_server,
accept.no_context_takeover if accept.no_context_takeover is not None else accept.offer.request_no_context_takeover,
accept.request_no_context_takeover)
return pmce
def __init__(self,
is_server,
server_no_context_takeover,
client_no_context_takeover):
self._is_server = is_server
self.server_no_context_takeover = server_no_context_takeover
self.client_no_context_takeover = client_no_context_takeover
self._compressor = None
self._decompressor = None
def __json__(self):
return {'extension': self.EXTENSION_NAME,
'server_no_context_takeover': self.server_no_context_takeover,
'client_no_context_takeover': self.client_no_context_takeover}
def __repr__(self):
return "PerMessageSnappy(is_server = %s, server_no_context_takeover = %s, client_no_context_takeover = %s)" % (self._is_server, self.server_no_context_takeover, self.client_no_context_takeover)
def start_compress_message(self):
if self._is_server:
if self._compressor is None or self.server_no_context_takeover:
self._compressor = snappy.StreamCompressor()
else:
if self._compressor is None or self.client_no_context_takeover:
self._compressor = snappy.StreamCompressor()
def compress_message_data(self, data):
return self._compressor.add_chunk(data)
def end_compress_message(self):
return b""
def start_decompress_message(self):
if self._is_server:
if self._decompressor is None or self.client_no_context_takeover:
self._decompressor = snappy.StreamDecompressor()
else:
if self._decompressor is None or self.server_no_context_takeover:
self._decompressor = snappy.StreamDecompressor()
def decompress_message_data(self, data):
return self._decompressor.decompress(data)
def end_decompress_message(self):
pass
| {
"pile_set_name": "Github"
} |
/*
* Activiti Modeler component part of the Activiti project
* Copyright 2005-2014 Alfresco Software, Ltd. All rights reserved.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
* Input parameters for call activity
*/
var KisBpmInParametersCtrl = [ '$scope', '$modal', '$timeout', '$translate', function($scope, $modal, $timeout, $translate) {
// Config for the modal window
var opts = {
template: 'editor-app/configuration/properties/in-parameters-popup.html?version=' + Date.now(),
scope: $scope
};
// Open the dialog
$modal(opts);
}];
var KisBpmInParametersPopupCtrl = ['$scope', '$q', '$translate', function($scope, $q, $translate) {
// Put json representing form properties on scope
if ($scope.property.value !== undefined && $scope.property.value !== null
&& $scope.property.value.inParameters !== undefined
&& $scope.property.value.inParameters !== null) {
// Note that we clone the json object rather then setting it directly,
// this to cope with the fact that the user can click the cancel button and no changes should have happened
$scope.parameters = angular.copy($scope.property.value.inParameters);
} else {
$scope.parameters = [];
}
// Array to contain selected properties (yes - we only can select one, but ng-grid isn't smart enough)
$scope.selectedParameters = [];
$scope.translationsRetrieved = false;
$scope.labels = {};
var sourcePromise = $translate('PROPERTY.PARAMETER.SOURCE');
var sourceExpressionPromise = $translate('PROPERTY.PARAMETER.SOURCEEXPRESSION');
var targetPromise = $translate('PROPERTY.PARAMETER.TARGET');
$q.all([sourcePromise, sourceExpressionPromise, targetPromise]).then(function(results) {
$scope.labels.sourceLabel = results[0];
$scope.labels.sourceExpressionLabel = results[1];
$scope.labels.targetLabel = results[2];
$scope.translationsRetrieved = true;
// Config for grid
$scope.gridOptions = {
data: 'parameters',
enableRowReordering: true,
headerRowHeight: 28,
multiSelect: false,
keepLastSelected : false,
selectedItems: $scope.selectedParameters,
columnDefs: [{ field: 'source', displayName: $scope.labels.sourceLabel },
{ field: 'sourceExpression', displayName: $scope.labels.sourceExpressionLabel },
{ field: 'target', displayName: $scope.labels.targetLabel }]
};
});
// Click handler for add button
$scope.addNewParameter = function() {
$scope.parameters.push({ source : '',
sourceExpression : '',
target : ''});
};
// Click handler for remove button
$scope.removeParameter = function() {
if ($scope.selectedParameters.length > 0) {
var index = $scope.parameters.indexOf($scope.selectedParameters[0]);
$scope.gridOptions.selectItem(index, false);
$scope.parameters.splice(index, 1);
$scope.selectedParameters.length = 0;
if (index < $scope.parameters.length) {
$scope.gridOptions.selectItem(index + 1, true);
} else if ($scope.parameters.length > 0) {
$scope.gridOptions.selectItem(index - 1, true);
}
}
};
// Click handler for up button
$scope.moveParameterUp = function() {
if ($scope.selectedParameters.length > 0) {
var index = $scope.parameters.indexOf($scope.selectedParameters[0]);
if (index != 0) { // If it's the first, no moving up of course
// Reason for funny way of swapping, see https://github.com/angular-ui/ng-grid/issues/272
var temp = $scope.parameters[index];
$scope.parameters.splice(index, 1);
$timeout(function(){
$scope.parameters.splice(index + -1, 0, temp);
}, 100);
}
}
};
// Click handler for down button
$scope.moveParameterDown = function() {
if ($scope.selectedParameters.length > 0) {
var index = $scope.parameters.indexOf($scope.selectedParameters[0]);
if (index != $scope.parameters.length - 1) { // If it's the last element, no moving down of course
// Reason for funny way of swapping, see https://github.com/angular-ui/ng-grid/issues/272
var temp = $scope.parameters[index];
$scope.parameters.splice(index, 1);
$timeout(function(){
$scope.parameters.splice(index + 1, 0, temp);
}, 100);
}
}
};
// Click handler for save button
$scope.save = function() {
if ($scope.parameters.length > 0) {
$scope.property.value = {};
$scope.property.value.inParameters = $scope.parameters;
} else {
$scope.property.value = null;
}
$scope.updatePropertyInModel($scope.property);
$scope.close();
};
$scope.cancel = function() {
$scope.close();
};
// Close button handler
$scope.close = function() {
$scope.property.mode = 'read';
$scope.$hide();
};
}]; | {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2012 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.inputmethod.keyboard.internal;
import android.test.AndroidTestCase;
import android.test.suitebuilder.annotation.SmallTest;
@SmallTest
public class PointerTrackerQueueTests extends AndroidTestCase {
public static class Element implements PointerTrackerQueue.Element {
public static int sPhantomUpCount;
public static final long NOT_HAPPENED = -1;
public final int mId;
public boolean mIsModifier;
public boolean mIsInDraggingFinger;
public long mPhantomUpEventTime = NOT_HAPPENED;
public Element(int id) {
mId = id;
}
@Override
public boolean isModifier() {
return mIsModifier;
}
@Override
public boolean isInDraggingFinger() {
return mIsInDraggingFinger;
}
@Override
public void onPhantomUpEvent(long eventTime) {
sPhantomUpCount++;
mPhantomUpEventTime = eventTime + sPhantomUpCount;
}
@Override
public void cancelTrackingForAction() {}
@Override
public String toString() {
return Integer.toString(mId);
}
}
private final Element mElement1 = new Element(1);
private final Element mElement2 = new Element(2);
private final Element mElement3 = new Element(3);
private final Element mElement4 = new Element(4);
private final PointerTrackerQueue mQueue = new PointerTrackerQueue();
public void testEmpty() {
assertEquals(0, mQueue.size());
assertEquals("[]", mQueue.toString());
}
public void testAdd() {
mQueue.add(mElement1);
assertEquals(1, mQueue.size());
assertEquals("[1]", mQueue.toString());
mQueue.add(mElement2);
assertEquals(2, mQueue.size());
assertEquals("[1 2]", mQueue.toString());
mQueue.add(mElement3);
assertEquals(3, mQueue.size());
assertEquals("[1 2 3]", mQueue.toString());
mQueue.add(mElement4);
assertEquals(4, mQueue.size());
assertEquals("[1 2 3 4]", mQueue.toString());
}
public void testRemove() {
Element.sPhantomUpCount = 0;
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
mQueue.remove(mElement2);
assertEquals(3, mQueue.size());
assertEquals("[1 3 4]", mQueue.toString());
mQueue.remove(mElement4);
assertEquals(2, mQueue.size());
assertEquals("[1 3]", mQueue.toString());
mQueue.remove(mElement4);
assertEquals(2, mQueue.size());
assertEquals("[1 3]", mQueue.toString());
mQueue.remove(mElement1);
assertEquals(1, mQueue.size());
assertEquals("[3]", mQueue.toString());
mQueue.remove(mElement3);
assertEquals(0, mQueue.size());
assertEquals("[]", mQueue.toString());
mQueue.remove(mElement1);
assertEquals(0, mQueue.size());
assertEquals("[]", mQueue.toString());
assertEquals(0, Element.sPhantomUpCount);
assertEquals(Element.NOT_HAPPENED, mElement1.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement2.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement3.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement4.mPhantomUpEventTime);
}
public void testAddAndRemove() {
Element.sPhantomUpCount = 0;
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
mQueue.remove(mElement2);
assertEquals(3, mQueue.size());
assertEquals("[1 3 4]", mQueue.toString());
mQueue.remove(mElement4);
assertEquals(2, mQueue.size());
assertEquals("[1 3]", mQueue.toString());
mQueue.add(mElement2);
assertEquals(3, mQueue.size());
assertEquals("[1 3 2]", mQueue.toString());
mQueue.remove(mElement4);
assertEquals(3, mQueue.size());
assertEquals("[1 3 2]", mQueue.toString());
mQueue.remove(mElement1);
assertEquals(2, mQueue.size());
assertEquals("[3 2]", mQueue.toString());
mQueue.add(mElement1);
assertEquals(3, mQueue.size());
assertEquals("[3 2 1]", mQueue.toString());
mQueue.remove(mElement3);
assertEquals(2, mQueue.size());
assertEquals("[2 1]", mQueue.toString());
mQueue.remove(mElement1);
assertEquals(1, mQueue.size());
assertEquals("[2]", mQueue.toString());
assertEquals(Element.NOT_HAPPENED, mElement1.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement2.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement3.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement4.mPhantomUpEventTime);
}
public void testReleaseAllPointers() {
mElement2.mIsModifier = true;
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
final long eventTime = 123;
Element.sPhantomUpCount = 0;
mQueue.releaseAllPointers(eventTime);
assertEquals(4, Element.sPhantomUpCount);
assertEquals(0, mQueue.size());
assertEquals("[]", mQueue.toString());
assertEquals(eventTime + 1, mElement1.mPhantomUpEventTime);
assertEquals(eventTime + 2, mElement2.mPhantomUpEventTime);
assertEquals(eventTime + 3, mElement3.mPhantomUpEventTime);
assertEquals(eventTime + 4, mElement4.mPhantomUpEventTime);
}
public void testReleaseAllPointersOlderThanFirst() {
mElement2.mIsModifier = true;
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
final long eventTime = 123;
Element.sPhantomUpCount = 0;
mQueue.releaseAllPointersOlderThan(mElement1, eventTime);
assertEquals(0, Element.sPhantomUpCount);
assertEquals(3, mQueue.size());
assertEquals("[1 2 3]", mQueue.toString());
assertEquals(Element.NOT_HAPPENED, mElement1.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement2.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement3.mPhantomUpEventTime);
}
public void testReleaseAllPointersOlderThanLast() {
mElement2.mIsModifier = true;
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
final long eventTime = 123;
Element.sPhantomUpCount = 0;
mQueue.releaseAllPointersOlderThan(mElement4, eventTime);
assertEquals(2, Element.sPhantomUpCount);
assertEquals(2, mQueue.size());
assertEquals("[2 4]", mQueue.toString());
assertEquals(eventTime + 1, mElement1.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement2.mPhantomUpEventTime);
assertEquals(eventTime + 2, mElement3.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement4.mPhantomUpEventTime);
}
public void testReleaseAllPointersOlderThanWithoutModifierMiddle() {
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
final long eventTime = 123;
Element.sPhantomUpCount = 0;
mQueue.releaseAllPointersOlderThan(mElement3, eventTime);
assertEquals(2, Element.sPhantomUpCount);
assertEquals(2, mQueue.size());
assertEquals("[3 4]", mQueue.toString());
assertEquals(eventTime + 1, mElement1.mPhantomUpEventTime);
assertEquals(eventTime + 2, mElement2.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement3.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement4.mPhantomUpEventTime);
}
public void testReleaseAllPointersOlderThanWithoutModifierLast() {
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
final long eventTime = 123;
Element.sPhantomUpCount = 0;
mQueue.releaseAllPointersOlderThan(mElement4, eventTime);
assertEquals(3, Element.sPhantomUpCount);
assertEquals(1, mQueue.size());
assertEquals("[4]", mQueue.toString());
assertEquals(eventTime + 1, mElement1.mPhantomUpEventTime);
assertEquals(eventTime + 2, mElement2.mPhantomUpEventTime);
assertEquals(eventTime + 3, mElement3.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement4.mPhantomUpEventTime);
}
public void testReleaseAllPointersExcept() {
mElement2.mIsModifier = true;
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
final long eventTime = 123;
Element.sPhantomUpCount = 0;
mQueue.releaseAllPointersExcept(mElement3, eventTime);
assertEquals(3, Element.sPhantomUpCount);
assertEquals(1, mQueue.size());
assertEquals("[3]", mQueue.toString());
assertEquals(eventTime + 1, mElement1.mPhantomUpEventTime);
assertEquals(eventTime + 2, mElement2.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement3.mPhantomUpEventTime);
assertEquals(eventTime + 3, mElement4.mPhantomUpEventTime);
}
public void testHasModifierKeyOlderThan() {
Element.sPhantomUpCount = 0;
assertFalse("hasModifierKeyOlderThan empty", mQueue.hasModifierKeyOlderThan(mElement1));
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
assertFalse(mQueue.hasModifierKeyOlderThan(mElement1));
assertFalse(mQueue.hasModifierKeyOlderThan(mElement2));
assertFalse(mQueue.hasModifierKeyOlderThan(mElement3));
assertFalse(mQueue.hasModifierKeyOlderThan(mElement4));
mElement2.mIsModifier = true;
assertFalse(mQueue.hasModifierKeyOlderThan(mElement1));
assertFalse(mQueue.hasModifierKeyOlderThan(mElement2));
assertTrue(mQueue.hasModifierKeyOlderThan(mElement3));
assertTrue(mQueue.hasModifierKeyOlderThan(mElement4));
assertEquals(0, Element.sPhantomUpCount);
assertEquals(4, mQueue.size());
assertEquals("[1 2 3 4]", mQueue.toString());
assertEquals(Element.NOT_HAPPENED, mElement1.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement2.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement3.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement4.mPhantomUpEventTime);
}
public void testIsAnyInDraggingFinger() {
Element.sPhantomUpCount = 0;
assertFalse(mQueue.isAnyInDraggingFinger());
mQueue.add(mElement1);
mQueue.add(mElement2);
mQueue.add(mElement3);
mQueue.add(mElement4);
assertFalse(mQueue.isAnyInDraggingFinger());
mElement3.mIsInDraggingFinger = true;
assertTrue(mQueue.isAnyInDraggingFinger());
assertEquals(0, Element.sPhantomUpCount);
assertEquals(4, mQueue.size());
assertEquals("[1 2 3 4]", mQueue.toString());
assertEquals(Element.NOT_HAPPENED, mElement1.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement2.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement3.mPhantomUpEventTime);
assertEquals(Element.NOT_HAPPENED, mElement4.mPhantomUpEventTime);
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<entry xmlns="http://www.w3.org/2005/Atom">
<id>tag:bzr.mfd-consult.dk,2007:venus-expunge-test3/1</id>
<link href="http://example.com/3/1"/>
<title>Test 3/1</title>
<content>Entry for global setting 1</content>
<updated>2007-03-01T03:01:00Z</updated>
<source>
<id>tag:bzr.mfd-consult.dk,2007:venus-expunge-testfeed3</id>
<title>Test 3 source</title>
</source>
</entry> | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.github.lzyzsd.viewdraghelperdemo" >
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme">
<activity
android:name="com.github.lzyzsd.swipelayoutexample.MainActivity"
android:label="@string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
| {
"pile_set_name": "Github"
} |
// This file is generated automatically by `scripts/build/typings.js`. Please, don't change it.
import { isThisSecond } from 'date-fns'
export default isThisSecond
| {
"pile_set_name": "Github"
} |
// Copyright 2009, Squish Tech, LLC.
#include "html_document.h"
namespace libxmljs {
void
HtmlDocument::Initialize(v8::Local<v8::Object> target) {
}
} // namespcae libxmljs
| {
"pile_set_name": "Github"
} |
//
// ______ ______ ______
// /\ __ \ /\ ___\ /\ ___\
// \ \ __< \ \ __\_ \ \ __\_
// \ \_____\ \ \_____\ \ \_____\
// \/_____/ \/_____/ \/_____/
//
//
// Copyright (c) 2014-2015, Geek Zoo Studio
// http://www.bee-framework.com
//
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
#if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR)
#import "Bee_Precompile.h"
#import "Bee_Foundation.h"
#import "Bee_UISignal.h"
#import "Bee_UICell.h"
#pragma mark -
@interface UIView(BeeUICell)
@property (nonatomic, readonly) BeeUICell * containerCell;
@end
#endif // #if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR)
| {
"pile_set_name": "Github"
} |
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
from pyflink.testing.test_case_utils import PyFlinkTestCase
class ShellExampleTests(PyFlinkTestCase):
"""
If these tests failed, please fix these examples code and copy them to shell.py
"""
def test_batch_case(self):
from pyflink.shell import b_env, bt_env, FileSystem, OldCsv, DataTypes, Schema
# example begin
import tempfile
import os
import shutil
sink_path = tempfile.gettempdir() + '/batch.csv'
if os.path.exists(sink_path):
if os.path.isfile(sink_path):
os.remove(sink_path)
else:
shutil.rmtree(sink_path)
b_env.set_parallelism(1)
t = bt_env.from_elements([(1, 'hi', 'hello'), (2, 'hi', 'hello')], ['a', 'b', 'c'])
bt_env.connect(FileSystem().path(sink_path))\
.with_format(OldCsv()
.field_delimiter(',')
.field("a", DataTypes.BIGINT())
.field("b", DataTypes.STRING())
.field("c", DataTypes.STRING()))\
.with_schema(Schema()
.field("a", DataTypes.BIGINT())
.field("b", DataTypes.STRING())
.field("c", DataTypes.STRING()))\
.create_temporary_table("batch_sink")
t.select("a + 1, b, c").execute_insert("batch_sink").wait()
# verify code, do not copy these code to shell.py
with open(sink_path, 'r') as f:
lines = f.read()
self.assertEqual(lines, '2,hi,hello\n' + '3,hi,hello\n')
def test_stream_case(self):
from pyflink.shell import s_env, st_env, FileSystem, OldCsv, DataTypes, Schema
# example begin
import tempfile
import os
import shutil
sink_path = tempfile.gettempdir() + '/streaming.csv'
if os.path.exists(sink_path):
if os.path.isfile(sink_path):
os.remove(sink_path)
else:
shutil.rmtree(sink_path)
s_env.set_parallelism(1)
t = st_env.from_elements([(1, 'hi', 'hello'), (2, 'hi', 'hello')], ['a', 'b', 'c'])
st_env.connect(FileSystem().path(sink_path))\
.with_format(OldCsv()
.field_delimiter(',')
.field("a", DataTypes.BIGINT())
.field("b", DataTypes.STRING())
.field("c", DataTypes.STRING()))\
.with_schema(Schema()
.field("a", DataTypes.BIGINT())
.field("b", DataTypes.STRING())
.field("c", DataTypes.STRING()))\
.create_temporary_table("stream_sink")
t.select("a + 1, b, c").execute_insert("stream_sink").wait()
# verify code, do not copy these code to shell.py
with open(sink_path, 'r') as f:
lines = f.read()
self.assertEqual(lines, '2,hi,hello\n' + '3,hi,hello\n')
| {
"pile_set_name": "Github"
} |
################################################################################
#
# libsoc
#
################################################################################
LIBSOC_VERSION = 0.8.2
LIBSOC_SITE = $(call github,jackmitch,libsoc,$(LIBSOC_VERSION))
LIBSOC_LICENSE = LGPL-2.1
LIBSOC_LICENSE_FILES = LICENCE
LIBSOC_AUTORECONF = YES
LIBSOC_INSTALL_STAGING = YES
LIBSOC_DEPENDENCIES = host-pkgconf
# Install Python 2 bindings
ifeq ($(BR2_PACKAGE_PYTHON),y)
LIBSOC_DEPENDENCIES += python
LIBSOC_CONF_OPTS += --enable-python=2
# Install Python 3 bindings
else ifeq ($(BR2_PACKAGE_PYTHON3),y)
LIBSOC_DEPENDENCIES += python3
LIBSOC_CONF_OPTS += --enable-python=3
else
LIBSOC_CONF_OPTS += --disable-python
endif
$(eval $(autotools-package))
| {
"pile_set_name": "Github"
} |
package dev.morphia.aggregation.experimental.expressions.impls;
import dev.morphia.mapping.Mapper;
import org.bson.BsonWriter;
import org.bson.codecs.Codec;
import org.bson.codecs.EncoderContext;
public class ValueExpression extends Expression {
public ValueExpression(Object value) {
super(null, value);
}
@Override
public void encode(Mapper mapper, BsonWriter writer, EncoderContext encoderContext) {
if (getValue() != null) {
Codec codec = mapper.getCodecRegistry().get(getValue().getClass());
encoderContext.encodeWithChildContext(codec, writer, getValue());
} else {
writer.writeNull();
}
}
}
| {
"pile_set_name": "Github"
} |
// This is brl/bseg/sdet/sdet_harris_detector.cxx
#include <iostream>
#include <cstdlib>
#include "sdet_harris_detector.h"
//:
// \file
#ifdef _MSC_VER
# include "vcl_msvc_warnings.h"
#endif
#include "vil1/vil1_memory_image_of.h"
#include "vil/vil_image_view.h"
#include "vil/vil_convert.h"
#include <vil/algo/vil_corners.h>
#include <brip/brip_vil1_float_ops.h>
#include <brip/brip_vil_float_ops.h>
#include <vsol/vsol_point_2d.h>
//: A container to support sorting of corners
// Will result in descending order according to strength
struct sdet_harris_point
{
sdet_harris_point () = default;
void set_point(vsol_point_2d_sptr const& p) {p_ = p;}
void set_strength(const float s) {strength_ = s;}
vsol_point_2d_sptr point() {return p_;}
double strength() {return strength_;}
private:
float strength_;
vsol_point_2d_sptr p_;
};
//The sort compare function
static int compare(sdet_harris_point* pa,
sdet_harris_point* pb)
{
if (pa->strength() < pb->strength())
return +1;
return -1;
}
//---------------------------------------------------------------
// Constructors
//
//----------------------------------------------------------------
//: constructor from a parameter block (the only way)
//
sdet_harris_detector::sdet_harris_detector(sdet_harris_detector_params& rpp)
: sdet_harris_detector_params(rpp)
{
image_ = nullptr;
vimage_ = nullptr;
//don't really know but have to pick one
use_vil_image_ = true;
}
//:Default Destructor
sdet_harris_detector::~sdet_harris_detector()
= default;
//-------------------------------------------------------------------------
//: Set the image to be processed
//
void sdet_harris_detector::set_image(vil1_image const& image)
{
if (!image)
{
std::cout <<"In sdet_harris_detector::set_image(.) - null input\n";
return;
}
points_valid_ = false;
image_ = image;
use_vil_image_ = false;
}
//-------------------------------------------------------------------------
//: Set the image resource to be processed
//
void sdet_harris_detector::set_image_resource(vil_image_resource_sptr const& image)
{
if (!image)
{
std::cout <<"In sdet_harris_detector::set_image(.) - null input\n";
return;
}
points_valid_ = false;
vimage_ = image;
}
//------------------------------------------------------------------------
// : extract corners using vil1 code
//
bool sdet_harris_detector::extract_corners_vil1(std::vector<float>& x_pos,
std::vector<float>& y_pos,
std::vector<float>& val)
{
// Check the image
if (!image_)
{
std::cout << "In sdet_harris_detector::extract_corners() - no image\n";
return false;
}
int w = image_.width(), h = image_.height();
std::cout << "sdet_harris_detector::extract_corners(): width = "
<< w << " height = " << h << std::endl;
vil1_memory_image_of<float> inputf = brip_vil1_float_ops::convert_to_float(image_);
vil1_memory_image_of<float> smooth = brip_vil1_float_ops::gaussian(inputf, sigma_);
vil1_memory_image_of<float> IxIx, IxIy, IyIy, c;
IxIx.resize(w,h); IxIy.resize(w,h); IyIy.resize(w,h);
brip_vil1_float_ops::grad_matrix_NxN(smooth, n_, IxIx, IxIy, IyIy);
c = brip_vil1_float_ops::harris(IxIx, IxIy, IyIy, scale_factor_);
brip_vil1_float_ops::non_maximum_suppression(c, n_, thresh_, x_pos, y_pos, val);
return true;
}
//------------------------------------------------------------------------
// : extract corners using vil code
//
bool sdet_harris_detector::extract_corners_vil(std::vector<float>& x_pos,
std::vector<float>& y_pos,
std::vector<float>& val)
{
// Check the image
if (!vimage_||vimage_->nplanes()!=1)
{
std::cout << "In sdet_harris_detector::extract_corners() - "
<< "no image or not exactly one component\n";
return false;
}
int w = vimage_->ni(), h = vimage_->nj();
std::cout << "sdet_harris_detector::extract_corners(): width = "
<< w << " height = " << h << std::endl;
vil_image_view<float> base_view = vimage_->get_view();
vil_image_view<float> inputf;
vil_convert_cast(base_view, inputf);
vil_image_view<float> smooth = brip_vil_float_ops::gaussian(inputf, sigma_);
vil_image_view<float> c(w,h);
if (use_vil_harris_)
vil_corners(smooth,c,scale_factor_);
else
{
vil_image_view<float> IxIx(w,h), IxIy(w,h), IyIy(w,h);
brip_vil_float_ops::grad_matrix_NxN(smooth, n_, IxIx, IxIy, IyIy);
c = brip_vil_float_ops::harris(IxIx, IxIy, IyIy, scale_factor_);
}
brip_vil_float_ops::non_maximum_suppression(c, n_, thresh_, x_pos, y_pos, val);
return true;
}
//--------------------------------------------------------------------------
//: extract a set of vsol_point_2d(s)
void sdet_harris_detector::extract_corners()
{
if (points_valid_)
return;
//Process the image to extract the Harris corners
points_.clear();
std::vector<float> x_pos, y_pos, val;
if (!use_vil_image_)
{ if (!extract_corners_vil1(x_pos, y_pos, val)) return; }
else
{ if (!extract_corners_vil(x_pos, y_pos, val)) return; }
int n_corners = x_pos.size();
std::cout << "Found " << n_corners << " above the threshold\n";
if (!n_corners)
{
std::cout << "sdet_harris_detector::extract_corners() - "
<< "no corners found\n";
return;
}
//Sort the corners according to strength
auto* point_array = new sdet_harris_point[n_corners];
for (int i = 0; i<n_corners; i++)
{
vsol_point_2d_sptr p = new vsol_point_2d(x_pos[i], y_pos[i]);
point_array[i].set_point(p);
point_array[i].set_strength(val[i]);
}
std::qsort(point_array, n_corners, sizeof(sdet_harris_point),
(int (*)(const void *, const void *))&compare);
//output the corners (limit by maximum number of corners)
int num = (int)(percent_corners_/100.0*n_corners);
if (num>n_corners)
num = n_corners;
for (int i=0; i<num; i++)
{
points_.push_back(point_array[i].point());
// std::cout <<"s[" << i << "]=" << point_array[i].strength() << '\n';
}
delete [] point_array;
points_valid_ = true;
}
//----------------------------------------------------------
//: Clear internal storage
//
void sdet_harris_detector::clear()
{
points_.clear();
points_valid_ = false;
}
| {
"pile_set_name": "Github"
} |
var parse = require('../');
var test = require('tape');
test('stops parsing on the first non-option when stopEarly is set', function (t) {
var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], {
stopEarly: true
});
t.deepEqual(argv, {
aaa: 'bbb',
_: ['ccc', '--ddd']
});
t.end();
});
| {
"pile_set_name": "Github"
} |
<template>
</template>
<script>
export default {
data() {
return {};
},
mounted() {
//
},
methods: {
}
}
</script>
| {
"pile_set_name": "Github"
} |
{-# OPTIONS_GHC -fwarn-incomplete-patterns -fwarn-overlapping-patterns #-}
{-# LANGUAGE GADTs, TypeFamilies #-}
module T3927a where
type family F a
type instance F a = ()
data Foo a where
FooA :: Foo ()
FooB :: Foo Int
f :: a -> Foo (F a) -> () -- F a can only be () so only FooA is accepted
f _ FooA = ()
| {
"pile_set_name": "Github"
} |
darkskyline script DarkSkyline\DarkSkyline.eps
ds_install script DarkSkyline\Install.eps tdip
ds_uninstall script DarkSkyline\Uninstall.eps tdip
ds_load driverload -name tdip
ds_unload driverunload -name tdip
ds_verify script DarkSkyline\VerifyInstall.eps tdip
ds_running script DarkSkyline\VerifyRunning.eps tdip
ds_status script DarkSkyline\GetStatus.eps tdip
ds_getfilter packetscan -name tdip -get
ds_setfilter script DarkSkyline\SetFilter.eps tdip
ds_setmaxsize script DarkSkyline\SetMaxFileSize.eps tdip
ds_disable packetscan -name tdip -control disable
ds_enable packetscan -name tdip -control enable
ds_start packetscan -name tdip -control start
ds_stop packetscan -name tdip -control stop
ds_getcapture script DarkSkyline\HandleCapture.eps tdip get
ds_deletecapture script DarkSkyline\HandleCapture.eps tdip delete
ds_parsecapture script DarkSkyline\ParseCapture.eps | {
"pile_set_name": "Github"
} |
lf
lf
crlf
lf
lf
| {
"pile_set_name": "Github"
} |
[main]
test_datasets=[<val_data>,<val_data_no_target>]
[batching]
class=dataset.BatchingScheme
batch_size=10
[val_data]
class=dataset.load
; test wildcards
series=["source", "target"]
data=["tests/data/val10.part?.tc.en", "tests/data/val10.tc.de"]
outputs=[("target", "tests/outputs/tmpout-val10.tc.de")]
batching=<batching>
[val_data_no_target]
class=dataset.load
series=["source"]
data=["tests/data/val10.tc.en"]
outputs=[("target", "tests/outputs/tmpout-val10.tc.de")]
batching=<batching>
| {
"pile_set_name": "Github"
} |
"use strict";
const conversions = require("webidl-conversions");
const utils = require("./utils.js");
const HTMLElement = require("./HTMLElement.js");
const impl = utils.implSymbol;
function HTMLTitleElement() {
throw new TypeError("Illegal constructor");
}
HTMLTitleElement.prototype = Object.create(HTMLElement.interface.prototype);
HTMLTitleElement.prototype.constructor = HTMLTitleElement;
HTMLTitleElement.prototype.toString = function () {
if (this === HTMLTitleElement.prototype) {
return "[object HTMLTitleElementPrototype]";
}
return HTMLElement.interface.prototype.toString.call(this);
};
Object.defineProperty(HTMLTitleElement.prototype, "text", {
get() {
return this[impl].text;
},
set(V) {
V = conversions["DOMString"](V);
this[impl].text = V;
},
enumerable: true,
configurable: true
});
const iface = {
mixedInto: [],
is(obj) {
if (obj) {
if (obj[impl] instanceof Impl.implementation) {
return true;
}
for (let i = 0; i < module.exports.mixedInto.length; ++i) {
if (obj instanceof module.exports.mixedInto[i]) {
return true;
}
}
}
return false;
},
isImpl(obj) {
if (obj) {
if (obj instanceof Impl.implementation) {
return true;
}
const wrapper = utils.wrapperForImpl(obj);
for (let i = 0; i < module.exports.mixedInto.length; ++i) {
if (wrapper instanceof module.exports.mixedInto[i]) {
return true;
}
}
}
return false;
},
create(constructorArgs, privateData) {
let obj = Object.create(HTMLTitleElement.prototype);
this.setup(obj, constructorArgs, privateData);
return obj;
},
createImpl(constructorArgs, privateData) {
let obj = Object.create(HTMLTitleElement.prototype);
this.setup(obj, constructorArgs, privateData);
return utils.implForWrapper(obj);
},
_internalSetup(obj) {
HTMLElement._internalSetup(obj);
},
setup(obj, constructorArgs, privateData) {
if (!privateData) privateData = {};
privateData.wrapper = obj;
this._internalSetup(obj);
obj[impl] = new Impl.implementation(constructorArgs, privateData);
obj[impl][utils.wrapperSymbol] = obj;
},
interface: HTMLTitleElement,
expose: {
Window: { HTMLTitleElement: HTMLTitleElement }
}
};
module.exports = iface;
const Impl = require("../nodes/HTMLTitleElement-impl.js");
| {
"pile_set_name": "Github"
} |
/* ************************************************************************
qooxdoo - the new era of web development
http://qooxdoo.org
Copyright:
2004-2008 1&1 Internet AG, Germany, http://www.1und1.de
License:
MIT: https://opensource.org/licenses/MIT
See the LICENSE file in the project's top-level directory for details.
Authors:
* Sebastian Werner (wpbasti)
* Fabian Jakobs (fjakobs)
************************************************************************ */
/**
* A Spacer is a "virtual" widget, which can be placed into any layout and takes
* the space a normal widget of the same size would take.
*
* Spacers are invisible and very light weight because they don't require any
* DOM modifications.
*
* *Example*
*
* Here is a little example of how to use the widget.
*
* <pre class='javascript'>
* var container = new qx.ui.container.Composite(new qx.ui.layout.HBox());
* container.add(new qx.ui.core.Widget());
* container.add(new qx.ui.core.Spacer(50));
* container.add(new qx.ui.core.Widget());
* </pre>
*
* This example places two widgets and a spacer into a container with a
* horizontal box layout. In this scenario the spacer creates an empty area of
* 50 pixel width between the two widgets.
*
* *External Documentation*
*
* <a href='http://qooxdoo.org/docs/#desktop/widget/spacer.md' target='_blank'>
* Documentation of this widget in the qooxdoo manual.</a>
*/
qx.Class.define("qx.ui.core.Spacer",
{
extend : qx.ui.core.LayoutItem,
/*
*****************************************************************************
CONSTRUCTOR
*****************************************************************************
*/
/**
* @param width {Integer?null} the initial width
* @param height {Integer?null} the initial height
*/
construct : function(width, height)
{
this.base(arguments);
// Initialize dimensions
this.setWidth(width != null ? width : 0);
this.setHeight(height != null ? height : 0);
},
/*
*****************************************************************************
MEMBERS
*****************************************************************************
*/
members :
{
/**
* Helper method called from the visibility queue to detect outstanding changes
* to the appearance.
*
* @internal
*/
checkAppearanceNeeds : function() {
// placeholder to improve compatibility with Widget.
},
/**
* Recursively adds all children to the given queue
*
* @param queue {Map} The queue to add widgets to
*/
addChildrenToQueue : function(queue) {
// placeholder to improve compatibility with Widget.
},
/**
* Removes this widget from its parent and dispose it.
*
* Please note that the widget is not disposed synchronously. The
* real dispose happens after the next queue flush.
*
*/
destroy : function()
{
if (this.$$disposed) {
return;
}
var parent = this.$$parent;
if (parent) {
parent._remove(this);
}
qx.ui.core.queue.Dispose.add(this);
}
}
});
| {
"pile_set_name": "Github"
} |
{
"private": true,
"name": "demo",
"version": "1.0.0",
"license": "MIT",
"scripts": {
"build": "gatsby build",
"develop": "gatsby develop",
"clean": "gatsby clean"
},
"dependencies": {
"@vojtaholik/gatsby-theme-simplecast": "^1.0.8",
"gatsby": "^2.13.41",
"gatsby-plugin-theme-ui": "^0.2.18",
"gatsby-theme-ui": "^0.2.0",
"react": "^16.8.6",
"react-dom": "^16.8.6",
"theme-ui": "^0.2.21"
}
}
| {
"pile_set_name": "Github"
} |
/*
* <<
* Davinci
* ==
* Copyright (C) 2016 - 2019 EDP
* ==
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* >>
*
*/
package edp.davinci.service.impl;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.JSONObject;
import edp.core.common.jdbc.JdbcDataSource;
import edp.core.enums.DataTypeEnum;
import edp.core.exception.NotFoundException;
import edp.core.exception.ServerException;
import edp.core.exception.SourceException;
import edp.core.exception.UnAuthorizedExecption;
import edp.core.model.DBTables;
import edp.core.model.JdbcSourceInfo;
import edp.core.model.JdbcSourceInfo.JdbcSourceInfoBuilder;
import edp.core.model.QueryColumn;
import edp.core.model.TableInfo;
import edp.core.utils.*;
import edp.davinci.core.common.Constants;
import edp.davinci.core.enums.*;
import edp.davinci.core.model.DataUploadEntity;
import edp.davinci.core.model.RedisMessageEntity;
import edp.davinci.core.utils.CsvUtils;
import edp.davinci.core.utils.ExcelUtils;
import edp.davinci.core.utils.SourcePasswordEncryptUtils;
import edp.davinci.dao.SourceMapper;
import edp.davinci.dao.ViewMapper;
import edp.davinci.dto.projectDto.ProjectDetail;
import edp.davinci.dto.projectDto.ProjectPermission;
import edp.davinci.dto.sourceDto.*;
import edp.davinci.model.Source;
import edp.davinci.model.User;
import edp.davinci.model.View;
import edp.davinci.runner.LoadSupportDataSourceRunner;
import edp.davinci.service.ProjectService;
import edp.davinci.service.SourceService;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.BeanUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.util.StringUtils;
import org.springframework.web.multipart.MultipartFile;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupFile;
import java.util.*;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import static edp.core.consts.Consts.JDBC_DATASOURCE_DEFAULT_VERSION;
import static edp.davinci.core.common.Constants.DAVINCI_TOPIC_CHANNEL;
@Slf4j
@Service("sourceService")
public class SourceServiceImpl extends BaseEntityService implements SourceService {
private static final Logger optLogger = LoggerFactory.getLogger(LogNameEnum.BUSINESS_OPERATION.getName());
@Autowired
private SourceMapper sourceMapper;
@Autowired
private SqlUtils sqlUtils;
@Autowired
private ViewMapper viewMapper;
@Autowired
private ProjectService projectService;
@Autowired
private JdbcDataSource jdbcDataSource;
@Autowired
private RedisUtils redisUtils;
private static final CheckEntityEnum entity = CheckEntityEnum.SOURCE;
@Override
public boolean isExist(String name, Long id, Long projectId) {
Long sourceId = sourceMapper.getByNameWithProjectId(name, projectId);
if (null != id && null != sourceId) {
return !id.equals(sourceId);
}
return null != sourceId && sourceId.longValue() > 0L;
}
private void checkIsExist(String name, Long id, Long projectId) {
if (isExist(name, id, projectId)) {
alertNameTaken(entity, name);
}
}
/**
* 获取source列表
*
* @param projectId
* @param user
* @return
*/
@Override
public List<Source> getSources(Long projectId, User user)
throws NotFoundException, UnAuthorizedExecption, ServerException {
ProjectDetail projectDetail = null;
try {
projectDetail = projectService.getProjectDetail(projectId, user, false);
} catch (NotFoundException e) {
throw e;
} catch (UnAuthorizedExecption e) {
return null;
}
List<Source> sources = sourceMapper.getByProject(projectId);
if (!CollectionUtils.isEmpty(sources)) {
ProjectPermission projectPermission = projectService.getProjectPermission(projectDetail, user);
if (projectPermission.getSourcePermission() == UserPermissionEnum.HIDDEN.getPermission()) {
sources = null;
}
}
return sources;
}
@Override
public SourceDetail getSourceDetail(Long id, User user)
throws NotFoundException, UnAuthorizedExecption, ServerException {
Source source = getSource(id);
ProjectPermission projectPermission = getProjectPermission(source.getProjectId(), user);
if (projectPermission.getSourcePermission() == UserPermissionEnum.HIDDEN.getPermission()) {
throw new UnAuthorizedExecption();
}
SourceDetail sourceDetail = new SourceDetail();
BeanUtils.copyProperties(source, sourceDetail);
// Decrypt the password in config
JSONObject jsonObject = JSONObject.parseObject(sourceDetail.getConfig());
String decrypt = SourcePasswordEncryptUtils.decrypt((String) jsonObject.get("password"));
jsonObject.put("password", decrypt);
sourceDetail.setConfig(jsonObject.toString());
if (projectPermission.getSourcePermission() == UserPermissionEnum.READ.getPermission()) {
sourceDetail.setConfig(null);
}
return sourceDetail;
}
/**
* 创建source
*
* @param sourceCreate
* @param user
* @return
*/
@Override
@Transactional
public Source createSource(SourceCreate sourceCreate, User user)
throws NotFoundException, UnAuthorizedExecption, ServerException {
Long projectId = sourceCreate.getProjectId();
checkWritePermission(entity, projectId, user, "create");
String name = sourceCreate.getName();
checkIsExist(name, null, projectId);
if (null == SourceTypeEnum.typeOf(sourceCreate.getType())) {
throw new ServerException("Invalid source type");
}
BaseLock lock = getLock(entity, name, projectId);
if (lock != null && !lock.getLock()) {
alertNameTaken(entity, name);
}
try {
SourceConfig config = sourceCreate.getConfig();
// 测试连接
if (!testConnection(config)) {
throw new ServerException("test source connection fail");
}
Source source = new Source().createdBy(user.getId());
BeanUtils.copyProperties(sourceCreate, source);
// Decrypt the password in config
JSONObject jsonObject = JSONObject.parseObject(JSONObject.toJSONString(config));
String encrypt = SourcePasswordEncryptUtils.encrypt((String) jsonObject.get("password"));
jsonObject.put("password", encrypt);
source.setConfig(jsonObject.toString());
if (sourceMapper.insert(source) != 1) {
log.info("create source fail:{}", source.toString());
throw new ServerException("create source fail");
}
optLogger.info("source ({}) create by user (:{})", source.toString(), user.getId());
return source;
} finally {
releaseLock(lock);
}
}
private Source getSource(Long id) {
Source source = sourceMapper.getById(id);
if (null == source) {
log.warn("source (:{}) is not found", id);
throw new NotFoundException("this source is not found");
}
return source;
}
private boolean testConnection(SourceConfig config) {
// The password is encrypted
String encrypt = SourcePasswordEncryptUtils.encrypt(config.getPassword());
return sqlUtils.init(
config.getUrl(),
config.getUsername(),
encrypt,
config.getVersion(),
config.getProperties(),
config.isExt()
).testConnection();
}
/**
* 修改source
*
* @param sourceInfo
* @param user
* @return
*/
@Override
@Transactional
public Source updateSource(SourceInfo sourceInfo, User user)
throws NotFoundException, UnAuthorizedExecption, ServerException {
Source source = getSource(sourceInfo.getId());
checkWritePermission(entity, source.getProjectId(), user, "update");
String name = sourceInfo.getName();
Long projectId = source.getProjectId();
checkIsExist(name, source.getId(), projectId);
BaseLock lock = getLock(entity, name, projectId);
if (!lock.getLock()) {
alertNameTaken(entity, name);
}
try {
SourceConfig config = sourceInfo.getConfig();
// 测试连接
if (!testConnection(config)) {
throw new ServerException("test source connection fail");
}
// 失效的数据源
Source sourceCopy = new Source();
BeanUtils.copyProperties(source, sourceCopy);
BeanUtils.copyProperties(sourceInfo, source);
source.updatedBy(user.getId());
// Decrypt the password in config
JSONObject jsonObject = JSONObject.parseObject(JSONObject.toJSONString(sourceInfo.getConfig()));
String encrypt = SourcePasswordEncryptUtils.encrypt((String) jsonObject.get("password"));
jsonObject.put("password", encrypt);
source.setConfig(jsonObject.toString());
if (sourceMapper.update(source) != 1) {
log.info("update source fail:{}", source.toString());
throw new ServerException("update source fail:unspecified error");
}
// 释放失效数据源
String copyKey = SourceUtils.getKey(
sourceCopy.getJdbcUrl(),
sourceCopy.getUsername(),
sourceCopy.getPassword(),
sourceCopy.getDbVersion(),
sourceCopy.isExt());
String newKey = SourceUtils.getKey(
config.getUrl(),
config.getUsername(),
config.getPassword(),
config.getVersion(),
config.isExt());
if (!newKey.equals(copyKey)) {
releaseSource(sourceCopy);
}
optLogger.info("source ({}) update by user (:{})", source.toString(), user.getId());
return source;
} finally {
releaseLock(lock);
}
}
/**
* 删除source
*
* @param id
* @param user
* @return
*/
@Override
@Transactional
public boolean deleteSrouce(Long id, User user) throws NotFoundException, UnAuthorizedExecption, ServerException {
Source source = getSource(id);
checkWritePermission(entity, source.getProjectId(), user, "delete");
List<View> viewList = viewMapper.getBySourceId(id);
if (!CollectionUtils.isEmpty(viewList)) {
log.warn("There is at least one view using the source ({}), it is can not be deleted", id);
throw new ServerException("There is at least one view using the source, it is can not be deleted");
}
if (sourceMapper.deleteById(id) == 1) {
optLogger.info("source ({}) delete by user (:{})", source.toString(), user.getId());
releaseSource(source);
return true;
}
return false;
}
/**
* 测试数据源
*
* @param sourceTest
* @return
*/
@Override
public boolean testSource(SourceTest sourceTest) throws ServerException {
boolean testConnection = false;
try {
if (!sourceTest.isExt()) {
sourceTest.setVersion(null);
}
if (StringUtils.isEmpty(sourceTest.getVersion())
|| JDBC_DATASOURCE_DEFAULT_VERSION.equals(sourceTest.getVersion())) {
sourceTest.setVersion(null);
sourceTest.setExt(false);
}
JdbcSourceInfo jdbcSourceInfo = JdbcSourceInfoBuilder
.aJdbcSourceInfo()
.withJdbcUrl(sourceTest.getUrl())
.withUsername(sourceTest.getUsername())
.withPassword(sourceTest.getPassword())
.withProperties(sourceTest.getProperties())
.withExt(sourceTest.isExt())
.withDbVersion(sourceTest.getVersion())
.build();
testConnection = new SourceUtils(jdbcDataSource).testSource(jdbcSourceInfo);
} catch (SourceException e) {
log.error(e.toString(), e);
throw new ServerException(e.getMessage());
}
if (!testConnection) {
throw new ServerException("test source connection fail");
}
return true;
}
/**
* 生成csv对应的表结构
*
* @param sourceId
* @param uploadMeta
* @param user
* @return
*/
@Override
public void validCsvmeta(Long sourceId, UploadMeta uploadMeta, User user)
throws NotFoundException, UnAuthorizedExecption, ServerException {
Source source = getSource(sourceId);
checkWritePermission(entity, source.getProjectId(), user, "upload csv file in");
try {
boolean tableIsExist = sqlUtils.init(source).tableIsExist(uploadMeta.getTableName());
if (uploadMeta.getMode() == UploadModeEnum.NEW.getMode()) {
if (tableIsExist) {
throw new ServerException("table " + uploadMeta.getTableName() + " is already exist");
}
} else {
if (!tableIsExist) {
throw new ServerException("table " + uploadMeta.getTableName() + " is not exist");
}
}
} catch (SourceException e) {
log.error(e.getMessage());
throw new ServerException(e.getMessage());
}
}
/**
* 上传csv文件
*
* @param sourceId
* @param sourceDataUpload
* @param file
* @param user
* @param type
* @return
*/
@Override
@Transactional
public Boolean dataUpload(Long sourceId, SourceDataUpload sourceDataUpload, MultipartFile file, User user,
String type) throws NotFoundException, UnAuthorizedExecption, ServerException {
Source source = getSource(sourceId);
checkWritePermission(entity, source.getProjectId(), user, "upload data in");
if (!type.equals(FileTypeEnum.CSV.getType()) && !type.equals(FileTypeEnum.XLSX.getType())
&& !type.equals(FileTypeEnum.XLS.getType())) {
throw new ServerException("Unsupported file format");
}
// 校验文件是否csv文件
if (type.equals(FileTypeEnum.CSV.getType()) && !FileUtils.isCsv(file)) {
throw new ServerException("Please upload csv file");
}
if (type.equals(FileTypeEnum.XLSX.getType()) && !FileUtils.isExcel(file)) {
throw new ServerException("Please upload excel file");
}
DataTypeEnum dataTypeEnum = DataTypeEnum.urlOf(source.getJdbcUrl());
if (dataTypeEnum != DataTypeEnum.MYSQL) {
log.info("Unsupported data source, {}", source.getJdbcUrl());
throw new ServerException("Unsupported data source: " + source.getJdbcUrl());
}
try {
DataUploadEntity dataUploadEntity = null;
if (type.equals(FileTypeEnum.CSV.getType())) {
// 解析csv文件
dataUploadEntity = CsvUtils.parseCsvWithFirstAsHeader(file, "UTF-8");
} else {
// 解析excel文件
dataUploadEntity = ExcelUtils.parseExcelWithFirstAsHeader(file);
}
if (null != dataUploadEntity && !CollectionUtils.isEmpty(dataUploadEntity.getHeaders())) {
// 建表
createTable(dataUploadEntity.getHeaders(), sourceDataUpload, source);
// 传输数据
insertData(dataUploadEntity.getHeaders(), dataUploadEntity.getValues(), sourceDataUpload, source);
}
} catch (Exception e) {
throw new ServerException(e.getMessage());
}
return true;
}
private <T> T handleHiddenPermission(T obj, ProjectDetail projectDetail, User user, Long sourceId,
String operation) {
ProjectPermission projectPermission = projectService.getProjectPermission(projectDetail, user);
if (projectPermission.getSourcePermission() != UserPermissionEnum.HIDDEN.getPermission()) {
return obj;
}
log.info("user (:{}) have not permission to get source (:{}) {}", user.getId(), sourceId, operation);
return null;
}
/**
* 获取Source 的 db
*
* @param id
* @param user
* @return
* @throws NotFoundException
* @throws ServerException
*/
@Override
public List<String> getSourceDbs(Long id, User user) throws NotFoundException, ServerException {
Source source = getSource(id);
ProjectDetail projectDetail = projectService.getProjectDetail(source.getProjectId(), user, false);
List<String> dbList = null;
try {
dbList = sqlUtils.init(source).getDatabases();
} catch (SourceException e) {
throw new ServerException(e.getMessage());
}
if (null != dbList) {
dbList = handleHiddenPermission(dbList, projectDetail, user, source.getId(), "databases");
}
return dbList;
}
/**
* 获取Source的data tables
*
* @param id
* @param user
* @return
*/
@Override
public DBTables getSourceTables(Long id, String dbName, User user) throws NotFoundException {
DBTables dbTable = new DBTables(dbName);
Source source = getSource(id);
ProjectDetail projectDetail = projectService.getProjectDetail(source.getProjectId(), user, false);
List<QueryColumn> tableList = null;
try {
tableList = sqlUtils.init(source).getTableList(dbName);
} catch (SourceException e) {
throw new ServerException(e.getMessage());
}
if (null != tableList) {
handleHiddenPermission(tableList, projectDetail, user, source.getId(), "tables");
}
if (null != tableList) {
dbTable.setTables(tableList);
}
return dbTable;
}
/**
* 获取Source的data tables
*
* @param id
* @param user
* @return
*/
@Override
public TableInfo getTableInfo(Long id, String dbName, String tableName, User user) throws NotFoundException {
Source source = getSource(id);
ProjectDetail projectDetail = projectService.getProjectDetail(source.getProjectId(), user, false);
TableInfo tableInfo = null;
try {
tableInfo = sqlUtils.init(source).getTableInfo(dbName, tableName);
} catch (SourceException e) {
e.printStackTrace();
throw new ServerException(e.getMessage());
}
if (null != tableInfo) {
handleHiddenPermission(tableInfo, projectDetail, user, source.getId(), "table columns");
}
return tableInfo;
}
@Override
public List<DatasourceType> getDatasources() {
return LoadSupportDataSourceRunner.getSupportDatasourceList();
}
@Override
public boolean reconnect(Long id, DbBaseInfo dbBaseInfo, User user)
throws NotFoundException, UnAuthorizedExecption, ServerException {
Source source = getSource(id);
checkWritePermission(entity, source.getProjectId(), user, "reconnect");
if (!(dbBaseInfo.getDbUser().equals(source.getUsername())
&& dbBaseInfo.getDbPassword().equals(SourcePasswordEncryptUtils.decrypt(source.getPassword())))) {
log.warn("reconnect source (:{}) error, dbuser and dbpassword is wrong", id);
throw new ServerException("user or password is wrong");
}
releaseSource(source);
return sqlUtils.init(source).testConnection();
}
/**
* 释放失效数据源
*
* @param source
*/
private void releaseSource(Source source) {
SourceUtils sourceUtils = new SourceUtils(jdbcDataSource);
JdbcSourceInfo jdbcSourceInfo = JdbcSourceInfoBuilder
.aJdbcSourceInfo()
.withJdbcUrl(source.getJdbcUrl())
.withUsername(source.getUsername())
.withPassword(source.getPassword())
.withDbVersion(source.getDbVersion())
.withExt(source.isExt())
.build();
sourceUtils.releaseDataSource(jdbcSourceInfo);
if (redisUtils.isRedisEnable()) {
Map<String, Object> map = new HashMap<>();
map.put("url", source.getJdbcUrl());
map.put("username", source.getUsername());
map.put("password", source.getPassword());
map.put("version", source.getDbVersion());
map.put("ext", source.isExt());
SourceUtils.getReleaseSourceSet().add(String.valueOf(source.getId()));
publishReconnect(JSON.toJSONString(map), source.getId());
}
}
/**
* 向redis发布reconnect消息
*
* @param message
* @param id
*/
private void publishReconnect(String message, Long id) {
redisUtils.convertAndSend(DAVINCI_TOPIC_CHANNEL, new RedisMessageEntity(SourceMessageHandler.class, message, String.valueOf(id)));
}
/**
* 建表
*
* @param fields
* @param sourceDataUpload
* @param source
* @throws ServerException
*/
private void createTable(Set<QueryColumn> fields, SourceDataUpload sourceDataUpload, Source source)
throws ServerException {
if (CollectionUtils.isEmpty(fields)) {
throw new ServerException("there is have not any fields");
}
SqlUtils sqlUtils = this.sqlUtils.init(source);
STGroup stg = new STGroupFile(Constants.SQL_TEMPLATE);
String sql = null;
if (sourceDataUpload.getMode() == UploadModeEnum.COVER.getMode()) {
ST st = stg.getInstanceOf("createTable");
st.add("tableName", sourceDataUpload.getTableName());
st.add("fields", fields);
st.add("primaryKeys", StringUtils.isEmpty(sourceDataUpload.getPrimaryKeys()) ? null
: sourceDataUpload.getPrimaryKeys().split(","));
st.add("indexKeys", sourceDataUpload.getIndexList());
sql = st.render();
String dropSql = "DROP TABLE IF EXISTS `" + sourceDataUpload.getTableName() + "`";
sqlUtils.jdbcTemplate().execute(dropSql);
log.info("drop table sql : {}", dropSql);
} else {
boolean tableIsExist = sqlUtils.tableIsExist(sourceDataUpload.getTableName());
if (sourceDataUpload.getMode() == UploadModeEnum.NEW.getMode()) {
if (!tableIsExist) {
ST st = stg.getInstanceOf("createTable");
st.add("tableName", sourceDataUpload.getTableName());
st.add("fields", fields);
st.add("primaryKeys", sourceDataUpload.getPrimaryKeys());
st.add("indexKeys", sourceDataUpload.getIndexList());
sql = st.render();
} else {
throw new ServerException("table " + sourceDataUpload.getTableName() + " is already exist");
}
} else {
if (!tableIsExist) {
throw new ServerException("table " + sourceDataUpload.getTableName() + " is not exist");
}
}
}
log.info("create table sql : {}", sql);
try {
if (!StringUtils.isEmpty(sql)) {
sqlUtils.jdbcTemplate().execute(sql);
}
} catch (Exception e) {
throw new ServerException(e.getMessage());
}
}
/**
* 插入数据
*
* @param headers
* @param values
* @param sourceDataUpload
* @param source
*/
private void insertData(Set<QueryColumn> headers, List<Map<String, Object>> values,
SourceDataUpload sourceDataUpload, Source source) throws ServerException {
if (CollectionUtils.isEmpty(values)) {
return;
}
SqlUtils sqlUtils = this.sqlUtils.init(source);
try {
if (sourceDataUpload.getMode() == UploadModeEnum.COVER.getMode() || sourceDataUpload.getMode() == UploadModeEnum.REPLACE.getMode()) {
// 清空表
sqlUtils.jdbcTemplate().execute("Truncate table `" + sourceDataUpload.getTableName() + "`");
// 插入数据
executeInsert(sourceDataUpload.getTableName(), headers, values, sqlUtils);
} else {
boolean tableIsExist = sqlUtils.tableIsExist(sourceDataUpload.getTableName());
if (tableIsExist) {
executeInsert(sourceDataUpload.getTableName(), headers, values, sqlUtils);
} else {
throw new ServerException("table " + sourceDataUpload.getTableName() + " is not exist");
}
}
} catch (ServerException e) {
e.printStackTrace();
throw new ServerException(e.getMessage());
}
}
/**
* 多线程执行插入数据
*
* @param tableName
* @param headers
* @param values
* @param sqlUtils
* @throws ServerException
*/
private void executeInsert(String tableName, Set<QueryColumn> headers, List<Map<String, Object>> values,
SqlUtils sqlUtils) throws ServerException {
if (!CollectionUtils.isEmpty(values)) {
int len = 1000;
int totalSize = values.size();
int pageSize = len;
int totalPage = totalSize / pageSize;
if (totalSize % pageSize != 0) {
totalPage += 1;
if (totalSize < pageSize) {
pageSize = values.size();
}
}
ExecutorService executorService = Executors.newFixedThreadPool(Math.min(totalPage, 8));
STGroup stg = new STGroupFile(Constants.SQL_TEMPLATE);
ST st = stg.getInstanceOf("insertData");
st.add("tableName", tableName);
st.add("columns", headers);
String sql = st.render();
log.info("sql : {}", st.render());
List<Future> futures = new ArrayList<>();
// 分页批量插入
long startTime = System.currentTimeMillis();
log.info("execute insert start ---- {}", DateUtils.toyyyyMMddHHmmss(startTime));
for (int pageNum = 1; pageNum < totalPage + 1; pageNum++) {
int localPageNum = pageNum;
int localPageSize = pageSize;
futures.add(executorService.submit(() -> {
int starNum = (localPageNum - 1) * localPageSize;
int endNum = Math.min(localPageNum * localPageSize, totalSize);
log.info("executeInsert thread-{} : start:{}, end:{}", localPageNum, starNum, endNum);
sqlUtils.executeBatch(sql, headers, values.subList(starNum, endNum));
}));
}
try {
for (Future future : futures) {
future.get();
}
long endTime = System.currentTimeMillis();
log.info("execute insert end ---- {}", DateUtils.toyyyyMMddHHmmss(endTime));
log.info("execution time {} second", (endTime - startTime) / 1000);
} catch (InterruptedException | ExecutionException e) {
e.printStackTrace();
throw new ServerException(e.getMessage());
} finally {
executorService.shutdown();
}
}
}
}
| {
"pile_set_name": "Github"
} |
---
title: チュートリアル:Azure Active Directory と Amazon Web Services (AWS) を統合して複数のアカウントを接続する | Microsoft Docs
description: Azure AD と アマゾン ウェブ サービス (AWS) の間でシングル サインオンを構成する方法について説明します (従来のチュートリアル)。
services: active-directory
author: jeevansd
manager: CelesteDG
ms.reviewer: celested
ms.service: active-directory
ms.subservice: saas-app-tutorial
ms.workload: identity
ms.topic: article
ms.date: 08/07/2020
ms.author: jeedes
ms.openlocfilehash: 20674f5a793267c3a9e2fa078f95cbf96624df13
ms.sourcegitcommit: 023d10b4127f50f301995d44f2b4499cbcffb8fc
ms.translationtype: HT
ms.contentlocale: ja-JP
ms.lasthandoff: 08/18/2020
ms.locfileid: "88550170"
---
# <a name="tutorial-azure-active-directory-integration-with-amazon-web-services-aws-legacy-tutorial"></a>チュートリアル:Azure Active Directory と アマゾン ウェブ サービス (AWS) の統合 (従来のチュートリアル)
このチュートリアルでは、Azure Active Directory (Azure AD) と アマゾン ウェブ サービス (AWS) を統合する方法について説明します (従来のチュートリアル)。
Amazon Web Services (AWS) と Azure AD の統合には、次の利点があります。
- Amazon Web Services (AWS) にアクセスする Azure AD ユーザーを制御できます。
- ユーザーが自分の Azure AD アカウントで自動的に Amazon Web Services (AWS) にサインオンできるようにします (シングル サインオン)。
- 1 つの中央サイト (Azure Portal) でアカウントを管理できます。
SaaS アプリと Azure AD の統合の詳細については、「[Azure Active Directory のアプリケーション アクセスとシングル サインオンとは](../manage-apps/what-is-single-sign-on.md)」をご覧ください。

> [!NOTE]
> 1 つの AWS アプリをすべての AWS アカウントに接続するアプローチはお勧めしないことに注意してください。 代わりに、[この](https://docs.microsoft.com/azure/active-directory/saas-apps/amazon-web-service-tutorial)アプローチを使用して、Azure AD で AWS アプリの複数のインスタンスに AWS アカウントの複数のインスタンスを構成することをお勧めします。 このアプローチは、AWS アカウントとそれに含まれるロール数が少ない場合にのみ使用してください。このモデルは、AWS アカウントとこれらのアカウント内のロールの増加に合わせてスケーリングできません。 このアプローチでは、Azure AD ユーザー プロビジョニングを使用した AWS ロールのインポート機能が使用されないため、ロールを手動で追加、更新、削除する必要があります。 このアプローチに関するその他の制限事項については、以下の詳細を参照してください。
**次の理由からこのアプローチの使用をお勧めしていません。**
* アプリにすべてのロールを適用するには、Microsoft Graph エクスプローラーを使用する必要があります。 マニフェスト ファイル アプローチの使用はお勧めしません。
* 単一の AWS アプリに対し最高 1200 のアプリ ロールを追加した後にアプリ上で操作すると、サイズに関連したエラーがスローされ始めたと報告するお客様をこれまで見てきました。 アプリケーション オブジェクトにはサイズのハード制限があります。
* ロールがいずれかのアカウントに追加されたときにロールを手動で更新する必要がありますが、これは残念ながら置換アプローチであって追加ではありません。 また、アカウントが増加した場合、これはアカウントとロールの n x n の関係になります。
* すべての AWS アカウントは、同じフェデレーション メタデータ XML ファイルを使用しており、証明書のロールオーバー時には、この大量の作業を行って、同時にすべての AWS アカウントで証明書を更新する必要があります
## <a name="prerequisites"></a>前提条件
Amazon Web Services (AWS) と Azure AD の統合を構成するには、次のものが必要です。
* Azure AD サブスクリプション。 Azure AD の環境がない場合は、[こちら](https://azure.microsoft.com/pricing/free-trial/)から 1 か月の評価版を入手できます
* アマゾン ウェブ サービス (AWS) でのシングル サインオンが有効なサブスクリプション
> [!NOTE]
> このチュートリアルの手順をテストする場合、運用環境を使用しないことをお勧めします。
このチュートリアルの手順をテストするには、次の推奨事項に従ってください。
- 必要な場合を除き、運用環境は使用しないでください。
- Azure AD の評価環境がない場合は、[1 か月の評価版を入手できます](https://azure.microsoft.com/pricing/free-trial/)。
## <a name="scenario-description"></a>シナリオの説明
このチュートリアルでは、テスト環境で Azure AD のシングル サインオンを構成してテストします。
* アマゾン ウェブ サービス (AWS) では、**SP と IDP** によって開始される SSO がサポートされます
* アマゾン ウェブ サービス (AWS) を構成したら、組織の機密データを流出と侵入からリアルタイムで保護するセッション制御を適用することができます。 セッション制御は、条件付きアクセスを拡張したものです。 [Microsoft Cloud App Security でセッション制御を適用する方法](https://docs.microsoft.com/cloud-app-security/proxy-deployment-aad)をご覧ください。
## <a name="adding-amazon-web-services-aws-from-the-gallery"></a>ギャラリーからの Amazon Web Services (AWS) の追加
Azure AD への Amazon Web Services (AWS) の統合を構成するには、ギャラリーから管理対象 SaaS アプリの一覧に Amazon Web Services (AWS) を追加する必要があります。
1. 職場または学校アカウントか、個人の Microsoft アカウントを使用して、[Azure portal](https://portal.azure.com) にサインインします。
1. 左のナビゲーション ウィンドウで **[Azure Active Directory]** サービスを選択します。
1. **[エンタープライズ アプリケーション]** に移動し、 **[すべてのアプリケーション]** を選択します。
1. 新しいアプリケーションを追加するには、 **[新しいアプリケーション]** を選択します。
1. **[ギャラリーから追加する]** セクションで、検索ボックスに「**アマゾン ウェブ サービス (AWS)** 」と入力します。
1. 結果パネルから **[アマゾン ウェブ サービス (AWS)]** を選択してそのアプリを追加します。 お使いのテナントにアプリが追加されるのを数秒待機します。
1. アプリケーションが追加されたら、 **[プロパティ]** ページに移動して、**オブジェクト ID** をコピーします

## <a name="configure-and-test-azure-ad-sso"></a>Azure AD SSO の構成とテスト
このセクションでは、"Britta Simon" というテスト ユーザーに基づいて、Amazon Web Services (AWS) で Azure AD のシングル サインオンを構成し、テストします。
シングル サインオンを機能させるには、Azure AD ユーザーに対応する Amazon Web Services (AWS) ユーザーが Azure AD で認識される必要があります。 言い換えると、Azure AD ユーザーと Amazon Web Services (AWS) の関連ユーザーの間で、リンク関係が確立されている必要があります。
Amazon Web Services (AWS) で、Azure AD の **[ユーザー名]** の値を **[Username]** の値として割り当ててリンク関係を確立します。
Amazon Web Services (AWS) との Azure AD シングル サインオンを構成してテストするには、次の構成要素を完了する必要があります。
1. **[Azure AD シングル サインオンの構成](#configure-azure-ad-single-sign-on)** - ユーザーがこの機能を使用できるようにします。
2. **[アマゾン ウェブ サービス (AWS) シングル サインオンの構成](#configure-amazon-web-services-aws-single-sign-on)** - アプリケーション側でシングル サインオン設定を構成します。
3. **[シングル サインオンのテスト](#test-single-sign-on)** - 構成が機能するかどうかを確認します。
### <a name="configure-azure-ad-single-sign-on"></a>Azure AD シングル サインオンの構成
このセクションでは、Azure ポータルで Azure AD のシングル サインオンを有効にし、Amazon Web Services (AWS) アプリケーションへのシングル サインオンを構成します。
**Amazon Web Services (AWS) との Azure AD シングル サインオンを構成するには、次の手順を実行します。**
1. [Microsoft Azure portal](https://portal.azure.com/) の**アマゾン ウェブ サービス (AWS)** アプリケーション統合ページで、 **[シングル サインオン]** を選択します。

2. **[シングル サインオン方式の選択]** ダイアログで、 **[SAML/WS-Fed]** モードを選択して、シングル サインオンを有効にします。

3. **[SAML でシングル サインオンをセットアップします]** ページで、 **[編集]** アイコンをクリックして **[基本的な SAML 構成]** ダイアログを開きます。

4. アプリは Azure と事前に統合済みであるため、 **[基本的な SAML 構成]** セクションで実行が必要な手順はありません。 **[保存]** をクリックします。
5. アマゾン ウェブ サービス (AWS) アプリケーションは、特定の形式で構成された SAML アサーションを受け入れます。 このアプリケーションには、次の要求を構成します。 これらの属性の値は、アプリケーション統合ページの **[ユーザー属性と要求]** セクションで管理できます。 **[SAML でシングル サインオンをセットアップします]** ページで、 **[編集]** ボタンをクリックして **[ユーザー属性と要求]** ダイアログを開きます。

6. **[ユーザー属性]** ダイアログの **[ユーザーの要求]** セクションで、上の図のように SAML トークン属性を構成し、次の手順を実行します。
| 名前 | ソース属性 | 名前空間 |
| --------------- | --------------- | --------------- |
| RoleSessionName | user.userprincipalname | `https://aws.amazon.com/SAML/Attributes` |
| Role | user.assignedroles | `https://aws.amazon.com/SAML/Attributes`|
| SessionDuration | 「900 秒 (15 分) から43200 秒 (12 時間) の値を指定してください」 | `https://aws.amazon.com/SAML/Attributes` |
a. **[新しい要求の追加]** をクリックして **[ユーザー要求の管理]** ダイアログを開きます。


b. **[名前]** ボックスに、その行に対して表示される属性名を入力します。
c. **[名前空間]** ボックスに、その行に表示される名前空間の値を入力します。
d. [ソース] として **[属性]** を選択します。
e. **[ソース属性]** の一覧から、その行に表示される属性値を入力します。
f. **[OK]** をクリックします。
g. **[保存]** をクリックします。
7. **[SAML でシングル サインオンをセットアップします]** ページの **[SAML 署名証明書]** セクションで、 **[ダウンロード]** をクリックして**フェデレーション メタデータ XML** をダウンロードし、コンピューターに保存します。

### <a name="configure-amazon-web-services-aws-single-sign-on"></a>アマゾン ウェブ サービス (AWS) のシングル サインオンの構成
1. 別の Web ブラウザーのウィンドウで、管理者として Amazon Web Services (AWS) 企業サイトにサインオンします。
1. **[AWS Home]** をクリックします。
![シングル サインオン ホームの構成][11]
1. **[Identity and Access Management]** をクリックします。
![シングル サインオン ID の構成][12]
1. **[Identity Providers]** 、 **[Create Provider]** の順にクリックします。
![シングル サインオン プロバイダーの構成][13]
1. **[Configure Provider]** ダイアログ ページで、次の手順を実行します。
![シングル サインオンの構成ダイアログ][14]
a. **[Provider Type]** として **[SAML]** を選択します。
b. **[Provider Name]\(プロバイダー名\)** ボックスにプロバイダー名を入力します (例: *WAAD*)。
c. Azure Portal からダウンロードした**メタデータ ファイル**をアップロードするには、 **[ファイルの選択]** をクリックします。
d. ページの下部にある **[Next Step]** 」を参照してください。
1. **[Verify Provider Information]** ダイアログ ボックスで、 **[Create]** をクリックします。
![シングル サインオンの検証の構成][15]
1. **[Roles]** をクリックしてから **[Create role]** をクリックします。
![シングル サインオン ロールの構成][16]
1. **[Create role]** ページで、以下の手順を実行します。
![シングル サインオンの信頼の構成][19]
a. **[Select type of trusted entity]** の **[SAML 2.0 federation]** を選択します。
b. **[Choose a SAML 2.0 Provider]\(SAML 2.0 プロバイダーの選択\)** セクションで、先ほど作成した **SAML プロバイダー**を選択します (例: *WAAD*)
c. **[Allow programmatic and AWS Management Console access]** を選択します。
d. **[次へ: Permissions]\(次へ: アクセス許可\)** をクリックします。
1. 検索バーで「**Administrator Access**」を検索し、**AdministratorAccess** チェックボックスをオンにして、**次へ:タグ**を選択します。

1. **[Add tags (optional)]\(タグの追加 (省略可能)\)** セクションで、次の手順を実行します。

a. **[Key]\(キー\)** テキストボックスに、キー名を入力します。例:Azureadtest。
b. **[Value (optional)]\(値 (省略可能)\)** テキストボックスに、`accountname-aws-admin` の形式を使用してキー値を入力します。 アカウント名はすべて小文字にする必要があります。
c. **次へ:確認\)** をクリックします。
1. **[Review]** ダイアログで、次の手順を実行します。
![シングル サインオンの構成の確認][34]
a. **[ロール名]** テキストボックスに、`accountname-aws-admin` のパターンで値を入力します。
b. **[ロールの説明]** テキストボックスに、ロール名に使用したものと同じ値を入力します。
c. **[Create Role]** をクリックします。
d. 必要な数の役割ロールを作成し、それらを ID プロバイダーにマップします。
> [!NOTE]
> 同様に、添付するポリシーが異なる accountname-finance-admin、accountname-read-only-user、accountname-devops-user、accountname-tpm-user などの残りのロールも作成します。 また、これらのロール ポリシーは、AWS アカウントごとの要件に応じて後から変更することも可能ですが、AWS アカウント全体で各ロールに対して同じポリシーを維持することをお勧めします。
1. 以下に強調表示されているように、その AWS アカウントのアカウント ID を EC2 プロパティまたは IAM ダッシュボードから書き留めておいてください。

1. ここで [Azure portal](https://portal.azure.com/) にサインインし、 **[グループ]** に移動します。
1. 前に作成した IAM ロールと同じ名前で新しいグループを作成し、これらの新しいグループの**オブジェクト ID** を書き留めます。

1. 現在の AWS アカウントからサインアウトし、Azure AD によるシングル サインオンを構成する他のアカウントでログインします。
1. アカウントですべてのロールが作成されると、それらのアカウントの **[Roles]** 一覧にそれらが表示されます。

1. すべてのアカウントのすべてのロールのロール ARN と信頼済みエンティティを取得する必要があります。それらは、Azure AD アプリケーションと手動でマッピングするために必要です。
1. ロールをクリックして、 **[Role ARN]** と **[Trusted Entities]** の値をコピーします。 Azure AD で作成する必要があるすべてのロールにこれらの値が必要です。

1. すべてのアカウントのすべてのロールに対して上記の手順を実行し、**ロール ARN,信頼済みエンティティ**の形式でメモ帳に記録します。
1. 別のウィンドウで [Microsoft Graph Explorer](https://developer.microsoft.com/graph/graph-explorer) を開きます。
a. テナントのグローバル管理者/共同管理者の資格情報を使用して、Microsoft Graph Explorer サイトにサインインします。
b. ロールを作成するための十分なアクセス許可が必要です。 **[アクセス許可の変更]** をクリックして、必要なアクセス許可を取得します。

c. 一覧から次のアクセス許可を選択して (まだない場合)、[アクセス許可の変更] をクリックします。

d. ここで、再度ログインし、同意を受け入れるよう求められます。 同意を受け入れると、Microsoft Graph Explorer に再度ログインされます。
e. バージョン ドロップダウンを**ベータ**に変更します。 テナントからすべてのサービス プリンシパルを取得するには、次のクエリを使用します。
`https://graph.microsoft.com/beta/servicePrincipals`
複数のディレクトリを使用している場合、次のパターンを使用できます。プライマリ ドメインは `https://graph.microsoft.com/beta/contoso.com/servicePrincipals` にあります。

f. 取得したサービス プリンシパルの一覧から、変更する必要があるものを取得します。 Ctrl キーを押しながら F キーを押して、一覧に示されたすべての ServicePrincipals からアプリケーションを検索することもできます。 Azure AD プロパティ ページからコピーした**サービス プリンシパル オブジェクト ID** を使用して、次のクエリを使用し、対応するサービス プリンシパルを取得することができます。
`https://graph.microsoft.com/beta/servicePrincipals/<objectID>`

g. サービス プリンシパル オブジェクトから appRoles プロパティを抽出します。

h. ここで、アプリケーションの新しいロールを生成する必要があります。
i. 以下の JSON は、appRoles オブジェクトの例です。 同様のオブジェクトを作成して、アプリケーションに必要なロールを追加します。
```
{
"appRoles": [
{
"allowedMemberTypes": [
"User"
],
"description": "msiam_access",
"displayName": "msiam_access",
"id": "7dfd756e-8c27-4472-b2b7-38c17fc5de5e",
"isEnabled": true,
"origin": "Application",
"value": null
},
{
"allowedMemberTypes": [
"User"
],
"description": "Admin,WAAD",
"displayName": "Admin,WAAD",
"id": "4aacf5a4-f38b-4861-b909-bae023e88dde",
"isEnabled": true,
"origin": "ServicePrincipal",
"value": "arn:aws:iam::12345:role/Admin,arn:aws:iam::12345:saml-provider/WAAD"
},
{
"allowedMemberTypes": [
"User"
],
"description": "Auditors,WAAD",
"displayName": "Auditors,WAAD",
"id": "bcad6926-67ec-445a-80f8-578032504c09",
"isEnabled": true,
"origin": "ServicePrincipal",
"value": "arn:aws:iam::12345:role/Auditors,arn:aws:iam::12345:saml-provider/WAAD"
} ]
}
```
> [!Note]
> パッチ操作では、**msiam_access** の後にのみ、新しいロールを追加できます。 また、組織のニーズごとに必要な数のロールを追加することができます。 Azure AD では、SAML 応答の要求値として、これらのロールの**値**を送信します。
j. Microsoft Graph Explorer に戻り、メソッドを **GET** から **PATCH** に変更します。 上の例に示されているように appRoles プロパティを更新して、必要なロールを持つようにサービス プリンシパル オブジェクトを修正します。 **[クエリの実行]** をクリックして、パッチ操作を実行します。 成功のメッセージで、Amazon Web Services アプリケーションのロールの作成を確認します。

1. より多くのロールでサービス プリンシパルを修正したら、対応するロールにユーザー/グループを割り当てることができます。 これは、ポータルに移動し、Amazon Web Services アプリケーションに移動することで実行できます。 上部の **[ユーザーとグループ]** タブをクリックします。
1. グループで特定のロールを割り当てることができるように、すべての AWS ロール用に新しいグループを作成することをお勧めします。 これは 1 つのグループが 1 つのロールに対応する 1 対 1 のマッピングであることに注意してください。 そのグループに属するメンバーを追加できます。
1. グループが作成されたら、グループを選択し、アプリケーションに割り当てます。

> [!Note]
> グループを割り当てるときに、入れ子になったグループはサポートされていません。
1. グループにロールを割り当てるには、ロールを選択し、ページの下部にある **[割り当て]** ボタンをクリックします。

> [!Note]
> 新しいロールを表示するには、Azure Portal でセッションを更新する必要があることに注意してください。
### <a name="test-single-sign-on"></a>シングル サインオンのテスト
このセクションでは、アクセス パネルを使用して Azure AD のシングル サインオン構成をテストします。
アクセス パネルで [Amazon Web Services (AWS)] タイルをクリックすると、Amazon Web Services (AWS) アプリケーション ページが表示され、ロールを選択するオプションが表示されます。

SAML 応答を確認し、クレームとして渡されるロールを参照することもできます。

アクセス パネルの詳細については、[アクセス パネルの概要](../active-directory-saas-access-panel-introduction.md)に関する記事を参照してください。
## <a name="additional-resources"></a>その他のリソース
* [MS Graph API を使用してプロビジョニングを構成する方法](https://docs.microsoft.com/azure/active-directory/manage-apps/application-provisioning-configure-api)
* [SaaS アプリと Azure Active Directory を統合する方法に関するチュートリアルの一覧](tutorial-list.md)
* [Azure Active Directory のアプリケーション アクセスとシングル サインオンとは](../manage-apps/what-is-single-sign-on.md)
* [Microsoft Cloud App Security におけるセッション制御とは](https://docs.microsoft.com/cloud-app-security/proxy-intro-aad)
* [高度な可視性と制御によって Amazon Web Services (AWS) を保護する方法](https://docs.microsoft.com/cloud-app-security/protect-aws)
<!--Image references-->
[11]: ./media/aws-multi-accounts-tutorial/ic795031.png
[12]: ./media/aws-multi-accounts-tutorial/ic795032.png
[13]: ./media/aws-multi-accounts-tutorial/ic795033.png
[14]: ./media/aws-multi-accounts-tutorial/ic795034.png
[15]: ./media/aws-multi-accounts-tutorial/ic795035.png
[16]: ./media/aws-multi-accounts-tutorial/ic795022.png
[17]: ./media/aws-multi-accounts-tutorial/ic795023.png
[18]: ./media/aws-multi-accounts-tutorial/ic795024.png
[19]: ./media/aws-multi-accounts-tutorial/ic795025.png
[32]: ./media/aws-multi-accounts-tutorial/ic7950251.png
[33]: ./media/aws-multi-accounts-tutorial/ic7950252.png
[35]: ./media/aws-multi-accounts-tutorial/tutorial-amazonwebservices-provisioning.png
[34]: ./media/aws-multi-accounts-tutorial/config3.png
[36]: ./media/aws-multi-accounts-tutorial/tutorial-amazonwebservices-securitycredentials.png
[37]: ./media/aws-multi-accounts-tutorial/tutorial-amazonwebservices-securitycredentials-continue.png
[38]: ./media/aws-multi-accounts-tutorial/tutorial-amazonwebservices-createnewaccesskey.png
[39]: ./media/aws-multi-accounts-tutorial/tutorial-amazonwebservices-provisioning-automatic.png
[40]: ./media/aws-multi-accounts-tutorial/tutorial-amazonwebservices-provisioning-testconnection.png
[41]: ./media/aws-multi-accounts-tutorial/ | {
"pile_set_name": "Github"
} |
T0 segment 0 9 Elizabeth
T1 segment 10 15 Price
T2 segment 16 21 Foley
T3 segment 22 24 is
T4 segment 25 27 an
T5 segment 28 36 American
T6 segment 37 42 legal
T7 segment 43 51 theorist
T8 segment 52 55 who
T9 segment 56 62 writes
T10 segment 63 66 and
T11 segment 67 75 comments
T12 segment 76 78 in
T13 segment 79 82 the
T14 segment 83 89 fields
T15 segment 90 92 of
T16 segment 93 107 constitutional
T17 segment 108 111 law
T18 segment 111 112 ,
T19 segment 113 122 bioethics
T20 segment 122 123 ,
T21 segment 124 127 and
T22 segment 128 134 health
T23 segment 135 139 care
T24 segment 140 143 law
T25 segment 144 145 .
R0 s_p Arg1:T2 Arg2:T3
R1 compound Arg1:T2 Arg2:T0
R2 compound Arg1:T2 Arg2:T1
R3 p_o Arg1:T3 Arg2:T7
R4 is-specialized-by Arg1:T7 Arg2:T5
R5 s_p Arg1:T8 Arg2:T9
R6 p_c Arg1:T9 Arg2:T12
R7 c_co Arg1:T12 Arg2:T14
R8 is-specialized-by Arg1:T14 Arg2:T15
R9 c_co Arg1:T15 Arg2:T17
R10 is-specialized-by Arg1:T17 Arg2:T16
R11 compound Arg1:T24 Arg2:T22
R12 compound Arg1:T24 Arg2:T23
| {
"pile_set_name": "Github"
} |
// Copyright (c) DotSpatial Team. All rights reserved.
// Licensed under the MIT license. See License.txt file in the project root for full license information.
using System.ComponentModel.Composition;
using System.Windows.Forms;
namespace DemoMap
{
/// <summary>
/// This is the main window of the DemoMap program.
/// </summary>
public partial class MainForm : Form
{
[Export("Shell", typeof(ContainerControl))]
private static ContainerControl shell;
/// <summary>
/// Initializes a new instance of the <see cref="MainForm"/> class.
/// </summary>
public MainForm()
{
InitializeComponent();
if (DesignMode) return;
shell = this;
appManager.LoadExtensions();
}
}
} | {
"pile_set_name": "Github"
} |
<!--
/**
* Copyright © Magento, Inc. All rights reserved.
* See COPYING.txt for license details.
*/
-->
<!--display container to specify url options(Example: open in new tab)-->
<div class="admin__field admin__field-option url-input-setting" visible="visible" click="checkboxClick">
<input type="checkbox"
class="admin__control-checkbox"
ko-checked="settingValue"
disable="disabled"
ko-value="settingValue"
attr="id: uid, name: inputName"/>
<label class="admin__field-label" text="settingLabel" attr="for: uid"/>
</div>
| {
"pile_set_name": "Github"
} |
e3a1d8d82f79a08d3e921099bb6b9e7f4bb530c2
| {
"pile_set_name": "Github"
} |
import time,glob,re,sys,logging,os,tempfile
import numpy as np
import tensorflow as tf
from scipy import spatial
from settings import AWS,INDEX_PATH,CONFIG_PATH,DATA_PATH
try:
from settings import DEMO
except ImportError:
DEMO = None
pass
from tensorflow.python.platform import gfile
from nearpy import Engine
from nearpy.hashes import RandomBinaryProjections
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m-%d %H:%M',
filename='logs/worker.log',
filemode='a')
DIMENSIONS = 2048
PROJECTIONBITS = 16
ENGINE = Engine(DIMENSIONS, lshashes=[RandomBinaryProjections('rbp', PROJECTIONBITS,rand_seed=2611),
RandomBinaryProjections('rbp', PROJECTIONBITS,rand_seed=261),
RandomBinaryProjections('rbp', PROJECTIONBITS,rand_seed=26)])
class NodeLookup(object):
def __init__(self):
label_lookup_path = CONFIG_PATH+'/data/imagenet_2012_challenge_label_map_proto.pbtxt'
uid_lookup_path = CONFIG_PATH+'/data/imagenet_synset_to_human_label_map.txt'
self.node_lookup = self.load(label_lookup_path, uid_lookup_path)
def load(self, label_lookup_path, uid_lookup_path):
proto_as_ascii_lines = gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
parsed_items = p.findall(line)
uid = parsed_items[0]
human_string = parsed_items[2]
uid_to_human[uid] = human_string
node_id_to_uid = {}
proto_as_ascii = gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
if line.startswith(' target_class_string:'):
target_class_string = line.split(': ')[1]
node_id_to_uid[target_class] = target_class_string[1:-2]
node_id_to_name = {}
for key, val in node_id_to_uid.items():
if val not in uid_to_human:
tf.logging.fatal('Failed to locate: %s', val)
name = uid_to_human[val]
node_id_to_name[key] = name
return node_id_to_name
def id_to_string(self, node_id):
if node_id not in self.node_lookup:
return ''
return self.node_lookup[node_id]
def load_network(png=False):
with gfile.FastGFile(CONFIG_PATH+'/data/network.pb', 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
if png:
png_data = tf.placeholder(tf.string, shape=[])
decoded_png = tf.image.decode_png(png_data, channels=3)
_ = tf.import_graph_def(graph_def, name='incept',input_map={'DecodeJpeg': decoded_png})
return png_data
else:
_ = tf.import_graph_def(graph_def, name='incept')
def load_index():
index,files,findex = [],{},0
print "Using index path : {}".format(INDEX_PATH+"*.npy")
for fname in glob.glob(INDEX_PATH+"*.npy"):
logging.info("Starting {}".format(fname))
try:
t = np.load(fname)
if max(t.shape) > 0:
index.append(t)
else:
raise ValueError
except:
logging.error("Could not load {}".format(fname))
pass
else:
for i,f in enumerate(file(fname.replace(".feats_pool3.npy",".files")).readlines()):
files[findex] = f.strip()
ENGINE.store_vector(index[-1][i,:],"{}".format(findex))
findex += 1
logging.info("Loaded {}".format(fname))
index = np.concatenate(index)
return index,files
def nearest(query_vector,index,files,n=12):
query_vector= query_vector[np.newaxis,:]
temp = []
dist = []
logging.info("started query")
for k in xrange(index.shape[0]):
temp.append(index[k])
if (k+1) % 50000 == 0:
temp = np.transpose(np.dstack(temp)[0])
dist.append(spatial.distance.cdist(query_vector,temp))
temp = []
if temp:
temp = np.transpose(np.dstack(temp)[0])
dist.append(spatial.distance.cdist(query_vector,temp))
dist = np.hstack(dist)
ranked = np.squeeze(dist.argsort())
logging.info("query finished")
return [files[k] for i,k in enumerate(ranked[:n])]
def nearest_fast(query_vector,index,files,n=12):
return [files[int(k)] for v,k,d in ENGINE.neighbours(query_vector)[:n]]
def get_batch(path,batch_size = 1000):
"""
Args:
path: directory containing images
Returns: Generator with dictionary containing image_file_nameh : image_data, each with size = BUCKET_SIZE
"""
path += "/*"
image_data = {}
logging.info("starting with path {}".format(path))
for i,fname in enumerate(glob.glob(path)):
try:
image_data[fname] = gfile.FastGFile(fname, 'rb').read()
except:
logging.info("failed to load {}".format(fname))
pass
if (i+1) % batch_size == 0:
logging.info("Loaded {}, with {} images".format(i,len(image_data)))
yield image_data
image_data = {}
yield image_data
def store_index(features,files,count,index_dir):
feat_fname = "{}/{}.feats_pool3.npy".format(index_dir,count)
files_fname = "{}/{}.files".format(index_dir,count)
logging.info("storing in {}".format(index_dir))
with open(feat_fname,'w') as feats:
np.save(feats,np.array(features))
with open(files_fname,'w') as filelist:
filelist.write("\n".join(files))
def extract_features(image_data,sess):
pool3 = sess.graph.get_tensor_by_name('incept/pool_3:0')
features = []
files = []
for fname,data in image_data.iteritems():
try:
pool3_features = sess.run(pool3,{'incept/DecodeJpeg/contents:0': data})
features.append(np.squeeze(pool3_features))
files.append(fname)
except:
logging.error("error while processing fname {}".format(fname))
return features,files
def download(filename):
if DEMO:
command = 'aws s3api get-object --bucket aub3visualsearch --key "{}/{}" --request-payer requester appcode/static/examples/{}'.format(DEMO,filename,filename)
logging.info(command)
os.system(command)
else:
os.system("cp {}/{} appcode/static/examples/{}".format(DATA_PATH,filename.split("/")[-1],filename.split("/")[-1])) # this needlessly slows down the code, handle it elegantly by using the same directory as static dir in flask.
| {
"pile_set_name": "Github"
} |
/*
* This file is part of ACADO Toolkit.
*
* ACADO Toolkit -- A Toolkit for Automatic Control and Dynamic Optimization.
* Copyright (C) 2008-2014 by Boris Houska, Hans Joachim Ferreau,
* Milan Vukov, Rien Quirynen, KU Leuven.
* Developed within the Optimization in Engineering Center (OPTEC)
* under supervision of Moritz Diehl. All rights reserved.
*
* ACADO Toolkit is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* ACADO Toolkit is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with ACADO Toolkit; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
/**
* \file include/acado/variables_grid/matrix_variables_grid.ipp
* \author Hans Joachim Ferreau, Boris Houska
* \date 10.06.2008
*/
//
// mvukov:
// Disable stupid warning on line 417
//
#ifdef WIN32
#pragma warning( disable : 4390 )
#endif
//
// PUBLIC MEMBER FUNCTIONS:
//
BEGIN_NAMESPACE_ACADO
inline double& MatrixVariablesGrid::operator()( uint pointIdx, uint rowIdx, uint colIdx )
{
ASSERT( values != 0 );
ASSERT( pointIdx < getNumPoints( ) );
return values[pointIdx]->operator()( rowIdx,colIdx );
}
inline double MatrixVariablesGrid::operator()( uint pointIdx, uint rowIdx, uint colIdx ) const
{
ASSERT( values != 0 );
ASSERT( pointIdx < getNumPoints( ) );
return values[pointIdx]->operator()( rowIdx,colIdx );
}
inline MatrixVariablesGrid MatrixVariablesGrid::operator()( const uint rowIdx
) const
{
ASSERT( values != 0 );
if ( rowIdx >= getNumRows( ) )
{
ACADOERROR( RET_INVALID_ARGUMENTS );
return MatrixVariablesGrid();
}
Grid tmpGrid;
getGrid( tmpGrid );
MatrixVariablesGrid rowGrid( 1,1,tmpGrid,getType( ) );
for( uint run1 = 0; run1 < getNumPoints(); run1++ )
rowGrid( run1,0,0 ) = values[run1]->operator()( rowIdx,0 );
return rowGrid;
}
inline MatrixVariablesGrid MatrixVariablesGrid::operator[]( const uint pointIdx
) const
{
ASSERT( values != 0 );
if ( pointIdx >= getNumPoints( ) )
{
ACADOERROR( RET_INVALID_ARGUMENTS );
return MatrixVariablesGrid();
}
MatrixVariablesGrid pointGrid;
pointGrid.addMatrix( *(values[pointIdx]),getTime( pointIdx ) );
return pointGrid;
}
inline MatrixVariablesGrid MatrixVariablesGrid::operator+( const MatrixVariablesGrid& arg
) const
{
ASSERT( getNumPoints( ) == arg.getNumPoints( ) );
MatrixVariablesGrid tmp( *this );
for( uint i=0; i<getNumPoints( ); ++i )
*(tmp.values[i]) += *(arg.values[i]);
return tmp;
}
inline MatrixVariablesGrid& MatrixVariablesGrid::operator+=( const MatrixVariablesGrid& arg
)
{
ASSERT( getNumPoints( ) == arg.getNumPoints( ) );
for( uint i=0; i<getNumPoints( ); ++i )
*(values[i]) += *(arg.values[i]);
return *this;
}
inline MatrixVariablesGrid MatrixVariablesGrid::operator-( const MatrixVariablesGrid& arg
) const
{
ASSERT( getNumPoints( ) == arg.getNumPoints( ) );
MatrixVariablesGrid tmp( *this );
for( uint i=0; i<getNumPoints( ); ++i )
*(tmp.values[i]) -= *(arg.values[i]);
return tmp;
}
inline MatrixVariablesGrid& MatrixVariablesGrid::operator-=( const MatrixVariablesGrid& arg
)
{
ASSERT( getNumPoints( ) == arg.getNumPoints( ) );
for( uint i=0; i<getNumPoints( ); ++i )
*(values[i]) -= *(arg.values[i]);
return *this;
}
inline uint MatrixVariablesGrid::getDim( ) const
{
uint totalDim = 0;
for( uint i=0; i<getNumPoints( ); ++i )
totalDim += values[i]->getDim( );
return totalDim;
}
inline uint MatrixVariablesGrid::getNumRows( ) const
{
if ( values == 0 )
return 0;
return getNumRows( 0 );
}
inline uint MatrixVariablesGrid::getNumCols( ) const
{
if ( values == 0 )
return 0;
return getNumCols( 0 );
}
inline uint MatrixVariablesGrid::getNumValues( ) const
{
if ( values == 0 )
return 0;
return getNumValues( 0 );
}
inline uint MatrixVariablesGrid::getNumRows( uint pointIdx
) const
{
if( values == 0 )
return 0;
ASSERT( pointIdx < getNumPoints( ) );
return values[pointIdx]->getNumRows( );
}
inline uint MatrixVariablesGrid::getNumCols( uint pointIdx
) const
{
if( values == 0 )
return 0;
ASSERT( pointIdx < getNumPoints( ) );
return values[pointIdx]->getNumCols( );
}
inline uint MatrixVariablesGrid::getNumValues( uint pointIdx
) const
{
if( values == 0 )
return 0;
ASSERT( pointIdx < getNumPoints( ) );
return values[pointIdx]->getDim( );
}
inline VariableType MatrixVariablesGrid::getType( ) const
{
if ( getNumPoints() == 0 )
return VT_UNKNOWN;
return getType( 0 );
}
inline returnValue MatrixVariablesGrid::setType( VariableType _type
)
{
for( uint i=0; i<getNumPoints( ); ++i )
setType( i,_type );
return SUCCESSFUL_RETURN;
}
inline VariableType MatrixVariablesGrid::getType( uint pointIdx
) const
{
if ( pointIdx >= getNumPoints( ) )
return VT_UNKNOWN;
return values[pointIdx]->getType( );
}
inline returnValue MatrixVariablesGrid::setType( uint pointIdx,
VariableType _type
)
{
if ( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
return values[pointIdx]->setType( _type );
}
inline returnValue MatrixVariablesGrid::getName( uint pointIdx,
uint idx,
char* const _name
) const
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
return values[pointIdx]->getName( idx,_name );
}
inline returnValue MatrixVariablesGrid::setName( uint pointIdx,
uint idx,
const char* const _name
)
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
return values[pointIdx]->setName( idx,_name );
}
inline returnValue MatrixVariablesGrid::getUnit( uint pointIdx,
uint idx,
char* const _unit
) const
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
return values[pointIdx]->getUnit( idx,_unit );
}
inline returnValue MatrixVariablesGrid::setUnit( uint pointIdx,
uint idx,
const char* const _unit
)
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
return values[pointIdx]->setUnit( idx,_unit );
}
inline DVector MatrixVariablesGrid::getScaling( uint pointIdx
) const
{
if( pointIdx >= getNumPoints( ) )
return emptyVector;
return values[pointIdx]->getScaling( );
}
inline returnValue MatrixVariablesGrid::setScaling( uint pointIdx,
const DVector& _scaling
)
{
if ( pointIdx >= getNumPoints( ) )
return ACADOERROR(RET_INDEX_OUT_OF_BOUNDS);
return values[pointIdx]->setScaling( _scaling );
}
inline double MatrixVariablesGrid::getScaling( uint pointIdx,
uint valueIdx
) const
{
if( pointIdx >= getNumPoints( ) )
return -1.0;
return values[pointIdx]->getScaling( valueIdx );
}
inline returnValue MatrixVariablesGrid::setScaling( uint pointIdx,
uint valueIdx,
double _scaling
)
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
if( valueIdx >= values[pointIdx]->getDim( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
values[pointIdx]->setScaling( valueIdx,_scaling );
return SUCCESSFUL_RETURN;
}
inline DVector MatrixVariablesGrid::getLowerBounds( uint pointIdx
) const
{
if( pointIdx >= getNumPoints( ) )
return emptyVector;
return values[pointIdx]->getLowerBounds( );
}
inline returnValue MatrixVariablesGrid::setLowerBounds( uint pointIdx,
const DVector& _lb
)
{
if( pointIdx >= nPoints )
return ACADOERROR(RET_INDEX_OUT_OF_BOUNDS);
return values[pointIdx]->setLowerBounds( _lb );
}
inline double MatrixVariablesGrid::getLowerBound( uint pointIdx,
uint valueIdx
) const
{
if( pointIdx >= getNumPoints( ) )
return -INFTY;
return values[pointIdx]->getLowerBound( valueIdx );
}
inline returnValue MatrixVariablesGrid::setLowerBound( uint pointIdx,
uint valueIdx,
double _lb
)
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
if( valueIdx >= values[pointIdx]->getDim( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
values[pointIdx]->setLowerBound( valueIdx,_lb );
return SUCCESSFUL_RETURN;
}
inline DVector MatrixVariablesGrid::getUpperBounds( uint pointIdx
) const
{
if( pointIdx >= getNumPoints( ) )
return emptyVector;
return values[pointIdx]->getUpperBounds( );
}
inline returnValue MatrixVariablesGrid::setUpperBounds( uint pointIdx,
const DVector& _ub
)
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR(RET_INDEX_OUT_OF_BOUNDS);
return values[pointIdx]->setUpperBounds( _ub );
}
inline double MatrixVariablesGrid::getUpperBound( uint pointIdx,
uint valueIdx
) const
{
if( pointIdx >= getNumPoints( ) )
return INFTY;
return values[pointIdx]->getUpperBound( valueIdx );
}
inline returnValue MatrixVariablesGrid::setUpperBound( uint pointIdx,
uint valueIdx,
double _ub
)
{
if( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
if( valueIdx >= values[pointIdx]->getDim( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
values[pointIdx]->setUpperBound( valueIdx,_ub );
return SUCCESSFUL_RETURN;
}
inline BooleanType MatrixVariablesGrid::getAutoInit( uint pointIdx
) const
{
if ( pointIdx >= getNumPoints( ) )
{
ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
return defaultAutoInit;
}
return values[pointIdx]->getAutoInit( );
}
inline returnValue MatrixVariablesGrid::setAutoInit( uint pointIdx,
BooleanType _autoInit
)
{
if ( pointIdx >= getNumPoints( ) )
return ACADOERROR( RET_INDEX_OUT_OF_BOUNDS );
return values[pointIdx]->setAutoInit( _autoInit );
}
inline returnValue MatrixVariablesGrid::disableAutoInit( )
{
for( uint i=0; i<getNumPoints( ); ++i )
values[i]->setAutoInit( BT_FALSE );
return SUCCESSFUL_RETURN;
}
inline returnValue MatrixVariablesGrid::enableAutoInit( )
{
for( uint i=0; i<getNumPoints( ); ++i )
values[i]->setAutoInit( BT_TRUE );
return SUCCESSFUL_RETURN;
}
inline BooleanType MatrixVariablesGrid::hasNames( ) const
{
for( uint i=0; i<getNumPoints( ); ++i )
{
if ( values[i]->hasNames( ) == BT_TRUE )
return BT_TRUE;
}
return BT_FALSE;
}
inline BooleanType MatrixVariablesGrid::hasUnits( ) const
{
for( uint i=0; i<getNumPoints( ); ++i )
{
if ( values[i]->hasUnits( ) == BT_TRUE )
return BT_TRUE;
}
return BT_FALSE;
}
inline BooleanType MatrixVariablesGrid::hasScaling( ) const
{
for( uint i=0; i<getNumPoints( ); ++i )
{
if ( values[i]->hasScaling( ) == BT_TRUE )
return BT_TRUE;
}
return BT_FALSE;
}
inline BooleanType MatrixVariablesGrid::hasLowerBounds( ) const
{
for( uint i=0; i<getNumPoints( ); ++i )
{
if ( values[i]->hasLowerBounds( ) == BT_TRUE )
return BT_TRUE;
}
return BT_FALSE;
}
inline BooleanType MatrixVariablesGrid::hasUpperBounds( ) const
{
for( uint i=0; i<getNumPoints( ); ++i )
{
if ( values[i]->hasUpperBounds( ) == BT_TRUE )
return BT_TRUE;
}
return BT_FALSE;
}
inline double MatrixVariablesGrid::getMax( ) const
{
double maxValue = -INFTY;
for( uint i=0; i<getNumPoints( ); ++i )
{
if ( values[i]->getMax( ) > maxValue )
maxValue = values[i]->getMax( );
}
return maxValue;
}
inline double MatrixVariablesGrid::getMin( ) const
{
double minValue = INFTY;
for( uint i=0; i<getNumPoints( ); ++i )
{
if ( values[i]->getMin( ) < minValue )
minValue = values[i]->getMin( );
}
return minValue;
}
inline double MatrixVariablesGrid::getMean( ) const
{
double meanValue = 0.0;
if ( getNumPoints( ) == 0 )
return meanValue;
for( uint i=0; i<getNumPoints( ); ++i )
meanValue += values[i]->getMean( );
return ( meanValue / (double)getNumPoints( ) );
}
inline returnValue MatrixVariablesGrid::setZero( )
{
for( uint i=0; i<getNumPoints( ); ++i )
values[i]->setZero( );
return SUCCESSFUL_RETURN;
}
inline returnValue MatrixVariablesGrid::setAll( double _value
)
{
for( uint i = 0; i<getNumPoints( ); ++i )
values[i]->setAll( _value );
return SUCCESSFUL_RETURN;
}
inline returnValue MatrixVariablesGrid::getGrid( Grid& _grid
) const
{
return _grid.init( getNumPoints(),times );
}
inline Grid MatrixVariablesGrid::getTimePoints( ) const
{
Grid tmp;
getGrid( tmp );
return tmp;
}
CLOSE_NAMESPACE_ACADO
/*
* end of file
*/
| {
"pile_set_name": "Github"
} |
import Vue from 'vue';
import Vuex from 'vuex';
Vue.use(Vuex);
import router from './router';
import axios from 'axios';
axios.defaults.headers.common = {
'X-Requested-With': 'XMLHttpRequest',
'X-CSRF-TOKEN': window.csrf_token
};
export default new Vuex.Store({
state: {
saved: [],
listing_summaries: [],
listings: [],
auth: false
},
mutations: {
toggleSaved(state, id) {
let index = state.saved.findIndex(saved => saved === id);
if (index === -1) {
state.saved.push(id);
} else {
state.saved.splice(index, 1);
}
},
addData(state, { route, data }) {
if (data.auth) {
state.auth = data.auth;
}
if (route === 'listing') {
state.listings.push(data.listing);
} else {
state.listing_summaries = data.listings;
}
}
},
getters: {
getListing(state) {
return id => state.listings.find(listing => id == listing.id);
}
},
actions: {
toggleSaved({ commit, state }, id) {
if (state.auth) {
axios.post('/api/user/toggle_saved', { id }).then(
() => commit('toggleSaved', id)
);
} else {
router.push('/login');
}
}
},
});
| {
"pile_set_name": "Github"
} |
{{- if eq .Values.serviceType "LoadBalancer" }}
1. Get Traefik's load balancer IP/hostname:
NOTE: It may take a few minutes for this to become available.
You can watch the status by running:
$ kubectl get svc {{ template "traefik.fullname" . }} --namespace {{ .Release.Namespace }} -w
Once 'EXTERNAL-IP' is no longer '<pending>':
$ kubectl describe svc {{ template "traefik.fullname" . }} --namespace {{ .Release.Namespace }} | grep Ingress | awk '{print $3}'
2. Configure DNS records corresponding to Kubernetes ingress resources to point to the load balancer IP/hostname found in step 1
{{- end }}
{{- if eq .Values.serviceType "NodePort" }}
{{- if (and (not (empty .Values.service.nodePorts.https)) (not (empty .Values.service.nodePorts.http)))}}
1. Traefik is listening on the following ports on the host machine:
http - {{ .Values.service.nodePorts.http }}
https - {{ .Values.service.nodePorts.https }}
{{- else }}
1. Traefik has been started. You can find out the port numbers being used by traefik by running:
$ kubectl describe svc {{ template "traefik.fullname" . }} --namespace {{ .Release.Namespace }}
{{- end }}
2. Configure DNS records corresponding to Kubernetes ingress resources to point to the NODE_IP/NODE_HOST
{{- end }}
| {
"pile_set_name": "Github"
} |
<?php
namespace Doctrine\Tests\ORM\Functional\Ticket;
use Doctrine\Common\Collections\ArrayCollection;
use Doctrine\Tests\Models\CMS\CmsArticle;
use Doctrine\Tests\Models\CMS\CmsUser;
require_once __DIR__ . '/../../../TestInit.php';
/**
* @group DDC-1040
*/
class DDC1040Test extends \Doctrine\Tests\OrmFunctionalTestCase
{
public function setUp()
{
$this->useModelSet('cms');
parent::setUp();
}
public function testReuseNamedEntityParameter()
{
$user = new CmsUser();
$user->name = "John Galt";
$user->username = "jgalt";
$user->status = "inactive";
$article = new CmsArticle();
$article->topic = "This is John Galt speaking!";
$article->text = "Yadda Yadda!";
$article->setAuthor($user);
$this->_em->persist($user);
$this->_em->persist($article);
$this->_em->flush();
$dql = "SELECT a FROM Doctrine\Tests\Models\CMS\CmsArticle a WHERE a.user = :author";
$this->_em->createQuery($dql)
->setParameter('author', $user)
->getResult();
$dql = "SELECT a FROM Doctrine\Tests\Models\CMS\CmsArticle a WHERE a.user = :author AND a.user = :author";
$this->_em->createQuery($dql)
->setParameter('author', $user)
->getResult();
$dql = "SELECT a FROM Doctrine\Tests\Models\CMS\CmsArticle a WHERE a.topic = :topic AND a.user = :author AND a.user = :author";
$farticle = $this->_em->createQuery($dql)
->setParameter('author', $user)
->setParameter('topic', 'This is John Galt speaking!')
->getSingleResult();
$this->assertSame($article, $farticle);
}
public function testUseMultiplePositionalParameters()
{
$user = new CmsUser();
$user->name = "John Galt";
$user->username = "jgalt";
$user->status = "inactive";
$article = new CmsArticle();
$article->topic = "This is John Galt speaking!";
$article->text = "Yadda Yadda!";
$article->setAuthor($user);
$this->_em->persist($user);
$this->_em->persist($article);
$this->_em->flush();
$dql = "SELECT a FROM Doctrine\Tests\Models\CMS\CmsArticle a WHERE a.topic = ?1 AND a.user = ?2 AND a.user = ?3";
$farticle = $this->_em->createQuery($dql)
->setParameter(1, 'This is John Galt speaking!')
->setParameter(2, $user)
->setParameter(3, $user)
->getSingleResult();
$this->assertSame($article, $farticle);
}
} | {
"pile_set_name": "Github"
} |
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
// a passthrough stream.
// basically just the most minimal sort of Transform stream.
// Every written chunk gets output as-is.
module.exports = PassThrough;
var Transform = require('./_stream_transform');
/*<replacement>*/
var util = require('core-util-is');
util.inherits = require('inherits');
/*</replacement>*/
util.inherits(PassThrough, Transform);
function PassThrough(options) {
if (!(this instanceof PassThrough))
return new PassThrough(options);
Transform.call(this, options);
}
PassThrough.prototype._transform = function(chunk, encoding, cb) {
cb(null, chunk);
};
| {
"pile_set_name": "Github"
} |
(* This program is free software; you can redistribute it and/or *)
(* modify it under the terms of the GNU Lesser General Public License *)
(* as published by the Free Software Foundation; either version 2.1 *)
(* of the License, or (at your option) any later version. *)
(* *)
(* This program is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU General Public License for more details. *)
(* *)
(* You should have received a copy of the GNU Lesser General Public *)
(* License along with this program; if not, write to the Free *)
(* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA *)
(* 02110-1301 USA *)
(*****************************************************************************)
(* Projet Formel - Calculus of Inductive Constructions V5.10 *)
(*****************************************************************************)
(* *)
(* HomFoncteur Hom(a,-) is Complete *)
(* *)
(*****************************************************************************)
(* *)
(* A. SAIBI May 95 *)
(* *)
(*****************************************************************************)
Require Export HomFunctor.
Require Export Pres_Limits.
Set Implicit Arguments.
Unset Strict Implicit.
(* exemple : les Hom-Foncteurs preservent toutes les Limites *)
Section funset_pres.
Variables (J C : Category) (F : Functor J C) (l : Limit F) (c : C).
Definition FS_lim := c --> Lim l.
Definition FS_lcone := Limiting_cone l o_C FunSET c.
Section fs_diese.
(* soit X un setoid, soit tau : cone(X,FoH) (i.e. Delta(X) -> FoH)
pour tout x:|X|, nous construisons un
Tx : cone(c,F) (i.e. Delta(c) -> F)
telque Tx(i) = tau(i)(x) *)
Variables (X : SET) (tau : Cone X (F o_F FunSET c)).
Section fs_diese_mor_def.
Variable x : X.
Definition FS_cone_tau (i : J) := tau i x.
(* revient a` prouver: tau(j)(x) = tau(i)(x) o Fg *)
(* s'obtient apre`s simplification de Eq_cone sur tau *)
Lemma FS_cone_tau_cone_law : Cone_law FS_cone_tau.
Proof.
unfold Cone_law, FS_cone_tau in |- *.
intros i j g.
apply (EqC tau g x).
Qed.
Definition FS_cone := Build_Cone FS_cone_tau_cone_law.
(* soit X un setoid, soit tau : cone(X,FoH) (i.e. Delta(X) -> FoH)
construisons tau# : X -> C(c,(Lim F))
de'finissons tau#(x) par (Tx)# car l est une (Limit F) *)
Definition FS_diese_mor := Lim_diese l FS_cone.
End fs_diese_mor_def.
Lemma FS_diese_map_law : Map_law FS_diese_mor.
Proof.
unfold Map_law in |- *; intros x y H.
unfold FS_diese_mor in |- *.
apply (Ldiese_map l).
simpl in |- *; unfold Equal_NT in |- *; intro i; simpl in |- *.
unfold FS_cone_tau in |- *.
apply Pres1; assumption.
Qed.
Canonical Structure FS_diese := Build_Map FS_diese_map_law.
End fs_diese.
(* ve'rifions que le # qu'on vient de construire peut former
un (co)universal arrow *)
(* revient a` prouver:
qq X un setoid, qq tau : cone(X,FoH) (i.e. Delta(X) -> FoH)
qq i : J, qq x : |X|.
tau(i)(x) = tau#(x) o mu(i)
i.e. Tx(i) = (Tx)# o mu(i) qui n'est rien d'autre que eq_Ucone
pour le cone Tx *)
Lemma FS_limit1 : Limit_law1 FS_lcone FS_diese.
Proof.
unfold Limit_law1, Limit_eq in |- *; simpl in |- *.
intros X tau i.
unfold Ext in |- *; intro x.
unfold Comp_cone in |- *; simpl in |- *.
unfold FunSET_mor1, FS_diese_mor in |- *; simpl in |- *.
apply (Prf_limit1 l (FS_cone tau x) i).
Qed.
(* revient a` prouver que:
f(x) o mu(i) = (Tx)i =>
f(x) = (Tx)#
correspondant a` l'axiome d'universalite' de l *)
Lemma FS_limit2 : Limit_law2 FS_lcone FS_diese.
Proof.
unfold Limit_law2 in |- *; intros X tau f.
unfold Limit_eq, FS_diese in |- *; simpl in |- *.
unfold Ext in |- *; simpl in |- *.
unfold FunSET_mor1, FS_diese_mor in |- *.
intros H x.
unfold FunSET_ob in |- *.
apply (Prf_limit2 l).
unfold Limit_eq in |- *; simpl in |- *.
intro i; apply (H i x).
Qed.
Lemma FunSET_Preserves_l : Preserves_1limit (FunSET c) l.
Proof.
exact (Build_IsLimit FS_limit1 FS_limit2).
Defined.
End funset_pres.
(* on en d'eduit que C(c,-) Preserve toutes les Limites *)
Lemma FunSET_continuous :
forall (C : Category) (c : C), Continuous (FunSET c).
Proof.
unfold Continuous, Preserves_limits in |- *; intros C c J F l.
exact (FunSET_Preserves_l l c).
Defined.
| {
"pile_set_name": "Github"
} |
<html>
<link type="text/css" rel="stylesheet" media="all" href="import.css">
<link type="text/css" rel="stylesheet" media="all" href="basic.css">
<head>
</head>
<body>
<div id="import-test">id import-test</div>
<div id="import">id import-test</div>
<div class="imported inline">class imported inline</div>
<div id="mixin">class mixin</div>
<div class="a">class a</div>
<div class="b">class b</div>
<div class="b">class b<div class="c">class c</div></div>
<div class="a">class a<div class="d">class d</div></div>
<div class="extend">class extend<div class="c">class c</div></div>
</body>
</html> | {
"pile_set_name": "Github"
} |
package com.oklib.view_lib;
import com.oklib.R;
import com.oklib.base.BaseAppActivity;
/**
* 时间:2017/8/27
* 作者:蓝天
* 描述:仿小米时钟使用样例
*/
public class MiClockViewActivity extends BaseAppActivity {
@Override
protected int initLayoutId() {
return R.layout.activity_miclock_view;
}
@Override
protected void initView() {
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2012-present Oliver Eilhard. All rights reserved.
// Use of this source code is governed by a MIT-license.
// See http://olivere.mit-license.org/license.txt for details.
package elastic
import (
"context"
"fmt"
"net/url"
"strings"
"github.com/olivere/elastic/uritemplates"
)
// IndicesGetSettingsService allows to retrieve settings of one
// or more indices.
//
// See https://www.elastic.co/guide/en/elasticsearch/reference/6.7/indices-get-settings.html
// for more details.
type IndicesGetSettingsService struct {
client *Client
pretty bool
index []string
name []string
ignoreUnavailable *bool
allowNoIndices *bool
expandWildcards string
flatSettings *bool
local *bool
}
// NewIndicesGetSettingsService creates a new IndicesGetSettingsService.
func NewIndicesGetSettingsService(client *Client) *IndicesGetSettingsService {
return &IndicesGetSettingsService{
client: client,
index: make([]string, 0),
name: make([]string, 0),
}
}
// Index is a list of index names; use `_all` or empty string to perform
// the operation on all indices.
func (s *IndicesGetSettingsService) Index(indices ...string) *IndicesGetSettingsService {
s.index = append(s.index, indices...)
return s
}
// Name are the names of the settings that should be included.
func (s *IndicesGetSettingsService) Name(name ...string) *IndicesGetSettingsService {
s.name = append(s.name, name...)
return s
}
// IgnoreUnavailable indicates whether specified concrete indices should
// be ignored when unavailable (missing or closed).
func (s *IndicesGetSettingsService) IgnoreUnavailable(ignoreUnavailable bool) *IndicesGetSettingsService {
s.ignoreUnavailable = &ignoreUnavailable
return s
}
// AllowNoIndices indicates whether to ignore if a wildcard indices
// expression resolves into no concrete indices.
// (This includes `_all` string or when no indices have been specified).
func (s *IndicesGetSettingsService) AllowNoIndices(allowNoIndices bool) *IndicesGetSettingsService {
s.allowNoIndices = &allowNoIndices
return s
}
// ExpandWildcards indicates whether to expand wildcard expression
// to concrete indices that are open, closed or both.
// Options: open, closed, none, all. Default: open,closed.
func (s *IndicesGetSettingsService) ExpandWildcards(expandWildcards string) *IndicesGetSettingsService {
s.expandWildcards = expandWildcards
return s
}
// FlatSettings indicates whether to return settings in flat format (default: false).
func (s *IndicesGetSettingsService) FlatSettings(flatSettings bool) *IndicesGetSettingsService {
s.flatSettings = &flatSettings
return s
}
// Local indicates whether to return local information, do not retrieve
// the state from master node (default: false).
func (s *IndicesGetSettingsService) Local(local bool) *IndicesGetSettingsService {
s.local = &local
return s
}
// Pretty indicates that the JSON response be indented and human readable.
func (s *IndicesGetSettingsService) Pretty(pretty bool) *IndicesGetSettingsService {
s.pretty = pretty
return s
}
// buildURL builds the URL for the operation.
func (s *IndicesGetSettingsService) buildURL() (string, url.Values, error) {
var err error
var path string
var index []string
if len(s.index) > 0 {
index = s.index
} else {
index = []string{"_all"}
}
if len(s.name) > 0 {
// Build URL
path, err = uritemplates.Expand("/{index}/_settings/{name}", map[string]string{
"index": strings.Join(index, ","),
"name": strings.Join(s.name, ","),
})
} else {
// Build URL
path, err = uritemplates.Expand("/{index}/_settings", map[string]string{
"index": strings.Join(index, ","),
})
}
if err != nil {
return "", url.Values{}, err
}
// Add query string parameters
params := url.Values{}
if s.pretty {
params.Set("pretty", "true")
}
if s.ignoreUnavailable != nil {
params.Set("ignore_unavailable", fmt.Sprintf("%v", *s.ignoreUnavailable))
}
if s.allowNoIndices != nil {
params.Set("allow_no_indices", fmt.Sprintf("%v", *s.allowNoIndices))
}
if s.expandWildcards != "" {
params.Set("expand_wildcards", s.expandWildcards)
}
if s.flatSettings != nil {
params.Set("flat_settings", fmt.Sprintf("%v", *s.flatSettings))
}
if s.local != nil {
params.Set("local", fmt.Sprintf("%v", *s.local))
}
return path, params, nil
}
// Validate checks if the operation is valid.
func (s *IndicesGetSettingsService) Validate() error {
return nil
}
// Do executes the operation.
func (s *IndicesGetSettingsService) Do(ctx context.Context) (map[string]*IndicesGetSettingsResponse, error) {
// Check pre-conditions
if err := s.Validate(); err != nil {
return nil, err
}
// Get URL for request
path, params, err := s.buildURL()
if err != nil {
return nil, err
}
// Get HTTP response
res, err := s.client.PerformRequest(ctx, PerformRequestOptions{
Method: "GET",
Path: path,
Params: params,
})
if err != nil {
return nil, err
}
// Return operation response
var ret map[string]*IndicesGetSettingsResponse
if err := s.client.decoder.Decode(res.Body, &ret); err != nil {
return nil, err
}
return ret, nil
}
// IndicesGetSettingsResponse is the response of IndicesGetSettingsService.Do.
type IndicesGetSettingsResponse struct {
Settings map[string]interface{} `json:"settings"`
}
| {
"pile_set_name": "Github"
} |
{
"schema": true,
"ns": 5,
"namespaces": [
{
"prefix": "xml",
"uri": "http://www.w3.org/XML/1998/namespace"
},
{
"prefix": "xmlns",
"uri": "http://www.w3.org/2000/xmlns/"
},
{
"prefix": "html",
"uri": "http://www.w3.org/1999/xhtml/"
},
{
"prefix": "xs",
"uri": "http://www.w3.org/2001/XMLSchema"
},
{
"prefix": "dc",
"uri": "http://purl.org/dc/elements/1.1/"
},
{
"prefix": "",
"uri": "http://example.com"
}
],
"elements": [
{
"values": [
],
"type": null,
"ns": 5,
"name": "wrapper",
"element": true,
"attributes": [
],
"elements": [
{
"values": [
],
"type": "string",
"ns": 5,
"name": "name",
"element": true,
"attributes": [
],
"elements": [
]
},
{
"values": [
],
"type": "string",
"ns": 5,
"name": "uri",
"element": true,
"attributes": [
],
"elements": [
]
},
{
"values": [
],
"type": null,
"ns": 5,
"name": "dc",
"element": true,
"attributes": [
],
"elements": [
{
"values": [
],
"type": "mixed",
"ns": 4,
"element": true,
"attributes": [
{
"values": [
""
],
"type": "language",
"ns": 0,
"attribute": true,
"name": "lang"
}
],
"elements": [
],
"name": "title",
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"name": "creator",
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"name": "subject",
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"name": "description",
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"name": "publisher",
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"name": "contributor",
"any": false
}
],
"choices": [
{
"elements": [
"4:title",
"4:creator",
"4:subject",
"4:description",
"4:publisher",
"4:contributor"
],
"minOccurs": null,
"maxOccurs": 1
}
]
}
],
"choices": [
{
"elements": [
"5:name",
"5:uri",
"5:dc"
],
"minOccurs": null,
"maxOccurs": 1
}
]
},
{
"values": [
],
"type": null,
"ns": 5,
"name": "root",
"element": true,
"attributes": [
],
"elements": [
{
"values": [
],
"type": "string",
"ns": 5,
"name": "name",
"element": true,
"attributes": [
],
"elements": [
]
},
{
"values": [
],
"type": "string",
"ns": 5,
"name": "uri",
"element": true,
"attributes": [
],
"elements": [
]
},
{
"values": [
],
"type": null,
"ns": 5,
"name": "dc",
"element": true,
"attributes": [
],
"elements": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0]"
},
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][1]"
},
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][2]"
},
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][3]"
},
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][4]"
},
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][5]"
}
],
"choices": {
"$ref": "$[\"elements\"][0][\"elements\"][2][\"choices\"]"
}
}
],
"choices": [
{
"elements": [
"5:name",
"5:uri",
"5:dc"
],
"minOccurs": null,
"maxOccurs": 1
}
]
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "title",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "creator",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "subject",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "description",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "publisher",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "contributor",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "date",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "type",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "format",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "identifier",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "source",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "language",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "relation",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "coverage",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
},
{
"values": [
],
"type": "mixed",
"ns": 4,
"name": "rights",
"element": true,
"attributes": [
{
"$ref": "$[\"elements\"][0][\"elements\"][2][\"elements\"][0][\"attributes\"][0]"
}
],
"elements": [
],
"any": false
}
]
} | {
"pile_set_name": "Github"
} |
<?Lassoscript
// Last modified 8/31/09 by ECL, Landmann InterActive
/*
Tagdocs;
{Tagname= OutputSecondContent }
{Description= Outputs the already-built $SecondContent }
{Author= Eric Landmann }
{AuthorEmail= [email protected] }
{ModifiedBy= }
{ModifiedByEmail= }
{Date= 3/18/08 }
{Usage= OutputSecondContent (outputs without the container HTML)
OutputSecondContent, -container='Y' (outputs with the container HTML) }
{ExpectedResults= Outputs the HTML in $SecondContent }
{Dependencies= $SecondContent must be defined, otherwise there will be no output }
{DevelNotes= $SecondContent is created in detail.inc.
This tag is merely a convenience to make it less awkward for a designer }
{ChangeNotes= 8/31/09
Integrated into itPage codebase. }
/Tagdocs;
*/
If: !(Lasso_TagExists:'OutputSecondContent');
Define_Tag:'OutputSecondContent',
-Description='Outputs $SecondContent',
-Optional='container',
-Type='string';
Local('Result') = null;
// Check if $SecondContent is defined
If: (Var_Defined:'SecondContent');
#Result += '<!-- OutputSecondContent -->\n';
Local_Defined('initialvalue') ? #Result += '\t<div class="SecondContentPanel">\n';
#Result += $SecondContent;
Local_Defined('initialvalue') ? #Result += '\t</div>\n';
Else;
If: $svDebug == 'Y';
#Result += '<strong>OutputSecondContent: </strong>$SecondContent is undefined<br>\n';
/If;
/If;
Return: (Encode_Smart:(#Result));
/Define_Tag;
Log_Critical: 'Custom Tag Loaded - OutputSecondContent';
/If;
?> | {
"pile_set_name": "Github"
} |
/*global ODSA */
// Written by Jesse Terrazas and Bailey Spell
// Step-by-step simulation of a blockchain
$(document).ready(function() {
"use strict";
var av_name = "llistBlockchain";
var interpret = ODSA.UTILS.loadConfig({av_name: av_name}).interpreter;
var av = new JSAV(av_name);
// Set up the list
var l = av.ds.list({nodegap: 30, top: 35, left: 257});
l.addFirst("<br/> Block Number: 0<hr/><br/><hr/><br/><hr/><br/><br/><hr/> Hash: <br/> 0000000000000000");
l.layout();
// Slide 1
av.umsg(interpret("sc1"));
av.displayInit();
// Slide 2
av.umsg(interpret("sc2"));
l.addLast("<br/> Block Number: 1 <hr/> Nonce: ? <hr/> Data: First Block <hr/> Previous: <br/> 0000000000000000 <hr/> Hash: <br/> ?");
//$(".jsavvaluelabel").html("<br>");
l.layout();
av.step();
// Slide 3
av.umsg(interpret("sc3"));
l.get(1).value("<br/> Block Number: 1 <hr/> Nonce: 124950 <hr/> Data: First Block <hr/> Previous: <br/> 0000000000000000 <hr/> Hash: <br/> ?");
l.layout();
av.step();
// Slide 4
av.umsg(interpret("sc4"));
l.get(1).value("<br/> Block Number: 1 <hr/> Nonce: 124950 <hr/> Data: First Block <hr/> Previous: <br/> 0000000000000000 <hr/> Hash: <br/> 0000f2387d33d6fb");
// Hash : 0000f2387d33d6fbcbff5bde1388a93f0af5e202fd6b8e2939440024567f84c0
l.layout();
av.step();
// Slide 5
av.umsg(interpret("sc5"));
l.addLast("<br/> Block Number: 2 <hr/> Nonce: 3278 <hr/> Data: Second Block <hr/> Previous: <br/> 0000f2387d33d6fb <hr/> Hash: <br/> 0000cfce292fd014");
// Hash: 0000cfce292fd014ce887bbd65663c309ed7f0cda186dd070c225fd5ef34654c
l.layout();
av.step();
// Slide 6
av.umsg(interpret("sc5"));
l.addLast("<br/> Block Number: 3 <hr/> Nonce: 842 <hr/> Data: Third Block <hr/> Previous: <br/> 0000cfce292fd014 <hr/> Hash: <br/> 0000b33dc608b628");
// Hash: 0000b33dc608b62838f4b882f40114fcf067aeae6a3f74b104920031b256fad8
l.layout();
av.step();
// Slide 6
av.umsg(interpret("sc6"));
l.addLast("<br/> Block Number: 4 <hr/> Nonce: 182684 <hr/> Data: Fourth Block <hr/> Previous: <br/> 0000b33dc608b628 <hr/> Hash: <br/> 0000571ca76adedb");
// Hash: 0000571ca76adedbb3e436a5122d033ca4b4627731a3be55ba913ff382aedebb
l.layout();
av.recorded();
});
| {
"pile_set_name": "Github"
} |
# ST Microelectronics STM32WB MCU series
# Copyright (c) 2019 Linaro Limited
# SPDX-License-Identifier: Apache-2.0
config SOC_SERIES_STM32WBX
bool "STM32WBx Series MCU"
select ARM
select CPU_CORTEX_M4
select CPU_CORTEX_M_HAS_DWT
select CPU_HAS_FPU
select SOC_FAMILY_STM32
select HAS_STM32CUBE
select CPU_HAS_ARM_MPU
select HAS_SWO
select HAS_SYS_POWER_STATE_SLEEP_1
select HAS_SYS_POWER_STATE_SLEEP_2
select HAS_SYS_POWER_STATE_SLEEP_3
help
Enable support for STM32WB MCU series
| {
"pile_set_name": "Github"
} |
{
"AD": "Andora",
"AE": "United Arab Emirates",
"AF": "Afuganistani",
"AG": "Antigua ne Barbuda",
"AI": "Anguila",
"AL": "Albania",
"AM": "Armenia",
"AO": "Angola",
"AQ": "Antarctica",
"AR": "Ajentina",
"AS": "Samoa ye Amerika",
"AT": "Austria",
"AU": "Australia",
"AW": "Arubha",
"AX": "Zvitsuwa zveAland",
"AZ": "Azabajani",
"BA": "Boznia ne Herzegovina",
"BB": "Barbados",
"BD": "Bangladeshi",
"BE": "Beljium",
"BF": "Bukinafaso",
"BG": "Bulgaria",
"BH": "Bahareni",
"BI": "Burundi",
"BJ": "Benini",
"BL": "Saint Barthélemy",
"BM": "Bermuda",
"BN": "Burunei",
"BO": "Bolivia",
"BQ": "Bonaire, Sint Eustatius and Saba",
"BR": "Brazil",
"BS": "Bahama",
"BT": "Bhutani",
"BV": "Zvitsuwa zveBouvet",
"BW": "Botswana",
"BY": "Belarusi",
"BZ": "Belize",
"CA": "Kanada",
"CC": "Zvitsuwa zveCocos (Keeling)",
"CD": "Democratic Republic of the Congo",
"CF": "Central African Republic",
"CG": "Kongo",
"CH": "Switzerland",
"CI": "Ivory Coast",
"CK": "Zvitsuwa zveCook",
"CL": "Chile",
"CM": "Kameruni",
"CN": "China",
"CO": "Kolombia",
"CR": "Kostarika",
"CU": "Cuba",
"CV": "Zvitsuwa zveCape Verde",
"CW": "Curacao",
"CX": "Zvitsuwa zveChristmas",
"CY": "Cyprus",
"CZ": "Czech Republic",
"DE": "Germany",
"DJ": "Djibouti",
"DK": "Denmark",
"DM": "Dominica",
"DO": "Dominican Republic",
"DZ": "Aljeria",
"EC": "Ecuador",
"EE": "Estonia",
"EG": "Egypt",
"EH": "Western Sahara",
"ER": "Eritrea",
"ES": "Spain",
"ET": "Etiopia",
"FI": "Finland",
"FJ": "Fiji",
"FK": "Zvitsuwa zveFalklands",
"FM": "Micronesia",
"FO": "Zvitsuwa zveDaroe",
"FR": "France",
"GA": "Gabon",
"GB": "United Kingdom",
"GD": "Grenada",
"GE": "Georgia",
"GF": "French Guiana",
"GG": "Guernsey",
"GH": "Ghana",
"GI": "Gibraltar",
"GL": "Greenland",
"GM": "Gambia",
"GN": "Guinea",
"GP": "Guadeloupe",
"GQ": "Equatorial Guinea",
"GR": "Greece",
"GS": "South Georgia and the South Sandwich Islands",
"GT": "Guatemala",
"GU": "Guam",
"GW": "Guinea-Bissau",
"GY": "Guyana",
"HK": "Hong Kong",
"HM": "Zvitsuwa zveHeard and McDonald",
"HN": "Honduras",
"HR": "Korasia",
"HT": "Haiti",
"HU": "Hungary",
"ID": "Indonesia",
"IE": "Ireland",
"IL": "Izuraeri",
"IM": "Isle of Man",
"IN": "India",
"IO": "British Indian Ocean Territory",
"IQ": "Iraq",
"IR": "Iran",
"IS": "Iceland",
"IT": "Italy",
"JE": "Jersey",
"JM": "Jamaica",
"JO": "Jordan",
"JP": "Japan",
"KE": "Kenya",
"KG": "Kyrgyzstan",
"KH": "Kambodia",
"KI": "Kiribati",
"KM": "Komoro",
"KN": "Saint Kitts and Nevis",
"KP": "Korea, North",
"KR": "Korea, South",
"KW": "Kuwait",
"KY": "Zvitsuwa zveCayman",
"KZ": "Kazakhstan",
"LA": "Laos",
"LB": "Lebanon",
"LC": "Saint Lucia",
"LI": "Liechtenstein",
"LK": "Sri Lanka",
"LR": "Liberia",
"LS": "Lesotho",
"LT": "Lithuania",
"LU": "Luxembourg",
"LV": "Latvia",
"LY": "Libya",
"MA": "Morocco",
"MC": "Monaco",
"MD": "Moldova",
"ME": "Montenegro",
"MF": "Saint Martin (French Part)",
"MG": "Madagascar",
"MH": "Zvitsuwa zveMarshall",
"MK": "Macedonia",
"ML": "Mali",
"MM": "Myanmar",
"MN": "Mongolia",
"MO": "Macao",
"MP": "Zvitsuwa zvekumaodzanyemba eMariana",
"MQ": "Martinique",
"MR": "Mauritania",
"MS": "Montserrat",
"MT": "Malta",
"MU": "Mauritius",
"MV": "Maldives",
"MW": "Malawi",
"MX": "Mexico",
"MY": "Malaysia",
"MZ": "Mozambique",
"NA": "Namibia",
"NC": "New Caledonia",
"NE": "Niger",
"NF": "Chitsuwa cheNorfolk",
"NG": "Nigeria",
"NI": "Nicaragua",
"NL": "Netherlands",
"NO": "Norway",
"NP": "Nepal",
"NR": "Nauru",
"NU": "Niue",
"NZ": "New Zealand",
"OM": "Oman",
"PA": "Panama",
"PE": "Peru",
"PF": "French Polynesia",
"PG": "Papua New Guinea",
"PH": "Philippines",
"PK": "Pakistan",
"PL": "Poland",
"PM": "Saint Pierre and Miquelon",
"PN": "Pitcairn",
"PR": "Puerto Rico",
"PS": "Palestinian, State of",
"PT": "Portugal",
"PW": "Palau",
"PY": "Paraguay",
"QA": "Qatar",
"RE": "Réunion",
"RO": "Romania",
"RS": "Serbia",
"RU": "Russia",
"RW": "Rwanda",
"SA": "Saudi Arabia",
"SB": "Zvitsuwa zvaSolomon",
"SC": "Seychelles",
"SD": "Sudan",
"SE": "Sweden",
"SG": "Singapore",
"SH": "Saint Helena",
"SI": "Slovenia",
"SJ": "Svalbard and Jan Mayen",
"SK": "Slovakia",
"SL": "Sierra Leone",
"SM": "San Marino",
"SN": "Senegal",
"SO": "Somalia",
"SR": "Suriname",
"SS": "South Sudan",
"ST": "São Tomé and Príncipe",
"SV": "El Salvador",
"SX": "Sint Maarten (Dutch Part)",
"SY": "Syria",
"SZ": "Swaziland",
"TC": "Zvitsuwa zveTurk neCaico",
"TD": "Chadi",
"TF": "French Southern Territories",
"TG": "Togo",
"TH": "Thailand",
"TJ": "Tajikistan",
"TK": "Tokelau",
"TL": "East Timor",
"TM": "Turkmenistan",
"TN": "Tunisia",
"TO": "Tonga",
"TR": "Turkey",
"TT": "Trinidad and Tobago",
"TV": "Tuvalu",
"TW": "Taiwan",
"TZ": "Tanzania",
"UA": "Ukraine",
"UG": "Uganda",
"UM": "United States Minor Outlying Islands",
"US": "Amerika",
"UY": "Uruguay",
"UZ": "Uzbekistan",
"VA": "Vatican State",
"VC": "Saint Vincent and the Grenadines",
"VE": "Venezuela",
"VG": "Zvitsuwa zveHingirandi",
"VI": "Zvitsuwa zveAmerika",
"VN": "Vietnam",
"VU": "Vanuatu",
"WF": "Wallis and Futuna",
"WS": "Samoa",
"YE": "Yemen",
"YT": "Mayotte",
"ZA": "South Africa",
"ZM": "Zambia",
"ZW": "Zimbabwe",
} | {
"pile_set_name": "Github"
} |
///////////////////////////////////////////////////////////////////////////////////////////////////
// OpenGL Image Copyright (c) 2008 - 2011 G-Truc Creation (www.g-truc.net)
///////////////////////////////////////////////////////////////////////////////////////////////////
// Created : 2010-09-08
// Updated : 2010-09-08
// Licence : This source is under MIT License
// File : gli/core/size.hpp
///////////////////////////////////////////////////////////////////////////////////////////////////
#ifndef GLI_CORE_SIZE_INCLUDED
#define GLI_CORE_SIZE_INCLUDED
#include "texture2d.hpp"
namespace gli
{
//template <size_type sizeType>
image2D::size_type size(
image2D const & Image,
image2D::size_type const & SizeType);
//template <size_type sizeType>
texture2D::size_type size(
texture2D const & Texture,
texture2D::size_type const & SizeType);
}//namespace gli
#include "size.inl"
#endif//GLI_CORE_SIZE_INCLUDED
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.