prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>tree.go<|end_file_name|><|fim▁begin|>package excellent import ( "fmt" "strings" "github.com/nyaruka/goflow/envs" "github.com/nyaruka/goflow/excellent/functions" "github.com/nyaruka/goflow/excellent/operators" "github.com/nyaruka/goflow/excellent/types" ) // Expression is the base interface of all syntax elements type Expression interface { Evaluate(envs.Environment, *Scope) types.XValue String() string } // ContextReference is an identifier which is a function name or root variable in the context type ContextReference struct { name string } func (x *ContextReference) Evaluate(env envs.Environment, scope *Scope) types.XValue { value, exists := scope.Get(x.name) if !exists { return types.NewXErrorf("context has no property '%s'", x.name) } return value } func (x *ContextReference) String() string { return strings.ToLower(x.name) } type DotLookup struct { container Expression<|fim▁hole|> func (x *DotLookup) Evaluate(env envs.Environment, scope *Scope) types.XValue { containerVal := x.container.Evaluate(env, scope) if types.IsXError(containerVal) { return containerVal } return resolveLookup(env, containerVal, types.NewXText(x.lookup), lookupNotationDot) } func (x *DotLookup) String() string { return fmt.Sprintf("%s.%s", x.container.String(), x.lookup) } type ArrayLookup struct { container Expression lookup Expression } func (x *ArrayLookup) Evaluate(env envs.Environment, scope *Scope) types.XValue { containerVal := x.container.Evaluate(env, scope) if types.IsXError(containerVal) { return containerVal } lookupVal := x.lookup.Evaluate(env, scope) if types.IsXError(lookupVal) { return lookupVal } return resolveLookup(env, containerVal, lookupVal, lookupNotationArray) } func (x *ArrayLookup) String() string { return fmt.Sprintf("%s[%s]", x.container.String(), x.lookup.String()) } type FunctionCall struct { function Expression params []Expression } func (x *FunctionCall) Evaluate(env envs.Environment, scope *Scope) types.XValue { funcVal := x.function.Evaluate(env, scope) if types.IsXError(funcVal) { return funcVal } asFunction, isFunction := funcVal.(*types.XFunction) if !isFunction { return types.NewXErrorf("%s is not a function", x.function.String()) } params := make([]types.XValue, len(x.params)) for i := range x.params { params[i] = x.params[i].Evaluate(env, scope) } return asFunction.Call(env, params) } func (x *FunctionCall) String() string { params := make([]string, len(x.params)) for i := range x.params { params[i] = x.params[i].String() } return fmt.Sprintf("%s(%s)", x.function.String(), strings.Join(params, ", ")) } type AnonFunction struct { args []string body Expression } func (x *AnonFunction) Evaluate(env envs.Environment, scope *Scope) types.XValue { // create an XFunction which wraps our body expression fn := func(env envs.Environment, args ...types.XValue) types.XValue { // create new context that includes the args argsMap := make(map[string]types.XValue, len(x.args)) for i := range x.args { argsMap[x.args[i]] = args[i] } childScope := NewScope(types.NewXObject(argsMap), scope) return x.body.Evaluate(env, childScope) } return types.NewXFunction("", functions.NumArgsCheck(len(x.args), fn)) } func (x *AnonFunction) String() string { return fmt.Sprintf("(%s) => %s", strings.Join(x.args, ", "), x.body) } type Concatenation struct { exp1 Expression exp2 Expression } func (x *Concatenation) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Concatenate(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *Concatenation) String() string { return fmt.Sprintf("%s & %s", x.exp1.String(), x.exp2.String()) } type Addition struct { exp1 Expression exp2 Expression } func (x *Addition) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Add(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *Addition) String() string { return fmt.Sprintf("%s + %s", x.exp1.String(), x.exp2.String()) } type Subtraction struct { exp1 Expression exp2 Expression } func (x *Subtraction) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Subtract(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *Subtraction) String() string { return fmt.Sprintf("%s - %s", x.exp1.String(), x.exp2.String()) } type Multiplication struct { exp1 Expression exp2 Expression } func (x *Multiplication) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Multiply(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *Multiplication) String() string { return fmt.Sprintf("%s * %s", x.exp1.String(), x.exp2.String()) } type Division struct { exp1 Expression exp2 Expression } func (x *Division) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Divide(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *Division) String() string { return fmt.Sprintf("%s / %s", x.exp1.String(), x.exp2.String()) } type Exponent struct { expression Expression exponent Expression } func (x *Exponent) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Exponent(env, x.expression.Evaluate(env, scope), x.exponent.Evaluate(env, scope)) } func (x *Exponent) String() string { return fmt.Sprintf("%s ^ %s", x.expression.String(), x.exponent.String()) } type Negation struct { exp Expression } func (x *Negation) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Negate(env, x.exp.Evaluate(env, scope)) } func (x *Negation) String() string { return fmt.Sprintf("-%s", x.exp.String()) } type Equality struct { exp1 Expression exp2 Expression } func (x *Equality) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.Equal(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *Equality) String() string { return fmt.Sprintf("%s = %s", x.exp1.String(), x.exp2.String()) } type InEquality struct { exp1 Expression exp2 Expression } func (x *InEquality) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.NotEqual(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *InEquality) String() string { return fmt.Sprintf("%s != %s", x.exp1.String(), x.exp2.String()) } type LessThan struct { exp1 Expression exp2 Expression } func (x *LessThan) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.LessThan(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *LessThan) String() string { return fmt.Sprintf("%s < %s", x.exp1.String(), x.exp2.String()) } type LessThanOrEqual struct { exp1 Expression exp2 Expression } func (x *LessThanOrEqual) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.LessThanOrEqual(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *LessThanOrEqual) String() string { return fmt.Sprintf("%s <= %s", x.exp1.String(), x.exp2.String()) } type GreaterThan struct { exp1 Expression exp2 Expression } func (x *GreaterThan) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.GreaterThan(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *GreaterThan) String() string { return fmt.Sprintf("%s > %s", x.exp1.String(), x.exp2.String()) } type GreaterThanOrEqual struct { exp1 Expression exp2 Expression } func (x *GreaterThanOrEqual) Evaluate(env envs.Environment, scope *Scope) types.XValue { return operators.GreaterThanOrEqual(env, x.exp1.Evaluate(env, scope), x.exp2.Evaluate(env, scope)) } func (x *GreaterThanOrEqual) String() string { return fmt.Sprintf("%s >= %s", x.exp1.String(), x.exp2.String()) } type Parentheses struct { exp Expression } func (x *Parentheses) Evaluate(env envs.Environment, scope *Scope) types.XValue { return x.exp.Evaluate(env, scope) } func (x *Parentheses) String() string { return fmt.Sprintf("(%s)", x.exp.String()) } type TextLiteral struct { val types.XText } func (x *TextLiteral) Evaluate(env envs.Environment, scope *Scope) types.XValue { return x.val } func (x *TextLiteral) String() string { return x.val.Describe() } // NumberLiteral is a literal number like 123 or 1.5 type NumberLiteral struct { val types.XNumber } func (x *NumberLiteral) Evaluate(env envs.Environment, scope *Scope) types.XValue { return x.val } func (x *NumberLiteral) String() string { return x.val.Describe() } // BooleanLiteral is a literal bool type BooleanLiteral struct { val types.XBoolean } func (x *BooleanLiteral) Evaluate(env envs.Environment, scope *Scope) types.XValue { return x.val } func (x *BooleanLiteral) String() string { return x.val.Describe() } type NullLiteral struct{} func (x *NullLiteral) Evaluate(env envs.Environment, scope *Scope) types.XValue { return nil } func (x *NullLiteral) String() string { return "null" }<|fim▁end|>
lookup string }
<|file_name|>for-loop-refutable-pattern-error-message.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. fn main() { for &1 in [1].iter() {} //~ ERROR refutable pattern in `for` loop binding<|fim▁hole|><|fim▁end|>
}
<|file_name|>var-8-namedtuple-1st.py<|end_file_name|><|fim▁begin|>import bench from ucollections import namedtuple <|fim▁hole|> i = 0 while i < t.num: i += 1 bench.run(test)<|fim▁end|>
T = namedtuple("Tup", ["num", "bar"]) def test(num): t = T(20000000, 0)
<|file_name|>UiSelectorBuilder.java<|end_file_name|><|fim▁begin|>package com.nagopy.android.disablemanager2; import android.os.Build; import com.android.uiautomator.core.UiSelector; @SuppressWarnings("unused") public class UiSelectorBuilder { private UiSelector uiSelector; public UiSelector build() { return uiSelector; } /** * @since API Level 16 */ public UiSelectorBuilder() { uiSelector = new UiSelector(); } /** * @since API Level 16 */ public UiSelectorBuilder text(String text) { uiSelector = uiSelector.text(text); return this; } /** * @since API Level 17 */ public UiSelectorBuilder textMatches(String regex) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { uiSelector = uiSelector.textMatches(regex); } return this; } /** * @since API Level 16 */ public UiSelectorBuilder textStartsWith(String text) { uiSelector = uiSelector.textStartsWith(text); return this; } /** * @since API Level 16 */ public UiSelectorBuilder textContains(String text) { uiSelector = uiSelector.textContains(text); return this; } /** * @since API Level 16 */ public UiSelectorBuilder className(String className) { uiSelector = uiSelector.className(className); return this; } /** * @since API Level 17 */ public UiSelectorBuilder classNameMatches(String regex) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { uiSelector = uiSelector.classNameMatches(regex); } return this; } /** * @since API Level 17 */ public UiSelectorBuilder className(Class<?> type) { uiSelector = uiSelector.className(type.getName()); return this; } /** * @since API Level 16 */ public UiSelectorBuilder description(String desc) { uiSelector = uiSelector.description(desc); return this; } /** * @since API Level 17 */ public UiSelectorBuilder descriptionMatches(String regex) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { uiSelector = uiSelector.descriptionMatches(regex); } return this; } /** * @since API Level 16 */ public UiSelectorBuilder descriptionStartsWith(String desc) { uiSelector = uiSelector.descriptionStartsWith(desc); return this; } /** * @since API Level 16 */ public UiSelectorBuilder descriptionContains(String desc) { uiSelector = uiSelector.descriptionContains(desc); return this; } /** * @since API Level 18 */ public UiSelectorBuilder resourceId(String id) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { uiSelector = uiSelector.resourceId(id); } return this;<|fim▁hole|> * @since API Level 18 */ public UiSelectorBuilder resourceIdMatches(String regex) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { uiSelector = uiSelector.resourceIdMatches(regex); } return this; } /** * @since API Level 16 */ public UiSelectorBuilder index(final int index) { uiSelector = uiSelector.index(index); return this; } /** * @since API Level 16 */ public UiSelectorBuilder instance(final int instance) { uiSelector = uiSelector.instance(instance); return this; } /** * @since API Level 16 */ public UiSelectorBuilder enabled(boolean val) { uiSelector = uiSelector.enabled(val); return this; } /** * @since API Level 16 */ public UiSelectorBuilder focused(boolean val) { uiSelector = uiSelector.focused(val); return this; } /** * @since API Level 16 */ public UiSelectorBuilder focusable(boolean val) { uiSelector = uiSelector.focusable(val); return this; } /** * @since API Level 16 */ public UiSelectorBuilder scrollable(boolean val) { uiSelector = uiSelector.scrollable(val); return this; } /** * @since API Level 16 */ public UiSelectorBuilder selected(boolean val) { uiSelector = uiSelector.selected(val); return this; } /** * @since API Level 16 */ public UiSelectorBuilder checked(boolean val) { uiSelector = uiSelector.checked(val); return this; } /** * @since API Level 16 */ public UiSelectorBuilder clickable(boolean val) { uiSelector = uiSelector.clickable(val); return this; } /** * @since API Level 18 */ public UiSelectorBuilder checkable(boolean val) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2) { uiSelector = uiSelector.checkable(val); } return this; } /** * @since API Level 17 */ public UiSelectorBuilder longClickable(boolean val) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { uiSelector = uiSelector.longClickable(val); } return this; } /** * @since API Level 16 */ public UiSelectorBuilder childSelector(UiSelector selector) { uiSelector = uiSelector.childSelector(selector); return this; } /** * @since API Level 16 */ public UiSelectorBuilder fromParent(UiSelector selector) { uiSelector = uiSelector.fromParent(selector); return this; } /** * @since API Level 16 */ public UiSelectorBuilder packageName(String name) { uiSelector = uiSelector.packageName(name); return this; } /** * @since API Level 17 */ public UiSelectorBuilder packageNameMatches(String regex) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) { uiSelector = uiSelector.packageNameMatches(regex); } return this; } }<|fim▁end|>
} /**
<|file_name|>DefaultShaders.js<|end_file_name|><|fim▁begin|>/** * Author: thegoldenmule * Date: 3/17/13 */ (function (global) { "use strict"; var colorShaderVS = { name: "color-shader-vs", type: "x-shader/x-vertex", body: "precision highp float;" + "uniform mat4 uProjectionMatrix;" + "uniform mat4 uModelViewMatrix;" + "uniform vec4 uColor;" + "uniform float uDepth;" + "attribute vec2 aPosition;" + "attribute vec4 aColor;" + "varying vec4 vColor;" + "void main(void) {" + // vertex transform "gl_Position = uProjectionMatrix * uModelViewMatrix * vec4(aPosition, uDepth, 1.0);" + // calculate color "vColor = uColor * aColor;" + "}" }; var colorShaderFS = { name: "color-shader-fs", type: "x-shader/x-fragment", body: "precision highp float;" + "varying vec4 vColor;" + "void main(void) {" + "gl_FragColor = vColor;" + "}" }; var textureShaderVS = { name: "texture-shader-vs", type: "x-shader/x-vertex", body: "precision highp float;" + "uniform mat4 uProjectionMatrix;" + "uniform mat4 uModelViewMatrix;" + "uniform vec4 uColor;" + "uniform float uDepth;" + "attribute vec2 aPosition;" + "attribute vec2 aUV;" + "attribute vec4 aColor;" + "varying vec4 vColor;" + "varying vec2 vUV;" + "void main(void) {" + // vertex transform "gl_Position = uProjectionMatrix * uModelViewMatrix * vec4(aPosition, uDepth, 1.0);" + // pass color + uv through "vColor = aColor * uColor;" + "vUV = aUV;" + "}" }; var textureShaderFS = { name: "texture-shader-fs", type: "x-shader/x-fragment", body: "precision highp float;" + "varying vec4 vColor;" + "varying vec2 vUV;" + "uniform sampler2D uMainTextureSampler;" + "void main(void) {" + "gl_FragColor = texture2D(uMainTextureSampler, vUV) * vColor;" + "}" }; var spriteSheetShaderVS = {<|fim▁hole|> "precision highp float;" + "uniform mat4 uProjectionMatrix;" + "uniform mat4 uModelViewMatrix;" + "uniform vec4 uColor;" + "uniform float uDepth;" + "attribute vec2 aPosition;" + "attribute vec2 aUV;" + "attribute vec4 aColor;" + "varying vec4 vColor;" + "varying vec4 vVertexColor;" + "varying vec2 vUV;" + "void main(void) {" + // vertex transform "gl_Position = uProjectionMatrix * uModelViewMatrix * vec4(aPosition, uDepth, 1.0);" + // pass color + uv through "vColor = uColor;" + // note that in this shader, color.xy is the previous frame's uvs! "vUV = aUV;" + "vVertexColor = aColor;" + "}" }; var spriteSheetShaderFS = { name: "ss-shader-fs", type: "x-shader/x-fragment", body: "precision highp float;" + "varying vec4 vColor;" + "varying vec4 vVertexColor;" + "varying vec2 vUV;" + "uniform sampler2D uMainTextureSampler;" + "uniform float uFutureBlendScalar;" + "void main(void) {" + "vec4 currentFrame = texture2D(uMainTextureSampler, vUV);" + "vec4 futureFrame = texture2D(uMainTextureSampler, vec2(vVertexColor.xy));" + "gl_FragColor = futureFrame * uFutureBlendScalar + currentFrame * (1.0 - uFutureBlendScalar);" + "}" }; var boundingBoxShaderVS = { name: "bb-shader-vs", type: "x-shader/x-vertex", body: "precision highp float;" + "uniform mat4 uProjectionMatrix;" + "uniform mat4 uModelViewMatrix;" + "attribute vec2 aPosition;" + "attribute vec2 aUV;" + "varying vec2 vUV;" + "void main(void) {" + // vertex transform "gl_Position = uProjectionMatrix * uModelViewMatrix * vec4(aPosition, 0.0, 1.0);" + "vUV = aUV;" + "}" }; var boundingBoxShaderFS = { name: "bb-shader-fs", type: "x-shader/x-fragment", body: "varying vec2 vUV;" + "void main(void) {" + "gl_FragColor = vec4(1.0, 0.0, 0.0, 0.2);" + "}" }; var particleShaderVS = { name: "particle-shader-vs", type: "x-shader/x-vertex", body: "precision highp float;" + "uniform mat4 uProjectionMatrix;" + "uniform mat4 uModelViewMatrix;" + "uniform vec4 uColor;" + "uniform float uDepth;" + "attribute vec2 aPosition;" + "attribute vec2 aUV;" + "attribute vec4 aColor;" + "varying vec4 vColor;" + "varying vec2 vUV;" + "void main(void) {" + // vertex transform "gl_Position = uProjectionMatrix * uModelViewMatrix * vec4(aPosition, uDepth, 1.0);" + // pass color + uv through "vColor = aColor * uColor;" + "vUV = aUV;" + "}" }; var particleShaderFS = { name: "particle-shader-fs", type: "x-shader/x-fragment", body: "precision highp float;" + "varying vec4 vColor;" + "varying vec2 vUV;" + "uniform sampler2D uMainTextureSampler;" + "void main(void) {" + "gl_FragColor = texture2D(uMainTextureSampler, vUV) * vColor;" + "}" }; global.__DEFAULT_SHADERS = [ colorShaderVS, colorShaderFS, textureShaderVS, textureShaderFS, spriteSheetShaderFS, spriteSheetShaderVS, boundingBoxShaderVS, boundingBoxShaderFS, particleShaderVS, particleShaderFS ]; })(this);<|fim▁end|>
name: "ss-shader-vs", type: "x-shader/x-vertex", body:
<|file_name|>input.js<|end_file_name|><|fim▁begin|>class A { static get #x() {} get #x() {}<|fim▁hole|><|fim▁end|>
}
<|file_name|>pyedit_assign_params_to_attributes.py<|end_file_name|><|fim▁begin|>"""Assign Params to Attributes by Joel Hedlund <joel.hedlund at gmail.com>. PyDev script for generating python code that assigns method parameter values to attributes of self with the same name. Activates with 'a' by default. Edit global constants ACTIVATION_STRING and WAIT_FOR_ENTER if this does not suit your needs. See docs on the class AssignToAttribsOfSelf for more details. <|fim▁hole|>""" __version__ = "1.0.1" __copyright__ = """Available under the same conditions as PyDev. See PyDev license for details. http://pydev.sourceforge.net """ # Change this if the default does not suit your needs ACTIVATION_STRING = 'a' WAIT_FOR_ENTER = False # For earlier Python versions True, False = 1,0 # Set to True to force Jython script interpreter restart on save events. # Useful for Jython PyDev script development, not useful otherwise. DEBUG = False # This is a magic trick that tells the PyDev Extensions editor about the # namespace provided for pydev scripts: if False: from org.python.pydev.editor import PyEdit #@UnresolvedImport cmd = 'command string' editor = PyEdit assert cmd is not None assert editor is not None if DEBUG and cmd == 'onSave': from org.python.pydev.jython import JythonPlugin #@UnresolvedImport editor.pyEditScripting.interpreter = JythonPlugin.newPythonInterpreter() from org.eclipse.jface.action import Action #@UnresolvedImport #======================================================================================================================= # AssignToAttribsOfSelfAction #======================================================================================================================= class AssignToAttribsOfSelfAction(Action): def __init__(self, assign_to_attribs_helper): Action.__init__(self) self.assign_to_attribs_helper = assign_to_attribs_helper def run(self): self.assign_to_attribs_helper.run() #======================================================================================================================= # Actually bind the actions #======================================================================================================================= if cmd == 'onCreateActions' or (DEBUG and cmd == 'onSave'): from org.python.pydev.editor.correctionassist import PythonCorrectionProcessor #@UnresolvedImport import assign_params_to_attributes_action as helper import assign_params_to_attributes_assist #---------------------------------------------------------------------------------------------- Bind it to Ctrl+2, a sDescription = 'Assign method params to attribs of self' assign_to_attribs_helper = helper.AssignToAttribsOfSelf(editor) editor.addOfflineActionListener( ACTIVATION_STRING, AssignToAttribsOfSelfAction(assign_to_attribs_helper), sDescription, WAIT_FOR_ENTER) #------------------------------------------------------------------------------------------------- Bind it to Ctrl+1 ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST = 'ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST' if not PythonCorrectionProcessor.hasAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST): assist = assign_params_to_attributes_assist.AssistAssignParamsToAttributes() PythonCorrectionProcessor.addAdditionalAssist(ASSIGN_PARAMS_TO_ATTRIBUTES_ASSIST, assist)<|fim▁end|>
Contact the author for bug reports/feature requests. Changed:Fabio Zadrozny (binded to Ctrl+1 too)
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import logging import random import string import sys from oslo.config import cfg # Logging setup logger = logging.getLogger(__name__) stdout = logging.StreamHandler(sys.stdout) stdout.setLevel(logging.DEBUG) logger.addHandler(stdout) logger.setLevel(logging.DEBUG) default_opts = [ cfg.StrOpt('working_dir', default='/opt/docstack', help="The base path to use for docstack."), ] # Option Definitions infrastructure_opts = [ cfg.StrOpt('sql_backend', default='mysql', choices=['mysql', 'postgresql'], help="The sql backend to use."), cfg.StrOpt('sql_host', default='127.0.0.1', help="The host for the sql backend."), cfg.StrOpt('sql_user', default='mysql', help="The user for the sql backend."), cfg.StrOpt('sql_password', default='', help="Password for the sql backend."), cfg.StrOpt('queue_backend', default='rabbit', choices=['rabbit', 'qpid', 'zeromq'], help="The shared queue to use."), cfg.StrOpt('queue_host', default='127.0.0.1', help="The host for the queue backend."), cfg.StrOpt('queue_user', default='rabbit', help="The user for the queue backend."), cfg.StrOpt('queue_password', default='', help="Password for the sql backend."), ] def generate_password(length): chars = ''.join([string.lowercase, string.uppercase, "1234567890"]) choice = random.SystemRandom().choice return ''.join((choice(chars) for i in range(length)))<|fim▁hole|> conf = cfg.ConfigOpts() conf(project='docstack', prog='docstack') # Base options conf.register_opts(default_opts) # Infrastructure infrastructure_group = cfg.OptGroup(name="infrastructure", title="Infrastructure Services") conf.register_group(infrastructure_group) conf.register_opts(infrastructure_opts, infrastructure_group) conf.set_default('sql_password', generate_password(12), 'infrastructure') conf.set_default('queue_password', generate_password(12), 'infrastructure') conf.reload_config_files() # Log it all out conf.log_opt_values(logger, logging.INFO) return conf<|fim▁end|>
def parse():
<|file_name|>util.format.js<|end_file_name|><|fim▁begin|>// I used to use `util.format()` which was massive, then I switched to // format-util, although when using rollup I discovered that the index.js<|fim▁hole|>// just exported `require('util').format`, and then had the below contents // in another file. at any rate all I want is this function: function format(fmt) { fmt = String(fmt); // this is closer to util.format() behavior var re = /(%?)(%([jds]))/g , args = Array.prototype.slice.call(arguments, 1); if(args.length) { if(Array.isArray(args[0])) args = args[0]; fmt = fmt.replace(re, function(match, escaped, ptn, flag) { var arg = args.shift(); switch(flag) { case 's': arg = '' + arg; break; case 'd': arg = Number(arg); break; case 'j': arg = JSON.stringify(arg); break; } if(!escaped) { return arg; } args.unshift(arg); return match; }) } // arguments remain after formatting if(args.length) { fmt += ' ' + args.join(' '); } // update escaped %% values fmt = fmt.replace(/%{2,2}/g, '%'); return '' + fmt; } export default format;<|fim▁end|>
<|file_name|>svg.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ <%namespace name="helpers" file="/helpers.mako.rs" /> <%helpers:shorthand name="mask" products="gecko" extra_prefixes="webkit" flags="SHORTHAND_IN_GETCS" sub_properties="mask-mode mask-repeat mask-clip mask-origin mask-composite mask-position-x mask-position-y mask-size mask-image" spec="https://drafts.fxtf.org/css-masking/#propdef-mask"> use properties::longhands::{mask_mode, mask_repeat, mask_clip, mask_origin, mask_composite, mask_position_x, mask_position_y}; use properties::longhands::{mask_size, mask_image}; use values::specified::{Position, PositionComponent}; use parser::Parse; // FIXME(emilio): These two mask types should be the same! impl From<mask_origin::single_value::SpecifiedValue> for mask_clip::single_value::SpecifiedValue { fn from(origin: mask_origin::single_value::SpecifiedValue) -> mask_clip::single_value::SpecifiedValue { match origin { mask_origin::single_value::SpecifiedValue::ContentBox => mask_clip::single_value::SpecifiedValue::ContentBox, mask_origin::single_value::SpecifiedValue::PaddingBox => mask_clip::single_value::SpecifiedValue::PaddingBox , mask_origin::single_value::SpecifiedValue::BorderBox => mask_clip::single_value::SpecifiedValue::BorderBox, % if product == "gecko": mask_origin::single_value::SpecifiedValue::FillBox => mask_clip::single_value::SpecifiedValue::FillBox , mask_origin::single_value::SpecifiedValue::StrokeBox => mask_clip::single_value::SpecifiedValue::StrokeBox, mask_origin::single_value::SpecifiedValue::ViewBox=> mask_clip::single_value::SpecifiedValue::ViewBox, % endif } } } pub fn parse_value<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Longhands, ParseError<'i>> { % for name in "image mode position_x position_y size repeat origin clip composite".split(): // Vec grows from 0 to 4 by default on first push(). So allocate // with capacity 1, so in the common case of only one item we don't // way overallocate. Note that we always push at least one item if // parsing succeeds. let mut mask_${name} = mask_${name}::SpecifiedValue(Vec::with_capacity(1)); % endfor input.parse_comma_separated(|input| { % for name in "image mode position size repeat origin clip composite".split(): let mut ${name} = None; % endfor loop { if image.is_none() { if let Ok(value) = input.try(|input| mask_image::single_value ::parse(context, input)) { image = Some(value); continue<|fim▁hole|> if let Ok(value) = input.try(|input| Position::parse(context, input)) { position = Some(value); // Parse mask size, if applicable. size = input.try(|input| { input.expect_delim('/')?; mask_size::single_value::parse(context, input) }).ok(); continue } } % for name in "repeat origin clip composite mode".split(): if ${name}.is_none() { if let Ok(value) = input.try(|input| mask_${name}::single_value ::parse(context, input)) { ${name} = Some(value); continue } } % endfor break } if clip.is_none() { if let Some(origin) = origin { clip = Some(mask_clip::single_value::SpecifiedValue::from(origin)); } } let mut any = false; % for name in "image mode position size repeat origin clip composite".split(): any = any || ${name}.is_some(); % endfor if any { if let Some(position) = position { mask_position_x.0.push(position.horizontal); mask_position_y.0.push(position.vertical); } else { mask_position_x.0.push(PositionComponent::zero()); mask_position_y.0.push(PositionComponent::zero()); } % for name in "image mode size repeat origin clip composite".split(): if let Some(m_${name}) = ${name} { mask_${name}.0.push(m_${name}); } else { mask_${name}.0.push(mask_${name}::single_value ::get_initial_specified_value()); } % endfor Ok(()) } else { Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)) } })?; Ok(expanded! { % for name in "image mode position_x position_y size repeat origin clip composite".split(): mask_${name}: mask_${name}, % endfor }) } impl<'a> ToCss for LonghandsToSerialize<'a> { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: fmt::Write { use properties::longhands::mask_origin::single_value::computed_value::T as Origin; use properties::longhands::mask_clip::single_value::computed_value::T as Clip; let len = self.mask_image.0.len(); if len == 0 { return Ok(()); } % for name in "mode position_x position_y size repeat origin clip composite".split(): if self.mask_${name}.0.len() != len { return Ok(()); } % endfor for i in 0..len { if i > 0 { dest.write_str(", ")?; } % for name in "image mode position_x position_y size repeat origin clip composite".split(): let ${name} = &self.mask_${name}.0[i]; % endfor image.to_css(dest)?; dest.write_str(" ")?; mode.to_css(dest)?; dest.write_str(" ")?; Position { horizontal: position_x.clone(), vertical: position_y.clone() }.to_css(dest)?; if *size != mask_size::single_value::get_initial_specified_value() { dest.write_str(" / ")?; size.to_css(dest)?; } dest.write_str(" ")?; repeat.to_css(dest)?; if *origin != Origin::BorderBox || *clip != Clip::BorderBox { dest.write_str(" ")?; origin.to_css(dest)?; if *clip != From::from(*origin) { dest.write_str(" ")?; clip.to_css(dest)?; } } dest.write_str(" ")?; composite.to_css(dest)?; } Ok(()) } } </%helpers:shorthand> <%helpers:shorthand name="mask-position" products="gecko" extra_prefixes="webkit" flags="SHORTHAND_IN_GETCS" sub_properties="mask-position-x mask-position-y" spec="https://drafts.csswg.org/css-masks-4/#the-mask-position"> use properties::longhands::{mask_position_x,mask_position_y}; use values::specified::position::Position; use parser::Parse; pub fn parse_value<'i, 't>( context: &ParserContext, input: &mut Parser<'i, 't>, ) -> Result<Longhands, ParseError<'i>> { // Vec grows from 0 to 4 by default on first push(). So allocate with // capacity 1, so in the common case of only one item we don't way // overallocate. Note that we always push at least one item if parsing // succeeds. let mut position_x = mask_position_x::SpecifiedValue(Vec::with_capacity(1)); let mut position_y = mask_position_y::SpecifiedValue(Vec::with_capacity(1)); let mut any = false; input.parse_comma_separated(|input| { let value = Position::parse(context, input)?; position_x.0.push(value.horizontal); position_y.0.push(value.vertical); any = true; Ok(()) })?; if !any { return Err(input.new_custom_error(StyleParseErrorKind::UnspecifiedError)); } Ok(expanded! { mask_position_x: position_x, mask_position_y: position_y, }) } impl<'a> ToCss for LonghandsToSerialize<'a> { fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result where W: fmt::Write { let len = self.mask_position_x.0.len(); if len == 0 || self.mask_position_y.0.len() != len { return Ok(()); } for i in 0..len { Position { horizontal: self.mask_position_x.0[i].clone(), vertical: self.mask_position_y.0[i].clone() }.to_css(dest)?; if i < len - 1 { dest.write_str(", ")?; } } Ok(()) } } </%helpers:shorthand><|fim▁end|>
} } if position.is_none() {
<|file_name|>StylesSidebarPane.js<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2007 Apple Inc. All rights reserved. * Copyright (C) 2009 Joseph Pecoraro * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of Apple Computer, Inc. ("Apple") nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY APPLE AND ITS CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL APPLE OR ITS CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ Elements.StylesSidebarPane = class extends Elements.ElementsSidebarPane { constructor() { super(); this.setMinimumSize(96, 26); this.registerRequiredCSS('elements/stylesSidebarPane.css'); this.element.tabIndex = -1; Common.moduleSetting('colorFormat').addChangeListener(this.update.bind(this)); Common.moduleSetting('textEditorIndent').addChangeListener(this.update.bind(this)); /** @type {?UI.Widget} */ this._currentToolbarPane = null; /** @type {?UI.Widget} */ this._animatedToolbarPane = null; /** @type {?UI.Widget} */ this._pendingWidget = null; /** @type {?UI.ToolbarToggle} */ this._pendingWidgetToggle = null; this._toolbarPaneElement = this._createStylesSidebarToolbar(); this._noMatchesElement = this.contentElement.createChild('div', 'gray-info-message hidden'); this._noMatchesElement.textContent = ls`No matching selector or style`; this._sectionsContainer = this.contentElement.createChild('div'); UI.ARIAUtils.markAsTree(this._sectionsContainer); this._sectionsContainer.addEventListener('keydown', this._sectionsContainerKeyDown.bind(this), false); this._sectionsContainer.addEventListener('focusin', this._sectionsContainerFocusChanged.bind(this), false); this._sectionsContainer.addEventListener('focusout', this._sectionsContainerFocusChanged.bind(this), false); this._swatchPopoverHelper = new InlineEditor.SwatchPopoverHelper(); this._linkifier = new Components.Linkifier(Elements.StylesSidebarPane._maxLinkLength, /* useLinkDecorator */ true); /** @type {?Elements.StylePropertyHighlighter} */ this._decorator = null; this._userOperation = false; this._isEditingStyle = false; /** @type {?RegExp} */ this._filterRegex = null; this.contentElement.classList.add('styles-pane'); /** @type {!Array<!Elements.SectionBlock>} */ this._sectionBlocks = []; Elements.StylesSidebarPane._instance = this; UI.context.addFlavorChangeListener(SDK.DOMNode, this.forceUpdate, this); this.contentElement.addEventListener('copy', this._clipboardCopy.bind(this)); this._resizeThrottler = new Common.Throttler(100); } /** * @return {!InlineEditor.SwatchPopoverHelper} */ swatchPopoverHelper() { return this._swatchPopoverHelper; } /** * @param {boolean} userOperation */ setUserOperation(userOperation) { this._userOperation = userOperation; } /** * @param {!SDK.CSSProperty} property * @return {!Element} */ static createExclamationMark(property) { const exclamationElement = createElement('label', 'dt-icon-label'); exclamationElement.className = 'exclamation-mark'; if (!Elements.StylesSidebarPane.ignoreErrorsForProperty(property)) exclamationElement.type = 'smallicon-warning'; exclamationElement.title = SDK.cssMetadata().isCSSPropertyName(property.name) ? Common.UIString('Invalid property value') : Common.UIString('Unknown property name'); return exclamationElement; } /** * @param {!SDK.CSSProperty} property * @return {boolean} */ static ignoreErrorsForProperty(property) { /** * @param {string} string */ function hasUnknownVendorPrefix(string) { return !string.startsWith('-webkit-') && /^[-_][\w\d]+-\w/.test(string); } const name = property.name.toLowerCase(); // IE hack. if (name.charAt(0) === '_') return true; // IE has a different format for this. if (name === 'filter') return true; // Common IE-specific property prefix. if (name.startsWith('scrollbar-')) return true; if (hasUnknownVendorPrefix(name)) return true; const value = property.value.toLowerCase(); // IE hack. if (value.endsWith('\\9')) return true; if (hasUnknownVendorPrefix(value)) return true; return false; } /** * @param {string} placeholder * @param {!Element} container * @param {function(?RegExp)} filterCallback * @return {!Element} */ static createPropertyFilterElement(placeholder, container, filterCallback) { const input = createElementWithClass('input'); input.placeholder = placeholder; function searchHandler() { const regex = input.value ? new RegExp(input.value.escapeForRegExp(), 'i') : null; filterCallback(regex); } input.addEventListener('input', searchHandler, false); /** * @param {!Event} event */ function keydownHandler(event) { if (event.key !== 'Escape' || !input.value) return; event.consume(true); input.value = ''; searchHandler(); } input.addEventListener('keydown', keydownHandler, false); input.setFilterValue = setFilterValue; /** * @param {string} value */ function setFilterValue(value) { input.value = value; input.focus(); searchHandler(); } return input; } /** * @param {!SDK.CSSProperty} cssProperty */ revealProperty(cssProperty) { this._decorator = new Elements.StylePropertyHighlighter(this, cssProperty); this._decorator.perform(); this.update(); } forceUpdate() { this._swatchPopoverHelper.hide(); this._resetCache(); this.update(); } /** * @param {!Event} event */ _sectionsContainerKeyDown(event) { const activeElement = this._sectionsContainer.ownerDocument.deepActiveElement(); if (!activeElement) return; const section = activeElement._section; if (!section) return; switch (event.key) { case 'ArrowUp': case 'ArrowLeft': const sectionToFocus = section.previousSibling() || section.lastSibling(); sectionToFocus.element.focus(); event.consume(true); break; case 'ArrowDown': case 'ArrowRight': { const sectionToFocus = section.nextSibling() || section.firstSibling(); sectionToFocus.element.focus(); event.consume(true); break; } case 'Home': section.firstSibling().element.focus(); event.consume(true); break; case 'End': section.lastSibling().element.focus(); event.consume(true); break; } } _sectionsContainerFocusChanged() { // When a styles section is focused, shift+tab should leave the section. // Leaving tabIndex = 0 on the first element would cause it to be focused instead. if (this._sectionBlocks[0] && this._sectionBlocks[0].sections[0]) this._sectionBlocks[0].sections[0].element.tabIndex = this._sectionsContainer.hasFocus() ? -1 : 0; } /** * @param {!Event} event */ _onAddButtonLongClick(event) { const cssModel = this.cssModel(); if (!cssModel) return; const headers = cssModel.styleSheetHeaders().filter(styleSheetResourceHeader); /** @type {!Array.<{text: string, handler: function()}>} */ const contextMenuDescriptors = []; for (let i = 0; i < headers.length; ++i) { const header = headers[i]; const handler = this._createNewRuleInStyleSheet.bind(this, header); contextMenuDescriptors.push({text: Bindings.displayNameForURL(header.resourceURL()), handler: handler}); } contextMenuDescriptors.sort(compareDescriptors); const contextMenu = new UI.ContextMenu(event); for (let i = 0; i < contextMenuDescriptors.length; ++i) { const descriptor = contextMenuDescriptors[i]; contextMenu.defaultSection().appendItem(descriptor.text, descriptor.handler); } contextMenu.footerSection().appendItem( 'inspector-stylesheet', this._createNewRuleInViaInspectorStyleSheet.bind(this)); contextMenu.show(); /** * @param {!{text: string, handler: function()}} descriptor1 * @param {!{text: string, handler: function()}} descriptor2 * @return {number} */ function compareDescriptors(descriptor1, descriptor2) { return String.naturalOrderComparator(descriptor1.text, descriptor2.text); } /** * @param {!SDK.CSSStyleSheetHeader} header * @return {boolean} */ function styleSheetResourceHeader(header) { return !header.isViaInspector() && !header.isInline && !!header.resourceURL(); } } /** * @param {?RegExp} regex */ _onFilterChanged(regex) { this._filterRegex = regex; this._updateFilter(); } /** * @param {!Elements.StylePropertiesSection} editedSection * @param {!Elements.StylePropertyTreeElement=} editedTreeElement */ _refreshUpdate(editedSection, editedTreeElement) { if (editedTreeElement) { for (const section of this.allSections()) { if (section.isBlank) continue; section._updateVarFunctions(editedTreeElement); } } if (this._isEditingStyle) return; const node = this.node(); if (!node) return; for (const section of this.allSections()) { if (section.isBlank) continue; section.update(section === editedSection); } if (this._filterRegex) this._updateFilter(); this._nodeStylesUpdatedForTest(node, false); } /** * @override * @return {!Promise.<?>} */ doUpdate() { return this._fetchMatchedCascade().then(this._innerRebuildUpdate.bind(this)); } /** * @override */ onResize() { this._resizeThrottler.schedule(this._innerResize.bind(this)); } /** * @return {!Promise} */ _innerResize() { const width = this.contentElement.getBoundingClientRect().width + 'px'; this.allSections().forEach(section => section.propertiesTreeOutline.element.style.width = width); return Promise.resolve(); } _resetCache() { if (this.cssModel()) this.cssModel().discardCachedMatchedCascade(); } /** * @return {!Promise.<?SDK.CSSMatchedStyles>} */ _fetchMatchedCascade() { const node = this.node(); if (!node || !this.cssModel()) return Promise.resolve(/** @type {?SDK.CSSMatchedStyles} */ (null)); return this.cssModel().cachedMatchedCascadeForNode(node).then(validateStyles.bind(this)); /** * @param {?SDK.CSSMatchedStyles} matchedStyles * @return {?SDK.CSSMatchedStyles} * @this {Elements.StylesSidebarPane} */ function validateStyles(matchedStyles) { return matchedStyles && matchedStyles.node() === this.node() ? matchedStyles : null; } } /** * @param {boolean} editing */ setEditingStyle(editing) { if (this._isEditingStyle === editing) return; this.contentElement.classList.toggle('is-editing-style', editing); this._isEditingStyle = editing; } /** * @override * @param {!Common.Event=} event */ onCSSModelChanged(event) { const edit = event && event.data ? /** @type {?SDK.CSSModel.Edit} */ (event.data.edit) : null; if (edit) { for (const section of this.allSections()) section._styleSheetEdited(edit); return; } if (this._userOperation || this._isEditingStyle) return; this._resetCache(); this.update(); } /** * @return {number} */ _focusedSectionIndex() { let index = 0; for (const block of this._sectionBlocks) { for (const section of block.sections) { if (section.element.hasFocus()) return index; index++; } } return -1; } /** * @param {?SDK.CSSMatchedStyles} matchedStyles * @return {!Promise} */ async _innerRebuildUpdate(matchedStyles) { const focusedIndex = this._focusedSectionIndex(); this._linkifier.reset(); this._sectionsContainer.removeChildren(); this._sectionBlocks = []; const node = this.node(); if (!matchedStyles || !node) { this._noMatchesElement.classList.remove('hidden'); return; } this._sectionBlocks = await this._rebuildSectionsForMatchedStyleRules(/** @type {!SDK.CSSMatchedStyles} */ (matchedStyles)); let pseudoTypes = []; const keys = matchedStyles.pseudoTypes(); if (keys.delete(Protocol.DOM.PseudoType.Before)) pseudoTypes.push(Protocol.DOM.PseudoType.Before); pseudoTypes = pseudoTypes.concat(keys.valuesArray().sort()); for (const pseudoType of pseudoTypes) { const block = Elements.SectionBlock.createPseudoTypeBlock(pseudoType); for (const style of matchedStyles.pseudoStyles(pseudoType)) { const section = new Elements.StylePropertiesSection(this, matchedStyles, style); block.sections.push(section); } this._sectionBlocks.push(block); } for (const keyframesRule of matchedStyles.keyframes()) { const block = Elements.SectionBlock.createKeyframesBlock(keyframesRule.name().text); for (const keyframe of keyframesRule.keyframes()) block.sections.push(new Elements.KeyframePropertiesSection(this, matchedStyles, keyframe.style)); this._sectionBlocks.push(block); } let index = 0; for (const block of this._sectionBlocks) { const titleElement = block.titleElement(); if (titleElement) this._sectionsContainer.appendChild(titleElement); for (const section of block.sections) { this._sectionsContainer.appendChild(section.element); if (index === focusedIndex) section.element.focus(); index++; } } if (focusedIndex >= index) this._sectionBlocks[0].sections[0].element.focus(); this._sectionsContainerFocusChanged(); if (this._filterRegex) this._updateFilter(); else this._noMatchesElement.classList.toggle('hidden', this._sectionBlocks.length > 0); this._nodeStylesUpdatedForTest(/** @type {!SDK.DOMNode} */ (node), true); if (this._decorator) { this._decorator.perform(); this._decorator = null; } } /** * @param {!SDK.DOMNode} node * @param {boolean} rebuild */ _nodeStylesUpdatedForTest(node, rebuild) { // For sniffing in tests. } /** * @param {!SDK.CSSMatchedStyles} matchedStyles * @return {!Promise<!Array.<!Elements.SectionBlock>>} */ async _rebuildSectionsForMatchedStyleRules(matchedStyles) { const blocks = [new Elements.SectionBlock(null)]; let lastParentNode = null; for (const style of matchedStyles.nodeStyles()) { const parentNode = matchedStyles.isInherited(style) ? matchedStyles.nodeForStyle(style) : null; if (parentNode && parentNode !== lastParentNode) { lastParentNode = parentNode; const block = await Elements.SectionBlock._createInheritedNodeBlock(lastParentNode); blocks.push(block); } const section = new Elements.StylePropertiesSection(this, matchedStyles, style); blocks.peekLast().sections.push(section); } return blocks; } async _createNewRuleInViaInspectorStyleSheet() { const cssModel = this.cssModel(); const node = this.node(); if (!cssModel || !node) return; this.setUserOperation(true); const styleSheetHeader = await cssModel.requestViaInspectorStylesheet(/** @type {!SDK.DOMNode} */ (node)); this.setUserOperation(false); await this._createNewRuleInStyleSheet(styleSheetHeader); } /** * @param {?SDK.CSSStyleSheetHeader} styleSheetHeader */ async _createNewRuleInStyleSheet(styleSheetHeader) { if (!styleSheetHeader) return; const text = await styleSheetHeader.requestContent() || ''; const lines = text.split('\n'); const range = TextUtils.TextRange.createFromLocation(lines.length - 1, lines[lines.length - 1].length); this._addBlankSection(this._sectionBlocks[0].sections[0], styleSheetHeader.id, range); } /** * @param {!Elements.StylePropertiesSection} insertAfterSection * @param {string} styleSheetId * @param {!TextUtils.TextRange} ruleLocation */ _addBlankSection(insertAfterSection, styleSheetId, ruleLocation) { const node = this.node(); const blankSection = new Elements.BlankStylePropertiesSection( this, insertAfterSection._matchedStyles, node ? node.simpleSelector() : '', styleSheetId, ruleLocation, insertAfterSection._style); this._sectionsContainer.insertBefore(blankSection.element, insertAfterSection.element.nextSibling); for (const block of this._sectionBlocks) { const index = block.sections.indexOf(insertAfterSection); if (index === -1) continue; block.sections.splice(index + 1, 0, blankSection); blankSection.startEditingSelector(); } } /** * @param {!Elements.StylePropertiesSection} section */ removeSection(section) { for (const block of this._sectionBlocks) { const index = block.sections.indexOf(section); if (index === -1) continue; block.sections.splice(index, 1); section.element.remove(); } } /** * @return {?RegExp} */ filterRegex() { return this._filterRegex; } _updateFilter() { let hasAnyVisibleBlock = false; for (const block of this._sectionBlocks) hasAnyVisibleBlock |= block.updateFilter(); this._noMatchesElement.classList.toggle('hidden', hasAnyVisibleBlock); } /** * @override */ willHide() { this._swatchPopoverHelper.hide(); super.willHide(); } /** * @return {!Array<!Elements.StylePropertiesSection>} */ allSections() { let sections = []; for (const block of this._sectionBlocks) sections = sections.concat(block.sections); return sections; } /** * @param {!Event} event */ _clipboardCopy(event) { Host.userMetrics.actionTaken(Host.UserMetrics.Action.StyleRuleCopied); } /** * @return {!Element} */ _createStylesSidebarToolbar() { const container = this.contentElement.createChild('div', 'styles-sidebar-pane-toolbar-container'); const hbox = container.createChild('div', 'hbox styles-sidebar-pane-toolbar'); const filterContainerElement = hbox.createChild('div', 'styles-sidebar-pane-filter-box'); const filterInput = Elements.StylesSidebarPane.createPropertyFilterElement(ls`Filter`, hbox, this._onFilterChanged.bind(this)); UI.ARIAUtils.setAccessibleName(filterInput, Common.UIString('Filter Styles')); filterContainerElement.appendChild(filterInput); const toolbar = new UI.Toolbar('styles-pane-toolbar', hbox); toolbar.makeToggledGray(); toolbar.appendLocationItems('styles-sidebarpane-toolbar'); const toolbarPaneContainer = container.createChild('div', 'styles-sidebar-toolbar-pane-container'); const toolbarPaneContent = toolbarPaneContainer.createChild('div', 'styles-sidebar-toolbar-pane'); return toolbarPaneContent; } /** * @param {?UI.Widget} widget * @param {?UI.ToolbarToggle} toggle */ showToolbarPane(widget, toggle) { if (this._pendingWidgetToggle) this._pendingWidgetToggle.setToggled(false); this._pendingWidgetToggle = toggle; if (this._animatedToolbarPane) this._pendingWidget = widget; else this._startToolbarPaneAnimation(widget); if (widget && toggle) toggle.setToggled(true); } /** * @param {?UI.Widget} widget */ _startToolbarPaneAnimation(widget) { if (widget === this._currentToolbarPane) return; if (widget && this._currentToolbarPane) { this._currentToolbarPane.detach(); widget.show(this._toolbarPaneElement); this._currentToolbarPane = widget; this._currentToolbarPane.focus(); return; } this._animatedToolbarPane = widget; if (this._currentToolbarPane) this._toolbarPaneElement.style.animationName = 'styles-element-state-pane-slideout'; else if (widget) this._toolbarPaneElement.style.animationName = 'styles-element-state-pane-slidein'; if (widget) widget.show(this._toolbarPaneElement); const listener = onAnimationEnd.bind(this); this._toolbarPaneElement.addEventListener('animationend', listener, false); /** * @this {!Elements.StylesSidebarPane} */ function onAnimationEnd() { this._toolbarPaneElement.style.removeProperty('animation-name'); this._toolbarPaneElement.removeEventListener('animationend', listener, false); if (this._currentToolbarPane) this._currentToolbarPane.detach(); this._currentToolbarPane = this._animatedToolbarPane; if (this._currentToolbarPane) this._currentToolbarPane.focus(); this._animatedToolbarPane = null; if (this._pendingWidget) { this._startToolbarPaneAnimation(this._pendingWidget); this._pendingWidget = null; } } } }; Elements.StylesSidebarPane._maxLinkLength = 30; Elements.SectionBlock = class { /** * @param {?Element} titleElement */ constructor(titleElement) { this._titleElement = titleElement; this.sections = []; } /** * @param {!Protocol.DOM.PseudoType} pseudoType * @return {!Elements.SectionBlock} */ static createPseudoTypeBlock(pseudoType) { const separatorElement = createElement('div'); separatorElement.className = 'sidebar-separator'; separatorElement.textContent = Common.UIString('Pseudo ::%s element', pseudoType); return new Elements.SectionBlock(separatorElement); } /** * @param {string} keyframesName * @return {!Elements.SectionBlock} */ static createKeyframesBlock(keyframesName) { const separatorElement = createElement('div'); separatorElement.className = 'sidebar-separator'; separatorElement.textContent = Common.UIString('@keyframes ' + keyframesName); return new Elements.SectionBlock(separatorElement); } /** * @param {!SDK.DOMNode} node * @return {!Promise<!Elements.SectionBlock>} */ static async _createInheritedNodeBlock(node) { const separatorElement = createElement('div'); separatorElement.className = 'sidebar-separator'; separatorElement.createTextChild(Common.UIString('Inherited from') + ' '); const link = await Common.Linkifier.linkify(node); separatorElement.appendChild(link); return new Elements.SectionBlock(separatorElement); } /** * @return {boolean} */ updateFilter() { let hasAnyVisibleSection = false; for (const section of this.sections) hasAnyVisibleSection |= section._updateFilter(); if (this._titleElement) this._titleElement.classList.toggle('hidden', !hasAnyVisibleSection); return hasAnyVisibleSection; } /** * @return {?Element} */ titleElement() { return this._titleElement; } }; Elements.StylePropertiesSection = class { /** * @param {!Elements.StylesSidebarPane} parentPane * @param {!SDK.CSSMatchedStyles} matchedStyles * @param {!SDK.CSSStyleDeclaration} style */ constructor(parentPane, matchedStyles, style) { this._parentPane = parentPane; this._style = style; this._matchedStyles = matchedStyles; this.editable = !!(style.styleSheetId && style.range); /** @type {?number} */ this._hoverTimer = null; this._willCauseCancelEditing = false; this._forceShowAll = false; this._originalPropertiesCount = style.leadingProperties().length; const rule = style.parentRule; this.element = createElementWithClass('div', 'styles-section matched-styles monospace'); this.element.tabIndex = -1; UI.ARIAUtils.markAsTreeitem(this.element); this._editing = false; this.element.addEventListener('keydown', this._onKeyDown.bind(this), false); this.element._section = this; this._innerElement = this.element.createChild('div'); this._titleElement = this._innerElement.createChild('div', 'styles-section-title ' + (rule ? 'styles-selector' : '')); this.propertiesTreeOutline = new UI.TreeOutlineInShadow(); this.propertiesTreeOutline.setFocusable(false); this.propertiesTreeOutline.registerRequiredCSS('elements/stylesSectionTree.css'); this.propertiesTreeOutline.element.classList.add('style-properties', 'matched-styles', 'monospace'); this.propertiesTreeOutline.section = this; this._innerElement.appendChild(this.propertiesTreeOutline.element); this._showAllButton = UI.createTextButton('', this._showAllItems.bind(this), 'styles-show-all'); this._innerElement.appendChild(this._showAllButton); const selectorContainer = createElement('div'); this._selectorElement = createElementWithClass('span', 'selector'); this._selectorElement.textContent = this._headerText(); selectorContainer.appendChild(this._selectorElement); this._selectorElement.addEventListener('mouseenter', this._onMouseEnterSelector.bind(this), false); this._selectorElement.addEventListener('mouseleave', this._onMouseOutSelector.bind(this), false); const openBrace = createElement('span'); openBrace.textContent = ' {'; selectorContainer.appendChild(openBrace); selectorContainer.addEventListener('mousedown', this._handleEmptySpaceMouseDown.bind(this), false); selectorContainer.addEventListener('click', this._handleSelectorContainerClick.bind(this), false); const closeBrace = this._innerElement.createChild('div', 'sidebar-pane-closing-brace'); closeBrace.textContent = '}'; this._createHoverMenuToolbar(closeBrace); this._selectorElement.addEventListener('click', this._handleSelectorClick.bind(this), false); this.element.addEventListener('mousedown', this._handleEmptySpaceMouseDown.bind(this), false); this.element.addEventListener('click', this._handleEmptySpaceClick.bind(this), false); this.element.addEventListener('mousemove', this._onMouseMove.bind(this), false); this.element.addEventListener('mouseleave', this._setSectionHovered.bind(this, false), false); if (rule) { // Prevent editing the user agent and user rules. if (rule.isUserAgent() || rule.isInjected()) { this.editable = false; } else { // Check this is a real CSSRule, not a bogus object coming from Elements.BlankStylePropertiesSection. if (rule.styleSheetId) { const header = rule.cssModel().styleSheetHeaderForId(rule.styleSheetId); this.navigable = !header.isAnonymousInlineStyleSheet(); } } } this._mediaListElement = this._titleElement.createChild('div', 'media-list media-matches'); this._selectorRefElement = this._titleElement.createChild('div', 'styles-section-subtitle'); this._updateMediaList(); this._updateRuleOrigin(); this._titleElement.appendChild(selectorContainer); this._selectorContainer = selectorContainer; if (this.navigable) this.element.classList.add('navigable'); if (!this.editable) { this.element.classList.add('read-only'); this.propertiesTreeOutline.element.classList.add('read-only'); } const throttler = new Common.Throttler(100); this._scheduleHeightUpdate = () => throttler.schedule(this._manuallySetHeight.bind(this)); this._hoverableSelectorsMode = false; this._markSelectorMatches(); this.onpopulate(); } /** * @param {!SDK.CSSMatchedStyles} matchedStyles * @param {!Components.Linkifier} linkifier * @param {?SDK.CSSRule} rule * @return {!Node} */ static createRuleOriginNode(matchedStyles, linkifier, rule) { if (!rule) return createTextNode(''); let ruleLocation; if (rule instanceof SDK.CSSStyleRule) ruleLocation = rule.style.range; else if (rule instanceof SDK.CSSKeyframeRule) ruleLocation = rule.key().range; const header = rule.styleSheetId ? matchedStyles.cssModel().styleSheetHeaderForId(rule.styleSheetId) : null; if (ruleLocation && rule.styleSheetId && header && !header.isAnonymousInlineStyleSheet()) { return Elements.StylePropertiesSection._linkifyRuleLocation( matchedStyles.cssModel(), linkifier, rule.styleSheetId, ruleLocation); } if (rule.isUserAgent()) return createTextNode(Common.UIString('user agent stylesheet')); if (rule.isInjected()) return createTextNode(Common.UIString('injected stylesheet')); if (rule.isViaInspector()) return createTextNode(Common.UIString('via inspector')); if (header && header.ownerNode) { const link = Elements.DOMLinkifier.linkifyDeferredNodeReference(header.ownerNode); link.textContent = '<style>…</style>'; return link; } return createTextNode(''); } /** * @param {!SDK.CSSModel} cssModel * @param {!Components.Linkifier} linkifier * @param {string} styleSheetId * @param {!TextUtils.TextRange} ruleLocation * @return {!Node} */ static _linkifyRuleLocation(cssModel, linkifier, styleSheetId, ruleLocation) { const styleSheetHeader = cssModel.styleSheetHeaderForId(styleSheetId); const lineNumber = styleSheetHeader.lineNumberInSource(ruleLocation.startLine); const columnNumber = styleSheetHeader.columnNumberInSource(ruleLocation.startLine, ruleLocation.startColumn); const matchingSelectorLocation = new SDK.CSSLocation(styleSheetHeader, lineNumber, columnNumber); return linkifier.linkifyCSSLocation(matchingSelectorLocation); } /** * @param {!Event} event */ _onKeyDown(event) { if (this._editing || !this.editable || event.altKey || event.ctrlKey || event.metaKey) return; switch (event.key) { case 'Enter': case ' ': this._startEditingAtFirstPosition(); event.consume(true); break; default: // Filter out non-printable key strokes. if (event.key.length === 1) this.addNewBlankProperty(0).startEditing(); break; } } /** * @param {boolean} isHovered */ _setSectionHovered(isHovered) { this.element.classList.toggle('styles-panel-hovered', isHovered); this.propertiesTreeOutline.element.classList.toggle('styles-panel-hovered', isHovered); if (this._hoverableSelectorsMode !== isHovered) { this._hoverableSelectorsMode = isHovered; this._markSelectorMatches(); } } /** * @param {!Event} event */ _onMouseMove(event) { const hasCtrlOrMeta = UI.KeyboardShortcut.eventHasCtrlOrMeta(/** @type {!MouseEvent} */ (event)); this._setSectionHovered(hasCtrlOrMeta); } /** * @param {!Element} container */ _createHoverMenuToolbar(container) { if (!this.editable) return; const items = []; const textShadowButton = new UI.ToolbarButton(Common.UIString('Add text-shadow'), 'largeicon-text-shadow'); textShadowButton.addEventListener( UI.ToolbarButton.Events.Click, this._onInsertShadowPropertyClick.bind(this, 'text-shadow')); textShadowButton.element.tabIndex = -1; items.push(textShadowButton); const boxShadowButton = new UI.ToolbarButton(Common.UIString('Add box-shadow'), 'largeicon-box-shadow'); boxShadowButton.addEventListener( UI.ToolbarButton.Events.Click, this._onInsertShadowPropertyClick.bind(this, 'box-shadow')); boxShadowButton.element.tabIndex = -1; items.push(boxShadowButton); const colorButton = new UI.ToolbarButton(Common.UIString('Add color'), 'largeicon-foreground-color'); colorButton.addEventListener(UI.ToolbarButton.Events.Click, this._onInsertColorPropertyClick, this); colorButton.element.tabIndex = -1; items.push(colorButton); const backgroundButton = new UI.ToolbarButton(Common.UIString('Add background-color'), 'largeicon-background-color'); backgroundButton.addEventListener(UI.ToolbarButton.Events.Click, this._onInsertBackgroundColorPropertyClick, this); backgroundButton.element.tabIndex = -1; items.push(backgroundButton); let newRuleButton = null; if (this._style.parentRule) { newRuleButton = new UI.ToolbarButton(Common.UIString('Insert Style Rule Below'), 'largeicon-add'); newRuleButton.addEventListener(UI.ToolbarButton.Events.Click, this._onNewRuleClick, this); newRuleButton.element.tabIndex = -1; items.push(newRuleButton); } const sectionToolbar = new UI.Toolbar('sidebar-pane-section-toolbar', container); for (let i = 0; i < items.length; ++i) sectionToolbar.appendToolbarItem(items[i]); const menuButton = new UI.ToolbarButton('', 'largeicon-menu'); menuButton.element.tabIndex = -1; sectionToolbar.appendToolbarItem(menuButton); setItemsVisibility.call(this, items, false); sectionToolbar.element.addEventListener('mouseenter', setItemsVisibility.bind(this, items, true)); sectionToolbar.element.addEventListener('mouseleave', setItemsVisibility.bind(this, items, false)); UI.ARIAUtils.markAsHidden(sectionToolbar.element); /** * @param {!Array<!UI.ToolbarButton>} items * @param {boolean} value * @this {Elements.StylePropertiesSection} */ function setItemsVisibility(items, value) { for (let i = 0; i < items.length; ++i) items[i].setVisible(value); menuButton.setVisible(!value); if (this._isSASSStyle()) newRuleButton.setVisible(false); } } /** * @return {boolean} */ _isSASSStyle() { const header = this._style.styleSheetId ? this._style.cssModel().styleSheetHeaderForId(this._style.styleSheetId) : null; if (!header) return false; const sourceMap = header.cssModel().sourceMapManager().sourceMapForClient(header); return sourceMap ? sourceMap.editable() : false; } /** * @return {!SDK.CSSStyleDeclaration} */ style() { return this._style; } /** * @return {string} */ _headerText() { const node = this._matchedStyles.nodeForStyle(this._style); if (this._style.type === SDK.CSSStyleDeclaration.Type.Inline) return this._matchedStyles.isInherited(this._style) ? Common.UIString('Style Attribute') : 'element.style'; if (this._style.type === SDK.CSSStyleDeclaration.Type.Attributes) return node.nodeNameInCorrectCase() + '[' + Common.UIString('Attributes Style') + ']'; return this._style.parentRule.selectorText(); } _onMouseOutSelector() { if (this._hoverTimer) clearTimeout(this._hoverTimer); SDK.OverlayModel.hideDOMNodeHighlight(); } _onMouseEnterSelector() { if (this._hoverTimer) clearTimeout(this._hoverTimer); this._hoverTimer = setTimeout(this._highlight.bind(this), 300); } _highlight() { SDK.OverlayModel.hideDOMNodeHighlight(); const node = this._parentPane.node(); if (!node) return; const selectors = this._style.parentRule ? this._style.parentRule.selectorText() : undefined; node.domModel().overlayModel().highlightDOMNodeWithConfig( node.id, {mode: 'all', showInfo: undefined, selectors: selectors}); } /** * @return {?Elements.StylePropertiesSection} */ firstSibling() { const parent = this.element.parentElement; if (!parent) return null; let childElement = parent.firstChild; while (childElement) { if (childElement._section) return childElement._section; childElement = childElement.nextSibling; } return null; } /** * @return {?Elements.StylePropertiesSection} */ lastSibling() { const parent = this.element.parentElement; if (!parent) return null; let childElement = parent.lastChild; while (childElement) { if (childElement._section) return childElement._section; childElement = childElement.previousSibling; } return null; } /** * @return {?Elements.StylePropertiesSection} */ nextSibling() { let curElement = this.element; do curElement = curElement.nextSibling; while (curElement && !curElement._section); return curElement ? curElement._section : null; } /** * @return {?Elements.StylePropertiesSection} */ previousSibling() { let curElement = this.element; do curElement = curElement.previousSibling; while (curElement && !curElement._section); return curElement ? curElement._section : null; } /** * @param {!Common.Event} event */ _onNewRuleClick(event) { event.data.consume(); const rule = this._style.parentRule; const range = TextUtils.TextRange.createFromLocation(rule.style.range.endLine, rule.style.range.endColumn + 1); this._parentPane._addBlankSection(this, /** @type {string} */ (rule.styleSheetId), range); } /** * @param {string} propertyName * @param {!Common.Event} event */ _onInsertShadowPropertyClick(propertyName, event) { event.data.consume(true); const treeElement = this.addNewBlankProperty(); treeElement.property.name = propertyName; treeElement.property.value = '0 0 black'; treeElement.updateTitle(); const shadowSwatchPopoverHelper = Elements.ShadowSwatchPopoverHelper.forTreeElement(treeElement); if (shadowSwatchPopoverHelper) shadowSwatchPopoverHelper.showPopover(); } /** * @param {!Common.Event} event */ _onInsertColorPropertyClick(event) { event.data.consume(true); const treeElement = this.addNewBlankProperty(); treeElement.property.name = 'color'; treeElement.property.value = 'black'; treeElement.updateTitle(); const colorSwatch = Elements.ColorSwatchPopoverIcon.forTreeElement(treeElement); if (colorSwatch) colorSwatch.showPopover(); } /** * @param {!Common.Event} event */ _onInsertBackgroundColorPropertyClick(event) { event.data.consume(true); const treeElement = this.addNewBlankProperty(); treeElement.property.name = 'background-color'; treeElement.property.value = 'white'; treeElement.updateTitle(); const colorSwatch = Elements.ColorSwatchPopoverIcon.forTreeElement(treeElement); if (colorSwatch) colorSwatch.showPopover(); } /** * @param {!SDK.CSSModel.Edit} edit */ _styleSheetEdited(edit) { const rule = this._style.parentRule; if (rule) rule.rebase(edit); else this._style.rebase(edit); this._updateMediaList(); this._updateRuleOrigin(); } /** * @param {!Array.<!SDK.CSSMedia>} mediaRules */ _createMediaList(mediaRules) { for (let i = mediaRules.length - 1; i >= 0; --i) { const media = mediaRules[i]; // Don't display trivial non-print media types. if (!media.text.includes('(') && media.text !== 'print') continue; const mediaDataElement = this._mediaListElement.createChild('div', 'media'); const mediaContainerElement = mediaDataElement.createChild('span'); const mediaTextElement = mediaContainerElement.createChild('span', 'media-text'); switch (media.source) { case SDK.CSSMedia.Source.LINKED_SHEET: case SDK.CSSMedia.Source.INLINE_SHEET: mediaTextElement.textContent = 'media="' + media.text + '"'; break; case SDK.CSSMedia.Source.MEDIA_RULE: const decoration = mediaContainerElement.createChild('span'); mediaContainerElement.insertBefore(decoration, mediaTextElement); decoration.textContent = '@media '; mediaTextElement.textContent = media.text; if (media.styleSheetId) { mediaDataElement.classList.add('editable-media'); mediaTextElement.addEventListener( 'click', this._handleMediaRuleClick.bind(this, media, mediaTextElement), false); } break; case SDK.CSSMedia.Source.IMPORT_RULE: mediaTextElement.textContent = '@import ' + media.text; break; } } } _updateMediaList() { this._mediaListElement.removeChildren(); if (this._style.parentRule && this._style.parentRule instanceof SDK.CSSStyleRule) this._createMediaList(this._style.parentRule.media); } /** * @param {string} propertyName * @return {boolean} */ isPropertyInherited(propertyName) { if (this._matchedStyles.isInherited(this._style)) { // While rendering inherited stylesheet, reverse meaning of this property. // Render truly inherited properties with black, i.e. return them as non-inherited. return !SDK.cssMetadata().isPropertyInherited(propertyName); } return false; } /** * @return {?Elements.StylePropertiesSection} */ nextEditableSibling() { let curSection = this; do curSection = curSection.nextSibling(); while (curSection && !curSection.editable); if (!curSection) { curSection = this.firstSibling(); while (curSection && !curSection.editable) curSection = curSection.nextSibling(); } return (curSection && curSection.editable) ? curSection : null; } /** * @return {?Elements.StylePropertiesSection} */ previousEditableSibling() { let curSection = this; do curSection = curSection.previousSibling(); while (curSection && !curSection.editable); if (!curSection) { curSection = this.lastSibling(); while (curSection && !curSection.editable) curSection = curSection.previousSibling(); } return (curSection && curSection.editable) ? curSection : null; } /** * @param {!Elements.StylePropertyTreeElement} editedTreeElement */ refreshUpdate(editedTreeElement) { this._parentPane._refreshUpdate(this, editedTreeElement); } /** * @param {!Elements.StylePropertyTreeElement} editedTreeElement */ _updateVarFunctions(editedTreeElement) { let child = this.propertiesTreeOutline.firstChild(); while (child) { if (child !== editedTreeElement) child.updateTitleIfComputedValueChanged(); child = child.traverseNextTreeElement(false /* skipUnrevealed */, null /* stayWithin */, true /* dontPopulate */); } } /** * @param {boolean} full */ update(full) { this._selectorElement.textContent = this._headerText(); this._markSelectorMatches(); if (full) { this.onpopulate(); } else { let child = this.propertiesTreeOutline.firstChild(); while (child) { child.setOverloaded(this._isPropertyOverloaded(child.property)); child = child.traverseNextTreeElement(false /* skipUnrevealed */, null /* stayWithin */, true /* dontPopulate */); } } } /** * @param {!Event=} event */ _showAllItems(event) { if (event) event.consume(); if (this._forceShowAll) return; this._forceShowAll = true; this.onpopulate(); } onpopulate() { this.propertiesTreeOutline.removeChildren(); const style = this._style; let count = 0; const properties = style.leadingProperties(); const maxProperties = Elements.StylePropertiesSection.MaxProperties + properties.length - this._originalPropertiesCount; for (const property of properties) { if (!this._forceShowAll && count >= maxProperties) break; count++; const isShorthand = !!style.longhandProperties(property.name).length; const inherited = this.isPropertyInherited(property.name); const overloaded = this._isPropertyOverloaded(property); const item = new Elements.StylePropertyTreeElement( this._parentPane, this._matchedStyles, property, isShorthand, inherited, overloaded, false); this.propertiesTreeOutline.appendChild(item); } if (count < properties.length) { this._showAllButton.classList.remove('hidden'); this._showAllButton.textContent = ls`Show All Properties (${properties.length - count} more)`; } else { this._showAllButton.classList.add('hidden'); } } /** * @param {!SDK.CSSProperty} property * @return {boolean} */ _isPropertyOverloaded(property) { return this._matchedStyles.propertyState(property) === SDK.CSSMatchedStyles.PropertyState.Overloaded; } /** * @return {boolean} */ _updateFilter() { let hasMatchingChild = false; this._showAllItems(); for (const child of this.propertiesTreeOutline.rootElement().children()) hasMatchingChild |= child._updateFilter(); const regex = this._parentPane.filterRegex(); const hideRule = !hasMatchingChild && !!regex && !regex.test(this.element.deepTextContent()); this.element.classList.toggle('hidden', hideRule); if (!hideRule && this._style.parentRule) this._markSelectorHighlights(); return !hideRule; } _markSelectorMatches() { const rule = this._style.parentRule; if (!rule) return; this._mediaListElement.classList.toggle('media-matches', this._matchedStyles.mediaMatches(this._style)); const selectorTexts = rule.selectors.map(selector => selector.text); const matchingSelectorIndexes = this._matchedStyles.matchingSelectors(/** @type {!SDK.CSSStyleRule} */ (rule)); const matchingSelectors = /** @type {!Array<boolean>} */ (new Array(selectorTexts.length).fill(false)); for (const matchingIndex of matchingSelectorIndexes) matchingSelectors[matchingIndex] = true; if (this._parentPane._isEditingStyle) return; const fragment = this._hoverableSelectorsMode ? this._renderHoverableSelectors(selectorTexts, matchingSelectors) : this._renderSimplifiedSelectors(selectorTexts, matchingSelectors); this._selectorElement.removeChildren(); this._selectorElement.appendChild(fragment); this._markSelectorHighlights(); } /** * @param {!Array<string>} selectors * @param {!Array<boolean>} matchingSelectors * @return {!DocumentFragment} */ _renderHoverableSelectors(selectors, matchingSelectors) { const fragment = createDocumentFragment(); for (let i = 0; i < selectors.length; ++i) { if (i) fragment.createTextChild(', '); fragment.appendChild(this._createSelectorElement(selectors[i], matchingSelectors[i], i)); } return fragment; } /** * @param {string} text * @param {boolean} isMatching * @param {number=} navigationIndex * @return {!Element} */ _createSelectorElement(text, isMatching, navigationIndex) { const element = createElementWithClass('span', 'simple-selector'); element.classList.toggle('selector-matches', isMatching); if (typeof navigationIndex === 'number') element._selectorIndex = navigationIndex; element.textContent = text; return element; } /** * @param {!Array<string>} selectors * @param {!Array<boolean>} matchingSelectors * @return {!DocumentFragment} */ _renderSimplifiedSelectors(selectors, matchingSelectors) { const fragment = createDocumentFragment(); let currentMatching = false; let text = ''; for (let i = 0; i < selectors.length; ++i) { if (currentMatching !== matchingSelectors[i] && text) { fragment.appendChild(this._createSelectorElement(text, currentMatching)); text = ''; } currentMatching = matchingSelectors[i]; text += selectors[i] + (i === selectors.length - 1 ? '' : ', '); } if (text) fragment.appendChild(this._createSelectorElement(text, currentMatching)); return fragment; } _markSelectorHighlights() { const selectors = this._selectorElement.getElementsByClassName('simple-selector'); const regex = this._parentPane.filterRegex(); for (let i = 0; i < selectors.length; ++i) { const selectorMatchesFilter = !!regex && regex.test(selectors[i].textContent); selectors[i].classList.toggle('filter-match', selectorMatchesFilter); } } /** * @return {boolean} */ _checkWillCancelEditing() { const willCauseCancelEditing = this._willCauseCancelEditing; this._willCauseCancelEditing = false; return willCauseCancelEditing; } /** * @param {!Event} event */ _handleSelectorContainerClick(event) { if (this._checkWillCancelEditing() || !this.editable) return; if (event.target === this._selectorContainer) { this.addNewBlankProperty(0).startEditing(); event.consume(true); } } /** * @param {number=} index * @return {!Elements.StylePropertyTreeElement} */ addNewBlankProperty(index = this.propertiesTreeOutline.rootElement().childCount()) { const property = this._style.newBlankProperty(index); const item = new Elements.StylePropertyTreeElement( this._parentPane, this._matchedStyles, property, false, false, false, true); this.propertiesTreeOutline.insertChild(item, property.index); return item; } _handleEmptySpaceMouseDown() { this._willCauseCancelEditing = this._parentPane._isEditingStyle; } /** * @param {!Event} event */ _handleEmptySpaceClick(event) { if (!this.editable || this.element.hasSelection() || this._checkWillCancelEditing()) return; if (event.target.classList.contains('header') || this.element.classList.contains('read-only') || event.target.enclosingNodeOrSelfWithClass('media')) { event.consume(); return; } const deepTarget = event.deepElementFromPoint(); if (deepTarget.treeElement) this.addNewBlankProperty(deepTarget.treeElement.property.index + 1).startEditing(); else this.addNewBlankProperty().startEditing(); event.consume(true); } /** * @param {!SDK.CSSMedia} media * @param {!Element} element * @param {!Event} event */ _handleMediaRuleClick(media, element, event) { if (UI.isBeingEdited(element)) return; if (UI.KeyboardShortcut.eventHasCtrlOrMeta(/** @type {!MouseEvent} */ (event)) && this.navigable) { const location = media.rawLocation(); if (!location) { event.consume(true); return; } const uiLocation = Bindings.cssWorkspaceBinding.rawLocationToUILocation(location); if (uiLocation) Common.Revealer.reveal(uiLocation); event.consume(true); return; } if (!this.editable || this._isSASSStyle()) return; const config = new UI.InplaceEditor.Config( this._editingMediaCommitted.bind(this, media), this._editingMediaCancelled.bind(this, element), undefined, this._editingMediaBlurHandler.bind(this)); UI.InplaceEditor.startEditing(element, config); this.startEditing(); element.getComponentSelection().selectAllChildren(element); this._parentPane.setEditingStyle(true); const parentMediaElement = element.enclosingNodeOrSelfWithClass('media'); parentMediaElement.classList.add('editing-media'); event.consume(true); } /** * @param {!Element} element */ _editingMediaFinished(element) { this._parentPane.setEditingStyle(false); const parentMediaElement = element.enclosingNodeOrSelfWithClass('media'); parentMediaElement.classList.remove('editing-media'); this.stopEditing(); } /** * @param {!Element} element */ _editingMediaCancelled(element) { this._editingMediaFinished(element); // Mark the selectors in group if necessary. // This is overridden by BlankStylePropertiesSection. this._markSelectorMatches(); element.getComponentSelection().collapse(element, 0); } /** * @param {!Element} editor * @param {!Event} blurEvent * @return {boolean} */ _editingMediaBlurHandler(editor, blurEvent) { return true; } /** * @param {!SDK.CSSMedia} media * @param {!Element} element * @param {string} newContent * @param {string} oldContent * @param {(!Elements.StylePropertyTreeElement.Context|undefined)} context * @param {string} moveDirection */ _editingMediaCommitted(media, element, newContent, oldContent, context, moveDirection) { this._parentPane.setEditingStyle(false); this._editingMediaFinished(element); if (newContent) newContent = newContent.trim(); /** * @param {boolean} success * @this {Elements.StylePropertiesSection} */ function userCallback(success) { if (success) { this._matchedStyles.resetActiveProperties(); this._parentPane._refreshUpdate(this); } this._parentPane.setUserOperation(false); this._editingMediaTextCommittedForTest(); } // This gets deleted in finishOperation(), which is called both on success and failure. this._parentPane.setUserOperation(true); this._parentPane.cssModel().setMediaText(media.styleSheetId, media.range, newContent).then(userCallback.bind(this)); } _editingMediaTextCommittedForTest() { } /** * @param {!Event} event */ _handleSelectorClick(event) { if (UI.KeyboardShortcut.eventHasCtrlOrMeta(/** @type {!MouseEvent} */ (event)) && this.navigable && event.target.classList.contains('simple-selector')) { this._navigateToSelectorSource(event.target._selectorIndex, true); event.consume(true); return; } this._startEditingAtFirstPosition(); event.consume(true); } /** * @param {number} index * @param {boolean} focus */ _navigateToSelectorSource(index, focus) { const cssModel = this._parentPane.cssModel(); const rule = this._style.parentRule; const header = cssModel.styleSheetHeaderForId(/** @type {string} */ (rule.styleSheetId)); if (!header) return; const rawLocation = new SDK.CSSLocation(header, rule.lineNumberInSource(index), rule.columnNumberInSource(index)); const uiLocation = Bindings.cssWorkspaceBinding.rawLocationToUILocation(rawLocation); if (uiLocation) Common.Revealer.reveal(uiLocation, !focus); } _startEditingAtFirstPosition() { if (!this.editable || this._isSASSStyle()) return; if (!this._style.parentRule) { this.moveEditorFromSelector('forward'); return; } this.startEditingSelector(); } startEditingSelector() { const element = this._selectorElement; if (UI.isBeingEdited(element)) return; element.scrollIntoViewIfNeeded(false); // Reset selector marks in group, and normalize whitespace. element.textContent = element.textContent.replace(/\s+/g, ' ').trim(); const config = new UI.InplaceEditor.Config(this.editingSelectorCommitted.bind(this), this.editingSelectorCancelled.bind(this)); UI.InplaceEditor.startEditing(this._selectorElement, config); this.startEditing(); element.getComponentSelection().selectAllChildren(element); this._parentPane.setEditingStyle(true); if (element.classList.contains('simple-selector')) this._navigateToSelectorSource(0, false); } /** * @param {string} moveDirection */ moveEditorFromSelector(moveDirection) { this._markSelectorMatches(); if (!moveDirection) return; if (moveDirection === 'forward') { let firstChild = this.propertiesTreeOutline.firstChild(); while (firstChild && firstChild.inherited()) firstChild = firstChild.nextSibling; if (!firstChild) this.addNewBlankProperty().startEditing(); else firstChild.startEditing(firstChild.nameElement); } else { const previousSection = this.previousEditableSibling(); if (!previousSection) return; previousSection.addNewBlankProperty().startEditing(); } } /** * @param {!Element} element * @param {string} newContent * @param {string} oldContent * @param {(!Elements.StylePropertyTreeElement.Context|undefined)} context * @param {string} moveDirection */ editingSelectorCommitted(element, newContent, oldContent, context, moveDirection) { this._editingSelectorEnded(); if (newContent) newContent = newContent.trim(); if (newContent === oldContent) { // Revert to a trimmed version of the selector if need be. this._selectorElement.textContent = newContent; this.moveEditorFromSelector(moveDirection); return; } const rule = this._style.parentRule; if (!rule) return; /** * @this {Elements.StylePropertiesSection} */ function headerTextCommitted() { this._parentPane.setUserOperation(false); this.moveEditorFromSelector(moveDirection); this._editingSelectorCommittedForTest(); } // This gets deleted in finishOperationAndMoveEditor(), which is called both on success and failure. this._parentPane.setUserOperation(true); this._setHeaderText(rule, newContent).then(headerTextCommitted.bind(this)); } /** * @param {!SDK.CSSRule} rule * @param {string} newContent * @return {!Promise} */ _setHeaderText(rule, newContent) { /** * @param {!SDK.CSSStyleRule} rule * @param {boolean} success * @return {!Promise} * @this {Elements.StylePropertiesSection} */ function onSelectorsUpdated(rule, success) { if (!success) return Promise.resolve(); return this._matchedStyles.recomputeMatchingSelectors(rule).then(updateSourceRanges.bind(this, rule)); } /** * @param {!SDK.CSSStyleRule} rule * @this {Elements.StylePropertiesSection} */ function updateSourceRanges(rule) { const doesAffectSelectedNode = this._matchedStyles.matchingSelectors(rule).length > 0; this.propertiesTreeOutline.element.classList.toggle('no-affect', !doesAffectSelectedNode);<|fim▁hole|> console.assert(rule instanceof SDK.CSSStyleRule); const oldSelectorRange = rule.selectorRange(); if (!oldSelectorRange) return Promise.resolve(); return rule.setSelectorText(newContent) .then(onSelectorsUpdated.bind(this, /** @type {!SDK.CSSStyleRule} */ (rule), oldSelectorRange)); } _editingSelectorCommittedForTest() { } _updateRuleOrigin() { this._selectorRefElement.removeChildren(); this._selectorRefElement.appendChild(Elements.StylePropertiesSection.createRuleOriginNode( this._matchedStyles, this._parentPane._linkifier, this._style.parentRule)); } _editingSelectorEnded() { this._parentPane.setEditingStyle(false); this.stopEditing(); } editingSelectorCancelled() { this._editingSelectorEnded(); // Mark the selectors in group if necessary. // This is overridden by BlankStylePropertiesSection. this._markSelectorMatches(); } startEditing() { this._manuallySetHeight(); this.element.addEventListener('input', this._scheduleHeightUpdate, true); this._editing = true; } /** * @return {!Promise} */ _manuallySetHeight() { this.element.style.height = (this._innerElement.clientHeight + 1) + 'px'; this.element.style.contain = 'strict'; return Promise.resolve(); } stopEditing() { this.element.style.removeProperty('height'); this.element.style.removeProperty('contain'); this.element.removeEventListener('input', this._scheduleHeightUpdate, true); this._editing = false; if (this._parentPane.element === this._parentPane.element.ownerDocument.deepActiveElement()) this.element.focus(); } }; Elements.BlankStylePropertiesSection = class extends Elements.StylePropertiesSection { /** * @param {!Elements.StylesSidebarPane} stylesPane * @param {!SDK.CSSMatchedStyles} matchedStyles * @param {string} defaultSelectorText * @param {string} styleSheetId * @param {!TextUtils.TextRange} ruleLocation * @param {!SDK.CSSStyleDeclaration} insertAfterStyle */ constructor(stylesPane, matchedStyles, defaultSelectorText, styleSheetId, ruleLocation, insertAfterStyle) { const cssModel = /** @type {!SDK.CSSModel} */ (stylesPane.cssModel()); const rule = SDK.CSSStyleRule.createDummyRule(cssModel, defaultSelectorText); super(stylesPane, matchedStyles, rule.style); this._normal = false; this._ruleLocation = ruleLocation; this._styleSheetId = styleSheetId; this._selectorRefElement.removeChildren(); this._selectorRefElement.appendChild(Elements.StylePropertiesSection._linkifyRuleLocation( cssModel, this._parentPane._linkifier, styleSheetId, this._actualRuleLocation())); if (insertAfterStyle && insertAfterStyle.parentRule) this._createMediaList(insertAfterStyle.parentRule.media); this.element.classList.add('blank-section'); } /** * @return {!TextUtils.TextRange} */ _actualRuleLocation() { const prefix = this._rulePrefix(); const lines = prefix.split('\n'); const editRange = new TextUtils.TextRange(0, 0, lines.length - 1, lines.peekLast().length); return this._ruleLocation.rebaseAfterTextEdit(TextUtils.TextRange.createFromLocation(0, 0), editRange); } /** * @return {string} */ _rulePrefix() { return this._ruleLocation.startLine === 0 && this._ruleLocation.startColumn === 0 ? '' : '\n\n'; } /** * @return {boolean} */ get isBlank() { return !this._normal; } /** * @override * @param {!Element} element * @param {string} newContent * @param {string} oldContent * @param {!Elements.StylePropertyTreeElement.Context|undefined} context * @param {string} moveDirection */ editingSelectorCommitted(element, newContent, oldContent, context, moveDirection) { if (!this.isBlank) { super.editingSelectorCommitted(element, newContent, oldContent, context, moveDirection); return; } /** * @param {?SDK.CSSStyleRule} newRule * @return {!Promise} * @this {Elements.BlankStylePropertiesSection} */ function onRuleAdded(newRule) { if (!newRule) { this.editingSelectorCancelled(); this._editingSelectorCommittedForTest(); return Promise.resolve(); } return this._matchedStyles.addNewRule(newRule, this._matchedStyles.node()) .then(onAddedToCascade.bind(this, newRule)); } /** * @param {!SDK.CSSStyleRule} newRule * @this {Elements.BlankStylePropertiesSection} */ function onAddedToCascade(newRule) { const doesSelectorAffectSelectedNode = this._matchedStyles.matchingSelectors(newRule).length > 0; this._makeNormal(newRule); if (!doesSelectorAffectSelectedNode) this.propertiesTreeOutline.element.classList.add('no-affect'); this._updateRuleOrigin(); this._parentPane.setUserOperation(false); this._editingSelectorEnded(); if (this.element.parentElement) // Might have been detached already. this.moveEditorFromSelector(moveDirection); this._markSelectorMatches(); this._editingSelectorCommittedForTest(); } if (newContent) newContent = newContent.trim(); this._parentPane.setUserOperation(true); const cssModel = this._parentPane.cssModel(); const ruleText = this._rulePrefix() + newContent + ' {}'; cssModel.addRule(this._styleSheetId, ruleText, this._ruleLocation).then(onRuleAdded.bind(this)); } /** * @override */ editingSelectorCancelled() { this._parentPane.setUserOperation(false); if (!this.isBlank) { super.editingSelectorCancelled(); return; } this._editingSelectorEnded(); this._parentPane.removeSection(this); } /** * @param {!SDK.CSSRule} newRule */ _makeNormal(newRule) { this.element.classList.remove('blank-section'); this._style = newRule.style; // FIXME: replace this instance by a normal Elements.StylePropertiesSection. this._normal = true; } }; Elements.StylePropertiesSection.MaxProperties = 50; Elements.KeyframePropertiesSection = class extends Elements.StylePropertiesSection { /** * @param {!Elements.StylesSidebarPane} stylesPane * @param {!SDK.CSSMatchedStyles} matchedStyles * @param {!SDK.CSSStyleDeclaration} style */ constructor(stylesPane, matchedStyles, style) { super(stylesPane, matchedStyles, style); this._selectorElement.className = 'keyframe-key'; } /** * @override * @return {string} */ _headerText() { return this._style.parentRule.key().text; } /** * @override * @param {!SDK.CSSRule} rule * @param {string} newContent * @return {!Promise} */ _setHeaderText(rule, newContent) { /** * @param {boolean} success * @this {Elements.KeyframePropertiesSection} */ function updateSourceRanges(success) { if (!success) return; this._parentPane._refreshUpdate(this); } console.assert(rule instanceof SDK.CSSKeyframeRule); const oldRange = rule.key().range; if (!oldRange) return Promise.resolve(); return rule.setKeyText(newContent).then(updateSourceRanges.bind(this)); } /** * @override * @param {string} propertyName * @return {boolean} */ isPropertyInherited(propertyName) { return false; } /** * @override * @param {!SDK.CSSProperty} property * @return {boolean} */ _isPropertyOverloaded(property) { return false; } /** * @override */ _markSelectorHighlights() { } /** * @override */ _markSelectorMatches() { this._selectorElement.textContent = this._style.parentRule.key().text; } /** * @override */ _highlight() { } }; Elements.StylesSidebarPane.CSSPropertyPrompt = class extends UI.TextPrompt { /** * @param {!Elements.StylePropertyTreeElement} treeElement * @param {boolean} isEditingName */ constructor(treeElement, isEditingName) { // Use the same callback both for applyItemCallback and acceptItemCallback. super(); this.initialize(this._buildPropertyCompletions.bind(this), UI.StyleValueDelimiters); this._isColorAware = SDK.cssMetadata().isColorAwareProperty(treeElement.property.name); this._cssCompletions = []; if (isEditingName) { this._cssCompletions = SDK.cssMetadata().allProperties(); if (!treeElement.node().isSVGNode()) this._cssCompletions = this._cssCompletions.filter(property => !SDK.cssMetadata().isSVGProperty(property)); } else { this._cssCompletions = SDK.cssMetadata().propertyValues(treeElement.nameElement.textContent); } this._treeElement = treeElement; this._isEditingName = isEditingName; this._cssVariables = treeElement.matchedStyles().availableCSSVariables(treeElement.property.ownerStyle); if (this._cssVariables.length < 1000) this._cssVariables.sort(String.naturalOrderComparator); else this._cssVariables.sort(); if (!isEditingName) { this.disableDefaultSuggestionForEmptyInput(); // If a CSS value is being edited that has a numeric or hex substring, hint that precision modifier shortcuts are available. if (treeElement && treeElement.valueElement) { const cssValueText = treeElement.valueElement.textContent; if (cssValueText.match(/#[\da-f]{3,6}$/i)) { this.setTitle(Common.UIString( 'Increment/decrement with mousewheel or up/down keys. %s: R ±1, Shift: G ±1, Alt: B ±1', Host.isMac() ? 'Cmd' : 'Ctrl')); } else if (cssValueText.match(/\d+/)) { this.setTitle(Common.UIString( 'Increment/decrement with mousewheel or up/down keys. %s: ±100, Shift: ±10, Alt: ±0.1', Host.isMac() ? 'Cmd' : 'Ctrl')); } } } } /** * @override * @param {!Event} event */ onKeyDown(event) { switch (event.key) { case 'ArrowUp': case 'ArrowDown': case 'PageUp': case 'PageDown': if (this._handleNameOrValueUpDown(event)) { event.preventDefault(); return; } break; case 'Enter': // Accept any available autocompletions and advance to the next field. this.tabKeyPressed(); event.preventDefault(); return; } super.onKeyDown(event); } /** * @override * @param {!Event} event */ onMouseWheel(event) { if (this._handleNameOrValueUpDown(event)) { event.consume(true); return; } super.onMouseWheel(event); } /** * @override * @return {boolean} */ tabKeyPressed() { this.acceptAutoComplete(); // Always tab to the next field. return false; } /** * @param {!Event} event * @return {boolean} */ _handleNameOrValueUpDown(event) { /** * @param {string} originalValue * @param {string} replacementString * @this {Elements.StylesSidebarPane.CSSPropertyPrompt} */ function finishHandler(originalValue, replacementString) { // Synthesize property text disregarding any comments, custom whitespace etc. this._treeElement.applyStyleText( this._treeElement.nameElement.textContent + ': ' + this._treeElement.valueElement.textContent, false); } /** * @param {string} prefix * @param {number} number * @param {string} suffix * @return {string} * @this {Elements.StylesSidebarPane.CSSPropertyPrompt} */ function customNumberHandler(prefix, number, suffix) { if (number !== 0 && !suffix.length && SDK.cssMetadata().isLengthProperty(this._treeElement.property.name)) suffix = 'px'; return prefix + number + suffix; } // Handle numeric value increment/decrement only at this point. if (!this._isEditingName && this._treeElement.valueElement && UI.handleElementValueModifications( event, this._treeElement.valueElement, finishHandler.bind(this), this._isValueSuggestion.bind(this), customNumberHandler.bind(this))) return true; return false; } /** * @param {string} word * @return {boolean} */ _isValueSuggestion(word) { if (!word) return false; word = word.toLowerCase(); return this._cssCompletions.indexOf(word) !== -1 || word.startsWith('--'); } /** * @param {string} expression * @param {string} query * @param {boolean=} force * @return {!Promise<!UI.SuggestBox.Suggestions>} */ _buildPropertyCompletions(expression, query, force) { const lowerQuery = query.toLowerCase(); const editingVariable = !this._isEditingName && expression.trim().endsWith('var('); if (!query && !force && !editingVariable && (this._isEditingName || expression)) return Promise.resolve([]); const prefixResults = []; const anywhereResults = []; if (!editingVariable) this._cssCompletions.forEach(completion => filterCompletions.call(this, completion, false /* variable */)); if (this._isEditingName || editingVariable) this._cssVariables.forEach(variable => filterCompletions.call(this, variable, true /* variable */)); const results = prefixResults.concat(anywhereResults); if (!this._isEditingName && !results.length && query.length > 1 && '!important'.startsWith(lowerQuery)) results.push({text: '!important'}); const userEnteredText = query.replace('-', ''); if (userEnteredText && (userEnteredText === userEnteredText.toUpperCase())) { for (let i = 0; i < results.length; ++i) { if (!results[i].text.startsWith('--')) results[i].text = results[i].text.toUpperCase(); } } if (editingVariable) { results.forEach(result => { result.title = result.text; result.text += ')'; }); } if (this._isColorAware && !this._isEditingName) { results.stableSort((a, b) => { if (!!a.subtitleRenderer === !!b.subtitleRenderer) return 0; return a.subtitleRenderer ? -1 : 1; }); } return Promise.resolve(results); /** * @param {string} completion * @param {boolean} variable * @this {Elements.StylesSidebarPane.CSSPropertyPrompt} */ function filterCompletions(completion, variable) { const index = completion.toLowerCase().indexOf(lowerQuery); const result = {text: completion}; if (variable) { const computedValue = this._treeElement.matchedStyles().computeCSSVariable(this._treeElement.property.ownerStyle, completion); if (computedValue) { const color = Common.Color.parse(computedValue); if (color) result.subtitleRenderer = swatchRenderer.bind(null, color); } } if (index === 0) { result.priority = this._isEditingName ? SDK.cssMetadata().propertyUsageWeight(completion) : 1; prefixResults.push(result); } else if (index > -1) { anywhereResults.push(result); } } /** * @param {!Common.Color} color * @return {!Element} */ function swatchRenderer(color) { const swatch = InlineEditor.ColorSwatch.create(); swatch.hideText(true); swatch.setColor(color); swatch.style.pointerEvents = 'none'; return swatch; } } }; Elements.StylesSidebarPropertyRenderer = class { /** * @param {?SDK.CSSRule} rule * @param {?SDK.DOMNode} node * @param {string} name * @param {string} value */ constructor(rule, node, name, value) { this._rule = rule; this._node = node; this._propertyName = name; this._propertyValue = value; /** @type {?function(string):!Node} */ this._colorHandler = null; /** @type {?function(string):!Node} */ this._bezierHandler = null; /** @type {?function(string, string):!Node} */ this._shadowHandler = null; /** @type {?function(string):!Node} */ this._varHandler = createTextNode; } /** * @param {function(string):!Node} handler */ setColorHandler(handler) { this._colorHandler = handler; } /** * @param {function(string):!Node} handler */ setBezierHandler(handler) { this._bezierHandler = handler; } /** * @param {function(string, string):!Node} handler */ setShadowHandler(handler) { this._shadowHandler = handler; } /** * @param {function(string):!Node} handler */ setVarHandler(handler) { this._varHandler = handler; } /** * @return {!Element} */ renderName() { const nameElement = createElement('span'); nameElement.className = 'webkit-css-property'; nameElement.textContent = this._propertyName; nameElement.normalize(); return nameElement; } /** * @return {!Element} */ renderValue() { const valueElement = createElement('span'); valueElement.className = 'value'; if (!this._propertyValue) return valueElement; if (this._shadowHandler && (this._propertyName === 'box-shadow' || this._propertyName === 'text-shadow' || this._propertyName === '-webkit-box-shadow') && !SDK.CSSMetadata.VariableRegex.test(this._propertyValue)) { valueElement.appendChild(this._shadowHandler(this._propertyValue, this._propertyName)); valueElement.normalize(); return valueElement; } const regexes = [SDK.CSSMetadata.VariableRegex, SDK.CSSMetadata.URLRegex]; const processors = [this._varHandler, this._processURL.bind(this)]; if (this._bezierHandler && SDK.cssMetadata().isBezierAwareProperty(this._propertyName)) { regexes.push(UI.Geometry.CubicBezier.Regex); processors.push(this._bezierHandler); } if (this._colorHandler && SDK.cssMetadata().isColorAwareProperty(this._propertyName)) { regexes.push(Common.Color.Regex); processors.push(this._colorHandler); } const results = TextUtils.TextUtils.splitStringByRegexes(this._propertyValue, regexes); for (let i = 0; i < results.length; i++) { const result = results[i]; const processor = result.regexIndex === -1 ? createTextNode : processors[result.regexIndex]; valueElement.appendChild(processor(result.value)); } valueElement.normalize(); return valueElement; } /** * @param {string} text * @return {!Node} */ _processURL(text) { // Strip "url(" and ")" along with whitespace. let url = text.substring(4, text.length - 1).trim(); const isQuoted = /^'.*'$/.test(url) || /^".*"$/.test(url); if (isQuoted) url = url.substring(1, url.length - 1); const container = createDocumentFragment(); container.createTextChild('url('); let hrefUrl = null; if (this._rule && this._rule.resourceURL()) hrefUrl = Common.ParsedURL.completeURL(this._rule.resourceURL(), url); else if (this._node) hrefUrl = this._node.resolveURL(url); container.appendChild(Components.Linkifier.linkifyURL(hrefUrl || url, {text: url, preventClick: true})); container.createTextChild(')'); return container; } }; /** * @implements {UI.ToolbarItem.Provider} */ Elements.StylesSidebarPane.ButtonProvider = class { constructor() { this._button = new UI.ToolbarButton(Common.UIString('New Style Rule'), 'largeicon-add'); this._button.addEventListener(UI.ToolbarButton.Events.Click, this._clicked, this); const longclickTriangle = UI.Icon.create('largeicon-longclick-triangle', 'long-click-glyph'); this._button.element.appendChild(longclickTriangle); new UI.LongClickController(this._button.element, this._longClicked.bind(this)); UI.context.addFlavorChangeListener(SDK.DOMNode, onNodeChanged.bind(this)); onNodeChanged.call(this); /** * @this {Elements.StylesSidebarPane.ButtonProvider} */ function onNodeChanged() { let node = UI.context.flavor(SDK.DOMNode); node = node ? node.enclosingElementOrSelf() : null; this._button.setEnabled(!!node); } } /** * @param {!Common.Event} event */ _clicked(event) { Elements.StylesSidebarPane._instance._createNewRuleInViaInspectorStyleSheet(); } /** * @param {!Event} e */ _longClicked(e) { Elements.StylesSidebarPane._instance._onAddButtonLongClick(e); } /** * @override * @return {!UI.ToolbarItem} */ item() { return this._button; } };<|fim▁end|>
this._matchedStyles.resetActiveProperties(); this._parentPane._refreshUpdate(this); }
<|file_name|>mac_dev.py<|end_file_name|><|fim▁begin|># coding=utf-8 __author__ = "Daniel Arroyo <[email protected]>" __license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html' import logging <|fim▁hole|> def __init__(self): self.name = "astrobox-dev" self.logger = logging.getLogger(__name__) super(MacDevNetworkManager, self).__init__() def getActiveConnections(self): return { 'wired': { 'id': 'localhost', 'signal': None, 'name': 'Localhost', 'ip': '127.0.0.1:5000', 'secured': True }, 'wireless': None, 'manual': None } def storedWifiNetworks(self): return [ {'id': '1', 'name': 'Test Connection 1', 'active': True}, {'id': '2', 'name': 'Test Connection 2', 'active': False}, {'id': '3', 'name': 'Test Connection 3', 'active': False} ] def deleteStoredWifiNetwork(self, networkId): return (networkId in [c['id'] for c in self.storedWifiNetworks()]) def hasWifi(self): return False def isOnline(self): return True def startHotspot(self): # return True when succesful return "Not supporded on Mac" def stopHotspot(self): # return True when succesful return "Not supporded on Mac" def getHostname(self): return self.name def setHostname(self, name): self.name = name self.logger.info('Host name is set to %s ' % name) return True<|fim▁end|>
from astroprint.network import NetworkManager as NetworkManagerBase class MacDevNetworkManager(NetworkManagerBase):
<|file_name|>TestValidator_float.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one * # or more contributor license agreements. See the NOTICE file * # distributed with this work for additional information * # regarding copyright ownership. The ASF licenses this file * # to you under the Apache License, Version 2.0 (the * # "License"); you may not use this file except in compliance * # with the License. You may obtain a copy of the License at * # * # http://www.apache.org/licenses/LICENSE-2.0 * # * # Unless required by applicable law or agreed to in writing, * # software distributed under the License is distributed on an * # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * # KIND, either express or implied. See the License for the * # specific language governing permissions and limitations * # under the License. import unittest from etch.python.Types import * from etch.binding.support.Validator_float import * from etch.binding.transport.fmt.TypeCode import * class Test_EtchSupportValidator_float(unittest.TestCase): def _test_validator(self): _test = self._test # TODO - resolve casting issues in validator tests _test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MAX_VALUE), "1") _test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MIN_VALUE), "2" ) _test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MAX_VALUE), "3" )<|fim▁hole|> _test(1, "float[1]", TypeCode.ARRAY, [Float], [], 1) _test(2, "float[2]", TypeCode.ARRAY, [[Float]], [[]], [] ) _test(3, "float[3]", TypeCode.ARRAY, [[[Float]]], [[[]]], [[]] ) _test(4, "float[4]", TypeCode.ARRAY, [[[[Float]]]], [[[[]]]], [[[]]] ) _test(5, "float[5]", TypeCode.ARRAY, [[[[[Float]]]]], [[[[[]]]]], [[[[]]]] ) def test_getNeg1(self): self.assertRaises(IllegalArgumentException, Validator_float.get, -1) def test_getNeg2(self): self.assertRaises(IllegalArgumentException, Validator_float.get(0).elementValidator) def test_getMaxPlusOne(self): self.assertRaises(IllegalArgumentException, Validator_float.get, Validator.MAX_NDIMS + 1) def _test(self, n, s, tc, clss, good, bad): v = Validator_float.get(n) self.assertEqual(n, v.getNDims()) self.assertEqual(clss, v.getExpectedClass()) self.assertEqual(s, repr(v)) self.assertEqual(True, v.validate(good)) self.assertEqual(False, v.validate(bad)) self.assertEqual(tc, v.checkValue(good)) self.assertEqual(None, v.checkValue(bad)) if n > 0: self.assertEqual(n-1, v.elementValidator().getNDims()) if __name__=='__main__': unittest.main()<|fim▁end|>
_test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MIN_VALUE), "4" ) _test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MAX_VALUE), "5" ) _test(0, "float[0]", TypeCode.FLOAT4, Float, Float(Float.MIN_VALUE), "abc" )
<|file_name|>rolebinding.go<|end_file_name|><|fim▁begin|>/* Copyright 2016 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package kubectl import ( "fmt" "strings" "k8s.io/apimachinery/pkg/runtime" "k8s.io/kubernetes/pkg/apis/rbac" ) // RoleBindingGeneratorV1 supports stable generation of a roleBinding. type RoleBindingGeneratorV1 struct { // Name of roleBinding (required) Name string // ClusterRole for the roleBinding ClusterRole string // Role for the roleBinding Role string // Users to derive the roleBinding from (optional) Users []string // Groups to derive the roleBinding from (optional) Groups []string // ServiceAccounts to derive the roleBinding from in namespace:name format(optional) ServiceAccounts []string } // Ensure it supports the generator pattern that uses parameter injection. var _ Generator = &RoleBindingGeneratorV1{} // Ensure it supports the generator pattern that uses parameters specified during construction. var _ StructuredGenerator = &RoleBindingGeneratorV1{} // Generate returns a roleBinding using the specified parameters. func (s RoleBindingGeneratorV1) Generate(genericParams map[string]interface{}) (runtime.Object, error) { err := ValidateParams(s.ParamNames(), genericParams) if err != nil { return nil, err } delegate := &RoleBindingGeneratorV1{} fromFileStrings, found := genericParams["user"] if found { fromFileArray, isArray := fromFileStrings.([]string) if !isArray { return nil, fmt.Errorf("expected []string, found :%v", fromFileStrings) } delegate.Users = fromFileArray delete(genericParams, "user") } fromLiteralStrings, found := genericParams["group"] if found { fromLiteralArray, isArray := fromLiteralStrings.([]string) if !isArray { return nil, fmt.Errorf("expected []string, found :%v", fromFileStrings) }<|fim▁hole|> if found { fromLiteralArray, isArray := fromSAStrings.([]string) if !isArray { return nil, fmt.Errorf("expected []string, found :%v", fromFileStrings) } delegate.ServiceAccounts = fromLiteralArray delete(genericParams, "serviceaccounts") } params := map[string]string{} for key, value := range genericParams { strVal, isString := value.(string) if !isString { return nil, fmt.Errorf("expected string, saw %v for '%s'", value, key) } params[key] = strVal } delegate.Name = params["name"] delegate.ClusterRole = params["clusterrole"] delegate.Role = params["role"] return delegate.StructuredGenerate() } // ParamNames returns the set of supported input parameters when using the parameter injection generator pattern. func (s RoleBindingGeneratorV1) ParamNames() []GeneratorParam { return []GeneratorParam{ {"name", true}, {"clusterrole", false}, {"role", false}, {"user", false}, {"group", false}, {"serviceaccount", false}, {"force", false}, } } // StructuredGenerate outputs a roleBinding object using the configured fields. func (s RoleBindingGeneratorV1) StructuredGenerate() (runtime.Object, error) { if err := s.validate(); err != nil { return nil, err } roleBinding := &rbac.RoleBinding{} roleBinding.Name = s.Name switch { case len(s.Role) > 0: roleBinding.RoleRef = rbac.RoleRef{ APIGroup: rbac.GroupName, Kind: "Role", Name: s.Role, } case len(s.ClusterRole) > 0: roleBinding.RoleRef = rbac.RoleRef{ APIGroup: rbac.GroupName, Kind: "ClusterRole", Name: s.ClusterRole, } } for _, user := range s.Users { roleBinding.Subjects = append(roleBinding.Subjects, rbac.Subject{ Kind: rbac.UserKind, APIVersion: "rbac.authorization.k8s.io/v1beta1", Name: user, }) } for _, group := range s.Groups { roleBinding.Subjects = append(roleBinding.Subjects, rbac.Subject{ Kind: rbac.GroupKind, APIVersion: "rbac.authorization.k8s.io/v1beta1", Name: group, }) } for _, sa := range s.ServiceAccounts { tokens := strings.Split(sa, ":") if len(tokens) != 2 { return nil, fmt.Errorf("serviceaccount must be <namespace>:<name>") } roleBinding.Subjects = append(roleBinding.Subjects, rbac.Subject{ Kind: rbac.ServiceAccountKind, Namespace: tokens[0], Name: tokens[1], }) } return roleBinding, nil } // validate validates required fields are set to support structured generation. func (s RoleBindingGeneratorV1) validate() error { if len(s.Name) == 0 { return fmt.Errorf("name must be specified") } if (len(s.ClusterRole) == 0) == (len(s.Role) == 0) { return fmt.Errorf("exactly one of clusterrole or role must be specified") } return nil }<|fim▁end|>
delegate.Groups = fromLiteralArray delete(genericParams, "group") } fromSAStrings, found := genericParams["serviceaccount"]
<|file_name|>dump_dependency_json.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from __future__ import print_function import os import gyp import gyp.common import gyp.msvs_emulation import json generator_supports_multiple_toolsets = True generator_wants_static_library_dependencies_adjusted = False generator_filelist_paths = { } generator_default_variables = { } for dirname in ['INTERMEDIATE_DIR', 'SHARED_INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']: # Some gyp steps fail if these are empty(!). generator_default_variables[dirname] = 'dir' for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME', 'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT', 'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX', 'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX', 'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX', 'CONFIGURATION_NAME']: generator_default_variables[unused] = '' def CalculateVariables(default_variables, params): generator_flags = params.get('generator_flags', {}) for key, val in generator_flags.items(): default_variables.setdefault(key, val) default_variables.setdefault('OS', gyp.common.GetFlavor(params)) flavor = gyp.common.GetFlavor(params) if flavor =='win': # # Copy additional generator configuration data from VS, which is shared by the Windows Ninja generator. # import gyp.generator.msvs as msvs_generator # generator_additional_non_configuration_keys = getattr(msvs_generator, 'generator_additional_non_configuration_keys', []) # generator_additional_path_sections = getattr(msvs_generator, 'generator_additional_path_sections', []) gyp.msvs_emulation.CalculateCommonVariables(default_variables, params) def CalculateGeneratorInputInfo(params): """Calculate the generator specific info that gets fed to input (called by gyp).""" generator_flags = params.get('generator_flags', {}) if generator_flags.get('adjust_static_libraries', False): global generator_wants_static_library_dependencies_adjusted generator_wants_static_library_dependencies_adjusted = True toplevel = params['options'].toplevel_dir generator_dir = os.path.relpath(params['options'].generator_output or '.') # output_dir: relative path from generator_dir to the build directory. output_dir = generator_flags.get('output_dir', 'out') qualified_out_dir = os.path.normpath(os.path.join( toplevel, generator_dir, output_dir, 'gypfiles')) global generator_filelist_paths generator_filelist_paths = { 'toplevel': toplevel, 'qualified_out_dir': qualified_out_dir, } def GenerateOutput(target_list, target_dicts, _, params): # Map of target -> list of targets it depends on. edges = {} # Queue of targets to visit. targets_to_visit = target_list[:] <|fim▁hole|> if target in edges: continue edges[target] = [] for dep in target_dicts[target].get('dependencies', []): edges[target].append(dep) targets_to_visit.append(dep) try: filepath = params['generator_flags']['output_dir'] except KeyError: filepath = '.' filename = os.path.join(filepath, 'dump.json') f = open(filename, 'w') json.dump(edges, f) f.close() print('Wrote json to %s.' % filename)<|fim▁end|>
while len(targets_to_visit) > 0: target = targets_to_visit.pop()
<|file_name|>TftpContexts.py<|end_file_name|><|fim▁begin|># vim: ts=4 sw=4 et ai: """This module implements all contexts for state handling during uploads and downloads, the main interface to which being the TftpContext base class. The concept is simple. Each context object represents a single upload or download, and the state object in the context object represents the current state of that transfer. The state object has a handle() method that expects the next packet in the transfer, and returns a state object until the transfer is complete, at which point it returns None. That is, unless there is a fatal error, in which case a TftpException is returned instead.""" import logging import os import socket import sys import time from .TftpPacketFactory import TftpPacketFactory from .TftpPacketTypes import * from .TftpShared import * from .TftpStates import * log = logging.getLogger("tftpy.TftpContext") ############################################################################### # Utility classes ############################################################################### class TftpMetrics: """A class representing metrics of the transfer.""" def __init__(self): # Bytes transferred self.bytes = 0 # Bytes re-sent self.resent_bytes = 0 # Duplicate packets received self.dups = {} self.dupcount = 0 # Times self.start_time = 0 self.end_time = 0 self.duration = 0 # Rates self.bps = 0 self.kbps = 0 # Generic errors self.errors = 0 def compute(self): # Compute transfer time self.duration = self.end_time - self.start_time if self.duration == 0: self.duration = 1 log.debug("TftpMetrics.compute: duration is %s", self.duration) self.bps = (self.bytes * 8.0) / self.duration self.kbps = self.bps / 1024.0 log.debug("TftpMetrics.compute: kbps is %s", self.kbps) for key in self.dups: self.dupcount += self.dups[key] def add_dup(self, pkt): """This method adds a dup for a packet to the metrics.""" log.debug("Recording a dup of %s", pkt) s = str(pkt) if s in self.dups: self.dups[s] += 1 else: self.dups[s] = 1 tftpassert(self.dups[s] < MAX_DUPS, "Max duplicates reached") ############################################################################### # Context classes ############################################################################### class TftpContext: """The base class of the contexts.""" def __init__(self, host, port, timeout, retries=DEF_TIMEOUT_RETRIES, localip=""): """Constructor for the base context, setting shared instance variables.""" self.file_to_transfer = None self.fileobj = None self.options = None self.packethook = None self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if localip != "": self.sock.bind((localip, 0)) self.sock.settimeout(timeout) self.timeout = timeout self.retries = retries self.state = None self.next_block = 0 self.factory = TftpPacketFactory() # Note, setting the host will also set self.address, as it's a property. self.host = host self.port = port # The port associated with the TID self.tidport = None # Metrics self.metrics = TftpMetrics() # Fluag when the transfer is pending completion. self.pending_complete = False # Time when this context last received any traffic. # FIXME: does this belong in metrics? self.last_update = 0 # The last packet we sent, if applicable, to make resending easy. self.last_pkt = None # Count the number of retry attempts. self.retry_count = 0 def getBlocksize(self): """Fetch the current blocksize for this session.""" return int(self.options.get("blksize", 512)) def __del__(self): """Simple destructor to try to call housekeeping in the end method if not called explicitly. Leaking file descriptors is not a good thing.""" self.end() def checkTimeout(self, now): """Compare current time with last_update time, and raise an exception if we're over the timeout time.""" log.debug("checking for timeout on session %s", self) if now - self.last_update > self.timeout: raise TftpTimeout("Timeout waiting for traffic") def start(self): raise NotImplementedError("Abstract method") def end(self, close_fileobj=True): """Perform session cleanup, since the end method should always be called explicitly by the calling code, this works better than the destructor. Set close_fileobj to False so fileobj can be returned open.""" log.debug("in TftpContext.end - closing socket") self.sock.close() if close_fileobj and self.fileobj is not None and not self.fileobj.closed: log.debug("self.fileobj is open - closing") self.fileobj.close() def gethost(self): """ Simple getter method for use in a property. """ return self.__host def sethost(self, host): """ Setter method that also sets the address property as a result of the host that is set. """ self.__host = host self.address = socket.gethostbyname(host) host = property(gethost, sethost) def setNextBlock(self, block): if block >= 2 ** 16: log.debug("Block number rollover to 0 again") block = 0 self.__eblock = block def getNextBlock(self): return self.__eblock next_block = property(getNextBlock, setNextBlock) def cycle(self): """ Here we wait for a response from the server after sending it something, and dispatch appropriate action to that response. """ try: (buffer, (raddress, rport)) = self.sock.recvfrom(MAX_BLKSIZE) except socket.timeout: log.warning("Timeout waiting for traffic, retrying...") raise TftpTimeout("Timed-out waiting for traffic") # Ok, we've received a packet. Log it. log.debug("Received %d bytes from %s:%s", len(buffer), raddress, rport) # And update our last updated time. self.last_update = time.time() # Decode it. recvpkt = self.factory.parse(buffer) # Check for known "connection". if raddress != self.address: log.warning( "Received traffic from %s, expected host %s. Discarding" % (raddress, self.host) ) if self.tidport and self.tidport != rport: log.warning( "Received traffic from %s:%s but we're " "connected to %s:%s. Discarding." % (raddress, rport, self.host, self.tidport) ) # If there is a packethook defined, call it. We unconditionally # pass all packets, it's up to the client to screen out different # kinds of packets. This way, the client is privy to things like # negotiated options. if self.packethook: self.packethook(recvpkt) # And handle it, possibly changing state. self.state = self.state.handle(recvpkt, raddress, rport) # If we didn't throw any exceptions here, reset the retry_count to # zero. self.retry_count = 0 class TftpContextServer(TftpContext): """The context for the server.""" def __init__( self, host, port, timeout, root, dyn_file_func=None, upload_open=None, retries=DEF_TIMEOUT_RETRIES, ): TftpContext.__init__(self, host, port, timeout, retries) # At this point we have no idea if this is a download or an upload. We # need to let the start state determine that. self.state = TftpStateServerStart(self) self.root = root self.dyn_file_func = dyn_file_func self.upload_open = upload_open def __str__(self): return f"{self.host}:{self.port} {self.state}" def start(self, buffer): """ Start the state cycle. Note that the server context receives an initial packet in its start method. Also note that the server does not loop on cycle(), as it expects the TftpServer object to manage that. """ log.debug("In TftpContextServer.start") self.metrics.start_time = time.time() log.debug("Set metrics.start_time to %s", self.metrics.start_time) # And update our last updated time. self.last_update = time.time() pkt = self.factory.parse(buffer) log.debug("TftpContextServer.start() - factory returned a %s", pkt) # Call handle once with the initial packet. This should put us into # the download or the upload state. self.state = self.state.handle(pkt, self.host, self.port) def end(self): """Finish up the context.""" TftpContext.end(self) self.metrics.end_time = time.time() log.debug("Set metrics.end_time to %s", self.metrics.end_time) self.metrics.compute() class TftpContextClientUpload(TftpContext): """The upload context for the client during an upload. Note: If input is a hyphen, then we will use stdin.""" def __init__( self, host, port, filename, input, options, packethook, timeout, retries=DEF_TIMEOUT_RETRIES, localip="", ): TftpContext.__init__(self, host, port, timeout, retries, localip) self.file_to_transfer = filename self.options = options self.packethook = packethook # If the input object has a read() function, # assume it is file-like. if hasattr(input, "read"): self.fileobj = input elif input == "-": self.fileobj = sys.stdin.buffer else: self.fileobj = open(input, "rb") log.debug("TftpContextClientUpload.__init__()") log.debug( "file_to_transfer = %s, options = %s" % (self.file_to_transfer, self.options) ) def __str__(self): return f"{self.host}:{self.port} {self.state}" def start(self): log.info("Sending tftp upload request to %s" % self.host) log.info(" filename -> %s" % self.file_to_transfer) log.info(" options -> %s" % self.options) self.metrics.start_time = time.time() log.debug("Set metrics.start_time to %s" % self.metrics.start_time) # FIXME: put this in a sendWRQ method? pkt = TftpPacketWRQ() pkt.filename = self.file_to_transfer pkt.mode = "octet" # FIXME - shouldn't hardcode this pkt.options = self.options self.sock.sendto(pkt.encode().buffer, (self.host, self.port)) self.next_block = 1 self.last_pkt = pkt # FIXME: should we centralize sendto operations so we can refactor all # saving of the packet to the last_pkt field? self.state = TftpStateSentWRQ(self) while self.state: try: log.debug("State is %s" % self.state) self.cycle() except TftpTimeout as err: log.error(str(err)) self.retry_count += 1 if self.retry_count >= self.retries: log.debug("hit max retries, giving up") raise else: log.warning("resending last packet") self.state.resendLast() <|fim▁hole|> """Finish up the context.""" TftpContext.end(self) self.metrics.end_time = time.time() log.debug("Set metrics.end_time to %s" % self.metrics.end_time) self.metrics.compute() class TftpContextClientDownload(TftpContext): """The download context for the client during a download. Note: If output is a hyphen, then the output will be sent to stdout.""" def __init__( self, host, port, filename, output, options, packethook, timeout, retries=DEF_TIMEOUT_RETRIES, localip="", ): TftpContext.__init__(self, host, port, timeout, retries, localip) # FIXME: should we refactor setting of these params? self.file_to_transfer = filename self.options = options self.packethook = packethook self.filelike_fileobj = False # If the output object has a write() function, # assume it is file-like. if hasattr(output, "write"): self.fileobj = output self.filelike_fileobj = True # If the output filename is -, then use stdout elif output == "-": self.fileobj = sys.stdout self.filelike_fileobj = True else: self.fileobj = open(output, "wb") log.debug("TftpContextClientDownload.__init__()") log.debug( "file_to_transfer = %s, options = %s" % (self.file_to_transfer, self.options) ) def __str__(self): return f"{self.host}:{self.port} {self.state}" def start(self): """Initiate the download.""" log.info("Sending tftp download request to %s" % self.host) log.info(" filename -> %s" % self.file_to_transfer) log.info(" options -> %s" % self.options) self.metrics.start_time = time.time() log.debug("Set metrics.start_time to %s" % self.metrics.start_time) # FIXME: put this in a sendRRQ method? pkt = TftpPacketRRQ() pkt.filename = self.file_to_transfer pkt.mode = "octet" # FIXME - shouldn't hardcode this pkt.options = self.options self.sock.sendto(pkt.encode().buffer, (self.host, self.port)) self.next_block = 1 self.last_pkt = pkt self.state = TftpStateSentRRQ(self) while self.state: try: log.debug("State is %s" % self.state) self.cycle() except TftpTimeout as err: log.error(str(err)) self.retry_count += 1 if self.retry_count >= self.retries: log.debug("hit max retries, giving up") raise else: log.warning("resending last packet") self.state.resendLast() except TftpFileNotFoundError as err: # If we received file not found, then we should not save the open # output file or we'll be left with a size zero file. Delete it, # if it exists. log.error("Received File not found error") if self.fileobj is not None and not self.filelike_fileobj: if os.path.exists(self.fileobj.name): log.debug("unlinking output file of %s", self.fileobj.name) os.unlink(self.fileobj.name) raise def end(self): """Finish up the context.""" TftpContext.end(self, not self.filelike_fileobj) self.metrics.end_time = time.time() log.debug("Set metrics.end_time to %s" % self.metrics.end_time) self.metrics.compute()<|fim▁end|>
def end(self):
<|file_name|>NewOrExistingModelWizard.js<|end_file_name|><|fim▁begin|>/** * @license * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ CLASS({ package: 'foam.apps.builder.wizard', name: 'NewOrExistingModelWizard', extends: 'foam.apps.builder.wizard.NewOrExistingWizard', requires: [ 'foam.apps.builder.wizard.ModelWizard', 'foam.apps.builder.wizard.ModelPreviewWizard', ], imports: [ 'modelDAO', ], exports: [ 'editView', 'innerEditView', ], properties: [ { name: 'data', postSet: function(old,nu) { if ( nu.baseModelId ) this.baseModel = nu.baseModelId; } }, { type: 'Model', name: 'baseModel', help: 'The list is filtered to only include models that extend baseModel.', postSet: function() { if ( this.modelDAO ) { this.existingDAO = this.modelDAO.where(EQ(Model.EXTENDS, this.baseModel.id)); } } }, { name: 'modelDAO', postSet: function(old,nu) { if ( this.baseModel ) { this.existingDAO = this.modelDAO.where(EQ(Model.EXTENDS, this.baseModel.id)); }<|fim▁hole|> label: 'Create a new Data Model', defaultValue: { factory_: 'foam.apps.builder.wizard.ModelWizard' }, }, { name: 'existingViewFactory', label: 'Copy an existing Data Model', defaultValue: null, }, { name: 'nextViewFactory', lazyFactory: function() { return this.newViewFactory; }, }, { name: 'selection', }, { name: 'existingDAO', view: { factory_: 'foam.ui.md.DAOListView', rowView: 'foam.apps.builder.datamodels.ModelCitationView', } }, { model_: 'foam.apps.builder.wizard.WizardViewFactoryProperty', name: 'editView', defaultValue: { factory_: 'foam.apps.builder.wizard.ModelPreviewWizard' }, }, { model_: 'foam.apps.builder.wizard.WizardViewFactoryProperty', name: 'innerEditView', defaultValue: function() {}, }, ], methods: [ function onNext() { this.SUPER(); if ( this.selection && this.nextViewFactory === this.existingViewFactory ) { this.data.getDataConfig().model = this.selection; } } ], });<|fim▁end|>
}, }, { name: 'newViewFactory',
<|file_name|>vendor.rs<|end_file_name|><|fim▁begin|>use crate::command_prelude::*; use cargo::ops; use std::path::PathBuf; pub fn cli() -> App { subcommand("vendor") .about("Vendor all dependencies for a project locally") .arg(opt("quiet", "No output printed to stdout").short("q")) .arg_manifest_path() .arg(Arg::with_name("path").help("Where to vendor crates (`vendor` by default)")) .arg( Arg::with_name("no-delete") .long("no-delete") .help("Don't delete older crates in the vendor directory"), ) .arg( Arg::with_name("tomls") .short("s") .long("sync") .help("Additional `Cargo.toml` to sync and vendor") .value_name("TOML") .multiple(true), ) .arg( Arg::with_name("respect-source-config") .long("respect-source-config") .help("Respect `[source]` config in `.cargo/config`") .multiple(true), ) .arg( Arg::with_name("versioned-dirs") .long("versioned-dirs") .help("Always include version in subdir name"), ) .arg( Arg::with_name("no-merge-sources") .long("no-merge-sources") .hidden(true), )<|fim▁hole|> .hidden(true), ) .arg( Arg::with_name("only-git-deps") .long("only-git-deps") .hidden(true), ) .arg( Arg::with_name("disallow-duplicates") .long("disallow-duplicates") .hidden(true), ) .after_help("Run `cargo help vendor` for more detailed information.\n") } pub fn exec(config: &mut Config, args: &ArgMatches<'_>) -> CliResult { // We're doing the vendoring operation ourselves, so we don't actually want // to respect any of the `source` configuration in Cargo itself. That's // intended for other consumers of Cargo, but we want to go straight to the // source, e.g. crates.io, to fetch crates. if !args.is_present("respect-source-config") { config.values_mut()?.remove("source"); } // When we moved `cargo vendor` into Cargo itself we didn't stabilize a few // flags, so try to provide a helpful error message in that case to ensure // that users currently using the flag aren't tripped up. let crates_io_cargo_vendor_flag = if args.is_present("no-merge-sources") { Some("--no-merge-sources") } else if args.is_present("relative-path") { Some("--relative-path") } else if args.is_present("only-git-deps") { Some("--only-git-deps") } else if args.is_present("disallow-duplicates") { Some("--disallow-duplicates") } else { None }; if let Some(flag) = crates_io_cargo_vendor_flag { return Err(anyhow::format_err!( "\ the crates.io `cargo vendor` command has now been merged into Cargo itself and does not support the flag `{}` currently; to continue using the flag you can execute `cargo-vendor vendor ...`, and if you would like to see this flag supported in Cargo itself please feel free to file an issue at https://github.com/rust-lang/cargo/issues/new ", flag ) .into()); } let ws = args.workspace(config)?; let path = args .value_of_os("path") .map(|val| PathBuf::from(val.to_os_string())) .unwrap_or_else(|| PathBuf::from("vendor")); ops::vendor( &ws, &ops::VendorOptions { no_delete: args.is_present("no-delete"), destination: &path, versioned_dirs: args.is_present("versioned-dirs"), extra: args .values_of_os("tomls") .unwrap_or_default() .map(|s| PathBuf::from(s.to_os_string())) .collect(), }, )?; Ok(()) }<|fim▁end|>
.arg( Arg::with_name("relative-path") .long("relative-path")
<|file_name|>P06_RankIt.py<|end_file_name|><|fim▁begin|># You have been given an array A consisting of N integers. All the elements in this array A are unique. You have to # answer some queries based on the elements of this array. Each query will consist of a single integer x. You need to # print the rank based position of this element in this array considering that the array is 1 indexed. The rank # based position of an element in an array is its position in the array when the array has been sorted in ascending order. # # Note: It is guaranteed that all the elements in this array are unique and for each x belonging to a query, value ′x′ # shall exist in the array # # Input Format # # The first line consists of a single integer N denoting the size of array A. The next line contains N unique integers, # denoting the content of array A. The next line contains a single integer q denoting the number of queries. Each of # the next q lines contains a single integer x denoting the element whose rank based position needs to be printed. # # Output Format # # You need to print q integers denoting the answer to each query. # # Constraints # # 1≤N≤105 # 1≤A[i]≤109 # 1≤q≤105 # 1≤x≤109 # # SAMPLE INPUT # 5 # 1 2 3 4 5 # 5 # 1 # 2 # 3 # 4 # 5 # # SAMPLE OUTPUT # 1 # 2 # 3 # 4 # 5 n = int(input()) array = [int(i) for i in input().split()] array.insert(0, 0)<|fim▁hole|>def binarySearch(low, high, element): while(low <= high): mid = (low + high) // 2 if array[mid] == element: return mid elif array[mid] < element: low = mid + 1 else: high = mid - 1 for i in range(q): number = int(input()) print(binarySearch(0,len(array), number))<|fim▁end|>
array.sort() q = int(input())
<|file_name|>Movie2KTo.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from ..internal.DeadCrypter import DeadCrypter <|fim▁hole|> __type__ = "crypter" __version__ = "0.56" __status__ = "stable" __pattern__ = r'http://(?:www\.)?movie2k\.to/(.+)\.html' __config__ = [("activated", "bool", "Activated", True)] __description__ = """Movie2k.to decrypter plugin""" __license__ = "GPLv3" __authors__ = [("4Christopher", "[email protected]")]<|fim▁end|>
class Movie2KTo(DeadCrypter): __name__ = "Movie2KTo"
<|file_name|>scene.py<|end_file_name|><|fim▁begin|>"""Support for Lutron Caseta scenes.""" from typing import Any from homeassistant.components.scene import Scene<|fim▁hole|> async def async_setup_entry(hass, config_entry, async_add_entities): """Set up the Lutron Caseta scene platform. Adds scenes from the Caseta bridge associated with the config_entry as scene entities. """ entities = [] data = hass.data[CASETA_DOMAIN][config_entry.entry_id] bridge = data[BRIDGE_LEAP] scenes = bridge.get_scenes() for scene in scenes: entity = LutronCasetaScene(scenes[scene], bridge) entities.append(entity) async_add_entities(entities, True) class LutronCasetaScene(Scene): """Representation of a Lutron Caseta scene.""" def __init__(self, scene, bridge): """Initialize the Lutron Caseta scene.""" self._scene_name = scene["name"] self._scene_id = scene["scene_id"] self._bridge = bridge @property def name(self): """Return the name of the scene.""" return self._scene_name async def async_activate(self, **kwargs: Any) -> None: """Activate the scene.""" await self._bridge.activate_scene(self._scene_id)<|fim▁end|>
from .const import BRIDGE_LEAP, DOMAIN as CASETA_DOMAIN
<|file_name|>theme-customizer.js<|end_file_name|><|fim▁begin|>(function( $ ) { wp.customize( 'blogname', function( value ) {<|fim▁hole|> } ); } ); wp.customize( 'blogdescription', function( value ) { value.bind( function( to ) { $( '.site-description' ).text( to ); } ); } ); })( jQuery );<|fim▁end|>
value.bind( function( to ) { $( '.site-title a' ).text( to );
<|file_name|>netcdf.py<|end_file_name|><|fim▁begin|># coding: utf-8 # Copyright (c) Pymatgen Development Team. # Distributed under the terms of the MIT License. """Wrapper for netCDF readers.""" from __future__ import unicode_literals, division, print_function import os.path import warnings import numpy as np from collections import OrderedDict from monty.dev import requires from monty.collections import AttrDict from monty.functools import lazy_property from monty.string import marquee from pymatgen.core.units import ArrayWithUnit from pymatgen.core.xcfunc import XcFunc from pymatgen.core.structure import Structure import logging logger = logging.getLogger(__name__) __author__ = "Matteo Giantomassi" __copyright__ = "Copyright 2013, The Materials Project" __version__ = "0.1" __maintainer__ = "Matteo Giantomassi" __email__ = "gmatteo at gmail.com" __status__ = "Development" __date__ = "$Feb 21, 2013M$" __all__ = [ "as_ncreader", "as_etsfreader", "NetcdfReader", "NetcdfReaderError", "ETSF_Reader", "NO_DEFAULT", "structure_from_ncdata", ] try: import netCDF4 except ImportError as exc: netCDF4 = None warnings.warn("""\ `import netCDF4` failed with the following error: %s Please install netcdf4 with `conda install netcdf4` If the conda version does not work, uninstall it with `conda uninstall hdf4 hdf5 netcdf4` and use `pip install netcdf4`""" % str(exc)) def _asreader(file, cls): closeit = False if not isinstance(file, cls): file, closeit = cls(file), True return file, closeit def as_ncreader(file): """ Convert file into a NetcdfReader instance. Returns reader, closeit where closeit is set to True if we have to close the file before leaving the procedure. """ return _asreader(file, NetcdfReader) def as_etsfreader(file): return _asreader(file, ETSF_Reader) class NetcdfReaderError(Exception): """Base error class for NetcdfReader""" class NO_DEFAULT(object): """Signal that read_value should raise an Error""" class NetcdfReader(object): """ Wraps and extends netCDF4.Dataset. Read only mode. Supports with statements. Additional documentation available at: http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html """ Error = NetcdfReaderError @requires(netCDF4 is not None, "netCDF4 must be installed to use this class") def __init__(self, path): """Open the Netcdf file specified by path (read mode).""" self.path = os.path.abspath(path) try: self.rootgrp = netCDF4.Dataset(self.path, mode="r") except Exception as exc: raise self.Error("In file %s: %s" % (self.path, str(exc))) self.ngroups = len(list(self.walk_tree())) <|fim▁hole|> # self.path2group[child.path] = child.group def __enter__(self): """Activated when used in the with statement.""" return self def __exit__(self, type, value, traceback): """Activated at the end of the with statement. It automatically closes the file.""" self.rootgrp.close() def close(self): try: self.rootgrp.close() except Exception as exc: logger.warning("Exception %s while trying to close %s" % (exc, self.path)) def walk_tree(self, top=None): """ Navigate all the groups in the file starting from top. If top is None, the root group is used. """ if top is None: top = self.rootgrp values = top.groups.values() yield values for value in top.groups.values(): for children in self.walk_tree(value): yield children def print_tree(self): for children in self.walk_tree(): for child in children: print(child) def read_dimvalue(self, dimname, path="/", default=NO_DEFAULT): """ Returns the value of a dimension. Args: dimname: Name of the variable path: path to the group. default: return `default` if `dimname` is not present and `default` is not `NO_DEFAULT` else raise self.Error. """ try: dim = self._read_dimensions(dimname, path=path)[0] return len(dim) except self.Error: if default is NO_DEFAULT: raise return default def read_varnames(self, path="/"): """List of variable names stored in the group specified by path.""" if path == "/": return self.rootgrp.variables.keys() else: group = self.path2group[path] return group.variables.keys() def read_value(self, varname, path="/", cmode=None, default=NO_DEFAULT): """ Returns the values of variable with name varname in the group specified by path. Args: varname: Name of the variable path: path to the group. cmode: if cmode=="c", a complex ndarrays is constructed and returned (netcdf does not provide native support from complex datatype). default: returns default if varname is not present. self.Error is raised if default is default is NO_DEFAULT Returns: numpy array if varname represents an array, scalar otherwise. """ try: var = self.read_variable(varname, path=path) except self.Error: if default is NO_DEFAULT: raise return default if cmode is None: # scalar or array # getValue is not portable! try: return var.getValue()[0] if not var.shape else var[:] except IndexError: return var.getValue() if not var.shape else var[:] else: assert var.shape[-1] == 2 if cmode == "c": return var[...,0] + 1j*var[...,1] else: raise ValueError("Wrong value for cmode %s" % cmode) def read_variable(self, varname, path="/"): """Returns the variable with name varname in the group specified by path.""" return self._read_variables(varname, path=path)[0] def _read_dimensions(self, *dimnames, **kwargs): path = kwargs.get("path", "/") try: if path == "/": return [self.rootgrp.dimensions[dname] for dname in dimnames] else: group = self.path2group[path] return [group.dimensions[dname] for dname in dimnames] except KeyError: raise self.Error("In file %s:\nError while reading dimensions: `%s` with kwargs: `%s`" % (self.path, dimnames, kwargs)) def _read_variables(self, *varnames, **kwargs): path = kwargs.get("path", "/") try: if path == "/": return [self.rootgrp.variables[vname] for vname in varnames] else: group = self.path2group[path] return [group.variables[vname] for vname in varnames] except KeyError: raise self.Error("In file %s:\nError while reading variables: `%s` with kwargs `%s`." % (self.path, varnames, kwargs)) def read_keys(self, keys, dict_cls=AttrDict, path="/"): """ Read a list of variables/dimensions from file. If a key is not present the corresponding entry in the output dictionary is set to None. """ od = dict_cls() for k in keys: try: # Try to read a variable. od[k] = self.read_value(k, path=path) except self.Error: try: # Try to read a dimension. od[k] = self.read_dimvalue(k, path=path) except self.Error: od[k] = None return od class ETSF_Reader(NetcdfReader): """ This object reads data from a file written according to the ETSF-IO specifications. We assume that the netcdf file contains at least the crystallographic section. """ @lazy_property def chemical_symbols(self): """Chemical symbols char [number of atom species][symbol length].""" charr = self.read_value("chemical_symbols") symbols = [] for v in charr: symbols.append("".join(c.decode("utf-8") for c in v)) return symbols def typeidx_from_symbol(self, symbol): """Returns the type index from the chemical symbol. Note python convention.""" return self.chemical_symbols.index(symbol) def read_structure(self, cls=Structure): """Returns the crystalline structure.""" if self.ngroups != 1: raise NotImplementedError("In file %s: ngroups != 1" % self.path) return structure_from_ncdata(self, cls=cls) def read_abinit_xcfunc(self): """ Read ixc from an Abinit file. Return :class:`XcFunc` object. """ ixc = int(self.read_value("ixc")) return XcFunc.from_abinit_ixc(ixc) def read_abinit_hdr(self): """ Read the variables associated to the Abinit header. Return :class:`AbinitHeader` """ d = {} for hvar in _HDR_VARIABLES.values(): ncname = hvar.etsf_name if hvar.etsf_name is not None else hvar.name if ncname in self.rootgrp.variables: d[hvar.name] = self.read_value(ncname) elif ncname in self.rootgrp.dimensions: d[hvar.name] = self.read_dimvalue(ncname) else: raise ValueError("Cannot find `%s` in `%s`" % (ncname, self.path)) # Convert scalars to (well) scalars. if hasattr(d[hvar.name], "shape") and not d[hvar.name].shape: d[hvar.name] = np.asscalar(d[hvar.name]) if hvar.name in ("title", "md5_pseudos", "codvsn"): # Convert array of numpy bytes to list of strings if hvar.name == "codvsn": d[hvar.name] = "".join(bs.decode("utf-8").strip() for bs in d[hvar.name]) else: d[hvar.name] = ["".join(bs.decode("utf-8") for bs in astr).strip() for astr in d[hvar.name]] return AbinitHeader(d) def structure_from_ncdata(ncdata, site_properties=None, cls=Structure): """ Reads and returns a pymatgen structure from a NetCDF file containing crystallographic data in the ETSF-IO format. Args: ncdata: filename or NetcdfReader instance. site_properties: Dictionary with site properties. cls: The Structure class to instanciate. """ ncdata, closeit = as_ncreader(ncdata) # TODO check whether atomic units are used lattice = ArrayWithUnit(ncdata.read_value("primitive_vectors"), "bohr").to("ang") red_coords = ncdata.read_value("reduced_atom_positions") natom = len(red_coords) znucl_type = ncdata.read_value("atomic_numbers") # type_atom[0:natom] --> index Between 1 and number of atom species type_atom = ncdata.read_value("atom_species") # Fortran to C index and float --> int conversion. species = natom * [None] for atom in range(natom): type_idx = type_atom[atom] - 1 species[atom] = int(znucl_type[type_idx]) d = {} if site_properties is not None: for prop in site_properties: d[property] = ncdata.read_value(prop) structure = cls(lattice, species, red_coords, site_properties=d) # Quick and dirty hack. # I need an abipy structure since I need to_abivars and other methods. try: from abipy.core.structure import Structure as AbipyStructure structure.__class__ = AbipyStructure except ImportError: pass if closeit: ncdata.close() return structure class _H(object): __slots__ = ["name", "doc", "etsf_name"] def __init__(self, name, doc, etsf_name=None): self.name, self.doc, self.etsf_name = name, doc, etsf_name _HDR_VARIABLES = ( # Scalars _H("bantot", "total number of bands (sum of nband on all kpts and spins)"), _H("date", "starting date"), _H("headform", "format of the header"), _H("intxc", "input variable"), _H("ixc", "input variable"), _H("mband", "maxval(hdr%nband)", etsf_name="max_number_of_states"), _H("natom", "input variable", etsf_name="number_of_atoms"), _H("nkpt", "input variable", etsf_name="number_of_kpoints"), _H("npsp", "input variable"), _H("nspden", "input variable", etsf_name="number_of_components"), _H("nspinor", "input variable", etsf_name="number_of_spinor_components"), _H("nsppol", "input variable", etsf_name="number_of_spins"), _H("nsym", "input variable", etsf_name="number_of_symmetry_operations"), _H("ntypat", "input variable", etsf_name="number_of_atom_species"), _H("occopt", "input variable"), _H("pertcase", "the index of the perturbation, 0 if GS calculation"), _H("usepaw", "input variable (0=norm-conserving psps, 1=paw)"), _H("usewvl", "input variable (0=plane-waves, 1=wavelets)"), _H("kptopt", "input variable (defines symmetries used for k-point sampling)"), _H("pawcpxocc", "input variable"), _H("nshiftk_orig", "original number of shifts given in input (changed in inkpts, the actual value is nshiftk)"), _H("nshiftk", "number of shifts after inkpts."), _H("icoulomb", "input variable."), _H("ecut", "input variable", etsf_name="kinetic_energy_cutoff"), _H("ecutdg", "input variable (ecut for NC psps, pawecutdg for paw)"), _H("ecutsm", "input variable"), _H("ecut_eff", "ecut*dilatmx**2 (dilatmx is an input variable)"), _H("etot", "EVOLVING variable"), _H("fermie", "EVOLVING variable", etsf_name="fermi_energy"), _H("residm", "EVOLVING variable"), _H("stmbias", "input variable"), _H("tphysel", "input variable"), _H("tsmear", "input variable"), _H("nelect", "number of electrons (computed from pseudos and charge)"), _H("charge", "input variable"), # Arrays _H("qptn", "qptn(3) the wavevector, in case of a perturbation"), #_H("rprimd", "rprimd(3,3) EVOLVING variables", etsf_name="primitive_vectors"), #_H(ngfft, "ngfft(3) input variable", number_of_grid_points_vector1" #_H("nwvlarr", "nwvlarr(2) the number of wavelets for each resolution.", etsf_name="number_of_wavelets"), _H("kptrlatt_orig", "kptrlatt_orig(3,3) Original kptrlatt"), _H("kptrlatt", "kptrlatt(3,3) kptrlatt after inkpts."), _H("istwfk", "input variable istwfk(nkpt)"), _H("lmn_size", "lmn_size(npsp) from psps"), _H("nband", "input variable nband(nkpt*nsppol)", etsf_name="number_of_states"), _H("npwarr", "npwarr(nkpt) array holding npw for each k point", etsf_name="number_of_coefficients"), _H("pspcod", "pscod(npsp) from psps"), _H("pspdat", "psdat(npsp) from psps"), _H("pspso", "pspso(npsp) from psps"), _H("pspxc", "pspxc(npsp) from psps"), _H("so_psp", "input variable so_psp(npsp)"), _H("symafm", "input variable symafm(nsym)"), #_H(symrel="input variable symrel(3,3,nsym)", etsf_name="reduced_symmetry_matrices"), _H("typat", "input variable typat(natom)", etsf_name="atom_species"), _H("kptns", "input variable kptns(nkpt, 3)", etsf_name="reduced_coordinates_of_kpoints"), _H("occ", "EVOLVING variable occ(mband, nkpt, nsppol)", etsf_name="occupations"), _H("tnons", "input variable tnons(nsym, 3)", etsf_name="reduced_symmetry_translations"), _H("wtk", "weight of kpoints wtk(nkpt)", etsf_name="kpoint_weights"), _H("shiftk_orig", "original shifts given in input (changed in inkpts)."), _H("shiftk", "shiftk(3,nshiftk), shiftks after inkpts"), _H("amu", "amu(ntypat) ! EVOLVING variable"), #_H("xred", "EVOLVING variable xred(3,natom)", etsf_name="reduced_atom_positions"), _H("zionpsp", "zionpsp(npsp) from psps"), _H("znuclpsp", "znuclpsp(npsp) from psps. Note the difference between (znucl|znucltypat) and znuclpsp"), _H("znucltypat", "znucltypat(ntypat) from alchemy", etsf_name="atomic_numbers"), _H("codvsn", "version of the code"), _H("title", "title(npsp) from psps"), _H("md5_pseudos", "md5pseudos(npsp), md5 checksums associated to pseudos (read from file)"), #_H(type(pawrhoij_type), allocatable :: pawrhoij(:) ! EVOLVING variable, only for paw ) _HDR_VARIABLES = OrderedDict([(h.name, h) for h in _HDR_VARIABLES]) class AbinitHeader(AttrDict): """Stores the values reported in the Abinit header.""" #def __init__(self, *args, **kwargs): # super(AbinitHeader, self).__init__(*args, **kwargs) # for k, v in self.items(): # v.__doc__ = _HDR_VARIABLES[k].doc def __str__(self): return self.to_string() def to_string(self, verbose=0, title=None, **kwargs): """ String representation. kwargs are passed to `pprint.pformat`. Args: verbose: Verbosity level title: Title string. """ from pprint import pformat s = pformat(self, **kwargs) if title is not None: return "\n".join([marquee(title, mark="="), s]) return s<|fim▁end|>
#self.path2group = OrderedDict() #for children in self.walk_tree(): # for child in children: # #print(child.group, child.path)
<|file_name|>tflow.py<|end_file_name|><|fim▁begin|>import uuid from typing import Optional, Union from mitmproxy import connection from mitmproxy import flow from mitmproxy import http from mitmproxy import tcp from mitmproxy import websocket from mitmproxy.test.tutils import treq, tresp from wsproto.frame_protocol import Opcode def ttcpflow(client_conn=True, server_conn=True, messages=True, err=None) -> tcp.TCPFlow: if client_conn is True: client_conn = tclient_conn() if server_conn is True: server_conn = tserver_conn() if messages is True: messages = [ tcp.TCPMessage(True, b"hello", 946681204.2), tcp.TCPMessage(False, b"it's me", 946681204.5), ] if err is True: err = terr() f = tcp.TCPFlow(client_conn, server_conn) f.messages = messages f.error = err f.live = True return f def twebsocketflow(messages=True, err=None, close_code=None, close_reason='') -> http.HTTPFlow: flow = http.HTTPFlow(tclient_conn(), tserver_conn()) flow.request = http.Request( "example.com", 80, b"GET", b"http", b"example.com", b"/ws", b"HTTP/1.1", headers=http.Headers( connection="upgrade", upgrade="websocket", sec_websocket_version="13", sec_websocket_key="1234", ), content=b'', trailers=None, timestamp_start=946681200, timestamp_end=946681201, ) flow.response = http.Response( b"HTTP/1.1", 101, reason=b"Switching Protocols", headers=http.Headers( connection='upgrade', upgrade='websocket', sec_websocket_accept=b'', ), content=b'', trailers=None, timestamp_start=946681202, timestamp_end=946681203, ) flow.websocket = twebsocket() flow.websocket.close_reason = close_reason if close_code is not None: flow.websocket.close_code = close_code else: if err is True: # ABNORMAL_CLOSURE flow.websocket.close_code = 1006 else: # NORMAL_CLOSURE flow.websocket.close_code = 1000 flow.live = True return flow def tflow( *, client_conn: Optional[connection.Client] = None, server_conn: Optional[connection.Server] = None, req: Optional[http.Request] = None, resp: Union[bool, http.Response] = False, err: Union[bool, flow.Error] = False, ws: Union[bool, websocket.WebSocketData] = False, live: bool = True, ) -> http.HTTPFlow: """Create a flow for testing.""" if client_conn is None: client_conn = tclient_conn() if server_conn is None: server_conn = tserver_conn() if req is None: req = treq() if resp is True: resp = tresp() if err is True: err = terr() if ws is True: ws = twebsocket() assert resp is False or isinstance(resp, http.Response) assert err is False or isinstance(err, flow.Error) assert ws is False or isinstance(ws, websocket.WebSocketData) f = http.HTTPFlow(client_conn, server_conn) f.request = req f.response = resp or None f.error = err or None f.websocket = ws or None f.live = live return f class DummyFlow(flow.Flow): """A flow that is neither HTTP nor TCP.""" def __init__(self, client_conn, server_conn, live=None): super().__init__("dummy", client_conn, server_conn, live) def tdummyflow(client_conn=True, server_conn=True, err=None) -> DummyFlow: if client_conn is True: client_conn = tclient_conn() if server_conn is True: server_conn = tserver_conn() if err is True: err = terr() f = DummyFlow(client_conn, server_conn) f.error = err f.live = True return f def tclient_conn() -> connection.Client: c = connection.Client.from_state(dict( id=str(uuid.uuid4()), address=("127.0.0.1", 22), mitmcert=None, tls_established=True, timestamp_start=946681200, timestamp_tls_setup=946681201, timestamp_end=946681206, sni="address", cipher_name="cipher", alpn=b"http/1.1", tls_version="TLSv1.2", tls_extensions=[(0x00, bytes.fromhex("000e00000b6578616d"))], state=0, sockname=("", 0), error=None, tls=False, certificate_list=[], alpn_offers=[], cipher_list=[], )) return c def tserver_conn() -> connection.Server: c = connection.Server.from_state(dict( id=str(uuid.uuid4()), address=("address", 22),<|fim▁hole|> ip_address=("192.168.0.1", 22), timestamp_start=946681202, timestamp_tcp_setup=946681203, timestamp_tls_setup=946681204, timestamp_end=946681205, tls_established=True, sni="address", alpn=None, tls_version="TLSv1.2", via=None, state=0, error=None, tls=False, certificate_list=[], alpn_offers=[], cipher_name=None, cipher_list=[], via2=None, )) return c def terr(content: str = "error") -> flow.Error: err = flow.Error(content, 946681207) return err def twebsocket(messages: bool = True) -> websocket.WebSocketData: ws = websocket.WebSocketData() if messages: ws.messages = [ websocket.WebSocketMessage(Opcode.BINARY, True, b"hello binary", 946681203), websocket.WebSocketMessage(Opcode.TEXT, True, b"hello text", 946681204), websocket.WebSocketMessage(Opcode.TEXT, False, b"it's me", 946681205), ] ws.close_reason = "Close Reason" ws.close_code = 1000 ws.closed_by_client = False ws.timestamp_end = 946681205 return ws<|fim▁end|>
source_address=("address", 22),
<|file_name|>currency_getter.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # Copyright (c) 2009 CamptoCamp. All rights reserved. # @author Nicolas Bessi # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import logging _logger = logging.getLogger(__name__) class AbstractClassError(Exception): def __str__(self): return 'Abstract Class' def __repr__(self): return 'Abstract Class' class AbstractMethodError(Exception): def __str__(self): return 'Abstract Method' def __repr__(self): return 'Abstract Method' class UnknowClassError(Exception): def __str__(self): return 'Unknown Class' def __repr__(self): return 'Unknown Class' class UnsuportedCurrencyError(Exception): def __init__(self, value):<|fim▁hole|> self.curr = value def __str__(self): return 'Unsupported currency %s' % self.curr def __repr__(self): return 'Unsupported currency %s' % self.curr class Currency_getter_factory(): """Factory pattern class that will return a currency getter class base on the name passed to the register method """ def register(self, class_name): allowed = [ 'CH_ADMIN_getter', 'PL_NBP_getter', 'ECB_getter', 'GOOGLE_getter', 'YAHOO_getter', 'MX_BdM_getter', 'CA_BOC_getter', 'RO_BNR_getter', 'BG_CUSTOMS_getter', 'BG_SIBANK_getter', 'BG_UNICRDT_getter', ] if class_name in allowed: exec "from .update_service_%s import %s" % (class_name.replace('_getter', ''), class_name) class_def = eval(class_name) _logger.info("from .update_service_%s import %s: class_def %s:" % (class_name.replace('_getter', ''), class_name, class_def)) return class_def() else: raise UnknowClassError<|fim▁end|>
<|file_name|>SQLQueryResult.java<|end_file_name|><|fim▁begin|>/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2021 DBeaver Corp and others * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.model.sql; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.model.exec.DBCExecutionResult; import java.util.ArrayList; import java.util.Collections; import java.util.List; /** * SQLQueryResult */ public class SQLQueryResult implements DBCExecutionResult { public static class ExecuteResult { private boolean resultSet; private Long rowCount; private Long updateCount; private String resultSetName; public ExecuteResult(boolean resultSet) { this.resultSet = resultSet; } public boolean isResultSet() { return resultSet; } @Nullable public Long getRowCount() { return rowCount; } public void setRowCount(Long rowCount) { this.rowCount = rowCount; } @Nullable public Long getUpdateCount() { return updateCount; } public void setUpdateCount(Long updateCount) { this.updateCount = updateCount; } @Nullable public String getResultSetName() { return resultSetName; } public void setResultSetName(String resultSetName) { this.resultSetName = resultSetName; } } private SQLQuery statement; private Long rowOffset; private boolean hasResultSet; private Throwable error; private long queryTime; private List<Throwable> warnings; private List<ExecuteResult> executeResults = new ArrayList<>(); public SQLQueryResult(@NotNull SQLQuery statement) { this.statement = statement; } @NotNull public SQLQuery getStatement() { return statement; } public Long getRowOffset() { return rowOffset; } <|fim▁hole|> this.rowOffset = rowOffset; } public boolean hasResultSet() { return hasResultSet; } public void setHasResultSet(boolean hasResultSet) { this.hasResultSet = hasResultSet; } public boolean hasError() { return error != null; } @Nullable public Throwable getError() { return error; } public void setError(Throwable error) { this.error = error; } public long getQueryTime() { return queryTime; } public void setQueryTime(long queryTime) { this.queryTime = queryTime; } public List<Throwable> getWarnings() { return warnings; } public void addWarnings(Throwable[] warnings) { if (warnings == null) { return; } if (this.warnings == null) { this.warnings = new ArrayList<>(); } Collections.addAll(this.warnings, warnings); } public ExecuteResult addExecuteResult(boolean resultSet) { ExecuteResult executeResult = new ExecuteResult(resultSet); executeResults.add(executeResult); return executeResult; } public List<ExecuteResult> getExecuteResults() { return executeResults; } public ExecuteResult getExecuteResults(int order, boolean resultSets) { int rsIndex = -1; for (int i = 0; i < executeResults.size(); i++) { if (resultSets && !executeResults.get(i).isResultSet()) { continue; } rsIndex++; if (rsIndex == order) { return executeResults.get(i); } } return null; } }<|fim▁end|>
public void setRowOffset(Long rowOffset) {
<|file_name|>mfa.js<|end_file_name|><|fim▁begin|>wesabe.provide("fi-scripts.com.citibank.mfa", { dispatch: function() { if (page.present(e.mfa.indicator)) { action.answerSecurityQuestions(); return false; } }, actions: { }, elements: { mfa: { indicator: [ '//*[has-class("jrspageHeader")][contains(string(.), "Authorization Required")]', '//form[@action="/US/JRS/mfa/cq/ValidateCQ.do"]', ], cin: [ // ATM/Debit Card # (CIN) '//input[@type="text"][@name="cin"]', ], pin: [ // PIN '//input[@type="password"][@name="pin"]', ], continueButton: [ '//input[@type="image"][contains(@src, "cont_btn")]', '//input[@type="submit" or @type="image"]', ], }, security: { questions: [ '//form[contains(@action, "mfa")]//*[has-class("jrsnoteText")]/b/text()', ], answers: [ '//form[contains(@action, "mfa")]//input[@type="text" or @type="password"]', ], continueButton: [<|fim▁hole|> '//input[@type="image"][contains(@src, "cont_btn")]', '//input[@type="submit" or @type="image"]', ], }, }, });<|fim▁end|>
<|file_name|>configuration.js<|end_file_name|><|fim▁begin|>sap.ui.define(["sap/ui/integration/Designtime"], function ( Designtime ) { "use strict"; return function () { return new Designtime({ "form": { "items": { "validationGroup": { "type": "group", "label": "Validation" }, "OrderID": { "manifestpath": "/sap.card/configuration/parameters/OrderID/value", "label": "Order Id", "type": "integer", "required": true }, "stringphone": { "manifestpath": "/sap.card/configuration/parameters/string/value", "label": "String with Pattern validation", "type": "string", "translatable": false, "required": true, "placeholder": "555-4555", "validation": { "type": "error", "maxLength": 20, "minLength": 1, "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$", "message": "The string does not match a telefone number" } }, "stringphonenomessage": { "manifestpath": "/sap.card/configuration/parameters/string/value", "label": "String with default validation message", "type": "string",<|fim▁hole|> "required": true, "placeholder": "555-4555", "validation": { "type": "warning", "maxLength": 20, "minLength": 1, "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$" } }, "stringmaxmin": { "manifestpath": "/sap.card/configuration/parameters/string/value", "label": "String with Length Constrained", "type": "string", "translatable": false, "required": true, "placeholder": "MinMaxlength", "validation": { "type": "warning", "maxLength": 20, "minLength": 3 }, "hint": "Please refer to the <a href='https://www.sap.com'>documentation</a> lets see how this will behave if the text is wrapping to the next line and has <a href='https://www.sap.com'>two links</a>. good?" }, "integerrequired": { "manifestpath": "/sap.card/configuration/parameters/integerrequired/value", "label": "Integer with Required", "type": "integer", "translatable": false, "required": true }, "integervalidation": { "manifestpath": "/sap.card/configuration/parameters/integer/value", "label": "Integer with Min Max value", "type": "integer", "visualization": { "type": "Slider", "settings": { "value": "{currentSettings>value}", "min": 0, "max": 16, "width": "100%", "showAdvancedTooltip": true, "showHandleTooltip": false, "inputsAsTooltips": true, "enabled": "{currentSettings>editable}" } }, "validations": [ { "type": "warning", "minimum": 5, "message": "The minimum is 5." }, { "type": "error", "exclusiveMaximum": 16, "message": "The maximum is 15." }, { "type": "error", "multipleOf": 5, "message": "Has to be multiple of 5" } ] }, "numberrequired": { "manifestpath": "/sap.card/configuration/parameters/number/value", "label": "Number with validation", "type": "number", "translatable": false, "required": true, "validation": { "type": "error", "minimum": 0, "maximum": 100, "exclusiveMaximum": true, "message": "The value should be equal or greater than 0 and be less than 100." } } } }, "preview": { "modes": "AbstractLive" } }); }; });<|fim▁end|>
"translatable": false,
<|file_name|>base64utils.py<|end_file_name|><|fim▁begin|># Copyright 2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Python provides the base64 module as a core module but this is mostly limited to encoding and decoding base64 and it's variants. It is often useful to be able to perform other operations on base64 text. This module is meant to be used in conjunction with the core base64 module. Standarized base64 is defined in RFC-4648 "The Base16, Base32, and Base64 Data Encodings". This module provides the following base64 utility functionality: * tests if text is valid base64 * filter formatting from base64 * convert base64 between different alphabets * Handle padding issues - test if base64 is padded - removes padding - restores padding * wraps base64 text into formatted blocks - via iterator - return formatted string """ import re import string import six from six.moves import urllib from keystone.i18n import _ class InvalidBase64Error(ValueError): pass base64_alphabet_re = re.compile(r'^[^A-Za-z0-9+/=]+$') base64url_alphabet_re = re.compile(r'^[^A-Za-z0-9---_=]+$') base64_non_alphabet_re = re.compile(r'[^A-Za-z0-9+/=]+') base64url_non_alphabet_re = re.compile(r'[^A-Za-z0-9---_=]+') _strip_formatting_re = re.compile(r'\s+') _base64_to_base64url_trans = string.maketrans('+/', '-_') _base64url_to_base64_trans = string.maketrans('-_', '+/') def is_valid_base64(text): """Test if input text can be base64 decoded. :param text: input base64 text :type text: string :returns: bool -- True if text can be decoded as base64, False otherwise """ text = filter_formatting(text) if base64_non_alphabet_re.search(text): return False try: return base64_is_padded(text) except InvalidBase64Error: return False def is_valid_base64url(text): """Test if input text can be base64url decoded. :param text: input base64 text :type text: string :returns: bool -- True if text can be decoded as base64url, False otherwise """ text = filter_formatting(text) if base64url_non_alphabet_re.search(text): return False try: return base64_is_padded(text) except InvalidBase64Error: return False def filter_formatting(text): """Return base64 text without any formatting, just the base64. Base64 text is often formatted with whitespace, line endings, etc. This function strips out any formatting, the result will contain only base64 characters. Note, this function does not filter out all non-base64 alphabet characters, it only removes characters used for formatting. :param text: input text to filter :type text: string :returns: string -- filtered text without formatting """ return _strip_formatting_re.sub('', text) def base64_to_base64url(text): """Convert base64 text to base64url text. base64url text is designed to be safe for use in filenames and URL's. It is defined in RFC-4648 Section 5. base64url differs from base64 in the last two alphabet characters at index 62 and 63, these are sometimes referred as the altchars. The '+' character at index 62 is replaced by '-' (hyphen) and the '/' character at index 63 is replaced by '_' (underscore). This function only translates the altchars, non-alphabet characters are not filtered out. WARNING:: base64url continues to use the '=' pad character which is NOT URL safe. RFC-4648 suggests two alternate methods to deal with this: percent-encode percent-encode the pad character (e.g. '=' becomes '%3D'). This makes the base64url text fully safe. But percent-encoding has the downside of requiring percent-decoding prior to feeding the base64url text into a base64url decoder since most base64url decoders do not recognize %3D as a pad character and most decoders require correct padding. no-padding padding is not strictly necessary to decode base64 or base64url text, the pad can be computed from the input text length. However many decoders demand padding and will consider non-padded text to be malformed. If one wants to omit the trailing pad character(s) for use in URL's it can be added back using the base64_assure_padding() function. This function makes no decisions about which padding methodology to use. One can either call base64_strip_padding() to remove any pad characters (restoring later with base64_assure_padding()) or call base64url_percent_encode() to percent-encode the pad characters. :param text: input base64 text :type text: string :returns: string -- base64url text """ return text.translate(_base64_to_base64url_trans) def base64url_to_base64(text): """Convert base64url text to base64 text. See base64_to_base64url() for a description of base64url text and it's issues. This function does NOT handle percent-encoded pad characters, they will be left intact. If the input base64url text is percent-encoded you should call :param text: text in base64url alphabet :type text: string :returns: string -- text in base64 alphabet """ return text.translate(_base64url_to_base64_trans) def base64_is_padded(text, pad='='): """Test if the text is base64 padded. The input text must be in a base64 alphabet. The pad must be a single character. If the text has been percent-encoded (e.g. pad is the string '%3D') you must convert the text back to a base64 alphabet (e.g. if percent-encoded use the function base64url_percent_decode()). :param text: text containing ONLY characters in a base64 alphabet :type text: string :param pad: pad character (must be single character) (default: '=') :type pad: string :returns: bool -- True if padded, False otherwise :raises: ValueError, InvalidBase64Error """ if len(pad) != 1: raise ValueError(_('pad must be single character')) text_len = len(text) if text_len > 0 and text_len % 4 == 0: pad_index = text.find(pad) if pad_index >= 0 and pad_index < text_len - 2: raise InvalidBase64Error(_('text is multiple of 4, ' 'but pad "%s" occurs before ' '2nd to last char') % pad) if pad_index == text_len - 2 and text[-1] != pad: raise InvalidBase64Error(_('text is multiple of 4, ' 'but pad "%s" occurs before ' 'non-pad last char') % pad) return True if text.find(pad) >= 0: raise InvalidBase64Error(_('text is not a multiple of 4, ' 'but contains pad "%s"') % pad) return False def base64url_percent_encode(text): """Percent-encode base64url padding. The input text should only contain base64url alphabet characters. Any non-base64url alphabet characters will also be subject to percent-encoding. :param text: text containing ONLY characters in the base64url alphabet :type text: string :returns: string -- percent-encoded base64url text :raises: InvalidBase64Error """ if len(text) % 4 != 0: raise InvalidBase64Error(_('padded base64url text must be ' 'multiple of 4 characters')) return urllib.parse.quote(text) def base64url_percent_decode(text): """Percent-decode base64url padding. The input text should only contain base64url alphabet characters and the percent-encoded pad character. Any other percent-encoded characters will be subject to percent-decoding. :param text: base64url alphabet text :type text: string :returns: string -- percent-decoded base64url text """ decoded_text = urllib.parse.unquote(text) if len(decoded_text) % 4 != 0: raise InvalidBase64Error(_('padded base64url text must be ' 'multiple of 4 characters')) return decoded_text def base64_strip_padding(text, pad='='): """Remove padding from input base64 text. :param text: text containing ONLY characters in a base64 alphabet :type text: string :param pad: pad character (must be single character) (default: '=') :type pad: string :returns: string -- base64 text without padding :raises: ValueError """ if len(pad) != 1: raise ValueError(_('pad must be single character')) # Can't be padded if text is less than 4 characters. if len(text) < 4: return text if text[-1] == pad: if text[-2] == pad: return text[0:-2] else: return text[0:-1] else: return text def base64_assure_padding(text, pad='='): """Assure the input text ends with padding. Base64 text is normally expected to be a multiple of 4 characters. Each 4 character base64 sequence produces 3 octets of binary data. If the binary data is not a multiple of 3 the base64 text is padded at the end with a pad character such that it is always a multiple of 4. Padding is ignored and does not alter the binary data nor it's length. In some circumstances it is desirable to omit the padding character due to transport encoding conflicts. Base64 text can still be correctly decoded if the length of the base64 text (consisting only of characters in the desired base64 alphabet) is known, padding is not absolutely necessary. Some base64 decoders demand correct padding or one may wish to format RFC compliant base64, this function performs this action. Input is assumed to consist only of members of a base64 alphabet (i.e no whitespace). Iteration yields a sequence of lines. The line does NOT terminate with a line ending. <|fim▁hole|> If the text ends with the pad it is assumed to already be padded. Otherwise the binary length is computed from the input text length and correct number of pad characters are appended. :param text: text containing ONLY characters in a base64 alphabet :type text: string :param pad: pad character (must be single character) (default: '=') :type pad: string :returns: string -- input base64 text with padding :raises: ValueError """ if len(pad) != 1: raise ValueError(_('pad must be single character')) if text.endswith(pad): return text n = len(text) % 4 if n == 0: return text n = 4 - n padding = pad * n return text + padding def base64_wrap_iter(text, width=64): """Fold text into lines of text with max line length. Input is assumed to consist only of members of a base64 alphabet (i.e no whitespace). Iteration yields a sequence of lines. The line does NOT terminate with a line ending. Use the filter_formatting() function to assure the input text contains only the members of the alphabet. :param text: text containing ONLY characters in a base64 alphabet :type text: string :param width: number of characters in each wrapped line (default: 64) :type width: int :returns: generator -- sequence of lines of base64 text. """ text = six.text_type(text) for x in six.moves.range(0, len(text), width): yield text[x:x + width] def base64_wrap(text, width=64): """Fold text into lines of text with max line length. Input is assumed to consist only of members of a base64 alphabet (i.e no whitespace). Fold the text into lines whose line length is width chars long, terminate each line with line ending (default is '\\n'). Return the wrapped text as a single string. Use the filter_formatting() function to assure the input text contains only the members of the alphabet. :param text: text containing ONLY characters in a base64 alphabet :type text: string :param width: number of characters in each wrapped line (default: 64) :type width: int :returns: string -- wrapped text. """ buf = six.StringIO() for line in base64_wrap_iter(text, width): buf.write(line) buf.write(u'\n') text = buf.getvalue() buf.close() return text<|fim▁end|>
Use the filter_formatting() function to assure the input text contains only the members of the alphabet.
<|file_name|>mainMenu.java<|end_file_name|><|fim▁begin|>package championpicker.console; import com.googlecode.lanterna.gui.*; <|fim▁hole|>import com.googlecode.lanterna.TerminalFacade; import com.googlecode.lanterna.terminal.Terminal; import com.googlecode.lanterna.terminal.TerminalSize; import com.googlecode.lanterna.terminal.swing.SwingTerminal; import com.googlecode.lanterna.gui.GUIScreen; import com.googlecode.lanterna.gui.dialog.DialogButtons; import com.googlecode.lanterna.gui.component.Button; import com.googlecode.lanterna.gui.component.Panel; import com.googlecode.lanterna.gui.component.Label; import com.googlecode.lanterna.gui.Window; import com.googlecode.lanterna.screen.Screen; import com.googlecode.lanterna.screen.Screen; import championpicker.Main; import championpicker.console.mainStartUp; import championpicker.console.queueWindow; import javax.swing.JFrame; public class mainMenu extends Window{ public mainMenu(String name){ super(name); queueWindow win = new queueWindow(); addComponent(new Button("Queue!", new Action(){ public void doAction(){ System.out.println("Success!"); mainStartUp.gui.showWindow(win, GUIScreen.Position.CENTER); }})); } }<|fim▁end|>
<|file_name|>sync.py<|end_file_name|><|fim▁begin|>import fnmatch import os import re import shutil import sys import uuid from base import Step, StepRunner from tree import Commit here = os.path.abspath(os.path.split(__file__)[0]) bsd_license = """W3C 3-clause BSD License Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of works must retain the original copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the original copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the W3C nor the names of its contributors may be used to endorse or promote products derived from this work without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ def copy_wpt_tree(tree, dest, excludes=None, includes=None): """Copy the working copy of a Tree to a destination directory. :param tree: The Tree to copy. :param dest: The destination directory""" if os.path.exists(dest): assert os.path.isdir(dest) shutil.rmtree(dest) os.mkdir(dest) if excludes is None: excludes = [] excludes = [re.compile(fnmatch.translate(item)) for item in excludes] if includes is None: includes = [] includes = [re.compile(fnmatch.translate(item)) for item in includes] for tree_path in tree.paths(): if (any(item.match(tree_path) for item in excludes) and not any(item.match(tree_path) for item in includes)): continue source_path = os.path.join(tree.root, tree_path) dest_path = os.path.join(dest, tree_path) dest_dir = os.path.split(dest_path)[0] if not os.path.isdir(source_path): if not os.path.exists(dest_dir): os.makedirs(dest_dir) shutil.copy2(source_path, dest_path) for source, destination in [("testharness_runner.html", ""), ("testdriver-vendor.js", "resources/")]: source_path = os.path.join(here, os.pardir, source) dest_path = os.path.join(dest, destination, os.path.split(source)[1]) shutil.copy2(source_path, dest_path) add_license(dest) def add_license(dest): """Write the bsd license string to a LICENSE file. :param dest: Directory in which to place the LICENSE file.""" with open(os.path.join(dest, "LICENSE"), "w") as f: f.write(bsd_license) class UpdateCheckout(Step): """Pull changes from upstream into the local sync tree.""" provides = ["local_branch"] def create(self, state): sync_tree = state.sync_tree state.local_branch = uuid.uuid4().hex sync_tree.update(state.sync["remote_url"], state.sync["branch"], state.local_branch) sync_path = os.path.abspath(sync_tree.root) if sync_path not in sys.path: from update import setup_paths setup_paths(sync_path) def restore(self, state): assert os.path.abspath(state.sync_tree.root) in sys.path Step.restore(self, state) class GetSyncTargetCommit(Step): """Find the commit that we will sync to.""" provides = ["sync_commit"] def create(self, state): if state.target_rev is None: #Use upstream branch HEAD as the base commit state.sync_commit = state.sync_tree.get_remote_sha1(state.sync["remote_url"], state.sync["branch"]) else: state.sync_commit = Commit(state.sync_tree, state.rev)<|fim▁hole|> class LoadManifest(Step): """Load the test manifest""" provides = ["manifest_path", "test_manifest"] def create(self, state): from manifest import manifest state.manifest_path = os.path.join(state.metadata_path, "MANIFEST.json") state.test_manifest = manifest.Manifest("/") class UpdateManifest(Step): """Update the manifest to match the tests in the sync tree checkout""" def create(self, state): from manifest import manifest, update update.update(state.sync["path"], state.test_manifest) manifest.write(state.test_manifest, state.manifest_path) class CopyWorkTree(Step): """Copy the sync tree over to the destination in the local tree""" def create(self, state): copy_wpt_tree(state.sync_tree, state.tests_path, excludes=state.path_excludes, includes=state.path_includes) class CreateSyncPatch(Step): """Add the updated test files to a commit/patch in the local tree.""" def create(self, state): if not state.patch: return local_tree = state.local_tree sync_tree = state.sync_tree local_tree.create_patch("web-platform-tests_update_%s" % sync_tree.rev, "Update %s to revision %s" % (state.suite_name, sync_tree.rev)) test_prefix = os.path.relpath(state.tests_path, local_tree.root) local_tree.add_new(test_prefix) local_tree.add_ignored(sync_tree, test_prefix) updated = local_tree.update_patch(include=[state.tests_path, state.metadata_path]) local_tree.commit_patch() if not updated: self.logger.info("Nothing to sync") class SyncFromUpstreamRunner(StepRunner): """(Sub)Runner for doing an upstream sync""" steps = [UpdateCheckout, GetSyncTargetCommit, LoadManifest, UpdateManifest, CopyWorkTree, CreateSyncPatch]<|fim▁end|>
state.sync_tree.checkout(state.sync_commit.sha1, state.local_branch, force=True) self.logger.debug("New base commit is %s" % state.sync_commit.sha1)
<|file_name|>SetAttributeTool.gpr.py<|end_file_name|><|fim▁begin|>#<|fim▁hole|># Copyright (C) 2009 Douglas S. Blank <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # $Id$ register(TOOL, id = 'SetAttribute', name = _("Set Attribute"), description = _("Set an attribute to a given value."), version = '0.0.32', gramps_target_version = "5.1", status = STABLE, # not yet tested with python 3 fname = 'SetAttributeTool.py', authors = ["Douglas S. Blank"], authors_email = ["[email protected]"], category = TOOL_DBPROC, toolclass = 'SetAttributeWindow', optionclass = 'SetAttributeOptions', tool_modes = [TOOL_MODE_GUI], )<|fim▁end|>
# Gramps - a GTK+/GNOME based genealogy program #
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Any, Callable, Tuple, Union from packed import pack, unpack import jj from jj import default_app, default_handler from jj.apps import BaseApp, create_app from jj.http.codes import BAD_REQUEST, OK from jj.http.methods import ANY, DELETE, GET, POST from jj.matchers import LogicalMatcher, RequestMatcher, ResolvableMatcher, exists from jj.requests import Request from jj.resolvers import Registry, Resolver from jj.responses import RelayResponse, Response, StreamResponse from ._history import HistoryRepository from ._remote_response import RemoteResponseType __all__ = ("Mock",) MatcherType = Union[RequestMatcher, LogicalMatcher] class Mock(jj.App): def __init__(self, app_factory: Callable[..., BaseApp] = create_app, resolver_factory: Callable[..., Resolver] = Resolver) -> None: self._resolver = resolver_factory(Registry(), default_app, default_handler) self._app = app_factory(resolver=self._resolver) self._repo = HistoryRepository() def _decode(self, payload: bytes) -> Tuple[str, MatcherType, RemoteResponseType]: def resolver(cls: Any, **kwargs: Any) -> Any: return cls.__unpacked__(**kwargs, resolver=self._resolver) decoded = unpack(payload, {ResolvableMatcher: resolver}) handler_id = decoded.get("id") assert isinstance(handler_id, str) matcher = decoded.get("request") assert isinstance(matcher, (RequestMatcher, LogicalMatcher)) response = decoded.get("response") assert isinstance(response, (Response, RelayResponse)) return handler_id, matcher, response @jj.match(POST, headers={"x-jj-remote-mock": exists}) async def register(self, request: Request) -> Response: payload = await request.read() try: handler_id, matcher, response = self._decode(payload) except Exception: return Response(status=BAD_REQUEST, json={"status": BAD_REQUEST}) async def handler(request: Request) -> RemoteResponseType: return response.copy() self._resolver.register_attribute("handler_id", handler_id, handler) setattr(self._app.__class__, handler_id, matcher(handler)) return Response(status=OK, json={"status": OK}) @jj.match(DELETE, headers={"x-jj-remote-mock": exists}) async def deregister(self, request: Request) -> Response: payload = await request.read() try: handler_id, *_ = self._decode(payload)<|fim▁hole|> try: delattr(self._app.__class__, handler_id) except AttributeError: pass await self._repo.delete_by_tag(handler_id) return Response(status=OK, json={"status": OK}) @jj.match(GET, headers={"x-jj-remote-mock": exists}) async def history(self, request: Request) -> Response: payload = await request.read() try: handler_id, *_ = self._decode(payload) except Exception: return Response(status=BAD_REQUEST, json={"status": BAD_REQUEST}) history = await self._repo.get_by_tag(handler_id) packed = pack(history) return Response(status=OK, body=packed) @jj.match(ANY) async def resolve(self, request: Request) -> StreamResponse: handler = await self._resolver.resolve(request, self._app) response = await handler(request) handler_id = self._resolver.get_attribute("handler_id", handler, default=None) if handler_id: await self._repo.add(request, response, tags=[handler_id]) return response<|fim▁end|>
except Exception: return Response(status=BAD_REQUEST, json={"status": BAD_REQUEST})
<|file_name|>issue-5243.rs<|end_file_name|><|fim▁begin|>// run-pass #![allow(dead_code)] // Check that merely having lifetime parameters is not // enough for codegen to consider this as non-monomorphic, // which led to various assertions and failures in turn. // pretty-expanded FIXME #23616 struct S<'a> { v: &'a isize }<|fim▁hole|>fn f<'lt>(_s: &'lt S<'lt>) {} pub fn main() { f(& S { v: &42 }); }<|fim▁end|>
<|file_name|>pipeline.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use bluetooth_traits::BluetoothRequest; use compositing::CompositionPipeline; use compositing::CompositorProxy; use compositing::compositor_thread::Msg as CompositorMsg; use constellation::ScriptChan; use devtools_traits::{DevtoolsControlMsg, ScriptToDevtoolsControlMsg}; use euclid::scale_factor::ScaleFactor; use euclid::size::TypedSize2D; use gfx::font_cache_thread::FontCacheThread; use gfx_traits::DevicePixel; use ipc_channel::ipc::{self, IpcReceiver, IpcSender}; use ipc_channel::router::ROUTER; use layout_traits::LayoutThreadFactory; use msg::constellation_msg::{FrameId, FrameType, PipelineId, PipelineNamespaceId}; use net_traits::{IpcSend, ResourceThreads}; use net_traits::image_cache_thread::ImageCacheThread; use profile_traits::mem as profile_mem; use profile_traits::time; use script_traits::{ConstellationControlMsg, InitialScriptState}; use script_traits::{LayoutControlMsg, LayoutMsg, LoadData, MozBrowserEvent}; use script_traits::{NewLayoutInfo, SWManagerMsg, SWManagerSenders, ScriptMsg}; use script_traits::{ScriptThreadFactory, TimerEventRequest, WindowSizeData}; use servo_config::opts::{self, Opts}; use servo_config::prefs::{PREFS, Pref}; use servo_url::ServoUrl; use std::collections::HashMap; use std::env; use std::ffi::OsStr; use std::io::Error as IOError; use std::process; use std::rc::Rc; use std::sync::mpsc::Sender; use style_traits::{PagePx, ViewportPx}; use webrender_traits; /// A uniquely-identifiable pipeline of script thread, layout thread, and paint thread. pub struct Pipeline { pub id: PipelineId, /// The ID of the frame that contains this Pipeline. pub frame_id: FrameId, pub parent_info: Option<(PipelineId, FrameType)>, pub script_chan: Rc<ScriptChan>, /// A channel to layout, for performing reflows and shutdown. pub layout_chan: IpcSender<LayoutControlMsg>, /// A channel to the compositor. pub compositor_proxy: Box<CompositorProxy + 'static + Send>, /// URL corresponding to the most recently-loaded page. pub url: ServoUrl, /// The title of the most recently-loaded page. pub title: Option<String>, pub size: Option<TypedSize2D<f32, PagePx>>, /// Whether this pipeline is currently running animations. Pipelines that are running /// animations cause composites to be continually scheduled. pub running_animations: bool, pub children: Vec<FrameId>, /// Whether this pipeline is considered distinct from public pipelines. pub is_private: bool, /// Whether this pipeline should be treated as visible for the purposes of scheduling and /// resource management. pub visible: bool, } /// Initial setup data needed to construct a pipeline. /// /// *DO NOT* add any Senders to this unless you absolutely know what you're doing, or pcwalton will /// have to rewrite your code. Use IPC senders instead. pub struct InitialPipelineState { /// The ID of the pipeline to create. pub id: PipelineId, /// The ID of the frame that contains this Pipeline. pub frame_id: FrameId, /// The ID of the top-level frame that contains this Pipeline. pub top_level_frame_id: FrameId, /// The ID of the parent pipeline and frame type, if any. /// If `None`, this is the root. pub parent_info: Option<(PipelineId, FrameType)>, /// A channel to the associated constellation. pub constellation_chan: IpcSender<ScriptMsg>, /// A channel for the layout thread to send messages to the constellation. pub layout_to_constellation_chan: IpcSender<LayoutMsg>, /// A channel to schedule timer events. pub scheduler_chan: IpcSender<TimerEventRequest>, /// A channel to the compositor. pub compositor_proxy: Box<CompositorProxy + 'static + Send>, /// A channel to the developer tools, if applicable. pub devtools_chan: Option<Sender<DevtoolsControlMsg>>, /// A channel to the bluetooth thread. pub bluetooth_thread: IpcSender<BluetoothRequest>, /// A channel to the service worker manager thread pub swmanager_thread: IpcSender<SWManagerMsg>, /// A channel to the image cache thread. pub image_cache_thread: ImageCacheThread, /// A channel to the font cache thread. pub font_cache_thread: FontCacheThread, /// Channels to the resource-related threads. pub resource_threads: ResourceThreads, /// A channel to the time profiler thread. pub time_profiler_chan: time::ProfilerChan, /// A channel to the memory profiler thread. pub mem_profiler_chan: profile_mem::ProfilerChan, /// Information about the initial window size. pub window_size: Option<TypedSize2D<f32, PagePx>>, /// Information about the device pixel ratio. pub device_pixel_ratio: ScaleFactor<f32, ViewportPx, DevicePixel>, /// A channel to the script thread, if applicable. /// If this is `None`, create a new script thread. /// If this is `Some`, then reuse an existing script thread. pub script_chan: Option<Rc<ScriptChan>>, /// Information about the page to load. pub load_data: LoadData, /// The ID of the pipeline namespace for this script thread. pub pipeline_namespace_id: PipelineNamespaceId, /// Pipeline visibility to be inherited pub prev_visibility: Option<bool>, /// Webrender api. pub webrender_api_sender: webrender_traits::RenderApiSender, /// Whether this pipeline is considered private. pub is_private: bool, } impl Pipeline { /// Starts a paint thread, layout thread, and possibly a script thread, in /// a new process if requested. pub fn spawn<Message, LTF, STF>(state: InitialPipelineState) -> Result<Pipeline, IOError> where LTF: LayoutThreadFactory<Message=Message>, STF: ScriptThreadFactory<Message=Message> { // Note: we allow channel creation to panic, since recovering from this // probably requires a general low-memory strategy. let (pipeline_chan, pipeline_port) = ipc::channel() .expect("Pipeline main chan"); let (layout_content_process_shutdown_chan, layout_content_process_shutdown_port) = ipc::channel().expect("Pipeline layout content shutdown chan"); let device_pixel_ratio = state.device_pixel_ratio; let window_size = state.window_size.map(|size| { WindowSizeData { visible_viewport: size, initial_viewport: size * ScaleFactor::new(1.0), device_pixel_ratio: device_pixel_ratio, } }); let (script_chan, content_ports) = match state.script_chan { Some(script_chan) => { let new_layout_info = NewLayoutInfo { parent_info: state.parent_info, new_pipeline_id: state.id, frame_id: state.frame_id, load_data: state.load_data.clone(), window_size: window_size, pipeline_port: pipeline_port, content_process_shutdown_chan: Some(layout_content_process_shutdown_chan.clone()), layout_threads: PREFS.get("layout.threads").as_u64().expect("count") as usize, }; if let Err(e) = script_chan.send(ConstellationControlMsg::AttachLayout(new_layout_info)) { warn!("Sending to script during pipeline creation failed ({})", e); } (script_chan, None) } None => { let (script_chan, script_port) = ipc::channel().expect("Pipeline script chan"); (ScriptChan::new(script_chan), Some((script_port, pipeline_port))) } }; if let Some((script_port, pipeline_port)) = content_ports { // Route messages coming from content to devtools as appropriate. let script_to_devtools_chan = state.devtools_chan.as_ref().map(|devtools_chan| { let (script_to_devtools_chan, script_to_devtools_port) = ipc::channel() .expect("Pipeline script to devtools chan"); let devtools_chan = (*devtools_chan).clone(); ROUTER.add_route(script_to_devtools_port.to_opaque(), box move |message| { match message.to::<ScriptToDevtoolsControlMsg>() { Err(e) => error!("Cast to ScriptToDevtoolsControlMsg failed ({}).", e), Ok(message) => if let Err(e) = devtools_chan.send(DevtoolsControlMsg::FromScript(message)) { warn!("Sending to devtools failed ({})", e) }, } }); script_to_devtools_chan }); let (script_content_process_shutdown_chan, script_content_process_shutdown_port) = ipc::channel().expect("Pipeline script content process shutdown chan"); let unprivileged_pipeline_content = UnprivilegedPipelineContent { id: state.id, frame_id: state.frame_id, top_level_frame_id: state.top_level_frame_id, parent_info: state.parent_info, constellation_chan: state.constellation_chan, scheduler_chan: state.scheduler_chan, devtools_chan: script_to_devtools_chan, bluetooth_thread: state.bluetooth_thread, swmanager_thread: state.swmanager_thread, image_cache_thread: state.image_cache_thread, font_cache_thread: state.font_cache_thread, resource_threads: state.resource_threads, time_profiler_chan: state.time_profiler_chan, mem_profiler_chan: state.mem_profiler_chan, window_size: window_size, layout_to_constellation_chan: state.layout_to_constellation_chan, script_chan: script_chan.sender(), load_data: state.load_data.clone(), script_port: script_port, opts: (*opts::get()).clone(), prefs: PREFS.cloned(), pipeline_port: pipeline_port, pipeline_namespace_id: state.pipeline_namespace_id, layout_content_process_shutdown_chan: layout_content_process_shutdown_chan, layout_content_process_shutdown_port: layout_content_process_shutdown_port, script_content_process_shutdown_chan: script_content_process_shutdown_chan, script_content_process_shutdown_port: script_content_process_shutdown_port, webrender_api_sender: state.webrender_api_sender, }; // Spawn the child process. // // Yes, that's all there is to it! if opts::multiprocess() { let _ = try!(unprivileged_pipeline_content.spawn_multiprocess()); } else { unprivileged_pipeline_content.start_all::<Message, LTF, STF>(false); } } Ok(Pipeline::new(state.id, state.frame_id, state.parent_info, script_chan, pipeline_chan, state.compositor_proxy, state.is_private, state.load_data.url, state.window_size, state.prev_visibility.unwrap_or(true))) } /// Creates a new `Pipeline`, after the script and layout threads have been /// spawned. pub fn new(id: PipelineId, frame_id: FrameId, parent_info: Option<(PipelineId, FrameType)>, script_chan: Rc<ScriptChan>, layout_chan: IpcSender<LayoutControlMsg>, compositor_proxy: Box<CompositorProxy + 'static + Send>, is_private: bool, url: ServoUrl, size: Option<TypedSize2D<f32, PagePx>>, visible: bool) -> Pipeline { let pipeline = Pipeline { id: id, frame_id: frame_id, parent_info: parent_info, script_chan: script_chan, layout_chan: layout_chan, compositor_proxy: compositor_proxy, url: url, title: None, children: vec!(), size: size, running_animations: false, visible: visible, is_private: is_private, }; pipeline.notify_visibility(); pipeline } pub fn exit(&self) { debug!("pipeline {:?} exiting", self.id); // The compositor wants to know when pipelines shut down too. // It may still have messages to process from these other threads // before they can be safely shut down. // It's OK for the constellation to block on the compositor, // since the compositor never blocks on the constellation. if let Ok((sender, receiver)) = ipc::channel() { self.compositor_proxy.send(CompositorMsg::PipelineExited(self.id, sender)); if let Err(e) = receiver.recv() { warn!("Sending exit message failed ({}).", e); } } // Script thread handles shutting down layout, and layout handles shutting down the painter. // For now, if the script thread has failed, we give up on clean shutdown. if let Err(e) = self.script_chan.send(ConstellationControlMsg::ExitPipeline(self.id)) { warn!("Sending script exit message failed ({}).", e); } } pub fn freeze(&self) { if let Err(e) = self.script_chan.send(ConstellationControlMsg::Freeze(self.id)) { warn!("Sending freeze message failed ({}).", e); } } pub fn thaw(&self) { if let Err(e) = self.script_chan.send(ConstellationControlMsg::Thaw(self.id)) { warn!("Sending freeze message failed ({}).", e); } } pub fn force_exit(&self) { if let Err(e) = self.script_chan.send(ConstellationControlMsg::ExitPipeline(self.id)) { warn!("Sending script exit message failed ({}).", e); } if let Err(e) = self.layout_chan.send(LayoutControlMsg::ExitNow) { warn!("Sending layout exit message failed ({}).", e); } } pub fn to_sendable(&self) -> CompositionPipeline { CompositionPipeline { id: self.id.clone(), script_chan: self.script_chan.sender(), layout_chan: self.layout_chan.clone(), } } pub fn add_child(&mut self, frame_id: FrameId) { self.children.push(frame_id); } pub fn remove_child(&mut self, frame_id: FrameId) { match self.children.iter().position(|id| *id == frame_id) { None => return warn!("Pipeline remove child already removed ({:?}).", frame_id), Some(index) => self.children.remove(index), }; } pub fn trigger_mozbrowser_event(&self, child_id: Option<FrameId>, event: MozBrowserEvent) { assert!(PREFS.is_mozbrowser_enabled()); let event = ConstellationControlMsg::MozBrowserEvent(self.id, child_id, event); if let Err(e) = self.script_chan.send(event) { warn!("Sending mozbrowser event to script failed ({}).", e); } } fn notify_visibility(&self) { self.script_chan.send(ConstellationControlMsg::ChangeFrameVisibilityStatus(self.id, self.visible)) .expect("Pipeline script chan"); self.compositor_proxy.send(CompositorMsg::PipelineVisibilityChanged(self.id, self.visible)); } pub fn change_visibility(&mut self, visible: bool) { if visible == self.visible { return; } self.visible = visible; self.notify_visibility(); } } #[derive(Deserialize, Serialize)] pub struct UnprivilegedPipelineContent { id: PipelineId, frame_id: FrameId, top_level_frame_id: FrameId, parent_info: Option<(PipelineId, FrameType)>, constellation_chan: IpcSender<ScriptMsg>, layout_to_constellation_chan: IpcSender<LayoutMsg>, scheduler_chan: IpcSender<TimerEventRequest>, devtools_chan: Option<IpcSender<ScriptToDevtoolsControlMsg>>, bluetooth_thread: IpcSender<BluetoothRequest>, swmanager_thread: IpcSender<SWManagerMsg>, image_cache_thread: ImageCacheThread, font_cache_thread: FontCacheThread, resource_threads: ResourceThreads, time_profiler_chan: time::ProfilerChan, mem_profiler_chan: profile_mem::ProfilerChan, window_size: Option<WindowSizeData>, script_chan: IpcSender<ConstellationControlMsg>, load_data: LoadData, script_port: IpcReceiver<ConstellationControlMsg>, opts: Opts, prefs: HashMap<String, Pref>, pipeline_port: IpcReceiver<LayoutControlMsg>, pipeline_namespace_id: PipelineNamespaceId, layout_content_process_shutdown_chan: IpcSender<()>, layout_content_process_shutdown_port: IpcReceiver<()>, script_content_process_shutdown_chan: IpcSender<()>, script_content_process_shutdown_port: IpcReceiver<()>, webrender_api_sender: webrender_traits::RenderApiSender, } impl UnprivilegedPipelineContent { pub fn start_all<Message, LTF, STF>(self, wait_for_completion: bool) where LTF: LayoutThreadFactory<Message=Message>, STF: ScriptThreadFactory<Message=Message> { let layout_pair = STF::create(InitialScriptState { id: self.id, frame_id: self.frame_id, top_level_frame_id: self.top_level_frame_id, parent_info: self.parent_info, control_chan: self.script_chan.clone(), control_port: self.script_port, constellation_chan: self.constellation_chan, layout_to_constellation_chan: self.layout_to_constellation_chan.clone(),<|fim▁hole|> image_cache_thread: self.image_cache_thread.clone(), time_profiler_chan: self.time_profiler_chan.clone(), mem_profiler_chan: self.mem_profiler_chan.clone(), devtools_chan: self.devtools_chan, window_size: self.window_size, pipeline_namespace_id: self.pipeline_namespace_id, content_process_shutdown_chan: self.script_content_process_shutdown_chan, }, self.load_data.clone()); LTF::create(self.id, Some(self.top_level_frame_id), self.load_data.url, self.parent_info.is_some(), layout_pair, self.pipeline_port, self.layout_to_constellation_chan, self.script_chan, self.image_cache_thread, self.font_cache_thread, self.time_profiler_chan, self.mem_profiler_chan, Some(self.layout_content_process_shutdown_chan), self.webrender_api_sender, self.prefs.get("layout.threads").expect("exists").value() .as_u64().expect("count") as usize); if wait_for_completion { let _ = self.script_content_process_shutdown_port.recv(); let _ = self.layout_content_process_shutdown_port.recv(); } } #[cfg(not(target_os = "windows"))] pub fn spawn_multiprocess(self) -> Result<(), IOError> { use gaol::sandbox::{self, Sandbox, SandboxMethods}; use ipc_channel::ipc::IpcOneShotServer; use sandboxing::content_process_sandbox_profile; impl CommandMethods for sandbox::Command { fn arg<T>(&mut self, arg: T) where T: AsRef<OsStr> { self.arg(arg); } fn env<T, U>(&mut self, key: T, val: U) where T: AsRef<OsStr>, U: AsRef<OsStr> { self.env(key, val); } } // Note that this function can panic, due to process creation, // avoiding this panic would require a mechanism for dealing // with low-resource scenarios. let (server, token) = IpcOneShotServer::<IpcSender<UnprivilegedPipelineContent>>::new() .expect("Failed to create IPC one-shot server."); // If there is a sandbox, use the `gaol` API to create the child process. if opts::get().sandbox { let mut command = sandbox::Command::me().expect("Failed to get current sandbox."); self.setup_common(&mut command, token); let profile = content_process_sandbox_profile(); let _ = Sandbox::new(profile) .start(&mut command) .expect("Failed to start sandboxed child process!"); } else { let path_to_self = env::current_exe() .expect("Failed to get current executor."); let mut child_process = process::Command::new(path_to_self); self.setup_common(&mut child_process, token); let _ = child_process.spawn().expect("Failed to start unsandboxed child process!"); } let (_receiver, sender) = server.accept().expect("Server failed to accept."); try!(sender.send(self)); Ok(()) } #[cfg(target_os = "windows")] pub fn spawn_multiprocess(self) -> Result<(), IOError> { error!("Multiprocess is not supported on Windows."); process::exit(1); } #[cfg(not(windows))] fn setup_common<C: CommandMethods>(&self, command: &mut C, token: String) { C::arg(command, "--content-process"); C::arg(command, token); if let Ok(value) = env::var("RUST_BACKTRACE") { C::env(command, "RUST_BACKTRACE", value); } if let Ok(value) = env::var("RUST_LOG") { C::env(command, "RUST_LOG", value); } } pub fn constellation_chan(&self) -> IpcSender<ScriptMsg> { self.constellation_chan.clone() } pub fn opts(&self) -> Opts { self.opts.clone() } pub fn prefs(&self) -> HashMap<String, Pref> { self.prefs.clone() } pub fn swmanager_senders(&self) -> SWManagerSenders { SWManagerSenders { swmanager_sender: self.swmanager_thread.clone(), resource_sender: self.resource_threads.sender() } } } trait CommandMethods { fn arg<T>(&mut self, arg: T) where T: AsRef<OsStr>; fn env<T, U>(&mut self, key: T, val: U) where T: AsRef<OsStr>, U: AsRef<OsStr>; } impl CommandMethods for process::Command { fn arg<T>(&mut self, arg: T) where T: AsRef<OsStr> { self.arg(arg); } fn env<T, U>(&mut self, key: T, val: U) where T: AsRef<OsStr>, U: AsRef<OsStr> { self.env(key, val); } }<|fim▁end|>
scheduler_chan: self.scheduler_chan, bluetooth_thread: self.bluetooth_thread, resource_threads: self.resource_threads,
<|file_name|>SignificanceHeuristicTests.java<|end_file_name|><|fim▁begin|>/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.search.aggregations.bucket.significant; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.ChiSquare; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.GND; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.JLHScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.MutualInformation; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.PercentageScore; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristic; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicBuilder; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParser; import org.elasticsearch.search.aggregations.bucket.significant.heuristics.SignificanceHeuristicParserMapper; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestSearchContext; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; /** * */ public class SignificanceHeuristicTests extends ESTestCase { static class SignificantTermsTestSearchContext extends TestSearchContext { @Override public int numberOfShards() { return 1; } @Override public SearchShardTarget shardTarget() { return new SearchShardTarget("no node, this is a unit test", "no index, this is a unit test", 0); } } // test that stream output can actually be read - does not replace bwc test public void testStreamResponse() throws Exception { Version version = randomVersion(random()); InternalSignificantTerms[] sigTerms = getRandomSignificantTerms(getRandomSignificanceheuristic()); // write ByteArrayOutputStream outBuffer = new ByteArrayOutputStream(); OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer); out.setVersion(version); sigTerms[0].writeTo(out); // read ByteArrayInputStream inBuffer = new ByteArrayInputStream(outBuffer.toByteArray()); InputStreamStreamInput in = new InputStreamStreamInput(inBuffer); in.setVersion(version); sigTerms[1].readFrom(in); assertTrue(sigTerms[1].significanceHeuristic.equals(sigTerms[0].significanceHeuristic)); } InternalSignificantTerms[] getRandomSignificantTerms(SignificanceHeuristic heuristic) { InternalSignificantTerms[] sTerms = new InternalSignificantTerms[2]; ArrayList<InternalSignificantTerms.Bucket> buckets = new ArrayList<>(); if (randomBoolean()) { BytesRef term = new BytesRef("123.0"); buckets.add(new SignificantLongTerms.Bucket(1, 2, 3, 4, 123, InternalAggregations.EMPTY, null)); sTerms[0] = new SignificantLongTerms(10, 20, "some_name", null, 1, 1, heuristic, buckets, Collections.EMPTY_LIST, null); sTerms[1] = new SignificantLongTerms(); } else { BytesRef term = new BytesRef("someterm"); buckets.add(new SignificantStringTerms.Bucket(term, 1, 2, 3, 4, InternalAggregations.EMPTY)); sTerms[0] = new SignificantStringTerms(10, 20, "some_name", 1, 1, heuristic, buckets, Collections.EMPTY_LIST, null); sTerms[1] = new SignificantStringTerms(); } return sTerms; } SignificanceHeuristic getRandomSignificanceheuristic() { List<SignificanceHeuristic> heuristics = new ArrayList<>(); heuristics.add(JLHScore.INSTANCE); heuristics.add(new MutualInformation(randomBoolean(), randomBoolean())); heuristics.add(new GND(randomBoolean())); heuristics.add(new ChiSquare(randomBoolean(), randomBoolean())); return heuristics.get(randomInt(3)); } // test that // 1. The output of the builders can actually be parsed // 2. The parser does not swallow parameters after a significance heuristic was defined public void testBuilderAndParser() throws Exception { Set<SignificanceHeuristicParser> parsers = new HashSet<>(); SignificanceHeuristicParserMapper heuristicParserMapper = new SignificanceHeuristicParserMapper(parsers, null); SearchContext searchContext = new SignificantTermsTestSearchContext(); // test jlh with string assertTrue(parseFromString(heuristicParserMapper, searchContext, "\"jlh\":{}") instanceof JLHScore); // test gnd with string assertTrue(parseFromString(heuristicParserMapper, searchContext, "\"gnd\":{}") instanceof GND); // test mutual information with string boolean includeNegatives = randomBoolean(); boolean backgroundIsSuperset = randomBoolean(); assertThat(parseFromString(heuristicParserMapper, searchContext, "\"mutual_information\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new MutualInformation(includeNegatives, backgroundIsSuperset)))); assertThat(parseFromString(heuristicParserMapper, searchContext, "\"chi_square\":{\"include_negatives\": " + includeNegatives + ", \"background_is_superset\":" + backgroundIsSuperset + "}"), equalTo((SignificanceHeuristic) (new ChiSquare(includeNegatives, backgroundIsSuperset)))); // test with builders assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new JLHScore.JLHScoreBuilder()) instanceof JLHScore); assertTrue(parseFromBuilder(heuristicParserMapper, searchContext, new GND.GNDBuilder(backgroundIsSuperset)) instanceof GND); assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new MutualInformation.MutualInformationBuilder(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new MutualInformation(includeNegatives, backgroundIsSuperset))); assertThat(parseFromBuilder(heuristicParserMapper, searchContext, new ChiSquare.ChiSquareBuilder(includeNegatives, backgroundIsSuperset)), equalTo((SignificanceHeuristic) new ChiSquare(includeNegatives, backgroundIsSuperset))); // test exceptions String faultyHeuristicdefinition = "\"mutual_information\":{\"include_negatives\": false, \"some_unknown_field\": false}"; String expectedError = "unknown field [some_unknown_field]"; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); faultyHeuristicdefinition = "\"chi_square\":{\"unknown_field\": true}"; expectedError = "unknown field [unknown_field]"; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); faultyHeuristicdefinition = "\"jlh\":{\"unknown_field\": true}"; expectedError = "expected an empty object, but found "; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); faultyHeuristicdefinition = "\"gnd\":{\"unknown_field\": true}"; expectedError = "unknown field [unknown_field]"; checkParseException(heuristicParserMapper, searchContext, faultyHeuristicdefinition, expectedError); } protected void checkParseException(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String faultyHeuristicDefinition, String expectedError) throws IOException { try { XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); stParser.nextToken(); new SignificantTermsParser(heuristicParserMapper).parse("testagg", stParser, searchContext); fail(); } catch (ElasticsearchParseException e) { assertTrue(e.getMessage().contains(expectedError)); } } protected SignificanceHeuristic parseFromBuilder(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, SignificanceHeuristicBuilder significanceHeuristicBuilder) throws IOException { SignificantTermsBuilder stBuilder = new SignificantTermsBuilder("testagg"); stBuilder.significanceHeuristic(significanceHeuristicBuilder).field("text").minDocCount(200); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); stBuilder.internalXContent(stXContentBuilder, null); XContentParser stParser = JsonXContent.jsonXContent.createParser(stXContentBuilder.string()); return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser); } private SignificanceHeuristic parseSignificanceHeuristic(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, XContentParser stParser) throws IOException { stParser.nextToken(); SignificantTermsAggregatorFactory aggregatorFactory = (SignificantTermsAggregatorFactory) new SignificantTermsParser(heuristicParserMapper).parse("testagg", stParser, searchContext); stParser.nextToken(); assertThat(aggregatorFactory.getBucketCountThresholds().getMinDocCount(), equalTo(200l)); assertThat(stParser.currentToken(), equalTo(null)); stParser.close(); return aggregatorFactory.getSignificanceHeuristic(); } protected SignificanceHeuristic parseFromString(SignificanceHeuristicParserMapper heuristicParserMapper, SearchContext searchContext, String heuristicString) throws IOException { XContentParser stParser = JsonXContent.jsonXContent.createParser("{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); return parseSignificanceHeuristic(heuristicParserMapper, searchContext, stParser); } void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) { try { heuristicIsSuperset.getScore(2, 3, 1, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > supersetFreq")); } try { heuristicIsSuperset.getScore(1, 4, 2, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetSize > supersetSize")); } try { heuristicIsSuperset.getScore(2, 1, 3, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize")); } try { heuristicIsSuperset.getScore(1, 2, 4, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize")); } try { heuristicIsSuperset.getScore(1, 3, 4, 4); fail(); } catch (IllegalArgumentException assertionError) { assertNotNull(assertionError.getMessage()); assertTrue(assertionError.getMessage().contains("supersetFreq - subsetFreq > supersetSize - subsetSize")); } try { int idx = randomInt(3); long[] values = {1, 2, 3, 4}; values[idx] *= -1; heuristicIsSuperset.getScore(values[0], values[1], values[2], values[3]); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive")); } try { heuristicNotSuperset.getScore(2, 1, 3, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize")); } try { heuristicNotSuperset.getScore(1, 2, 4, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize")); } try { int idx = randomInt(3); long[] values = {1, 2, 3, 4}; values[idx] *= -1; heuristicNotSuperset.getScore(values[0], values[1], values[2], values[3]); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive")); } } void testAssertions(SignificanceHeuristic heuristic) { try { int idx = randomInt(3); long[] values = {1, 2, 3, 4}; values[idx] *= -1; heuristic.getScore(values[0], values[1], values[2], values[3]); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("Frequencies of subset and superset must be positive")); } try { heuristic.getScore(1, 2, 4, 3); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("supersetFreq > supersetSize")); } try { heuristic.getScore(2, 1, 3, 4); fail(); } catch (IllegalArgumentException illegalArgumentException) { assertNotNull(illegalArgumentException.getMessage()); assertTrue(illegalArgumentException.getMessage().contains("subsetFreq > subsetSize")); } } public void testAssertions() throws Exception { testBackgroundAssertions(new MutualInformation(true, true), new MutualInformation(true, false)); testBackgroundAssertions(new ChiSquare(true, true), new ChiSquare(true, false)); testBackgroundAssertions(new GND(true), new GND(false)); testAssertions(PercentageScore.INSTANCE); testAssertions(JLHScore.INSTANCE); } public void testBasicScoreProperties() { basicScoreProperties(JLHScore.INSTANCE, true); basicScoreProperties(new GND(true), true); basicScoreProperties(PercentageScore.INSTANCE, true); basicScoreProperties(new MutualInformation(true, true), false); basicScoreProperties(new ChiSquare(true, true), false); } public void basicScoreProperties(SignificanceHeuristic heuristic, boolean test0) { assertThat(heuristic.getScore(1, 1, 1, 3), greaterThan(0.0)); assertThat(heuristic.getScore(1, 1, 2, 3), lessThan(heuristic.getScore(1, 1, 1, 3))); assertThat(heuristic.getScore(1, 1, 3, 4), lessThan(heuristic.getScore(1, 1, 2, 4))); if (test0) { assertThat(heuristic.getScore(0, 1, 2, 3), equalTo(0.0)); } double score = 0.0; try { long a = randomLong(); long b = randomLong(); long c = randomLong(); long d = randomLong(); score = heuristic.getScore(a, b, c, d); } catch (IllegalArgumentException e) { } assertThat(score, greaterThanOrEqualTo(0.0)); } public void testScoreMutual() throws Exception { SignificanceHeuristic heuristic = new MutualInformation(true, true); assertThat(heuristic.getScore(1, 1, 1, 3), greaterThan(0.0)); assertThat(heuristic.getScore(1, 1, 2, 3), lessThan(heuristic.getScore(1, 1, 1, 3))); assertThat(heuristic.getScore(2, 2, 2, 4), equalTo(1.0)); assertThat(heuristic.getScore(0, 2, 2, 4), equalTo(1.0)); assertThat(heuristic.getScore(2, 2, 4, 4), equalTo(0.0)); assertThat(heuristic.getScore(1, 2, 2, 4), equalTo(0.0)); assertThat(heuristic.getScore(3, 6, 9, 18), equalTo(0.0)); double score = 0.0; try { long a = randomLong(); long b = randomLong();<|fim▁hole|> } catch (IllegalArgumentException e) { } assertThat(score, lessThanOrEqualTo(1.0)); assertThat(score, greaterThanOrEqualTo(0.0)); heuristic = new MutualInformation(false, true); assertThat(heuristic.getScore(0, 1, 2, 3), equalTo(Double.NEGATIVE_INFINITY)); heuristic = new MutualInformation(true, false); score = heuristic.getScore(2, 3, 1, 4); assertThat(score, greaterThanOrEqualTo(0.0)); assertThat(score, lessThanOrEqualTo(1.0)); score = heuristic.getScore(1, 4, 2, 3); assertThat(score, greaterThanOrEqualTo(0.0)); assertThat(score, lessThanOrEqualTo(1.0)); score = heuristic.getScore(1, 3, 4, 4); assertThat(score, greaterThanOrEqualTo(0.0)); assertThat(score, lessThanOrEqualTo(1.0)); } public void testGNDCornerCases() throws Exception { GND gnd = new GND(true); //term is only in the subset, not at all in the other set but that is because the other set is empty. // this should actually not happen because only terms that are in the subset are considered now, // however, in this case the score should be 0 because a term that does not exist cannot be relevant... assertThat(gnd.getScore(0, randomIntBetween(1, 2), 0, randomIntBetween(2,3)), equalTo(0.0)); // the terms do not co-occur at all - should be 0 assertThat(gnd.getScore(0, randomIntBetween(1, 2), randomIntBetween(2, 3), randomIntBetween(5,6)), equalTo(0.0)); // comparison between two terms that do not exist - probably not relevant assertThat(gnd.getScore(0, 0, 0, randomIntBetween(1,2)), equalTo(0.0)); // terms co-occur perfectly - should be 1 assertThat(gnd.getScore(1, 1, 1, 1), equalTo(1.0)); gnd = new GND(false); assertThat(gnd.getScore(0, 0, 0, 0), equalTo(0.0)); } }<|fim▁end|>
long c = randomLong(); long d = randomLong(); score = heuristic.getScore(a, b, c, d);
<|file_name|>universal-issue-48703.rs<|end_file_name|><|fim▁begin|>#![feature(universal_impl_trait)] use std::fmt::Debug; fn foo<T>(x: impl Debug) { } fn main() { foo::<String>('a'); //~ ERROR cannot provide explicit generic arguments<|fim▁hole|><|fim▁end|>
}
<|file_name|>util.rs<|end_file_name|><|fim▁begin|>/* Copyright (c) 2015, 2016 Saurav Sachidanand Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN<|fim▁hole|>//! Some programming utilities /// Returns a float rounded upto a certain number of decimal digits #[inline] pub fn round_upto_digits(float: f64, decimal_digits: u32) -> f64 { let mut d = 1.0; for _ in 1..(decimal_digits + 1) { d *= 10.0; } (float * d).round() / d } /** Evaluates a polynomial using Horner's algorithm # Arguments * `$x` : The value of the independent variable `f32 or f64` * `$c` : The constant term `f32 or f64` * `$($a),*`: Sequence of coefficient terms for `$x`, in ascending powers of `$x` **/ #[macro_export] macro_rules! Horner_eval { ($x:expr, $c:expr, $($a:expr),*) => { { let mut y = $c; let mut u = 1.0; $( u *= $x; y += u * $a; )* y } } }<|fim▁end|>
THE SOFTWARE. */
<|file_name|>training_test.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for training.py.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import glob import json import os import random import shutil import tempfile import time import numpy as np from tensorflow.python.data.ops import dataset_ops from tensorflow.python.estimator import estimator as estimator_lib from tensorflow.python.estimator import exporter as exporter_lib from tensorflow.python.estimator import model_fn as model_fn_lib from tensorflow.python.estimator import run_config as run_config_lib from tensorflow.python.estimator import training from tensorflow.python.estimator.canned import dnn from tensorflow.python.estimator.canned import prediction_keys from tensorflow.python.estimator.export import export as export_lib from tensorflow.python.feature_column import feature_column from tensorflow.python.framework import constant_op from tensorflow.python.framework import ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import metrics as metrics_lib from tensorflow.python.ops import state_ops from tensorflow.python.platform import gfile from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging as logging from tensorflow.python.summary import summary_iterator from tensorflow.python.summary.writer import writer_cache from tensorflow.python.training import basic_session_run_hooks from tensorflow.python.training import monitored_session from tensorflow.python.training import server_lib from tensorflow.python.training import session_run_hook from tensorflow.python.training import training_util from tensorflow.python.util import compat _DEFAULT_EVAL_STEPS = 100 _DEFAULT_EVAL_DELAY_SECS = 120 _DEFAULT_EVAL_THROTTLE_SECS = 600 _DELAY_SECS_PER_WORKER = 5 _GLOBAL_STEP_KEY = ops.GraphKeys.GLOBAL_STEP _INVALID_INPUT_FN_MSG = '`input_fn` must be callable' _INVALID_HOOK_MSG = 'All hooks must be `SessionRunHook` instances' _INVALID_MAX_STEPS_MSG = 'Must specify max_steps > 0' _INVALID_STEPS_MSG = 'Must specify steps > 0' _INVALID_NAME_MSG = '`name` must be string' _INVALID_EVAL_DELAY_SECS_MSG = 'Must specify start_delay_secs >= 0' _INVALID_EVAL_THROTTLE_SECS_MSG = 'Must specify throttle_secs >= 0' _INVALID_ESTIMATOR_MSG = '`estimator` must have type `tf.estimator.Estimator`' _STALE_CHECKPOINT_MSG = 'There was no new checkpoint after the training.' _INVALID_EXPORTER_MSG = '`exporters` must be an Exporter' _INVALID_EXPORTER_NAME_TYPE_MSG = 'An Exporter must have a string name' _DUPLICATE_EXPORTER_NAMES_MSG = '`exporters` must have unique names.' _NONE_EXPORTER_NAME_MSG = ( 'An Exporter cannot have a name that is `None` or empty.') _INVALID_TRAIN_SPEC_MSG = '`train_spec` must have type `tf.estimator.TrainSpec`' _INVALID_EVAL_SPEC_MSG = '`eval_spec` must have type `tf.estimator.EvalSpec`' _EVAL_SPEC_OR_NONE_MSG = ( '`eval_spec` must be either `None` or have type `tf.estimator.EvalSpec`') _INVALID_EVAL_LISTENER_MSG = 'must have type `_ContinuousEvalListener`' _INVALID_CONFIG_FOR_STD_SERVER_MSG = 'Could not start server; .*TF_CONFIG' _INVALID_LOCAL_TASK_WITH_CLUSTER = '`task.type` in TF_CONFIG cannot be `local`' _INVALID_TASK_TYPE = '`estimator.config` must have task_type set.' _INPROPER_THROTTL_SECS = ( 'EvalSpec.throttle_secs is set as 0.*Please consider to increase') # The message should NOT have 'local' word as part of it. As (?!word) is looking # ahead, so, the $ (ending) check is required; otherwise, it will match # partially and return successuful. _INVALID_TASK_TO_RUN = ( 'Task type .* is not supported. Supported task types are ((?!local).)*$') _INVALID_EMPTY_EVAL_RESULT_ERR = ( 'Internal error: `Estimator.evaluate` should never return empty metrics') _INVALID_EVAL_RESULT_TYPE_ERR = '`Estimator.evaluate` should return dict.' _MISSING_GLOBAL_STEP_IN_EVAL_RESULT_ERR = ( 'Internal error: `Estimator.evaluate` result should have `global_step`') _INVALID_EVAL_TASK_ID_ERR = ( 'there can only be one `evaluator` task .*with task id 0') _TF_CONFIG_FOR_CHIEF = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], run_config_lib.TaskType.PS: ['host1:1', 'host2:2'], run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4'] }, 'task': { 'type': run_config_lib.TaskType.CHIEF, 'index': 0 } } _TF_CONFIG_FOR_MASTER = { 'cluster': { run_config_lib.TaskType.MASTER: ['host0:0'], run_config_lib.TaskType.PS: ['host1:1', 'host2:2'], run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4'] }, 'task': { 'type': run_config_lib.TaskType.MASTER, 'index': 0 } } _TF_CONFIG_FOR_WORKER = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], run_config_lib.TaskType.PS: ['host1:1', 'host2:2'], run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4'] }, 'task': { 'type': run_config_lib.TaskType.WORKER, 'index': 1 } } _TF_CONFIG_FOR_PS = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], run_config_lib.TaskType.PS: ['host1:1', 'host2:2'], run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4'] }, 'task': { 'type': run_config_lib.TaskType.PS, 'index': 1 } } _TF_CONFIG_FOR_EVALUATOR = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], run_config_lib.TaskType.PS: ['host1:1', 'host2:2'], run_config_lib.TaskType.WORKER: ['host3:3', 'host4:4'] }, 'task': { 'type': run_config_lib.TaskType.EVALUATOR, 'index': 0 } } _TF_CONFIG_FOR_GOOGLE = {'environment': 'google'} class _FakeHook(session_run_hook.SessionRunHook): """Fake implementation of `SessionRunHook`.""" class _InvalidHook(object): """Invalid hook (not a subclass of `SessionRunHook`).""" def _create_exporter(name): class FakeExporter(exporter_lib.Exporter): def __init__(self, name): self._name = name @property def name(self): return self._name def export(self, *args, **kwargs): del args, kwargs return FakeExporter(name=name) def _create_run_config_with_cluster_spec(tf_config): with test.mock.patch.dict('os.environ', {'TF_CONFIG': json.dumps(tf_config)}): return run_config_lib.RunConfig() class TrainSpecTest(test.TestCase): """Tests TrainSpec.""" def testRequiredArgumentsSet(self): """Tests that no errors are raised when all required arguments are set.""" spec = training.TrainSpec(input_fn=lambda: 1) self.assertEqual(1, spec.input_fn()) self.assertIsNone(spec.max_steps) self.assertEqual(0, len(spec.hooks)) def testAllArgumentsSet(self): """Tests that no errors are raised when all arguments are set.""" hooks = [_FakeHook()] spec = training.TrainSpec(input_fn=lambda: 1, max_steps=2, hooks=hooks) self.assertEqual(1, spec.input_fn()) self.assertEqual(2, spec.max_steps) self.assertEqual(tuple(hooks), spec.hooks) def testInvalidInputFn(self): with self.assertRaisesRegexp(TypeError, _INVALID_INPUT_FN_MSG): training.TrainSpec(input_fn='invalid') def testInvalidMaxStep(self): with self.assertRaisesRegexp(ValueError, _INVALID_MAX_STEPS_MSG): training.TrainSpec(input_fn=lambda: 1, max_steps=0) def testInvalidHook(self): with self.assertRaisesRegexp(TypeError, _INVALID_HOOK_MSG): training.TrainSpec(input_fn=lambda: 1, hooks=[_InvalidHook()]) class EvalSpecTest(test.TestCase): """Tests EvalSpec.""" def testRequiredArgumentsSet(self): """Tests that no errors are raised when all required arguments are set.""" spec = training.EvalSpec(input_fn=lambda: 1) self.assertEqual(1, spec.input_fn()) self.assertEqual(_DEFAULT_EVAL_STEPS, spec.steps) self.assertIsNone(spec.name) self.assertEqual(0, len(spec.hooks)) self.assertEqual(0, len(spec.exporters)) self.assertEqual(_DEFAULT_EVAL_DELAY_SECS, spec.start_delay_secs) self.assertEqual(_DEFAULT_EVAL_THROTTLE_SECS, spec.throttle_secs) def testAllArgumentsSet(self): """Tests that no errors are raised when all arguments are set.""" hooks = [_FakeHook()] exporter = _create_exporter('a') spec = training.EvalSpec( input_fn=lambda: 1, steps=2, name='name', hooks=hooks, exporters=exporter, start_delay_secs=3, throttle_secs=4) self.assertEqual(1, spec.input_fn()) self.assertEqual(2, spec.steps) self.assertEqual('name', spec.name) self.assertEqual(tuple(hooks), spec.hooks) self.assertEqual((exporter,), spec.exporters) self.assertEqual(3, spec.start_delay_secs) self.assertEqual(4, spec.throttle_secs) def testListOfExporters(self): """Tests that no errors are raised with multiple exporters.""" exporters = [_create_exporter('a'), _create_exporter('b')] spec = training.EvalSpec(input_fn=lambda: 1, exporters=exporters) self.assertEqual(1, spec.input_fn()) self.assertEqual(tuple(exporters), spec.exporters) def testInvalidInputFn(self): with self.assertRaisesRegexp(TypeError, _INVALID_INPUT_FN_MSG): training.EvalSpec(input_fn='invalid') def testInvalidMaxStep(self): with self.assertRaisesRegexp(ValueError, _INVALID_STEPS_MSG): training.EvalSpec(input_fn=lambda: 1, steps=0) def testInvalidName(self): with self.assertRaisesRegexp(TypeError, _INVALID_NAME_MSG): training.EvalSpec(input_fn=lambda: 1, name=123) def testInvalidHook(self): with self.assertRaisesRegexp(TypeError, _INVALID_HOOK_MSG): training.EvalSpec(input_fn=lambda: 1, hooks=[_InvalidHook()]) def testInvalidDelaySecs(self): with self.assertRaisesRegexp(ValueError, _INVALID_EVAL_DELAY_SECS_MSG): training.EvalSpec(input_fn=lambda: 1, start_delay_secs=-1) def testInvalidThrottleSecs(self): with self.assertRaisesRegexp(ValueError, _INVALID_EVAL_THROTTLE_SECS_MSG): training.EvalSpec(input_fn=lambda: 1, throttle_secs=-1) def testInvalidTypeOfListOfExporters(self): with self.assertRaisesRegexp(TypeError, _INVALID_EXPORTER_MSG): training.EvalSpec( input_fn=lambda: 1, exporters=[_create_exporter('a'), _FakeHook()]) def testInvalidTypeOfIndividualExporter(self): with self.assertRaisesRegexp(TypeError, _INVALID_EXPORTER_MSG): training.EvalSpec(input_fn=lambda: 1, exporters=_FakeHook()) def testInvalidTypeOfExporterName(self): with self.assertRaisesRegexp(ValueError, _INVALID_EXPORTER_NAME_TYPE_MSG): training.EvalSpec(input_fn=lambda: 1, exporters=_create_exporter(name=123)) def testMultipleExportersWithTheSameName(self):<|fim▁hole|> with self.assertRaisesRegexp(ValueError, _DUPLICATE_EXPORTER_NAMES_MSG): training.EvalSpec( input_fn=lambda: 1, exporters=[_create_exporter('a'), _create_exporter('a')]) def testMultipleExportersAndOneWithoutAName(self): with self.assertRaisesRegexp(ValueError, _NONE_EXPORTER_NAME_MSG): training.EvalSpec( input_fn=lambda: 1, exporters=[_create_exporter('a'), _create_exporter(None)]) def testSingleExporterWithoutAName(self): with self.assertRaisesRegexp(ValueError, _NONE_EXPORTER_NAME_MSG): training.EvalSpec(input_fn=lambda: 1, exporters=_create_exporter(None)) class TrainAndEvaluateTest(test.TestCase): def test_run_task(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) with test.mock.patch.object(training, '_TrainingExecutor') as mock_executor: mock_executor_instance = test.mock.Mock() mock_executor.return_value = mock_executor_instance training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec) mock_executor.assert_called_with(estimator=mock_est, train_spec=mock_train_spec, eval_spec=mock_eval_spec) self.assertTrue(mock_executor_instance.run.called) def test_error_out_if_evaluator_task_id_is_non_zero(self): tf_config = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], }, 'task': { 'type': run_config_lib.TaskType.EVALUATOR, 'index': 1 } } mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = _create_run_config_with_cluster_spec(tf_config) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) with self.assertRaisesRegexp(ValueError, _INVALID_EVAL_TASK_ID_ERR): training.train_and_evaluate(mock_est, mock_train_spec, mock_eval_spec) def test_invalid_estimator(self): invalid_estimator = object() mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) with self.assertRaisesRegexp(TypeError, _INVALID_ESTIMATOR_MSG): training.train_and_evaluate(invalid_estimator, mock_train_spec, mock_eval_spec) def test_fail_fast_if_invalid_eval_spec(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) invalid_eval_spec = object() with test.mock.patch.object(training, '_TrainingExecutor') as mock_executor: with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_SPEC_MSG): training.train_and_evaluate(mock_est, mock_train_spec, invalid_eval_spec) mock_executor.assert_not_called() class TrainingExecutorConstructorTest(test.TestCase): """Tests constructor of _TrainingExecutor.""" def test_required_arguments_set(self): estimator = estimator_lib.Estimator(model_fn=lambda features: features) train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = training.EvalSpec(input_fn=lambda: 1) executor = training._TrainingExecutor(estimator, train_spec, eval_spec) self.assertEqual(estimator, executor.estimator) def test_invalid_estimator(self): invalid_estimator = object() train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = training.EvalSpec(input_fn=lambda: 1) with self.assertRaisesRegexp(TypeError, _INVALID_ESTIMATOR_MSG): training._TrainingExecutor(invalid_estimator, train_spec, eval_spec) def test_invalid_train_spec(self): estimator = estimator_lib.Estimator(model_fn=lambda features: features) invalid_train_spec = object() eval_spec = training.EvalSpec(input_fn=lambda: 1) with self.assertRaisesRegexp(TypeError, _INVALID_TRAIN_SPEC_MSG): training._TrainingExecutor(estimator, invalid_train_spec, eval_spec) def test_invalid_eval_spec(self): estimator = estimator_lib.Estimator(model_fn=lambda features: features) train_spec = training.TrainSpec(input_fn=lambda: 1) invalid_eval_spec = object() with self.assertRaisesRegexp(TypeError, _EVAL_SPEC_OR_NONE_MSG): training._TrainingExecutor(estimator, train_spec, invalid_eval_spec) def test_eval_spec_none(self): estimator = estimator_lib.Estimator(model_fn=lambda features: features) train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = None # Tests that no error is raised. training._TrainingExecutor(estimator, train_spec, eval_spec) def test_invalid_train_hooks(self): estimator = estimator_lib.Estimator(model_fn=lambda features: features) train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = training.EvalSpec(input_fn=lambda: 1) invalid_train_hooks = [object()] with self.assertRaisesRegexp(TypeError, _INVALID_HOOK_MSG): training._TrainingExecutor( estimator, train_spec, eval_spec, train_hooks=invalid_train_hooks) def test_invalid_continuous_eval_listener(self): estimator = estimator_lib.Estimator(model_fn=lambda features: features) train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = training.EvalSpec(input_fn=lambda: 1) invalid_continuous_eval_listener = object() with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_LISTENER_MSG): training._TrainingExecutor( estimator, train_spec, eval_spec, continuous_eval_listener=invalid_continuous_eval_listener) class _TrainingExecutorTrainingTest(object): """Tests training of _TrainingExecutor.""" def __init__(self, run_config): self._run_config = run_config def _run_task(self, executor): # We should not call executor.run as the test here is intended to test # run_foo explicitly (foo is the task type). return getattr(executor, 'run_' + self._run_config.task_type)() @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_train_with_train_spec(self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = self._run_config train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_server_instance = mock_server.return_value executor = training._TrainingExecutor(mock_est, train_spec, mock_eval_spec) self._run_task(executor) mock_server.assert_called_with( mock_est.config.cluster_spec, job_name=mock_est.config.task_type, task_index=mock_est.config.task_id, config=test.mock.ANY, protocol=None, start=False) self.assertTrue(mock_server_instance.start.called) mock_est.train.assert_called_with( input_fn=train_spec.input_fn, max_steps=train_spec.max_steps, hooks=list(train_spec.hooks), saving_listeners=test.mock.ANY) mock_est.evaluate.assert_not_called() mock_est.export_savedmodel.assert_not_called() @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_train_with_no_eval_spec(self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = self._run_config train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()]) eval_spec = None mock_server_instance = mock_server.return_value executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) self._run_task(executor) mock_server.assert_called_with( mock_est.config.cluster_spec, job_name=mock_est.config.task_type, task_index=mock_est.config.task_id, config=test.mock.ANY, protocol=None, start=False) self.assertTrue(mock_server_instance.start.called) mock_est.train.assert_called_with( input_fn=train_spec.input_fn, max_steps=train_spec.max_steps, hooks=list(train_spec.hooks), saving_listeners=test.mock.ANY) mock_est.evaluate.assert_not_called() mock_est.export_savedmodel.assert_not_called() @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_train_with_train_hooks(self, unused_mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = self._run_config train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) extra_hooks = [_FakeHook()] executor = training._TrainingExecutor( mock_est, train_spec, mock_eval_spec, train_hooks=extra_hooks) self._run_task(executor) mock_est.train.assert_called_with( input_fn=train_spec.input_fn, max_steps=train_spec.max_steps, hooks=list(train_spec.hooks) + extra_hooks, saving_listeners=test.mock.ANY) @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_no_server_startup_in_google(self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = self._run_config mock_train_spec = test.mock.Mock(spec=training.TrainSpec, hooks=[]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) tf_config = {'TF_CONFIG': json.dumps(_TF_CONFIG_FOR_GOOGLE)} with test.mock.patch.dict('os.environ', tf_config): self._run_task(executor) mock_server.assert_not_called() def test_fail_with_empty_cluster_spec(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = None mock_est.config.master = 'grpc://...' mock_est.config.task_type = 'worker' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): self._run_task(training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec)) def test_fail_with_empty_master(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec( {'worker': ['dummy', 'dummy1']}) mock_est.config.master = '' mock_est.config.task_type = 'worker' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): self._run_task(training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec)) @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_single_worker_node_with_empty_tf_master( self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec, hooks=[]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) # Single node cluster. mock_est.config.cluster_spec = server_lib.ClusterSpec({'worker': ['dummy']}) mock_est.config.master = '' mock_est.config.task_type = 'worker' mock_est.config.task_id = 2 self._run_task(training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec)) self.assertTrue(mock_est.train.called) mock_server.assert_not_called() def test_fail_with_empty_task_type(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec({'worker': ['dummy']}) mock_est.config.master = 'grpc://...' mock_est.config.task_type = '' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): self._run_task(training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec)) def test_fail_with_none_task_id(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec({'worker': ['dummy']}) mock_est.config.master = 'grpc://...' mock_est.config.task_type = 'worker' mock_est.config.task_id = None with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): self._run_task(training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec)) class TrainingExecutorRunWorkerTest(_TrainingExecutorTrainingTest, test.TestCase): """Tests run_worker of _TrainingExecutor.""" def __init__(self, methodName='runTest'): # pylint: disable=invalid-name test.TestCase.__init__(self, methodName) _TrainingExecutorTrainingTest.__init__( self, run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_WORKER)) @test.mock.patch.object(server_lib, 'Server') def test_delay_for_worker(self, _): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = self._run_config mock_train_spec = test.mock.Mock(spec=training.TrainSpec, hooks=[]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) expected_secs = (self._run_config.task_id + 1) * _DELAY_SECS_PER_WORKER with test.mock.patch.object(time, 'sleep') as mock_sleep: mock_sleep.side_effect = lambda s: self.assertEqual(expected_secs, s) self._run_task(executor) self.assertTrue(mock_sleep.called) class TrainingExecutorRunChiefTest(_TrainingExecutorTrainingTest, test.TestCase): """Tests run_chief of _TrainingExecutor.""" def __init__(self, methodName='runTest'): # pylint: disable=invalid-name test.TestCase.__init__(self, methodName) _TrainingExecutorTrainingTest.__init__( self, run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_CHIEF)) @test.mock.patch.object(server_lib, 'Server') def test_no_delay_for_chief(self, _): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = self._run_config mock_train_spec = test.mock.Mock(spec=training.TrainSpec, hooks=[]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) with test.mock.patch.object(time, 'sleep') as mock_sleep: self._run_task(executor) mock_sleep.assert_not_called() class TrainingExecutorRunMasterTest(test.TestCase): """Tests run_chief of _TrainingExecutor.""" def setUp(self): self._run_config = _create_run_config_with_cluster_spec( _TF_CONFIG_FOR_MASTER) @test.mock.patch.object(server_lib, 'Server') def test_no_delay_for_master(self, _): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123} mock_est.config = self._run_config mock_train_spec = test.mock.Mock( spec=training.TrainSpec, max_steps=123, hooks=[]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[]) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) with test.mock.patch.object(time, 'sleep') as mock_sleep: executor.run_master() mock_sleep.assert_not_called() @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_train_with_train_spec(self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123} mock_est.config = self._run_config train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[]) mock_server_instance = mock_server.return_value executor = training._TrainingExecutor(mock_est, train_spec, mock_eval_spec) executor.run_master() mock_server.assert_called_with( mock_est.config.cluster_spec, job_name=mock_est.config.task_type, task_index=mock_est.config.task_id, config=test.mock.ANY, protocol=None, start=False) self.assertTrue(mock_server_instance.start.called) mock_est.train.assert_called_with( input_fn=train_spec.input_fn, max_steps=train_spec.max_steps, hooks=list(train_spec.hooks), saving_listeners=test.mock.ANY) mock_est.export_savedmodel.assert_not_called() @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_train_with_no_eval_spec_fails(self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123} mock_est.config = self._run_config train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()]) eval_spec = None executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_SPEC_MSG): executor.run_master() @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_train_with_train_hooks(self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123} mock_est.config = self._run_config train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=2, hooks=[_FakeHook()]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[]) extra_hooks = [_FakeHook()] executor = training._TrainingExecutor( mock_est, train_spec, mock_eval_spec, train_hooks=extra_hooks) executor.run_master() mock_est.train.assert_called_with( input_fn=train_spec.input_fn, max_steps=train_spec.max_steps, hooks=list(train_spec.hooks) + extra_hooks, saving_listeners=test.mock.ANY) @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_no_server_startup_in_google(self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123} mock_est.config = self._run_config mock_train_spec = test.mock.Mock( spec=training.TrainSpec, max_steps=123, hooks=[]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[]) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) tf_config = {'TF_CONFIG': json.dumps(_TF_CONFIG_FOR_GOOGLE)} with test.mock.patch.dict('os.environ', tf_config): executor.run_master() mock_server.assert_not_called() def test_fail_with_empty_cluster_spec(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = None mock_est.config.master = 'grpc://...' mock_est.config.task_type = 'master' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor( mock_est, mock_train_spec, mock_eval_spec).run_master() def test_fail_with_empty_master(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec( {'master': ['dummy'], 'worker': ['dummy1']}) mock_est.config.master = '' mock_est.config.task_type = 'master' mock_est.config.task_id = 0 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor( mock_est, mock_train_spec, mock_eval_spec).run_master() @test.mock.patch.object(time, 'sleep') @test.mock.patch.object(server_lib, 'Server') def test_single_master_node_with_empty_tf_master( self, mock_server, unused_mock_sleep): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate = lambda *args, **kw: {ops.GraphKeys.GLOBAL_STEP: 123} mock_train_spec = test.mock.Mock( spec=training.TrainSpec, max_steps=123, hooks=[]) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec, exporters=[]) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec( {'master': ['dummy']}) mock_est.config.master = '' mock_est.config.task_type = 'master' mock_est.config.task_id = 0 executor = training._TrainingExecutor( mock_est, mock_train_spec, mock_eval_spec) executor.run_master() mock_server.assert_not_called() self.assertTrue(mock_est.train.called) def test_fail_with_empty_task_type(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec({'master': ['dummy']}) mock_est.config.master = 'grpc://...' mock_est.config.task_type = '' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor( mock_est, mock_train_spec, mock_eval_spec).run_master() def test_fail_with_none_task_id(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec({'master': ['dummy']}) mock_est.config.master = 'grpc://...' mock_est.config.task_type = 'master' mock_est.config.task_id = None with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor( mock_est, mock_train_spec, mock_eval_spec).run_master() @test.mock.patch.object(server_lib, 'Server') def test_run_master_triggers_evaluate_and_export(self, _): def estimator_train(saving_listeners, *args, **kwargs): # There shalt be a saving_listener. Estimator is going to call # `after_save`. del args, kwargs saving_listeners[0].begin() saving_listeners[0].after_save(session=None, global_step_value=0) saving_listeners[0].after_save(session=None, global_step_value=10) mock_est = test.mock.Mock( spec=estimator_lib.Estimator, model_dir='path/', train=estimator_train) mock_est.latest_checkpoint.return_value = 'checkpoint_path/' mock_est.config = self._run_config exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_whether_export_is_called' train_spec = training.TrainSpec(input_fn=lambda: 1, max_steps=300) eval_spec = training.EvalSpec( input_fn=lambda: 1, steps=2, exporters=exporter) eval_result = {_GLOBAL_STEP_KEY: train_spec.max_steps} mock_est.evaluate.return_value = eval_result executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) executor.run_master() mock_est.evaluate.assert_called_with( name=eval_spec.name, input_fn=eval_spec.input_fn, steps=eval_spec.steps, checkpoint_path='checkpoint_path/', hooks=eval_spec.hooks) self.assertEqual(1, exporter.export.call_count) exporter.export.assert_called_with( estimator=mock_est, export_path=os.path.join('path/', 'export', exporter.name), checkpoint_path='checkpoint_path/', eval_result=eval_result, is_the_final_export=True) @test.mock.patch.object(basic_session_run_hooks, 'SecondOrStepTimer') @test.mock.patch.object(server_lib, 'Server') def test_run_master_throttle_eval(self, _, mock_timer_class): mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/') mock_timer = test.mock.Mock() mock_timer_class.return_value = mock_timer def estimator_train(saving_listeners, *args, **kwargs): del args, kwargs saving_listeners[0].begin() # Call four times. mock_timer.should_trigger_for_step.return_value = True saving_listeners[0].after_save(session=None, global_step_value=None) mock_timer.should_trigger_for_step.return_value = True saving_listeners[0].after_save(session=None, global_step_value=None) mock_timer.should_trigger_for_step.return_value = False saving_listeners[0].after_save(session=None, global_step_value=None) mock_timer.should_trigger_for_step.return_value = True saving_listeners[0].after_save(session=None, global_step_value=None) mock_est.train = estimator_train mock_est.latest_checkpoint.side_effect = ['ckpt1', 'ckpt2'] mock_est.config = self._run_config exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_whether_export_is_called' train_spec = training.TrainSpec(input_fn=lambda: 1, max_steps=300) eval_spec = training.EvalSpec( input_fn=lambda: 1, steps=2, exporters=exporter, throttle_secs=10) mock_est.evaluate.side_effect = [ {_GLOBAL_STEP_KEY: train_spec.max_steps //2}, {_GLOBAL_STEP_KEY: train_spec.max_steps} ] executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) executor.run_master() self.assertEqual(2, mock_est.evaluate.call_count) self.assertEqual(2, exporter.export.call_count) is_final_export_list = [call[1]['is_the_final_export'] for call in exporter.export.call_args_list] self.assertEqual([False, True], is_final_export_list) @test.mock.patch.object(basic_session_run_hooks, 'SecondOrStepTimer') @test.mock.patch.object(server_lib, 'Server') def test_run_master_throttle_eval_which_skips_final_ckpt( self, _, mock_timer_class): mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/') mock_timer = test.mock.Mock() mock_timer_class.return_value = mock_timer def estimator_train(saving_listeners, *args, **kwargs): del args, kwargs saving_listeners[0].begin() # Call tree times (one for first saving). mock_timer.should_trigger_for_step.return_value = True saving_listeners[0].after_save(session=None, global_step_value=0) mock_timer.should_trigger_for_step.return_value = True saving_listeners[0].after_save(session=None, global_step_value=125) mock_timer.should_trigger_for_step.return_value = False saving_listeners[0].after_save(session=None, global_step_value=250) # At the end evaluate should be called even if throttle secs prevents it. mock_timer.should_trigger_for_step.return_value = False saving_listeners[0].end(session=None, global_step_value=300) mock_est.train = estimator_train mock_est.latest_checkpoint.side_effect = ['ckpt1', 'ckpt2'] mock_est.config = self._run_config exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_whether_export_is_called' train_spec = training.TrainSpec(input_fn=lambda: 1, max_steps=300) eval_spec = training.EvalSpec( input_fn=lambda: 1, steps=2, exporters=exporter, throttle_secs=10) mock_est.evaluate.side_effect = [ {_GLOBAL_STEP_KEY: train_spec.max_steps //2}, {_GLOBAL_STEP_KEY: train_spec.max_steps} ] executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) executor.run_master() self.assertEqual(2, mock_est.evaluate.call_count) self.assertEqual(2, exporter.export.call_count) is_final_export_list = [call[1]['is_the_final_export'] for call in exporter.export.call_args_list] self.assertEqual([False, True], is_final_export_list) class TrainingExecutorRunEvaluatorTest(test.TestCase): """Tests run_evaluator of _TrainingExecutor.""" def _set_up_mock_est_to_train_and_evaluate_once(self, mock_est, mock_train_spec): """Sets global step in eval result to end the while True eval loop.""" training_max_step = 200 mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: training_max_step} mock_train_spec.max_steps = training_max_step def test_evaluate_with_evaluate_spec(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.latest_checkpoint.return_value = 'latest_it_is' mock_train_spec = test.mock.Mock(spec=training.TrainSpec) self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) eval_spec = training.EvalSpec( input_fn=lambda: 1, steps=2, hooks=[_FakeHook()], name='cont_eval', start_delay_secs=0, throttle_secs=0) executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) executor.run_evaluator() mock_est.evaluate.assert_called_with( name='cont_eval', input_fn=eval_spec.input_fn, steps=eval_spec.steps, checkpoint_path='latest_it_is', hooks=eval_spec.hooks) self.assertFalse(mock_est.train.called) def test_evaluate_with_no_eval_spec_fails(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.latest_checkpoint.return_value = 'latest_it_is' mock_train_spec = test.mock.Mock(spec=training.TrainSpec) self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) eval_spec = None executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_SPEC_MSG): executor.run_evaluator() def test_evaluate_with_train_hooks(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.latest_checkpoint.return_value = 'latest_it_is' mock_train_spec = test.mock.Mock(spec=training.TrainSpec) self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) eval_spec = training.EvalSpec( input_fn=lambda: 1, steps=2, hooks=[_FakeHook()], name='cont_eval', start_delay_secs=0, throttle_secs=0) # The train_hooks will not be called during eval. mock_hook = test.mock.Mock(spec=session_run_hook.SessionRunHook) executor = training._TrainingExecutor( mock_est, mock_train_spec, eval_spec, train_hooks=[mock_hook]) executor.run_evaluator() mock_hook.begin.assert_not_called() def test_evaluate_multiple_times(self): training_max_step = 200 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.model_dir = compat.as_bytes(test.get_temp_dir()) mock_est.evaluate.side_effect = [ {_GLOBAL_STEP_KEY: training_max_step // 2}, {_GLOBAL_STEP_KEY: training_max_step} ] mock_est.latest_checkpoint.side_effect = ['path_1', 'path_2'] mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_train_spec.max_steps = training_max_step exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_how_many_times_export_is_called' mock_est.times_export_was_called = 0 mock_est.times_final_export_was_called = 0 def export(estimator, export_path, checkpoint_path, eval_result, is_the_final_export): del export_path, checkpoint_path, eval_result estimator.times_export_was_called += 1 # final_export is happened at the end. self.assertEqual(0, estimator.times_final_export_was_called) if is_the_final_export: estimator.times_final_export_was_called += 1 exporter.export = export eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=0, exporters=exporter) executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) executor.run_evaluator() self.assertEqual(2, mock_est.evaluate.call_count) self.assertEqual(2, mock_est.times_export_was_called) self.assertEqual(1, mock_est.times_final_export_was_called) def test_evaluate_listener_before_eval(self): training_max_step = 200 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.model_dir = compat.as_bytes(test.get_temp_dir()) # Without early stopping, this eval will be run twice. mock_est.evaluate.side_effect = [{ _GLOBAL_STEP_KEY: training_max_step // 2 }, { _GLOBAL_STEP_KEY: training_max_step }] mock_est.latest_checkpoint.side_effect = ['path_1', 'path_2'] mock_train_spec = test.mock.Mock(spec=training.TrainSpec, hooks=[]) mock_train_spec.max_steps = training_max_step class _Listener(training._ContinuousEvalListener): def __init__(self): self.call_count = 0 def before_eval(self): self.call_count += 1 return self.call_count == 1 listener = _Listener() eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=0) training._TrainingExecutor( mock_est, mock_train_spec, eval_spec, continuous_eval_listener=listener).run_evaluator() # Before_eval returns False during the second time, so, evaluate will be # called once. self.assertEqual(1, mock_est.evaluate.call_count) self.assertEqual(2, listener.call_count) def test_evaluate_listener_after_eval(self): training_max_step = 200 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.model_dir = compat.as_bytes(test.get_temp_dir()) # Without early stopping, this eval will be run twice. expected_eval_metrics = [{ _GLOBAL_STEP_KEY: training_max_step // 2 }, { _GLOBAL_STEP_KEY: training_max_step }] mock_est.evaluate.side_effect = expected_eval_metrics mock_est.latest_checkpoint.side_effect = ['path_1', 'path_2'] mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_train_spec.max_steps = training_max_step class _Listener(training._ContinuousEvalListener): def __init__(self): self.call_count = 0 def after_eval(self, eval_result): self.call_count += 1 self.eval_result = eval_result return False listener = _Listener() eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=0) training._TrainingExecutor( mock_est, mock_train_spec, eval_spec, continuous_eval_listener=listener).run_evaluator() # after_eval returns False during the first time, so, evaluate will be # called once. self.assertEqual(1, mock_est.evaluate.call_count) self.assertEqual(1, listener.call_count) self.assertAllEqual(expected_eval_metrics[0], listener.eval_result.metrics) self.assertEqual('path_1', listener.eval_result.checkpoint_path) def test_final_export_is_true_in_the_end(self): training_max_step = 200 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.model_dir = compat.as_bytes(test.get_temp_dir()) mock_est.evaluate.side_effect = [ {_GLOBAL_STEP_KEY: training_max_step // 2}, {_GLOBAL_STEP_KEY: training_max_step} ] mock_est.latest_checkpoint.side_effect = ['path_1', 'path_2'] mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_train_spec.max_steps = training_max_step mock_est.times_export_fn_was_called = 0 mock_est.times_the_final_export_was_true = 0 def export(estimator, export_path, checkpoint_path, eval_result, is_the_final_export): del export_path, checkpoint_path, eval_result estimator.times_export_fn_was_called += 1 if is_the_final_export: estimator.times_the_final_export_was_true += 1 exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_how_many_times_export_is_called' exporter.export = export eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=0, exporters=exporter) executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) executor.run_evaluator() self.assertEqual(2, mock_est.evaluate.call_count) self.assertEqual(2, mock_est.times_export_fn_was_called) self.assertEqual(1, mock_est.times_the_final_export_was_true) def test_skip_evaluation_due_to_ckpt(self): training_max_step = 200 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate.side_effect = [ {_GLOBAL_STEP_KEY: training_max_step // 2}, {_GLOBAL_STEP_KEY: training_max_step} ] mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_train_spec.max_steps = training_max_step self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) # First two items are invalid, next two items are same. mock_est.latest_checkpoint.side_effect = [ None, '', 'same', 'same', 'path_2' ] eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=2) executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) with test.mock.patch.object(logging, 'warning') as mock_log: executor.run_evaluator() # Three checkpoint paths are invalid. self.assertEqual(5, mock_est.latest_checkpoint.call_count) self.assertEqual(2, mock_est.evaluate.call_count) # Two warning logs are expected (last warning time is reset after a # successuful evaluation) self.assertEqual(2, mock_log.call_count) def test_warning_if_throttle_secs_is_zero(self): training_max_step = 200 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate.side_effect = [ {_GLOBAL_STEP_KEY: training_max_step} ] mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_train_spec.max_steps = training_max_step self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) # We need to make the first one invalid, so it will check the # throttle_secs=0. mock_est.latest_checkpoint.side_effect = [None, 'path'] eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=0) executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) with test.mock.patch.object(logging, 'warning') as mock_log: executor.run_evaluator() # First ckpt is invalid. self.assertEqual(2, mock_est.latest_checkpoint.call_count) self.assertEqual(1, mock_est.evaluate.call_count) self.assertRegexpMatches(str(mock_log.call_args), _INPROPER_THROTTL_SECS) def test_continuous_eval_listener_eval_result(self): training_max_step = 200 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) expected_eval_metrics = [{ _GLOBAL_STEP_KEY: training_max_step // 2 }, { _GLOBAL_STEP_KEY: training_max_step }] mock_est.evaluate.side_effect = expected_eval_metrics mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_train_spec.max_steps = training_max_step class _Listener(training._ContinuousEvalListener): def __init__(self): self.eval_results = [] def after_eval(self, eval_result): self.eval_results.append(eval_result) return True continuous_eval_listener = _Listener() self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) # First two items are invalid, next two items are same. mock_est.latest_checkpoint.side_effect = [ None, '', 'same', 'same', 'path_2' ] expected_eval_results = [ training._EvalResult(training._EvalStatus.MISSING_CHECKPOINT), training._EvalResult(training._EvalStatus.MISSING_CHECKPOINT), training._EvalResult( training._EvalStatus.EVALUATED, metrics=expected_eval_metrics[0], checkpoint_path='same'), training._EvalResult(training._EvalStatus.NO_NEW_CHECKPOINT), training._EvalResult( training._EvalStatus.EVALUATED, metrics=expected_eval_metrics[1], checkpoint_path='path_2'), ] eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=0) executor = training._TrainingExecutor( mock_est, mock_train_spec, eval_spec, continuous_eval_listener=continuous_eval_listener) executor.run_evaluator() # Three checkpoint paths are invalid. self.assertEqual(5, mock_est.latest_checkpoint.call_count) self.assertEqual(2, mock_est.evaluate.call_count) self.assertEqual(5, len(continuous_eval_listener.eval_results)) for i, result in enumerate(continuous_eval_listener.eval_results): self.assertEqual(expected_eval_results[i].status, result.status) self.assertAllEqual(expected_eval_results[i].metrics, result.metrics) self.assertEqual(expected_eval_results[i].checkpoint_path, result.checkpoint_path) def test_sleep_start_delay_secs(self): training_max_step = 200 start_delay_secs = 123 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: training_max_step} mock_est.model_dir = compat.as_bytes(test.get_temp_dir()) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_train_spec.max_steps = training_max_step eval_spec = training.EvalSpec( input_fn=lambda: 1, steps=2, hooks=[_FakeHook()], name='cont_eval', start_delay_secs=start_delay_secs, throttle_secs=0) executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) with test.mock.patch.object(time, 'sleep') as mock_sleep: executor.run_evaluator() mock_sleep.assert_called_with(start_delay_secs) self.assertTrue(mock_est.evaluate.called) @test.mock.patch.object(time, 'time') @test.mock.patch.object(time, 'sleep') def test_throttle_secs(self, mock_sleep, mock_time): throttle_secs = 123 operation_secs = 12 mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) eval_spec = training.EvalSpec( input_fn=lambda: 1, start_delay_secs=0, throttle_secs=throttle_secs) mock_time.side_effect = [921, 921 + operation_secs] executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) # Disable logging as it calls time.time also. with test.mock.patch.object(logging, 'info'): executor.run_evaluator() mock_sleep.assert_called_with(throttle_secs - operation_secs) self.assertTrue(mock_est.evaluate.called) def test_that_export_is_called(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) self._set_up_mock_est_to_train_and_evaluate_once(mock_est, mock_train_spec) def export(estimator, *args, **kwargs): del args, kwargs estimator.export_was_called = True exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_whether_export_is_called' exporter.export = export eval_spec = training.EvalSpec( input_fn=lambda: 1, steps=2, start_delay_secs=0, throttle_secs=0, exporters=exporter) executor = training._TrainingExecutor(mock_est, mock_train_spec, eval_spec) executor.run_evaluator() # Verify that export was called on the right estimator. self.assertTrue(mock_est.export_was_called) def test_errors_out_if_evaluate_returns_empty_dict(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = training.EvalSpec(input_fn=(lambda: 1), start_delay_secs=0, throttle_secs=0) mock_est.evaluate.return_value = {} executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(ValueError, _INVALID_EMPTY_EVAL_RESULT_ERR): executor.run_evaluator() def test_errors_out_if_evaluate_returns_non_dict(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = training.EvalSpec(input_fn=(lambda: 1), start_delay_secs=0, throttle_secs=0) mock_est.evaluate.return_value = 123 executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_RESULT_TYPE_ERR): executor.run_evaluator() def test_errors_out_if_evaluate_returns_dict_without_global_step(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) train_spec = training.TrainSpec(input_fn=lambda: 1) eval_spec = training.EvalSpec(input_fn=(lambda: 1), start_delay_secs=0, throttle_secs=0) mock_est.evaluate.return_value = {'loss': 123} executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(ValueError, _MISSING_GLOBAL_STEP_IN_EVAL_RESULT_ERR): executor.run_evaluator() class TrainingExecutorRunPsTest(test.TestCase): """Tests run_ps of _TrainingExecutor.""" @test.mock.patch.object(server_lib, 'Server') def test_std_server(self, mock_server): mock_server_instance = test.mock.Mock() mock_server.return_value = mock_server_instance mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = _create_run_config_with_cluster_spec(_TF_CONFIG_FOR_PS) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) executor.run_ps() mock_server.assert_called_with( mock_est.config.cluster_spec, job_name=mock_est.config.task_type, task_index=mock_est.config.task_id, config=test.mock.ANY, protocol=None, start=False) self.assertTrue(mock_server_instance.start.called) self.assertTrue(mock_server_instance.join.called) def test_fail_with_empty_cluster_spec(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = None mock_est.config.master = 'grpc://...' mock_est.config.task_type = 'ps' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec).run_ps() def test_fail_with_empty_master(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec({'ps': ['dummy']}) mock_est.config.master = '' mock_est.config.task_type = 'ps' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec).run_ps() def test_fail_with_empty_task_type(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec({'ps': ['dummy']}) mock_est.config.master = 'grpc://...' mock_est.config.task_type = '' mock_est.config.task_id = 2 with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec).run_ps() def test_fail_with_none_task_id(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.PropertyMock(spec=run_config_lib.RunConfig) mock_est.config.cluster_spec = server_lib.ClusterSpec({'ps': ['dummy']}) mock_est.config.master = 'grpc://...' mock_est.config.task_type = 'ps' mock_est.config.task_id = None with self.assertRaisesRegexp(RuntimeError, _INVALID_CONFIG_FOR_STD_SERVER_MSG): training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec).run_ps() class StopAtSecsHookTest(test.TestCase): """Tests StopAtSecsHook.""" @test.mock.patch.object(time, 'time') def test_stops_after_time(self, mock_time): mock_time.return_value = 1484695987.209386 hook = training._StopAtSecsHook(1000) with ops.Graph().as_default(): no_op = control_flow_ops.no_op() # some time passed before training starts mock_time.return_value += 250 with monitored_session.MonitoredSession(hooks=[hook]) as sess: self.assertFalse(sess.should_stop()) sess.run(no_op) self.assertFalse(sess.should_stop()) mock_time.return_value += 500 sess.run(no_op) self.assertFalse(sess.should_stop()) mock_time.return_value += 400 sess.run(no_op) self.assertFalse(sess.should_stop()) mock_time.return_value += 200 sess.run(no_op) self.assertTrue(sess.should_stop()) class TrainingExecutorRunLocalTest(test.TestCase): """Tests run_local of _TrainingExecutor.""" def _model_fn(self, features, labels, mode): del labels with ops.control_dependencies([features]): train_op = state_ops.assign_add(training_util.get_global_step(), 1) return model_fn_lib.EstimatorSpec( mode, loss=constant_op.constant(0.), train_op=train_op, predictions=constant_op.constant([[10.]]), eval_metric_ops={'mean_of_features': metrics_lib.mean(features)}) def _input_fn(self, repeat=True): ds = dataset_ops.Dataset.from_tensors([1]) if repeat: return ds.repeat() return ds def unique_checkpoint_every_time_fn(self): return 'checkpoint_path_%s/' % random.random() def test_runs_evaluate_with_every_new_checkpoint(self): est = estimator_lib.Estimator( model_fn=self._model_fn, config=run_config_lib.RunConfig(save_checkpoints_steps=10)) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) mock_est.times_export_was_called = 0 mock_est.times_final_export_was_called = 0 def export(estimator, export_path, checkpoint_path, eval_result, is_the_final_export): del export_path, checkpoint_path, eval_result estimator.times_export_was_called += 1 # final_export is happened at the end. self.assertEqual(0, estimator.times_final_export_was_called) if is_the_final_export: estimator.times_final_export_was_called += 1 exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_how_many_times_export_is_called' exporter.export = export train_spec = training.TrainSpec(input_fn=self._input_fn, max_steps=22) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), throttle_secs=0, exporters=exporter) executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) executor.run_local() self.assertEqual(1, mock_est.train.call_count) self.assertEqual(3, mock_est.evaluate.call_count) self.assertEqual(3, mock_est.times_export_was_called) self.assertEqual(1, mock_est.times_final_export_was_called) def test_runs_with_eval_listener_before_eval(self): est = estimator_lib.Estimator( model_fn=self._model_fn, config=run_config_lib.RunConfig(save_checkpoints_steps=10)) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) mock_est.latest_checkpoint = self.unique_checkpoint_every_time_fn train_spec = training.TrainSpec(input_fn=self._input_fn, max_steps=12) eval_spec = training.EvalSpec(input_fn=lambda: self._input_fn(repeat=False)) mock_est.evaluate.side_effect = [{_GLOBAL_STEP_KEY: train_spec.max_steps}] class _Listener(training._ContinuousEvalListener): def __init__(self): self.call_count = 0 def before_eval(self): self.call_count += 1 return False # Will stop the run_local before first eval. listener = _Listener() executor = training._TrainingExecutor( mock_est, train_spec, eval_spec, continuous_eval_listener=listener) executor.run_local() self.assertEqual(1, mock_est.train.call_count) self.assertEqual(0, mock_est.evaluate.call_count) def test_runs_with_eval_listener_after_eval(self): est = estimator_lib.Estimator( model_fn=self._model_fn, config=run_config_lib.RunConfig(save_checkpoints_steps=10)) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) train_spec = training.TrainSpec(input_fn=self._input_fn, max_steps=3000) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), throttle_secs=0) class _Listener(training._ContinuousEvalListener): def __init__(self): self.call_count = 0 def after_eval(self, eval_result): self.call_count += 1 return False # Will stop the run_local after first eval. listener = _Listener() executor = training._TrainingExecutor( mock_est, train_spec, eval_spec, continuous_eval_listener=listener) metrics, _ = executor.run_local() # pylint: disable=assignment-from-no-return self.assertEqual(1, mock_est.train.call_count) self.assertEqual(1, mock_est.evaluate.call_count) self.assertEqual(1, listener.call_count) # Should be less than max_steps since listener did early stopping. self.assertLess(metrics[_GLOBAL_STEP_KEY], train_spec.max_steps) def test_handles_no_new_checkpoint_found(self): est = estimator_lib.Estimator( model_fn=self._model_fn, # disable saving checkpoint config=run_config_lib.RunConfig( save_checkpoints_steps=None, save_checkpoints_secs=None)) train_spec = training.TrainSpec( input_fn=self._input_fn, max_steps=300, hooks=[_FakeHook()]) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), hooks=[_FakeHook()], throttle_secs=100) executor = training._TrainingExecutor(est, train_spec, eval_spec) with self.assertRaisesRegexp(ValueError, 'There should be a CheckpointSaverHook'): executor.run_local() def test_final_export_is_true_in_the_end(self): est = estimator_lib.Estimator( model_fn=self._model_fn, config=run_config_lib.RunConfig(save_checkpoints_steps=10)) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) mock_est.times_export_fn_was_called = 0 mock_est.times_the_final_export_was_true = 0 def export(estimator, export_path, checkpoint_path, eval_result, is_the_final_export): del export_path, checkpoint_path, eval_result estimator.times_export_fn_was_called += 1 if is_the_final_export: estimator.times_the_final_export_was_true += 1 exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_how_many_times_export_is_called' exporter.export = export train_spec = training.TrainSpec( input_fn=self._input_fn, max_steps=12, hooks=[_FakeHook()]) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), throttle_secs=0, exporters=exporter) executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) executor.run_local() self.assertEqual(1, mock_est.train.call_count) self.assertEqual(2, mock_est.evaluate.call_count) self.assertEqual(2, mock_est.times_export_fn_was_called) self.assertEqual(1, mock_est.times_the_final_export_was_true) def test_train_and_evaluate_args(self): est = estimator_lib.Estimator(model_fn=self._model_fn) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) train_spec = training.TrainSpec( input_fn=self._input_fn, max_steps=300, hooks=[_FakeHook()]) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), steps=2, hooks=[_FakeHook()], name='local_eval') executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) executor.run_local() mock_est.evaluate.assert_called_with( name=eval_spec.name, input_fn=eval_spec.input_fn, steps=eval_spec.steps, checkpoint_path=est.latest_checkpoint(), hooks=eval_spec.hooks) train_args = mock_est.train.call_args[1] self.assertEqual(list(train_spec.hooks), list(train_args['hooks'])) self.assertEqual(train_spec.input_fn, train_args['input_fn']) self.assertEqual(train_spec.max_steps, train_args['max_steps']) def test_train_with_no_eval_spec_fails(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=300, hooks=[_FakeHook()]) eval_spec = None executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_SPEC_MSG): executor.run_local() def test_train_hooks(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator, model_dir='path/') mock_est.latest_checkpoint.return_value = 'checkpoint_path/' train_spec = training.TrainSpec( input_fn=lambda: 1, max_steps=300, hooks=[_FakeHook()]) eval_spec = training.EvalSpec(input_fn=lambda: 1, steps=2) mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: train_spec.max_steps} extra_hooks = [_FakeHook()] executor = training._TrainingExecutor( mock_est, train_spec, eval_spec, train_hooks=extra_hooks) executor.run_local() train_args = mock_est.train.call_args[1] self.assertEqual( list(train_spec.hooks) + extra_hooks, [ h for h in train_args['hooks'] if not isinstance(h, training._StopAtSecsHook) ]) def test_that_export_is_called_with_run_local(self): est = estimator_lib.Estimator(model_fn=self._model_fn) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) train_spec = training.TrainSpec(input_fn=self._input_fn, max_steps=12) mock_est.evaluate.return_value = {_GLOBAL_STEP_KEY: train_spec.max_steps} def export(estimator, *args, **kwargs): del args, kwargs estimator.export_was_called = True return 'path_to_export' exporter = test.mock.PropertyMock(spec=exporter_lib.Exporter) exporter.name = 'see_whether_export_is_called' exporter.export = export eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), steps=2, start_delay_secs=0, throttle_secs=213, exporters=exporter) executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) # pylint: disable=assignment-from-no-return _, export_results = executor.run_local() # pylint: enable=assignment-from-no-return self.assertTrue(mock_est.export_was_called) self.assertEqual(export_results, ['path_to_export']) def test_errors_out_if_evaluate_returns_empty_dict(self): est = estimator_lib.Estimator( model_fn=self._model_fn, config=run_config_lib.RunConfig(save_checkpoints_steps=2)) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) train_spec = training.TrainSpec(input_fn=self._input_fn) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), throttle_secs=0) mock_est.evaluate.return_value = {} executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(ValueError, _INVALID_EMPTY_EVAL_RESULT_ERR): executor.run_local() def test_errors_out_if_evaluate_returns_non_dict(self): est = estimator_lib.Estimator( model_fn=self._model_fn, config=run_config_lib.RunConfig(save_checkpoints_steps=2)) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) train_spec = training.TrainSpec(input_fn=self._input_fn) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), throttle_secs=0) mock_est.evaluate.return_value = 123 executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(TypeError, _INVALID_EVAL_RESULT_TYPE_ERR): executor.run_local() def test_errors_out_if_evaluate_returns_dict_without_global_step(self): est = estimator_lib.Estimator( model_fn=self._model_fn, config=run_config_lib.RunConfig(save_checkpoints_steps=2)) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) train_spec = training.TrainSpec(input_fn=self._input_fn) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), throttle_secs=0) mock_est.evaluate.return_value = {'loss': 123} executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) with self.assertRaisesRegexp(ValueError, _MISSING_GLOBAL_STEP_IN_EVAL_RESULT_ERR): executor.run_local() def test_train_and_evaluate_return_metrics(self): est = estimator_lib.Estimator(model_fn=self._model_fn) mock_est = test.mock.Mock(spec=estimator_lib.Estimator, wraps=est) train_spec = training.TrainSpec( input_fn=self._input_fn, max_steps=12, hooks=[_FakeHook()]) eval_spec = training.EvalSpec( input_fn=lambda: self._input_fn(repeat=False), steps=2, hooks=[_FakeHook()], name='local_eval') executor = training._TrainingExecutor(mock_est, train_spec, eval_spec) # pylint: disable=assignment-from-no-return metrics, _ = executor.run_local() # pylint: enable=assignment-from-no-return self.assertEqual(metrics['global_step'], 12) class TrainAndEvaluateRunTest(test.TestCase): def _test_run_task_and_executor(self, run_config): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = run_config mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) executor.call_task = {} def task_fn(name): def _fn(): executor.call_task[name] = 1 return _fn executor.run_chief = task_fn('chief') executor.run_master = task_fn('master') executor.run_ps = task_fn('ps') executor.run_evaluator = task_fn('evaluator') executor.run_worker = task_fn('worker') executor.run_local = task_fn('local') return executor def test_run_chief(self): executor = self._test_run_task_and_executor( run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_CHIEF)) executor.run() self.assertEqual(1, executor.call_task['chief']) def test_run_worker(self): executor = self._test_run_task_and_executor( run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_WORKER)) executor.run() self.assertEqual(1, executor.call_task['worker']) def test_run_ps(self): executor = self._test_run_task_and_executor( run_config=_create_run_config_with_cluster_spec(_TF_CONFIG_FOR_PS)) executor.run() self.assertEqual(1, executor.call_task['ps']) def test_run_evaluator(self): executor = self._test_run_task_and_executor( run_config=_create_run_config_with_cluster_spec( _TF_CONFIG_FOR_EVALUATOR)) executor.run() self.assertEqual(1, executor.call_task['evaluator']) def test_run_local(self): executor = self._test_run_task_and_executor( run_config=run_config_lib.RunConfig()) executor.run() self.assertEqual(1, executor.call_task['local']) def test_invalid_local_task(self): tf_config = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], 'local': ['hos1:1'], }, 'task': { 'type': 'local', # invalid task type. 'index': 0 } } mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = _create_run_config_with_cluster_spec(tf_config) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) with self.assertRaisesRegexp(ValueError, _INVALID_LOCAL_TASK_WITH_CLUSTER): executor.run() def test_unsupported_task_due_to_missing_run_task(self): unsupported_task = 'alloc' tf_config = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], unsupported_task: ['hos1:1'], }, 'task': { 'type': unsupported_task, 'index': 0 } } mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = _create_run_config_with_cluster_spec(tf_config) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) with self.assertRaisesRegexp(ValueError, _INVALID_TASK_TO_RUN): executor.run() def test_unsupported_task_due_to_not_callable(self): unsupported_task = 'alloc' tf_config = { 'cluster': { run_config_lib.TaskType.CHIEF: ['host0:0'], unsupported_task: ['hos1:1'], }, 'task': { 'type': unsupported_task, 'index': 0 } } mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = _create_run_config_with_cluster_spec(tf_config) mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) executor.run_alloc = 123 # not callable with self.assertRaisesRegexp(ValueError, _INVALID_TASK_TO_RUN): executor.run() def test_invalid_task_type(self): mock_est = test.mock.Mock(spec=estimator_lib.Estimator) mock_est.config = test.mock.Mock() mock_train_spec = test.mock.Mock(spec=training.TrainSpec) mock_eval_spec = test.mock.Mock(spec=training.EvalSpec) mock_est.config = test.mock.Mock() mock_est.config.cluster_spec = server_lib.ClusterSpec({'1': ['dummy']}) mock_est.config.task_type = '' executor = training._TrainingExecutor(mock_est, mock_train_spec, mock_eval_spec) with self.assertRaisesRegexp(ValueError, _INVALID_TASK_TYPE): executor.run() class TrainAndEvaluateIntegrationTest(test.TestCase): def setUp(self): self._model_dir = tempfile.mkdtemp() def tearDown(self): if self._model_dir: shutil.rmtree(self._model_dir) def _as_label(self, data_in_float): return np.rint(data_in_float).astype(np.int64) def _get_exporter(self, name, fc): feature_spec = feature_column.make_parse_example_spec(fc) serving_input_receiver_fn = ( export_lib.build_parsing_serving_input_receiver_fn(feature_spec)) return exporter_lib.LatestExporter( name, serving_input_receiver_fn=serving_input_receiver_fn) def _extract_loss_and_global_step(self, event_folder): """Returns the loss and global step in last event.""" event_paths = glob.glob(os.path.join(event_folder, 'events*')) loss = None global_step_count = None for e in summary_iterator.summary_iterator(event_paths[-1]): current_loss = None for v in e.summary.value: if v.tag == 'loss': current_loss = v.simple_value # If loss is not found, global step is meaningless. if current_loss is None: continue current_global_step = e.step if global_step_count is None or current_global_step > global_step_count: global_step_count = current_global_step loss = current_loss return (loss, global_step_count) def test_complete_flow_with_non_distributed_configuration(self): n_classes = 3 input_dimension = 2 batch_size = 10 eval_name = 'foo' exporter_name = 'saved_model_exporter' # max_steps should be larger than save_summary_steps max_steps = 10 save_summary_steps = 9 data = np.linspace( 0., n_classes - 1., batch_size * input_dimension, dtype=np.float32) x_data = data.reshape(batch_size, input_dimension) y_data = np.reshape(self._as_label(data[:batch_size]), (batch_size, 1)) # learn y = x def train_input_fn(): return dataset_ops.Dataset.from_tensor_slices(({ 'x': x_data }, y_data)).batch(batch_size).repeat().shuffle(1000) def eval_input_fn(): return dataset_ops.Dataset.from_tensor_slices(({ 'x': x_data }, y_data)).batch(batch_size) def predict_input_fn(): return dataset_ops.Dataset.from_tensor_slices({ 'x': x_data }).batch(batch_size) feature_columns = [ feature_column.numeric_column('x', shape=(input_dimension,))] est = dnn.DNNClassifier( hidden_units=(2, 2), feature_columns=feature_columns, n_classes=n_classes, config=run_config_lib.RunConfig(save_summary_steps=save_summary_steps), model_dir=self._model_dir) train_spec = training.TrainSpec(input_fn=train_input_fn, max_steps=max_steps) eval_spec = training.EvalSpec( name=eval_name, input_fn=eval_input_fn, steps=None, exporters=self._get_exporter(exporter_name, feature_columns), throttle_secs=0) training.train_and_evaluate(est, train_spec, eval_spec) # Make sure nothing is stuck in limbo. writer_cache.FileWriterCache.clear() # Examine the training events. Use a range to check global step to avoid # flakyness due to global step race condition. training_loss, _ = self._extract_loss_and_global_step(est.model_dir) self.assertIsNotNone(training_loss) # Examine the eval events. The global step should be accurate. eval_loss, eval_global_step = self._extract_loss_and_global_step( event_folder=est.eval_dir(eval_name)) self.assertIsNotNone(eval_loss) self.assertEqual(max_steps, eval_global_step) # Examine the export folder. export_dir = os.path.join(os.path.join(est.model_dir, 'export'), exporter_name) self.assertTrue(gfile.Exists(export_dir)) # Examine the ckpt for predict. predicted_proba = np.array([ x[prediction_keys.PredictionKeys.PROBABILITIES] for x in est.predict(predict_input_fn) ]) self.assertAllEqual((batch_size, n_classes), predicted_proba.shape) if __name__ == '__main__': test.main()<|fim▁end|>
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function (grunt) { grunt.loadNpmTasks('grunt-contrib-jshint'); grunt.initConfig({ jshint: { options: { jshintrc: true }, all: ['*.js', 'lib/*.js', 'test/*.js'] } }); grunt.registerTask('default', ['jshint']);<|fim▁hole|><|fim▁end|>
};
<|file_name|>Setup.py<|end_file_name|><|fim▁begin|>__author__ = 'Autio'<|fim▁hole|>from distutils.core import setup import py2exe setup(windows=['ShitCrimson.py'])<|fim▁end|>
<|file_name|>exec.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////// // Name: exec.cpp // Purpose: exec sample demonstrates wxExecute and related functions // Author: Vadim Zeitlin // Modified by: // Created: 15.01.00 // RCS-ID: $Id: exec.cpp 54352 2008-06-25 07:51:09Z JS $ // Copyright: (c) Vadim Zeitlin // Licence: wxWindows licence ///////////////////////////////////////////////////////////////////////////// // ============================================================================ // declarations // ============================================================================ // ---------------------------------------------------------------------------- // headers // ---------------------------------------------------------------------------- // For compilers that support precompilation, includes "wx/wx.h". #include "wx/wxprec.h" #ifdef __BORLANDC__ #pragma hdrstop #endif // for all others, include the necessary headers (this file is usually all you // need because it includes almost all "standard" wxWidgets headers #ifndef WX_PRECOMP #include "wx/app.h" #include "wx/log.h" #include "wx/frame.h" #include "wx/panel.h" #include "wx/timer.h" #include "wx/utils.h" #include "wx/menu.h" #include "wx/msgdlg.h" #include "wx/textdlg.h" #include "wx/filedlg.h" #include "wx/choicdlg.h" #include "wx/button.h" #include "wx/textctrl.h" #include "wx/listbox.h" #include "wx/sizer.h" #endif #include "wx/txtstrm.h" #include "wx/numdlg.h" #include "wx/textdlg.h" #include "wx/ffile.h" #include "wx/process.h" #include "wx/mimetype.h" #ifdef __WINDOWS__ #include "wx/dde.h" #endif // __WINDOWS__ // ---------------------------------------------------------------------------- // the usual application and main frame classes // ---------------------------------------------------------------------------- // Define a new application type, each program should derive a class from wxApp class MyApp : public wxApp { public: // override base class virtuals // ---------------------------- // this one is called on application startup and is a good place for the app // initialization (doing it here and not in the ctor allows to have an error // return: if OnInit() returns false, the application terminates) virtual bool OnInit(); }; // Define an array of process pointers used by MyFrame class MyPipedProcess; WX_DEFINE_ARRAY_PTR(MyPipedProcess *, MyProcessesArray); // Define a new frame type: this is going to be our main frame class MyFrame : public wxFrame { public: // ctor(s) MyFrame(const wxString& title, const wxPoint& pos, const wxSize& size); // event handlers (these functions should _not_ be virtual) void OnQuit(wxCommandEvent& event); void OnKill(wxCommandEvent& event); void OnClear(wxCommandEvent& event); void OnSyncExec(wxCommandEvent& event); void OnAsyncExec(wxCommandEvent& event); void OnShell(wxCommandEvent& event); void OnExecWithRedirect(wxCommandEvent& event); void OnExecWithPipe(wxCommandEvent& event); void OnPOpen(wxCommandEvent& event); void OnFileExec(wxCommandEvent& event); void OnOpenURL(wxCommandEvent& event); void OnAbout(wxCommandEvent& event); // polling output of async processes void OnTimer(wxTimerEvent& event); void OnIdle(wxIdleEvent& event); // for MyPipedProcess void OnProcessTerminated(MyPipedProcess *process); wxListBox *GetLogListBox() const { return m_lbox; } private: void ShowOutput(const wxString& cmd, const wxArrayString& output, const wxString& title); void DoAsyncExec(const wxString& cmd); void AddAsyncProcess(MyPipedProcess *process) { if ( m_running.IsEmpty() ) { // we want to start getting the timer events to ensure that a // steady stream of idle events comes in -- otherwise we // wouldn't be able to poll the child process input m_timerIdleWakeUp.Start(100); } //else: the timer is already running m_running.Add(process); } void RemoveAsyncProcess(MyPipedProcess *process) { m_running.Remove(process); if ( m_running.IsEmpty() ) { // we don't need to get idle events all the time any more m_timerIdleWakeUp.Stop(); } } // the PID of the last process we launched asynchronously long m_pidLast; // last command we executed wxString m_cmdLast; #ifdef __WINDOWS__ void OnDDEExec(wxCommandEvent& event); void OnDDERequest(wxCommandEvent& event); bool GetDDEServer(); // last params of a DDE transaction wxString m_server, m_topic, m_cmdDde; #endif // __WINDOWS__ wxListBox *m_lbox; MyProcessesArray m_running; // the idle event wake up timer wxTimer m_timerIdleWakeUp; // any class wishing to process wxWidgets events must use this macro DECLARE_EVENT_TABLE() }; // ---------------------------------------------------------------------------- // MyPipeFrame: allows the user to communicate with the child process // ---------------------------------------------------------------------------- class MyPipeFrame : public wxFrame { public: MyPipeFrame(wxFrame *parent, const wxString& cmd, wxProcess *process); protected: void OnTextEnter(wxCommandEvent& WXUNUSED(event)) { DoSend(); } void OnBtnSend(wxCommandEvent& WXUNUSED(event)) { DoSend(); } void OnBtnSendFile(wxCommandEvent& WXUNUSED(event)); void OnBtnGet(wxCommandEvent& WXUNUSED(event)) { DoGet(); } void OnBtnClose(wxCommandEvent& WXUNUSED(event)) { DoClose(); } void OnClose(wxCloseEvent& event); void OnProcessTerm(wxProcessEvent& event); void DoSend() { wxString s(m_textOut->GetValue()); s += _T('\n'); m_out.Write(s.c_str(), s.length()); m_textOut->Clear(); DoGet(); } void DoGet(); void DoClose(); private: void DoGetFromStream(wxTextCtrl *text, wxInputStream& in); void DisableInput(); void DisableOutput(); wxProcess *m_process; wxOutputStream &m_out; wxInputStream &m_in, &m_err; wxTextCtrl *m_textOut, *m_textIn, *m_textErr; DECLARE_EVENT_TABLE() }; // ---------------------------------------------------------------------------- // wxProcess-derived classes // ---------------------------------------------------------------------------- // This is the handler for process termination events class MyProcess : public wxProcess { public: MyProcess(MyFrame *parent, const wxString& cmd) : wxProcess(parent), m_cmd(cmd) { m_parent = parent; } // instead of overriding this virtual function we might as well process the // event from it in the frame class - this might be more convenient in some // cases virtual void OnTerminate(int pid, int status); protected: MyFrame *m_parent; wxString m_cmd; }; // A specialization of MyProcess for redirecting the output class MyPipedProcess : public MyProcess { public: MyPipedProcess(MyFrame *parent, const wxString& cmd) : MyProcess(parent, cmd) { Redirect(); } virtual void OnTerminate(int pid, int status); virtual bool HasInput(); }; // A version of MyPipedProcess which also sends input to the stdin of the // child process class MyPipedProcess2 : public MyPipedProcess { public: MyPipedProcess2(MyFrame *parent, const wxString& cmd, const wxString& input) : MyPipedProcess(parent, cmd), m_input(input) { } virtual bool HasInput(); private: wxString m_input; }; // ---------------------------------------------------------------------------- // constants // ---------------------------------------------------------------------------- // IDs for the controls and the menu commands enum { // menu items Exec_Quit = 100, Exec_Kill, Exec_ClearLog, Exec_SyncExec = 200, Exec_AsyncExec, Exec_Shell, Exec_POpen, Exec_OpenFile, Exec_OpenURL, Exec_DDEExec, Exec_DDERequest, Exec_Redirect, Exec_Pipe, Exec_About = 300, // control ids Exec_Btn_Send = 1000, Exec_Btn_SendFile, Exec_Btn_Get, Exec_Btn_Close }; static const wxChar *DIALOG_TITLE = _T("Exec sample"); // ---------------------------------------------------------------------------- // event tables and other macros for wxWidgets // ---------------------------------------------------------------------------- // the event tables connect the wxWidgets events with the functions (event // handlers) which process them. It can be also done at run-time, but for the // simple menu events like this the static method is much simpler. BEGIN_EVENT_TABLE(MyFrame, wxFrame) EVT_MENU(Exec_Quit, MyFrame::OnQuit) EVT_MENU(Exec_Kill, MyFrame::OnKill) EVT_MENU(Exec_ClearLog, MyFrame::OnClear) EVT_MENU(Exec_SyncExec, MyFrame::OnSyncExec) EVT_MENU(Exec_AsyncExec, MyFrame::OnAsyncExec) EVT_MENU(Exec_Shell, MyFrame::OnShell) EVT_MENU(Exec_Redirect, MyFrame::OnExecWithRedirect) EVT_MENU(Exec_Pipe, MyFrame::OnExecWithPipe) EVT_MENU(Exec_POpen, MyFrame::OnPOpen) EVT_MENU(Exec_OpenFile, MyFrame::OnFileExec) EVT_MENU(Exec_OpenURL, MyFrame::OnOpenURL) #ifdef __WINDOWS__ EVT_MENU(Exec_DDEExec, MyFrame::OnDDEExec) EVT_MENU(Exec_DDERequest, MyFrame::OnDDERequest)<|fim▁hole|> EVT_MENU(Exec_About, MyFrame::OnAbout) EVT_IDLE(MyFrame::OnIdle) EVT_TIMER(wxID_ANY, MyFrame::OnTimer) END_EVENT_TABLE() BEGIN_EVENT_TABLE(MyPipeFrame, wxFrame) EVT_BUTTON(Exec_Btn_Send, MyPipeFrame::OnBtnSend) EVT_BUTTON(Exec_Btn_SendFile, MyPipeFrame::OnBtnSendFile) EVT_BUTTON(Exec_Btn_Get, MyPipeFrame::OnBtnGet) EVT_BUTTON(Exec_Btn_Close, MyPipeFrame::OnBtnClose) EVT_TEXT_ENTER(wxID_ANY, MyPipeFrame::OnTextEnter) EVT_CLOSE(MyPipeFrame::OnClose) EVT_END_PROCESS(wxID_ANY, MyPipeFrame::OnProcessTerm) END_EVENT_TABLE() // Create a new application object: this macro will allow wxWidgets to create // the application object during program execution (it's better than using a // static object for many reasons) and also declares the accessor function // wxGetApp() which will return the reference of the right type (i.e. MyApp and // not wxApp) IMPLEMENT_APP(MyApp) // ============================================================================ // implementation // ============================================================================ // ---------------------------------------------------------------------------- // the application class // ---------------------------------------------------------------------------- // `Main program' equivalent: the program execution "starts" here bool MyApp::OnInit() { // Create the main application window MyFrame *frame = new MyFrame(_T("Exec wxWidgets sample"), wxDefaultPosition, wxSize(500, 140)); // Show it and tell the application that it's our main window frame->Show(true); SetTopWindow(frame); // success: wxApp::OnRun() will be called which will enter the main message // loop and the application will run. If we returned false here, the // application would exit immediately. return true; } // ---------------------------------------------------------------------------- // main frame // ---------------------------------------------------------------------------- #ifdef __VISUALC__ #pragma warning(disable: 4355) // this used in base member initializer list #endif // frame constructor MyFrame::MyFrame(const wxString& title, const wxPoint& pos, const wxSize& size) : wxFrame((wxFrame *)NULL, wxID_ANY, title, pos, size), m_timerIdleWakeUp(this) { m_pidLast = 0; #ifdef __WXMAC__ // we need this in order to allow the about menu relocation, since ABOUT is // not the default id of the about menu wxApp::s_macAboutMenuItemId = Exec_About; #endif // create a menu bar wxMenu *menuFile = new wxMenu(wxEmptyString, wxMENU_TEAROFF); menuFile->Append(Exec_Kill, _T("&Kill process...\tCtrl-K"), _T("Kill a process by PID")); menuFile->AppendSeparator(); menuFile->Append(Exec_ClearLog, _T("&Clear log\tCtrl-C"), _T("Clear the log window")); menuFile->AppendSeparator(); menuFile->Append(Exec_Quit, _T("E&xit\tAlt-X"), _T("Quit this program")); wxMenu *execMenu = new wxMenu; execMenu->Append(Exec_SyncExec, _T("Sync &execution...\tCtrl-E"), _T("Launch a program and return when it terminates")); execMenu->Append(Exec_AsyncExec, _T("&Async execution...\tCtrl-A"), _T("Launch a program and return immediately")); execMenu->Append(Exec_Shell, _T("Execute &shell command...\tCtrl-S"), _T("Launch a shell and execute a command in it")); execMenu->AppendSeparator(); execMenu->Append(Exec_Redirect, _T("Capture command &output...\tCtrl-O"), _T("Launch a program and capture its output")); execMenu->Append(Exec_Pipe, _T("&Pipe through command..."), _T("Pipe a string through a filter")); execMenu->Append(Exec_POpen, _T("&Open a pipe to a command...\tCtrl-P"), _T("Open a pipe to and from another program")); execMenu->AppendSeparator(); execMenu->Append(Exec_OpenFile, _T("Open &file...\tCtrl-F"), _T("Launch the command to open this kind of files")); execMenu->Append(Exec_OpenURL, _T("Open &URL...\tCtrl-U"), _T("Launch the default browser with the given URL")); #ifdef __WINDOWS__ execMenu->AppendSeparator(); execMenu->Append(Exec_DDEExec, _T("Execute command via &DDE...\tCtrl-D")); execMenu->Append(Exec_DDERequest, _T("Send DDE &request...\tCtrl-R")); #endif wxMenu *helpMenu = new wxMenu(wxEmptyString, wxMENU_TEAROFF); helpMenu->Append(Exec_About, _T("&About...\tF1"), _T("Show about dialog")); // now append the freshly created menu to the menu bar... wxMenuBar *menuBar = new wxMenuBar(); menuBar->Append(menuFile, _T("&File")); menuBar->Append(execMenu, _T("&Exec")); menuBar->Append(helpMenu, _T("&Help")); // ... and attach this menu bar to the frame SetMenuBar(menuBar); // create the listbox in which we will show misc messages as they come m_lbox = new wxListBox(this, wxID_ANY); wxFont font(12, wxFONTFAMILY_TELETYPE, wxFONTSTYLE_NORMAL, wxFONTWEIGHT_NORMAL); if ( font.Ok() ) m_lbox->SetFont(font); #if wxUSE_STATUSBAR // create a status bar just for fun (by default with 1 pane only) CreateStatusBar(); SetStatusText(_T("Welcome to wxWidgets exec sample!")); #endif // wxUSE_STATUSBAR } // ---------------------------------------------------------------------------- // event handlers: file and help menu // ---------------------------------------------------------------------------- void MyFrame::OnQuit(wxCommandEvent& WXUNUSED(event)) { // true is to force the frame to close Close(true); } void MyFrame::OnClear(wxCommandEvent& WXUNUSED(event)) { m_lbox->Clear(); } void MyFrame::OnAbout(wxCommandEvent& WXUNUSED(event)) { wxMessageBox(_T("Exec wxWidgets Sample\n(c) 2000-2002 Vadim Zeitlin"), _T("About Exec"), wxOK | wxICON_INFORMATION, this); } void MyFrame::OnKill(wxCommandEvent& WXUNUSED(event)) { long pid = wxGetNumberFromUser(_T("Please specify the process to kill"), _T("Enter PID:"), _T("Exec question"), m_pidLast, // we need the full unsigned int range -INT_MAX, INT_MAX, this); if ( pid == -1 ) { // cancelled return; } static const wxString signalNames[] = { _T("Just test (SIGNONE)"), _T("Hangup (SIGHUP)"), _T("Interrupt (SIGINT)"), _T("Quit (SIGQUIT)"), _T("Illegal instruction (SIGILL)"), _T("Trap (SIGTRAP)"), _T("Abort (SIGABRT)"), _T("Emulated trap (SIGEMT)"), _T("FP exception (SIGFPE)"), _T("Kill (SIGKILL)"), _T("Bus (SIGBUS)"), _T("Segment violation (SIGSEGV)"), _T("System (SIGSYS)"), _T("Broken pipe (SIGPIPE)"), _T("Alarm (SIGALRM)"), _T("Terminate (SIGTERM)"), }; int sig = wxGetSingleChoiceIndex(_T("How to kill the process?"), _T("Exec question"), WXSIZEOF(signalNames), signalNames, this); switch ( sig ) { default: wxFAIL_MSG( _T("unexpected return value") ); // fall through case -1: // cancelled return; case wxSIGNONE: case wxSIGHUP: case wxSIGINT: case wxSIGQUIT: case wxSIGILL: case wxSIGTRAP: case wxSIGABRT: case wxSIGEMT: case wxSIGFPE: case wxSIGKILL: case wxSIGBUS: case wxSIGSEGV: case wxSIGSYS: case wxSIGPIPE: case wxSIGALRM: case wxSIGTERM: break; } if ( sig == 0 ) { if ( wxProcess::Exists(pid) ) wxLogStatus(_T("Process %ld is running."), pid); else wxLogStatus(_T("No process with pid = %ld."), pid); } else // not SIGNONE { wxKillError rc = wxProcess::Kill(pid, (wxSignal)sig); if ( rc == wxKILL_OK ) { wxLogStatus(_T("Process %ld killed with signal %d."), pid, sig); } else { static const wxChar *errorText[] = { _T(""), // no error _T("signal not supported"), _T("permission denied"), _T("no such process"), _T("unspecified error"), }; wxLogStatus(_T("Failed to kill process %ld with signal %d: %s"), pid, sig, errorText[rc]); } } } // ---------------------------------------------------------------------------- // event handlers: exec menu // ---------------------------------------------------------------------------- void MyFrame::DoAsyncExec(const wxString& cmd) { wxProcess *process = new MyProcess(this, cmd); m_pidLast = wxExecute(cmd, wxEXEC_ASYNC, process); if ( !m_pidLast ) { wxLogError( _T("Execution of '%s' failed."), cmd.c_str() ); delete process; } else { wxLogStatus( _T("Process %ld (%s) launched."), m_pidLast, cmd.c_str() ); m_cmdLast = cmd; } } void MyFrame::OnSyncExec(wxCommandEvent& WXUNUSED(event)) { wxString cmd = wxGetTextFromUser(_T("Enter the command: "), DIALOG_TITLE, m_cmdLast); if ( !cmd ) return; wxLogStatus( _T("'%s' is running please wait..."), cmd.c_str() ); int code = wxExecute(cmd, wxEXEC_SYNC); wxLogStatus(_T("Process '%s' terminated with exit code %d."), cmd.c_str(), code); m_cmdLast = cmd; } void MyFrame::OnAsyncExec(wxCommandEvent& WXUNUSED(event)) { wxString cmd = wxGetTextFromUser(_T("Enter the command: "), DIALOG_TITLE, m_cmdLast); if ( !cmd ) return; DoAsyncExec(cmd); } void MyFrame::OnShell(wxCommandEvent& WXUNUSED(event)) { wxString cmd = wxGetTextFromUser(_T("Enter the command: "), DIALOG_TITLE, m_cmdLast); if ( !cmd ) return; int code = wxShell(cmd); wxLogStatus(_T("Shell command '%s' terminated with exit code %d."), cmd.c_str(), code); m_cmdLast = cmd; } void MyFrame::OnExecWithRedirect(wxCommandEvent& WXUNUSED(event)) { wxString cmd = wxGetTextFromUser(_T("Enter the command: "), DIALOG_TITLE, m_cmdLast); if ( !cmd ) return; bool sync; switch ( wxMessageBox(_T("Execute it synchronously?"), _T("Exec question"), wxYES_NO | wxCANCEL | wxICON_QUESTION, this) ) { case wxYES: sync = true; break; case wxNO: sync = false; break; default: return; } if ( sync ) { wxArrayString output, errors; int code = wxExecute(cmd, output, errors); wxLogStatus(_T("command '%s' terminated with exit code %d."), cmd.c_str(), code); if ( code != -1 ) { ShowOutput(cmd, output, _T("Output")); ShowOutput(cmd, errors, _T("Errors")); } } else // async exec { MyPipedProcess *process = new MyPipedProcess(this, cmd); if ( !wxExecute(cmd, wxEXEC_ASYNC, process) ) { wxLogError(_T("Execution of '%s' failed."), cmd.c_str()); delete process; } else { AddAsyncProcess(process); } } m_cmdLast = cmd; } void MyFrame::OnExecWithPipe(wxCommandEvent& WXUNUSED(event)) { if ( !m_cmdLast ) m_cmdLast = _T("tr [a-z] [A-Z]"); wxString cmd = wxGetTextFromUser(_T("Enter the command: "), DIALOG_TITLE, m_cmdLast); if ( !cmd ) return; wxString input = wxGetTextFromUser(_T("Enter the string to send to it: "), DIALOG_TITLE); if ( !input ) return; // always execute the filter asynchronously MyPipedProcess2 *process = new MyPipedProcess2(this, cmd, input); long pid = wxExecute(cmd, wxEXEC_ASYNC, process); if ( pid ) { wxLogStatus( _T("Process %ld (%s) launched."), pid, cmd.c_str() ); AddAsyncProcess(process); } else { wxLogError(_T("Execution of '%s' failed."), cmd.c_str()); delete process; } m_cmdLast = cmd; } void MyFrame::OnPOpen(wxCommandEvent& WXUNUSED(event)) { wxString cmd = wxGetTextFromUser(_T("Enter the command to launch: "), DIALOG_TITLE, m_cmdLast); if ( cmd.empty() ) return; wxProcess *process = wxProcess::Open(cmd); if ( !process ) { wxLogError(_T("Failed to launch the command.")); return; } wxLogVerbose(_T("PID of the new process: %ld"), process->GetPid()); wxOutputStream *out = process->GetOutputStream(); if ( !out ) { wxLogError(_T("Failed to connect to child stdin")); return; } wxInputStream *in = process->GetInputStream(); if ( !in ) { wxLogError(_T("Failed to connect to child stdout")); return; } new MyPipeFrame(this, cmd, process); } void MyFrame::OnFileExec(wxCommandEvent& WXUNUSED(event)) { static wxString s_filename; wxString filename; #if wxUSE_FILEDLG filename = wxLoadFileSelector(_T("any file"), NULL, s_filename, this); #else // !wxUSE_FILEDLG filename = wxGetTextFromUser(_T("Enter the file name"), _T("exec sample"), s_filename, this); #endif // wxUSE_FILEDLG/!wxUSE_FILEDLG if ( filename.empty() ) return; s_filename = filename; wxString ext = filename.AfterLast(_T('.')); wxFileType *ft = wxTheMimeTypesManager->GetFileTypeFromExtension(ext); if ( !ft ) { wxLogError(_T("Impossible to determine the file type for extension '%s'"), ext.c_str()); return; } wxString cmd; bool ok = ft->GetOpenCommand(&cmd, wxFileType::MessageParameters(filename)); delete ft; if ( !ok ) { wxLogError(_T("Impossible to find out how to open files of extension '%s'"), ext.c_str()); return; } DoAsyncExec(cmd); } void MyFrame::OnOpenURL(wxCommandEvent& WXUNUSED(event)) { static wxString s_filename; wxString filename = wxGetTextFromUser ( _T("Enter the URL"), _T("exec sample"), s_filename, this ); if ( filename.empty() ) return; s_filename = filename; if ( !wxLaunchDefaultBrowser(s_filename) ) wxLogError(_T("Failed to open URL \"%s\""), s_filename.c_str()); } // ---------------------------------------------------------------------------- // DDE stuff // ---------------------------------------------------------------------------- #ifdef __WINDOWS__ bool MyFrame::GetDDEServer() { wxString server = wxGetTextFromUser(_T("Server to connect to:"), DIALOG_TITLE, m_server); if ( !server ) return false; m_server = server; wxString topic = wxGetTextFromUser(_T("DDE topic:"), DIALOG_TITLE, m_topic); if ( !topic ) return false; m_topic = topic; wxString cmd = wxGetTextFromUser(_T("DDE command:"), DIALOG_TITLE, m_cmdDde); if ( !cmd ) return false; m_cmdDde = cmd; return true; } void MyFrame::OnDDEExec(wxCommandEvent& WXUNUSED(event)) { if ( !GetDDEServer() ) return; wxDDEClient client; wxConnectionBase *conn = client.MakeConnection(wxEmptyString, m_server, m_topic); if ( !conn ) { wxLogError(_T("Failed to connect to the DDE server '%s'."), m_server.c_str()); } else { if ( !conn->Execute(m_cmdDde) ) { wxLogError(_T("Failed to execute command '%s' via DDE."), m_cmdDde.c_str()); } else { wxLogStatus(_T("Successfully executed DDE command")); } } } void MyFrame::OnDDERequest(wxCommandEvent& WXUNUSED(event)) { if ( !GetDDEServer() ) return; wxDDEClient client; wxConnectionBase *conn = client.MakeConnection(wxEmptyString, m_server, m_topic); if ( !conn ) { wxLogError(_T("Failed to connect to the DDE server '%s'."), m_server.c_str()); } else { if ( !conn->Request(m_cmdDde) ) { wxLogError(_T("Failed to send request '%s' via DDE."), m_cmdDde.c_str()); } else { wxLogStatus(_T("Successfully sent DDE request.")); } } } #endif // __WINDOWS__ // ---------------------------------------------------------------------------- // various helpers // ---------------------------------------------------------------------------- // input polling void MyFrame::OnIdle(wxIdleEvent& event) { size_t count = m_running.GetCount(); for ( size_t n = 0; n < count; n++ ) { if ( m_running[n]->HasInput() ) { event.RequestMore(); } } } void MyFrame::OnTimer(wxTimerEvent& WXUNUSED(event)) { wxWakeUpIdle(); } void MyFrame::OnProcessTerminated(MyPipedProcess *process) { RemoveAsyncProcess(process); } void MyFrame::ShowOutput(const wxString& cmd, const wxArrayString& output, const wxString& title) { size_t count = output.GetCount(); if ( !count ) return; m_lbox->Append(wxString::Format(_T("--- %s of '%s' ---"), title.c_str(), cmd.c_str())); for ( size_t n = 0; n < count; n++ ) { m_lbox->Append(output[n]); } m_lbox->Append(wxString::Format(_T("--- End of %s ---"), title.Lower().c_str())); } // ---------------------------------------------------------------------------- // MyProcess // ---------------------------------------------------------------------------- void MyProcess::OnTerminate(int pid, int status) { wxLogStatus(m_parent, _T("Process %u ('%s') terminated with exit code %d."), pid, m_cmd.c_str(), status); // we're not needed any more delete this; } // ---------------------------------------------------------------------------- // MyPipedProcess // ---------------------------------------------------------------------------- bool MyPipedProcess::HasInput() { bool hasInput = false; if ( IsInputAvailable() ) { wxTextInputStream tis(*GetInputStream()); // this assumes that the output is always line buffered wxString msg; msg << m_cmd << _T(" (stdout): ") << tis.ReadLine(); m_parent->GetLogListBox()->Append(msg); hasInput = true; } if ( IsErrorAvailable() ) { wxTextInputStream tis(*GetErrorStream()); // this assumes that the output is always line buffered wxString msg; msg << m_cmd << _T(" (stderr): ") << tis.ReadLine(); m_parent->GetLogListBox()->Append(msg); hasInput = true; } return hasInput; } void MyPipedProcess::OnTerminate(int pid, int status) { // show the rest of the output while ( HasInput() ) ; m_parent->OnProcessTerminated(this); MyProcess::OnTerminate(pid, status); } // ---------------------------------------------------------------------------- // MyPipedProcess2 // ---------------------------------------------------------------------------- bool MyPipedProcess2::HasInput() { if ( !m_input.empty() ) { wxTextOutputStream os(*GetOutputStream()); os.WriteString(m_input); CloseOutput(); m_input.clear(); // call us once again - may be we'll have output return true; } return MyPipedProcess::HasInput(); } // ============================================================================ // MyPipeFrame implementation // ============================================================================ MyPipeFrame::MyPipeFrame(wxFrame *parent, const wxString& cmd, wxProcess *process) : wxFrame(parent, wxID_ANY, cmd), m_process(process), // in a real program we'd check that the streams are !NULL here m_out(*process->GetOutputStream()), m_in(*process->GetInputStream()), m_err(*process->GetErrorStream()) { m_process->SetNextHandler(this); wxPanel *panel = new wxPanel(this, wxID_ANY); m_textOut = new wxTextCtrl(panel, wxID_ANY, wxEmptyString, wxDefaultPosition, wxDefaultSize, wxTE_PROCESS_ENTER); m_textIn = new wxTextCtrl(panel, wxID_ANY, wxEmptyString, wxDefaultPosition, wxDefaultSize, wxTE_MULTILINE | wxTE_RICH); m_textIn->SetEditable(false); m_textErr = new wxTextCtrl(panel, wxID_ANY, wxEmptyString, wxDefaultPosition, wxDefaultSize, wxTE_MULTILINE | wxTE_RICH); m_textErr->SetEditable(false); wxSizer *sizerTop = new wxBoxSizer(wxVERTICAL); sizerTop->Add(m_textOut, 0, wxGROW | wxALL, 5); wxSizer *sizerBtns = new wxBoxSizer(wxHORIZONTAL); sizerBtns-> Add(new wxButton(panel, Exec_Btn_Send, _T("&Send")), 0, wxALL, 5); sizerBtns-> Add(new wxButton(panel, Exec_Btn_SendFile, _T("&File...")), 0, wxALL, 5); sizerBtns-> Add(new wxButton(panel, Exec_Btn_Get, _T("&Get")), 0, wxALL, 5); sizerBtns-> Add(new wxButton(panel, Exec_Btn_Close, _T("&Close")), 0, wxALL, 5); sizerTop->Add(sizerBtns, 0, wxCENTRE | wxALL, 5); sizerTop->Add(m_textIn, 1, wxGROW | wxALL, 5); sizerTop->Add(m_textErr, 1, wxGROW | wxALL, 5); panel->SetSizer(sizerTop); sizerTop->Fit(this); Show(); } void MyPipeFrame::OnBtnSendFile(wxCommandEvent& WXUNUSED(event)) { #if wxUSE_FILEDLG wxFileDialog filedlg(this, _T("Select file to send")); if ( filedlg.ShowModal() != wxID_OK ) return; wxFFile file(filedlg.GetFilename(), _T("r")); wxString data; if ( !file.IsOpened() || !file.ReadAll(&data) ) return; // can't write the entire string at once, this risk overflowing the pipe // and we would dead lock size_t len = data.length(); const wxChar *pc = data.c_str(); while ( len ) { const size_t CHUNK_SIZE = 4096; m_out.Write(pc, len > CHUNK_SIZE ? CHUNK_SIZE : len); // note that not all data could have been written as we don't block on // the write end of the pipe const size_t lenChunk = m_out.LastWrite(); pc += lenChunk; len -= lenChunk; DoGet(); } #endif // wxUSE_FILEDLG } void MyPipeFrame::DoGet() { // we don't have any way to be notified when any input appears on the // stream so we have to poll it :-( DoGetFromStream(m_textIn, m_in); DoGetFromStream(m_textErr, m_err); } void MyPipeFrame::DoGetFromStream(wxTextCtrl *text, wxInputStream& in) { while ( in.CanRead() ) { wxChar buffer[4096]; buffer[in.Read(buffer, WXSIZEOF(buffer) - 1).LastRead()] = _T('\0'); text->AppendText(buffer); } } void MyPipeFrame::DoClose() { m_process->CloseOutput(); DisableInput(); } void MyPipeFrame::DisableInput() { m_textOut->SetEditable(false); FindWindow(Exec_Btn_Send)->Disable(); FindWindow(Exec_Btn_SendFile)->Disable(); FindWindow(Exec_Btn_Close)->Disable(); } void MyPipeFrame::DisableOutput() { FindWindow(Exec_Btn_Get)->Disable(); } void MyPipeFrame::OnClose(wxCloseEvent& event) { if ( m_process ) { // we're not interested in getting the process termination notification // if we are closing it ourselves wxProcess *process = m_process; m_process = NULL; process->SetNextHandler(NULL); process->CloseOutput(); } event.Skip(); } void MyPipeFrame::OnProcessTerm(wxProcessEvent& WXUNUSED(event)) { DoGet(); delete m_process; m_process = NULL; wxLogWarning(_T("The other process has terminated, closing")); DisableInput(); DisableOutput(); }<|fim▁end|>
#endif // __WINDOWS__
<|file_name|>ConfigurationTest.java<|end_file_name|><|fim▁begin|>package configuration; import org.agle4j.framework.constant.ConfigConstant; import org.apache.commons.configuration.ConfigurationException; <|fim▁hole|>import org.junit.Test; /** * commons-configuration 包测试 * 一个java应用程序的配置管理类库 * * @author hanyx * @since */ public class ConfigurationTest { @Test public void testConfiguration() { try { PropertiesConfiguration config = new PropertiesConfiguration(ConfigConstant.CONFIG_FILE) ; config.setProperty("colors.background", "#000000"); config.save(); Integer num = config.getInt("app.upload_limit") ; System.out.println(num); } catch (ConfigurationException e) { e.printStackTrace(); } } }<|fim▁end|>
import org.apache.commons.configuration.PropertiesConfiguration;
<|file_name|>L01.ExeSeq01.py<|end_file_name|><|fim▁begin|><|fim▁hole|>print ("Hello World !!")<|fim▁end|>
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>pub const MEMWRITER_ERROR: &'static str = "MemWriter unexpectedly failed";<|fim▁hole|><|fim▁end|>
pub const SURFACE_ERROR: &'static str = "Surface could not be created";
<|file_name|>the-S3-amongos.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2015 The ChEMBL group. # Author: Nathan Dedman <[email protected]> # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Copyright 2009 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Implementation of an S3-like storage server, using Pymongo, MongoDB and Tornado. Useful to test features that will eventually run on S3, or if you want to run something locally that was once running on S3. We don't support all the features of S3, but it does work with the standard S3 client for the most basic semantics. To use the standard S3 client with this module: c = S3.AWSAuthConnection("", "", server="localhost", port=8888, is_secure=False) c.create_bucket("mybucket") c.put("mybucket", "mykey", "a value") print c.get("mybucket", "mykey").body Use s3cmd command line tool: s3cmd mb s3://wibble s3cmd put mytestfile.txt s3://wibble s3cmd rb s3://wibble --force --recursive """ import bisect import datetime import hashlib import os import os.path import urllib import logging import glob import getpass import re from tornado import escape from tornado import httpserver from tornado import ioloop from tornado import web from pymongo import MongoClient from pymongo import ASCENDING import bson from bson.binary import Binary from tornado.log import enable_pretty_logging <|fim▁hole|> http_server = httpserver.HTTPServer(application) # Utilize all CPUs if not debug: http_server.bind(port) http_server.start(0) else: enable_pretty_logging() http_server.listen(port) ioloop.IOLoop.current().start() class mongoS3(web.Application): """Implementation of an S3-like storage server based on MongoDB using PyMongo * Added compatibility with the s3cmd command line utility * File names of arbitrary length are supported (stored as meta data) * Multipart upload suported """ def __init__(self, debug=False): web.Application.__init__(self, [ (r"/", RootHandler), (r"/([^/]+)/(.+)", ObjectHandler), (r"/([^/]+)/", BucketHandler), (r"/ping",StatusHandler), (r'/(favicon.ico)', web.StaticFileHandler, {"path": ""}), # s3cmd ('http://s3.amazonaws.com/', s3cmdlHandler), (r"(http://.+.s3.amazonaws.com.*)", s3cmdlHandler), ],debug=debug) # Lazy connect the client self.client = MongoClient(connect=False) self.S3 = self.client.S3 self.metadata = self.client.metadata class StatusHandler(web.RequestHandler): SUPPORTED_METHODS = ("GET") # Send a simple 'PONG' to show we're alive! def get(self): self.set_header('Content-Type', 'application/json') self.finish({'response':'pong','UTC':datetime.datetime.now().isoformat()}) class BaseRequestHandler(web.RequestHandler): SUPPORTED_METHODS = ("PUT", "GET", "DELETE", "HEAD","POST","OPTIONS") def _get_bucket_names(self): return self.application.S3.collection_names(include_system_collections=False) def render_xml(self, value,**kwargs): assert isinstance(value, dict) and len(value) == 1 self.set_header("Content-Type", "application/xml; charset=UTF-8") name = value.keys()[0] parts = [] parts.append('<' + escape.utf8(name) +' xmlns="http://s3.amazonaws.com/doc/2006-03-01/">') parts.append('<Owner><ID>'+getpass.getuser()+'</ID><DisplayName>'+getpass.getuser()+'</DisplayName></Owner>') self._render_parts(value.values()[0], parts) parts.append('</' + escape.utf8(name) + '>') if 'code' in kwargs.keys(): self.set_status(kwargs['code']) self.finish('<?xml version="1.0" encoding="UTF-8"?>' + ''.join(parts)) def _render_parts(self, value, parts=[]): if isinstance(value, (unicode, bytes)): parts.append(escape.xhtml_escape(value)) elif isinstance(value, int) or isinstance(value, long): parts.append(str(value)) elif isinstance(value, datetime.datetime): parts.append(value.strftime("%Y-%m-%dT%H:%M:%S.000Z")) elif isinstance(value, dict): for name, subvalue in value.iteritems(): if not isinstance(subvalue, list): subvalue = [subvalue] for subsubvalue in subvalue: parts.append('<' + escape.utf8(name) + '>') self._render_parts(subsubvalue, parts) parts.append('</' + escape.utf8(name) + '>') else: raise Exception("Unknown S3 value type %r", value) def _error(self,**kwargs): bucket_name = object_name = None if hasattr(self,'bucket_name'): bucket_name = self.bucket_name if hasattr(self,'object_name'): object_name = self.object_name s3errorcodes_bucket = {'NSK':'NoSuchKey','NSB':'NoSuchBucket','BNE':'BucketNotEmpty',"BAE":"BucketAlreadyExists"} s3errorcodes_object = {'NSB':'NoSuchBucket','NSK':'NoSuchKey'} errormessage_object = {404:'The specified key does not exist.'} errormessage_bucket = {404:{'NSB':'The specified bucket does not exist.'},409:{'BNE':'The bucket you tried to delete is not empty.','BAE':'The requested bucket name is not available. Please select a different name and try again.'}} if self.__class__.__name__== 'BucketHandler': s3errorcodes = s3errorcodes_bucket errormessage = errormessage_bucket bucket_name = self.bucket_name object_name = None if self.__class__.__name__== 'ObjectHandler': s3errorcodes = s3errorcodes_object errormessage = errormessage_object if hasattr(self,'s3cmd'): returnDict = {'Error':{}} errorDict = returnDict['Error'] errorDict['Code'] = s3errorcodes[kwargs['s3code']] if self.__class__.__name__ == 'BucketHandler': errorDict['Message'] = errormessage[kwargs['code']][kwargs['s3code']] else: errorDict['Message'] = errormessage[kwargs['code']] errorDict['Resource'] = '/%s/%s' % (bucket_name,object_name) self.render_xml(returnDict,code=kwargs['code']) else: raise web.HTTPError(kwargs['code']) class s3cmdlHandler(web.RequestHandler): def prepare(self): # Handle s3 urls here self.s3cmd = True if self.application.settings['debug']: print "%s %s" % (self.__class__.__name__, self.request.method) s3match = re.match('(?:http://)(.+)(?:.s3.amazonaws.com\/)(.*)',self.request.uri) self.prefix = self.get_argument("prefix", u"") self.delimiter = self.get_argument("delimiter", u"") self.partNumber = self.get_argument("partNumber",u"") self.uploadId = self.get_argument("uploadId",u"") try: bucket_name = s3match.group(1) except: bucket_name = False try: if s3match.group(2).startswith('?'): object_name = prefix else: object_name = s3match.group(2) except: object_name = False if object_name: if '?uploads' in object_name: self.uploads = True if '?delete' in object_name: self.delete = True if object_name: object_name = object_name.split('?')[0] if self.request.uri == 'http://s3.amazonaws.com/': self.__class__ = RootHandler if bucket_name and not object_name: self.__class__ = BucketHandler self.bucket_name = bucket_name if bucket_name and object_name: self.__class__ = ObjectHandler self.bucket_name = bucket_name self.object_name = object_name class RootHandler(BaseRequestHandler): def get(self): buckets = [] bucket_names = self._get_bucket_names() for bucket_name in bucket_names: bucket_meta = self.application.metadata[bucket_name].find() buckets.append({ "Name": bucket_name, "CreationDate":bucket_meta.next()['created'], }) self.render_xml({"ListAllMyBucketsResult": { "Buckets": {"Bucket": buckets}, }}) class BucketHandler(BaseRequestHandler): def _get_bucket_cursor(self,bucket_name): return self.application.S3[bucket_name] def _remove_bucket(self,bucket_name): self.application.S3[bucket_name].drop() self.application.metadata[bucket_name].drop() def get(self, bucket_name): if hasattr(self,'bucket_name'): bucket_name = self.bucket_name prefix = self.get_argument("prefix", u"") marker = self.get_argument("marker", u"") max_keys = int(self.get_argument("max-keys", 50000)) terse = int(self.get_argument("terse", 0)) if bucket_name not in self._get_bucket_names(): self._error(code=404,s3code='NSB') return objects = [] contents = [] for bucket_object in self._get_bucket_cursor(bucket_name).find({'partNumber': None}): objects.append(bucket_object) start_pos = 0 # To do: # Fix bisection by dict lookup if marker: start_pos = bisect.bisect_right(objects, marker, start_pos) if prefix: start_pos = bisect.bisect_left(objects, prefix, start_pos) truncated = False for _object in objects[start_pos:]: if not _object['object_name'].startswith(prefix): break if len(contents) >= max_keys: truncated = True break c = {"Key": _object['object_name'],"ETag":_object['md5']} if not terse: c.update({ "LastModified":_object['added'], "Size":_object['size'], }) contents.append(c) marker = _object['object_name'] self.render_xml({"ListBucketResult": { "Name": bucket_name, "Prefix": prefix, "Marker": marker, "MaxKeys": max_keys, "IsTruncated": truncated, "Contents": contents }}) def put(self, bucket_name): # Create bucket and metadata if hasattr(self,'bucket_name'): bucket_name = self.bucket_name if bucket_name in self._get_bucket_names(): self._error(code=409,s3code='BAE') return self.application.S3.create_collection(bucket_name) self.application.metadata[bucket_name].insert({"created":datetime.datetime.utcnow()}) self.application.S3[bucket_name].ensure_index([("partNumber",ASCENDING)]) self.finish() def delete(self, bucket_name): if hasattr(self,'bucket_name'): bucket_name = self.bucket_name if bucket_name not in self._get_bucket_names(): self._error(code=404,s3code='NSB') return if self.application.S3[bucket_name].count() > 0: self._error(code=409,s3code='BNE') return self._remove_bucket(bucket_name) self.set_status(204) self.finish() def post(self, bucket_name): if hasattr(self,'bucket_name'): bucket_name = self.bucket_name if bucket_name not in self._get_bucket_names(): self._error(code=404,s3code='NSB') return self._remove_bucket(bucket_name) self.set_status(204) self.finish() def head(self,bucket_name): if hasattr(self,'bucket_name'): bucket_name = self.bucket_name if bucket_name not in self._get_bucket_names(): self._error(code=404,s3code='NSB') return self.set_header('Date', '"%s"' % datetime.datetime.utcnow()) self.finish() class ObjectHandler(BaseRequestHandler): def _object_md5(self,bucket_object): object_md5 = hashlib.md5() object_md5.update(bucket_object) return object_md5.hexdigest() def _get_bucket_object(self,**kwargs): if '_id' in kwargs.keys(): object_id = kwargs['_id'] object_field = '_id' if 'object_name' in kwargs.keys(): object_id = kwargs['object_name'] object_field = 'object_name' if 'bucket_name' in kwargs.keys(): bucket_name = kwargs['bucket_name'] return self.application.S3[bucket_name].find_one({object_field:object_id},{'partNumber': None}) def get(self,*args): if hasattr(self,'bucket_name') and hasattr(self,'object_name'): bucket_name = self.bucket_name object_name = self.object_name else: bucket_name,object_name = args prefix = self.get_argument("prefix", u"") marker = self.get_argument("marker", u"") acl = self.get_argument("acl", u"") object_name = urllib.unquote(object_name) if bucket_name not in self._get_bucket_names(): self._error(code=404,s3code='NSB') return bucket_object = self._get_bucket_object(bucket_name=bucket_name,object_name=object_name) if bucket_object: self.set_header("Content-Type", "application/unknown") self.set_header('etag', '"%s"' % bucket_object['md5']) self.set_header("Last-Modified", bucket_object['added']) if 'multipart' in bucket_object.keys(): print "MULTIPART" self.set_header("Content-Length",bucket_object['size']) for parts in self.application.S3[bucket_name].find({'object_name':object_name},{'partNumber': {'$exists':'true'}}): print parts['partNumber'] self.write(parts['object']) self.flush() self.finish() else: self.finish(bucket_object['object']) else: self._error(code=404,s3code='NSK') return def put(self, *args): if self.bucket_name and self.object_name: bucket_name = self.bucket_name object_name = self.object_name else: bucket_name,object_name = args original_name = urllib.unquote(object_name) if bucket_name not in self._get_bucket_names(): self._error(code=404,s3code='NSB') return # Insert object and then calculate computed md5 of stored object, size, then update and return # If the object already exists, delete contents and add updated timestamp and update existance = self.application.S3[bucket_name].find({"object_name":original_name}) if existance.count() > 0 and self.partNumber == None: existance_id = existance.next()['_id'] update_object = Binary(self.request.body) object_size = update_object.__len__() object_md5 = self._object_md5(update_object) self.application.S3[bucket_name].update({"_id":existance_id},{'$set': {'object':update_object,'md5':object_md5,'updated':datetime.datetime.utcnow(),'size':object_size}}) self.set_header('etag', '"%s"' % object_md5) self.finish() return if self.partNumber: tobeinserted = {'object_name':original_name,'object':Binary(self.request.body),'partNumber':self.partNumber} else: tobeinserted = {'object_name':original_name,'object':Binary(self.request.body)} inserted_object_id = self.application.S3[bucket_name].insert_one(tobeinserted).inserted_id inserted_object = self._get_bucket_object(bucket_name=bucket_name,_id=inserted_object_id) object_size = inserted_object['object'].__len__() object_md5 = self._object_md5(inserted_object['object']) self.application.S3[bucket_name].update({'_id':inserted_object_id},{'$set': {'md5':object_md5,'updated':datetime.datetime.utcnow(),'added':datetime.datetime.utcnow(),'size':object_size}}) self.set_header('etag', '"%s"' % object_md5) self.finish() def post(self, *args): # Add entry into bucket and flag as multipart upload if self.bucket_name and self.object_name: bucket_name = self.bucket_name object_name = self.object_name else: bucket_name,object_name = args if bucket_name not in self._get_bucket_names(): self._error(code=404,s3code='NSB') return original_name = urllib.unquote(object_name) bucket_object = Binary(self.request.body) object_size = bucket_object.__len__() object_md5 = self._object_md5(bucket_object) if self.uploadId: # We have a multipart upload, so iterate over the parts to generate the md5 hash and calculate size # This is the last call made after the mutlipart upload with the uploadId mupmd5 = hashlib.md5() mupsize = 0 for mup in self.application.S3[bucket_name].find({'object_name':object_name}): mupmd5.update(mup['object']) mupsize += mup['size'] self.application.S3[bucket_name].insert_one({'object_name':object_name,'object':bucket_object,'multipart':True,'md5':mupmd5.hexdigest(),'size':mupsize,'added':datetime.datetime.utcnow(),'updated':datetime.datetime.utcnow(),}) self.render_xml({"InitiateMultipartUploadResult": { "Bucket": bucket_name, "Prefix": self.prefix, "Key":object_name, "UploadId":object_name }}) def delete(self, *args): if self.bucket_name and self.object_name: bucket_name = self.bucket_name object_name = self.object_name else: bucket_name,object_name = args original_name = urllib.unquote(object_name) bucket_object = self._get_bucket_object(bucket_name=bucket_name,object_name=object_name) if bucket_object: self.set_status(204) self.application.S3[bucket_name].remove({"_id":bucket_object['_id']}) self.finish() else: self._error(code=404,s3code='NSK') return def head(self, *args): if hasattr(self,'bucket_name') and hasattr(self,'object_name'): bucket_name = self.bucket_name object_name = self.object_name else: bucket_name,object_name = args object_name = urllib.unquote(object_name) bucket_object = self._get_bucket_object(bucket_name=bucket_name,object_name=object_name) if bucket_object: self.set_header('etag', '"%s"' % bucket_object['md5']) self.set_header('last-modified', '"%s"' % bucket_object['updated']) self.finish() else: self._error(code=404,s3code='NSK') return if __name__ == "__main__": start(8080,debug=False)<|fim▁end|>
def start(port,debug=False): """Starts the pymongo S3 server""" application = mongoS3(debug)
<|file_name|>load_dsm.cpp<|end_file_name|><|fim▁begin|>/* * This program is free software; you can redistribute it and modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 2 of the license or (at your * option) any later version. * * Authors: Olivier Lapicque <[email protected]> */ ////////////////////////////////////////////// // DSIK Internal Format (DSM) module loader // ////////////////////////////////////////////// #include "stdafx.h" #include "sndfile.h" #pragma pack(1) #define DSMID_RIFF 0x46464952 // "RIFF" #define DSMID_DSMF 0x464d5344 // "DSMF" #define DSMID_SONG 0x474e4f53 // "SONG" #define DSMID_INST 0x54534e49 // "INST" #define DSMID_PATT 0x54544150 // "PATT" typedef struct DSMNOTE { BYTE note,ins,vol,cmd,inf; } Q_PACKED DSMNOTE; typedef struct DSMINST { DWORD id_INST; DWORD inst_len; CHAR filename[13]; BYTE flags; BYTE flags2; BYTE volume; DWORD length; DWORD loopstart; DWORD loopend; DWORD reserved1; WORD c2spd; WORD reserved2; CHAR samplename[28]; } Q_PACKED DSMINST; typedef struct DSMFILEHEADER { DWORD id_RIFF; // "RIFF" DWORD riff_len; DWORD id_DSMF; // "DSMF" DWORD id_SONG; // "SONG" DWORD song_len; } Q_PACKED DSMFILEHEADER; typedef struct DSMSONG { CHAR songname[28]; WORD reserved1; WORD flags; DWORD reserved2; WORD numord; WORD numsmp; WORD numpat; WORD numtrk; BYTE globalvol; BYTE mastervol; BYTE speed; BYTE bpm; BYTE panpos[16]; BYTE orders[128]; } Q_PACKED DSMSONG; typedef struct DSMPATT { DWORD id_PATT; DWORD patt_len; BYTE dummy1; BYTE dummy2; } Q_PACKED DSMPATT; #pragma pack() BOOL CSoundFile::ReadDSM(LPCBYTE lpStream, DWORD dwMemLength) //----------------------------------------------------------- { DSMFILEHEADER *pfh = (DSMFILEHEADER *)lpStream; DSMSONG *psong; DWORD dwMemPos; UINT nPat, nSmp; if ((!lpStream) || (dwMemLength < 1024) || (pfh->id_RIFF != DSMID_RIFF) || (pfh->riff_len + 8 > dwMemLength) || (pfh->riff_len < 1024) || (pfh->id_DSMF != DSMID_DSMF) || (pfh->id_SONG != DSMID_SONG) || (pfh->song_len > dwMemLength)) return FALSE; psong = (DSMSONG *)(lpStream + sizeof(DSMFILEHEADER)); dwMemPos = sizeof(DSMFILEHEADER) + pfh->song_len; m_nType = MOD_TYPE_DSM; m_nChannels = psong->numtrk; if (m_nChannels < 4) m_nChannels = 4; if (m_nChannels > 16) m_nChannels = 16; m_nSamples = psong->numsmp; if (m_nSamples > MAX_SAMPLES) m_nSamples = MAX_SAMPLES; m_nDefaultSpeed = psong->speed; m_nDefaultTempo = psong->bpm; m_nDefaultGlobalVolume = psong->globalvol << 2; if ((!m_nDefaultGlobalVolume) || (m_nDefaultGlobalVolume > 256)) m_nDefaultGlobalVolume = 256; m_nSongPreAmp = psong->mastervol & 0x7F; for (UINT iOrd=0; iOrd<MAX_ORDERS; iOrd++) { Order[iOrd] = (BYTE)((iOrd < psong->numord) ? psong->orders[iOrd] : 0xFF); } for (UINT iPan=0; iPan<16; iPan++) { ChnSettings[iPan].nPan = 0x80; if (psong->panpos[iPan] <= 0x80) { ChnSettings[iPan].nPan = psong->panpos[iPan] << 1; } } memcpy(m_szNames[0], psong->songname, 28); nPat = 0; nSmp = 1; while (dwMemPos < dwMemLength - 8) { DSMPATT *ppatt = (DSMPATT *)(lpStream + dwMemPos); DSMINST *pins = (DSMINST *)(lpStream+dwMemPos); // Reading Patterns if (ppatt->id_PATT == DSMID_PATT) { dwMemPos += 8; if (dwMemPos + ppatt->patt_len >= dwMemLength) break; DWORD dwPos = dwMemPos; dwMemPos += ppatt->patt_len; MODCOMMAND *m = AllocatePattern(64, m_nChannels); if (!m) break; PatternSize[nPat] = 64; Patterns[nPat] = m; UINT row = 0; while ((row < 64) && (dwPos + 2 <= dwMemPos)) { UINT flag = lpStream[dwPos++]; if (flag) { UINT ch = (flag & 0x0F) % m_nChannels; if (flag & 0x80) { UINT note = lpStream[dwPos++]; if (note) { if (note <= 12*9) note += 12; m[ch].note = (BYTE)note; } } if (flag & 0x40) { m[ch].instr = lpStream[dwPos++]; } if (flag & 0x20) { m[ch].volcmd = VOLCMD_VOLUME; m[ch].vol = lpStream[dwPos++]; } if (flag & 0x10) { UINT command = lpStream[dwPos++]; UINT param = lpStream[dwPos++]; switch(command) { // 4-bit Panning case 0x08: switch(param & 0xF0) { case 0x00: param <<= 4; break; case 0x10: command = 0x0A; param = (param & 0x0F) << 4; break;<|fim▁hole|> } break; // Portamentos case 0x11: case 0x12: command &= 0x0F; break; // 3D Sound (?) case 0x13: command = 'X' - 55; param = 0x91; break; default: // Volume + Offset (?) command = ((command & 0xF0) == 0x20) ? 0x09 : 0; } m[ch].command = (BYTE)command; m[ch].param = (BYTE)param; if (command) ConvertModCommand(&m[ch]); } } else { m += m_nChannels; row++; } } nPat++; } else // Reading Samples if ((nSmp <= m_nSamples) && (pins->id_INST == DSMID_INST)) { if (dwMemPos + pins->inst_len >= dwMemLength - 8) break; DWORD dwPos = dwMemPos + sizeof(DSMINST); dwMemPos += 8 + pins->inst_len; memcpy(m_szNames[nSmp], pins->samplename, 28); MODINSTRUMENT *psmp = &Ins[nSmp]; memcpy(psmp->name, pins->filename, 13); psmp->nGlobalVol = 64; psmp->nC4Speed = pins->c2spd; psmp->uFlags = (WORD)((pins->flags & 1) ? CHN_LOOP : 0); psmp->nLength = pins->length; psmp->nLoopStart = pins->loopstart; psmp->nLoopEnd = pins->loopend; psmp->nVolume = (WORD)(pins->volume << 2); if (psmp->nVolume > 256) psmp->nVolume = 256; UINT smptype = (pins->flags & 2) ? RS_PCM8S : RS_PCM8U; ReadSample(psmp, smptype, (LPCSTR)(lpStream+dwPos), dwMemLength - dwPos); nSmp++; } else { break; } } return TRUE; }<|fim▁end|>
case 0x20: command = 0x0E; param = (param & 0x0F) | 0xA0; break; case 0x30: command = 0x0E; param = (param & 0x0F) | 0x10; break; case 0x40: command = 0x0E; param = (param & 0x0F) | 0x20; break; default: command = 0;
<|file_name|>codeFixChangeJSDocSyntax12.ts<|end_file_name|><|fim▁begin|>// @strict: true<|fim▁hole|>////} verify.codeFix({ description: "Change '*' to 'any'", errorCode: 8020, index: 0, newRangeContent: "any", });<|fim▁end|>
/// <reference path='fourslash.ts' /> ////class C { //// p: [|*|]
<|file_name|>FeatureAlgorithms.cpp<|end_file_name|><|fim▁begin|>/* FeatureAlgorithm.cpp */ /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ /* */ /* Copyright (C) 2007 Open Microscopy Environment */ /* Massachusetts Institue of Technology, */ /* National Institutes of Health, */ /* University of Dundee */ /* */ /* */ /* */ /* This library is free software; you can redistribute it and/or */ /* modify it under the terms of the GNU Lesser General Public */ /* License as published by the Free Software Foundation; either */ /* version 2.1 of the License, or (at your option) any later version. */ /* */ /* This library is distributed in the hope that it will be useful, */ /* but WITHOUT ANY WARRANTY; without even the implied warranty of */ /* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU */ /* Lesser General Public License for more details. */ /* */ /* You should have received a copy of the GNU Lesser General Public */ /* License along with this library; if not, write to the Free Software */ /* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ /* */ /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ /* */ /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ /* Written by: */ /* Christopher E. Coletta <colettace [at] mail [dot] nih [dot] gov> */ /* Ilya G. Goldberg <goldbergil [at] mail [dot] nih [dot] gov> */ /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~*/ #include "FeatureNames.h" #include "FeatureAlgorithms.h" #include "cmatrix.h" #include <iostream> #include <cstdlib> #include <cmath> //start #including the functions directly once you start pulling them out of cmatrix //#include "transforms/Chebyshev.h" /* global variable */ extern int verbosity; void FeatureAlgorithm::print_info() const { std::cout << typeLabel() << " '" << name << "' (" << n_features << " features) " << std::endl; } bool FeatureAlgorithm::register_task() const { return (FeatureNames::registerFeatureAlgorithm (this)); } //=========================================================================== ChebyshevFourierCoefficients::ChebyshevFourierCoefficients() : FeatureAlgorithm ("Chebyshev-Fourier Coefficients", 32) { // cout << "Instantiating new " << name << " object." << endl; } std::vector<double> ChebyshevFourierCoefficients::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.ChebyshevFourierTransform2D(coeffs.data()); return coeffs; } // Register a static instance of the class using a global bool static bool ChebyshevFourierCoefficientsReg = ComputationTaskInstances::add (new ChebyshevFourierCoefficients); //=========================================================================== ChebyshevCoefficients::ChebyshevCoefficients() : FeatureAlgorithm ("Chebyshev Coefficients", 32) { // cout << "Instantiating new " << name << " object." << endl; } /** * Chebyshev Coefficients are calculated by performing a Chebyshev transform, * and generating a histogram of pixel intensities. * */ std::vector<double> ChebyshevCoefficients::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.ChebyshevStatistics2D(coeffs.data(), 0, 32); return coeffs; } // Register a static instance of the class using a global bool static bool ChebyshevCoefficientsReg = ComputationTaskInstances::add (new ChebyshevCoefficients); //=========================================================================== ZernikeCoefficients::ZernikeCoefficients() : FeatureAlgorithm ("Zernike Coefficients", 72) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> ZernikeCoefficients::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); long output_size; // output size is normally 72 IN_matrix.zernike2D(coeffs.data(), &output_size); return coeffs; } // Register a static instance of the class using a global bool static bool ZernikeCoefficientsReg = ComputationTaskInstances::add (new ZernikeCoefficients); //=========================================================================== HaralickTextures::HaralickTextures() : FeatureAlgorithm ("Haralick Textures", 28) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> HaralickTextures::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.HaralickTexture2D(0,coeffs.data()); return coeffs; } // Register a static instance of the class using a global bool static bool HaralickTexturesReg = ComputationTaskInstances::add (new HaralickTextures); //=========================================================================== MultiscaleHistograms::MultiscaleHistograms() : FeatureAlgorithm ("Multiscale Histograms", 24) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> MultiscaleHistograms::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.MultiScaleHistogram(coeffs.data()); return coeffs; } // Register a static instance of the class using a global bool static bool MultiscaleHistogramsReg = ComputationTaskInstances::add (new MultiscaleHistograms); //=========================================================================== TamuraTextures::TamuraTextures() : FeatureAlgorithm ("Tamura Textures", 6) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> TamuraTextures::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.TamuraTexture2D(coeffs.data()); return coeffs; } // Register a static instance of the class using a global bool static bool TamuraTexturesReg = ComputationTaskInstances::add (new TamuraTextures); //=========================================================================== CombFirstFourMoments::CombFirstFourMoments() : FeatureAlgorithm ("Comb Moments", 48) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> CombFirstFourMoments::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.CombFirstFourMoments2D(coeffs.data()); return coeffs; } // Register a static instance of the class using a global bool static bool CombFirstFourMomentsReg = ComputationTaskInstances::add (new CombFirstFourMoments); //=========================================================================== RadonCoefficients::RadonCoefficients() : FeatureAlgorithm ("Radon Coefficients", 12) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> RadonCoefficients::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.RadonTransform2D(coeffs.data()); <|fim▁hole|>} // Register a static instance of the class using a global bool static bool RadonCoefficientsReg = ComputationTaskInstances::add (new RadonCoefficients); //=========================================================================== /* fractal brownian fractal analysis bins - the maximal order of the fractal output - array of the size k the code is based on: CM Wu, YC Chen and KS Hsieh, Texture features for classification of ultrasonic liver images, IEEE Trans Med Imag 11 (1992) (2), pp. 141Ð152. method of approaximation of CC Chen, JS Daponte and MD Fox, Fractal feature analysis and classification in medical imaging, IEEE Trans Med Imag 8 (1989) (2), pp. 133Ð142. */ FractalFeatures::FractalFeatures() : FeatureAlgorithm ("Fractal Features", 20) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> FractalFeatures::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); int bins = n_features; int width = IN_matrix.width; int height = IN_matrix.height; readOnlyPixels IN_matrix_pix_plane = IN_matrix.ReadablePixels(); int x, y, k, bin = 0; int K = ( ( width > height ) ? height : width) / 5; // MIN int step = (int) floor ( K / bins ); if( step < 1 ) step = 1; // avoid an infinite loop if the image is small for( k = 1; k < K; k = k + step ) { double sum = 0.0; for( x = 0; x < width; x++ ) for( y = 0; y < height - k; y++ ) sum += fabs( IN_matrix_pix_plane(y,x) - IN_matrix_pix_plane(y+k,x) ); for( x = 0; x < width - k; x++ ) for( y = 0; y < height; y++ ) sum += fabs( IN_matrix_pix_plane(y,x) - IN_matrix_pix_plane(y,x + k) ); if( bin < bins ) coeffs[ bin++ ] = sum / ( width * ( width - k ) + height * ( height - k ) ); } return coeffs; } // Register a static instance of the class using a global bool static bool FractalFeaturesReg = ComputationTaskInstances::add (new FractalFeatures); //=========================================================================== PixelIntensityStatistics::PixelIntensityStatistics() : FeatureAlgorithm ("Pixel Intensity Statistics", 5) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> PixelIntensityStatistics::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); Moments2 stats; IN_matrix.GetStats (stats); coeffs[0] = stats.mean(); coeffs[1] = IN_matrix.get_median(); coeffs[2] = stats.std(); coeffs[3] = stats.min(); coeffs[4] = stats.max(); return coeffs; } // Register a static instance of the class using a global bool static bool PixelIntensityStatisticsReg = ComputationTaskInstances::add (new PixelIntensityStatistics); //=========================================================================== EdgeFeatures::EdgeFeatures() : FeatureAlgorithm ("Edge Features", 28) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> EdgeFeatures::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); unsigned long EdgeArea = 0; double MagMean=0, MagMedian=0, MagVar=0, MagHist[8]={0,0,0,0,0,0,0,0}, DirecMean=0, DirecMedian=0, DirecVar=0, DirecHist[8]={0,0,0,0,0,0,0,0}, DirecHomogeneity=0, DiffDirecHist[4]={0,0,0,0}; IN_matrix.EdgeStatistics(&EdgeArea, &MagMean, &MagMedian, &MagVar, MagHist, &DirecMean, &DirecMedian, &DirecVar, DirecHist, &DirecHomogeneity, DiffDirecHist, 8); int j, here = 0; coeffs[here++] = double( EdgeArea ); for( j=0; j<4; j++ ){ coeffs[here++] = DiffDirecHist[j]; } for( j=0; j<8; j++ ){ coeffs[here++] = DirecHist[j]; } coeffs[here++] = DirecHomogeneity; coeffs[here++] = DirecMean; coeffs[here++] = DirecMedian; coeffs[here++] = DirecVar; for( j=0; j<8; j++ ){ coeffs[here++] = MagHist[j]; } coeffs[here++] = MagMean; coeffs[here++] = MagMedian; coeffs[here++] = MagVar; return coeffs; } // Register a static instance of the class using a global bool static bool EdgeFeaturesReg = ComputationTaskInstances::add (new EdgeFeatures); //=========================================================================== ObjectFeatures::ObjectFeatures() : FeatureAlgorithm ("Otsu Object Features", 34) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> ObjectFeatures::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); unsigned long feature_count=0, AreaMin=0, AreaMax=0; long Euler=0; unsigned int AreaMedian=0, area_histogram[10]={0,0,0,0,0,0,0,0,0,0}, dist_histogram[10]={0,0,0,0,0,0,0,0,0,0}; double centroid_x=0, centroid_y=0, AreaMean=0, AreaVar=0, DistMin=0, DistMax=0, DistMean=0, DistMedian=0, DistVar=0; IN_matrix.FeatureStatistics(&feature_count, &Euler, &centroid_x, &centroid_y, &AreaMin, &AreaMax, &AreaMean, &AreaMedian, &AreaVar, area_histogram, &DistMin, &DistMax, &DistMean, &DistMedian, &DistVar, dist_histogram, 10); int j, here = 0; for( j = 0; j < 10; j++ ){ coeffs[here++] = area_histogram[j]; } coeffs[here++] = AreaMax; coeffs[here++] = AreaMean; coeffs[here++] = AreaMedian; coeffs[here++] = AreaMin; coeffs[here++] = AreaVar; coeffs[here++] = centroid_x; coeffs[here++] = centroid_y; coeffs[here++] = feature_count; for( j = 0; j < 10; j++ ) { coeffs[here++] = dist_histogram[j]; } coeffs[here++] = DistMax; coeffs[here++] = DistMean; coeffs[here++] = DistMedian; coeffs[here++] = DistMin; coeffs[here++] = DistVar; coeffs[here++] = Euler; return coeffs; } // Register a static instance of the class using a global bool static bool ObjectFeaturesReg = ComputationTaskInstances::add (new ObjectFeatures); //=========================================================================== InverseObjectFeatures::InverseObjectFeatures() : FeatureAlgorithm ("Inverse-Otsu Object Features", 34) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> InverseObjectFeatures::execute (const ImageMatrix &IN_matrix) const { ImageMatrix InvMatrix; InvMatrix.copy (IN_matrix); InvMatrix.invert(); static ObjectFeatures ObjFeaturesInst; return (ObjFeaturesInst.execute (InvMatrix)); } // Register a static instance of the class using a global bool static bool InverseObjectFeaturesReg = ComputationTaskInstances::add (new InverseObjectFeatures); //=========================================================================== GaborTextures::GaborTextures() : FeatureAlgorithm ("Gabor Textures", 7) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> GaborTextures::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize (n_features, 0); IN_matrix.GaborFilters2D(coeffs.data()); return coeffs; } // Register a static instance of the class using a global bool static bool GaborTexturesReg = ComputationTaskInstances::add (new GaborTextures); //=========================================================================== /* gini compute the gini coefficient paper reference: Roberto G. Abraham, Sidney van den Bergh, Preethi Nair, A NEW APPROACH TO GALAXY MORPHOLOGY. I. ANALYSIS OF THE SLOAN DIGITAL SKY SURVEY EARLY DATA RELEASE, The Astrophysical Journal, vol. 588, p. 218-229, 2003. */ GiniCoefficient::GiniCoefficient() : FeatureAlgorithm ("Gini Coefficient", 1) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> GiniCoefficient::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.resize(n_features, 0); long pixel_index, num_pixels; double *pixels, mean = 0.0, g = 0.0; long i, count = 0; double val; num_pixels = IN_matrix.height * IN_matrix.width; pixels = new double[ num_pixels ]; readOnlyPixels IN_matrix_pix_plane = IN_matrix.ReadablePixels(); for( pixel_index = 0; pixel_index < num_pixels; pixel_index++ ) { val = IN_matrix_pix_plane.array().coeff(pixel_index); if( val > 0 ) { pixels[ count ] = val; mean += val; count++; } } if( count > 0 ) mean = mean / count; qsort( pixels, count, sizeof(double), compare_doubles ); for( i = 1; i <= count; i++) g += (2. * i - count - 1.) * pixels[i-1]; delete [] pixels; if( count <= 1 || mean <= 0.0 ) coeffs[0] = 0.0; // avoid division by zero else coeffs[0] = g / ( mean * count * ( count-1 ) ); return coeffs; } // Register a static instance of the class using a global bool static bool GiniCoefficientReg = ComputationTaskInstances::add (new GiniCoefficient); //=========================================================================== /* Color Histogram compute the Color Histogram */ ColorHistogram::ColorHistogram() : FeatureAlgorithm ("Color Histogram", COLORS_NUM+1) { //cout << "Instantiating new " << name << " object." << endl; } std::vector<double> ColorHistogram::execute (const ImageMatrix &IN_matrix) const { std::vector<double> coeffs; if (verbosity > 3) std::cout << "calculating " << name << std::endl; coeffs.assign(n_features, 0); unsigned int x,y, width = IN_matrix.width, height = IN_matrix.height; HSVcolor hsv_pixel; unsigned long color_index=0; double certainties[COLORS_NUM+1]; readOnlyColors clr_plane = IN_matrix.ReadableColors(); // find the colors for( y = 0; y < height; y++ ) { for( x = 0; x < width; x++ ) { hsv_pixel = clr_plane (y, x); color_index = FindColor( hsv_pixel.h, hsv_pixel.s, hsv_pixel.v, certainties ); coeffs[ color_index ]++; } } /* normalize the color histogram */ for (color_index = 0; color_index <= COLORS_NUM; color_index++) coeffs[color_index] /= (width*height); return coeffs; } // Register a static instance of the class using a global bool static bool ColorHistogramReg = ComputationTaskInstances::add (new ColorHistogram);<|fim▁end|>
return coeffs;
<|file_name|>steering_acceleration.rs<|end_file_name|><|fim▁begin|>use nalgebra::{distance_squared, Point3, Vector3}; use alga::general::Real; use alga::general::AbstractModule; use num_traits::identities::Zero; use IsEnabled; use std::cell::RefCell; use std::rc::Rc; /// Represents result of a steering behaviour computation. User can aggregate /// more than one behaviour result into single acceleration struct. #[derive(Debug, PartialEq)] pub struct SteeringAcceleration<T: Real> { /// linear acceleration component pub linear: Vector3<T>, /// angular acceleration component pub angular: T, } impl<T: Real> SteeringAcceleration<T> { pub fn default() -> SteeringAcceleration<T> { SteeringAcceleration { linear: Vector3::zero(), angular: T::zero(), } } /// Creates a steering acceleration struct using given linear and angular components pub fn new( linear_acceleration: Vector3<T>, angular_acceleration: T, ) -> SteeringAcceleration<T> { SteeringAcceleration { linear: linear_acceleration, angular: angular_acceleration, } } /// Tests whether both linear and angular acceleration compenents are zero pub fn is_zero(&self) -> bool { self.angular.is_zero() && self.linear.is_zero() } <|fim▁hole|> self } /// pub fn add(&mut self, other: SteeringAcceleration<T>) -> &mut Self { self.angular += other.angular; self.linear += other.linear; self } /// pub fn scl(&mut self, scale: T) -> &mut Self { self.angular *= scale; self.linear = self.linear.multiply_by(scale); self } /// pub fn mul_add(&mut self, other: SteeringAcceleration<T>, scale: T) -> &mut Self { self.angular += other.angular * scale; self.linear += other.linear.multiply_by(scale); self } /// pub fn calculate_square_magnitude(&self) -> T { distance_squared(&Point3::from_coordinates(self.linear), &Point3::origin()) + self.angular * self.angular } /// pub fn calculate_magnitude(&self) -> T { self.calculate_square_magnitude().sqrt() } } pub trait SteeringAccelerationCalculator<T: Real>: IsEnabled<T> { fn calculate_steering( &mut self, steering_acceleration: Rc<RefCell<SteeringAcceleration<T>>>, ) -> Rc<RefCell<SteeringAcceleration<T>>> { if self.is_enabled() { self.calculate_real_steering(steering_acceleration.clone()); steering_acceleration } else { steering_acceleration.borrow_mut().set_zero(); steering_acceleration } } fn calculate_real_steering( &self, steering_acceleration: Rc<RefCell<SteeringAcceleration<T>>>, ) -> Rc<RefCell<SteeringAcceleration<T>>>; // fn set_enabled(&mut self, is_enabled : bool); } #[cfg(test)] mod test { use super::SteeringAcceleration; use nalgebra::Vector3; #[test] fn is_zero_positive() { let mut acceleration = SteeringAcceleration::new(Vector3::new(1.0f32, 2.0, 3.0), 5.0f32); acceleration.set_zero(); assert!(acceleration.is_zero()); } #[test] fn is_zero_negative() { let acceleration = SteeringAcceleration::new(Vector3::new(1.0f32, 2.0, 3.0), 5.0f32); assert_eq!(acceleration.is_zero(), false); } #[test] fn add() { let mut acceleration = SteeringAcceleration::new(Vector3::new(1.0f32, 1.0, 1.0), 1.0f32); let acceleration2 = SteeringAcceleration::new(Vector3::new(1.0f32, 1.0, 1.0), 1.0f32); acceleration.add(acceleration2); assert_eq!( SteeringAcceleration::new(Vector3::new(2.0f32, 2.0, 2.0), 2.0f32), acceleration ); } #[test] fn scl() { let mut acceleration = SteeringAcceleration::new(Vector3::new(1.0f32, 1.0, 1.0), 1.0f32); acceleration.scl(2.0f32); assert_eq!( SteeringAcceleration::new(Vector3::new(2.0f32, 2.0, 2.0), 2.0), acceleration ); } #[test] fn calculate_square_magnitude() { let acceleration = SteeringAcceleration::new(Vector3::new(2.0f32, 2.0, 2.0), 2.0f32); assert_eq!(16f32, acceleration.calculate_square_magnitude()); } #[test] fn calculate_magnitude() { let acceleration = SteeringAcceleration::new(Vector3::new(2.0f32, 2.0, 2.0), 2.0f32); assert_eq!(4f32, acceleration.calculate_magnitude()); } #[test] fn mul_add() { let mut acceleration = SteeringAcceleration::new(Vector3::new(1.0f32, 1.0, 1.0), 1.0); let acceleration2 = SteeringAcceleration::new(Vector3::new(1.0f32, 1.0, 1.0), 1.0); acceleration.mul_add(acceleration2, 2.0); assert_eq!( SteeringAcceleration::new(Vector3::new(3.0f32, 3.0, 3.0), 3.0), acceleration ); } }<|fim▁end|>
/// Sets both compononents to zero pub fn set_zero(&mut self) -> &mut Self { self.angular = T::zero(); self.linear = Vector3::zero();
<|file_name|>OgreGLSLESProgram.cpp<|end_file_name|><|fim▁begin|>/* ----------------------------------------------------------------------------- This source file is part of OGRE (Object-oriented Graphics Rendering Engine) For the latest info, see http://www.ogre3d.org/ Copyright (c) 2000-2014 Torus Knot Software Ltd Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ----------------------------------------------------------------------------- */ #include "OgreGLES2Prerequisites.h" #include "OgreGpuProgram.h" #include "OgreHighLevelGpuProgramManager.h" #include "OgreLogManager.h" #include "OgreRoot.h" #include "OgreStringConverter.h" #include "OgreGLUtil.h" #include "OgreGLES2RenderSystem.h" #include "OgreGLES2Support.h" #include "OgreGLSLESProgram.h" #include "OgreGLSLESProgramManager.h" #include "OgreGLSLPreprocessor.h" namespace Ogre { //----------------------------------------------------------------------- #if !OGRE_NO_GLES2_GLSL_OPTIMISER GLSLESProgram::CmdOptimisation GLSLESProgram::msCmdOptimisation; #endif //----------------------------------------------------------------------- //----------------------------------------------------------------------- GLSLESProgram::GLSLESProgram(ResourceManager* creator, const String& name, ResourceHandle handle, const String& group, bool isManual, ManualResourceLoader* loader) : GLSLShaderCommon(creator, name, handle, group, isManual, loader) , mGLShaderHandle(0) , mGLProgramHandle(0) #if !OGRE_NO_GLES2_GLSL_OPTIMISER , mIsOptimised(false) , mOptimiserEnabled(false) #endif { if (createParamDictionary("GLSLESProgram")) { setupBaseParamDictionary(); ParamDictionary* dict = getParamDictionary(); dict->addParameter(ParameterDef("preprocessor_defines", "Preprocessor defines use to compile the program.", PT_STRING),&msCmdPreprocessorDefines); #if !OGRE_NO_GLES2_GLSL_OPTIMISER dict->addParameter(ParameterDef("use_optimiser", "Should the GLSL optimiser be used. Default is false.", PT_BOOL),&msCmdOptimisation); #endif } // Manually assign language now since we use it immediately mSyntaxCode = "glsles"; // There is nothing to load mLoadFromFile = false; } //--------------------------------------------------------------------------- GLSLESProgram::~GLSLESProgram() { // Have to call this here reather than in Resource destructor // since calling virtual methods in base destructors causes crash if (isLoaded()) { unload(); } else { unloadHighLevel(); } } //--------------------------------------------------------------------------- #if OGRE_PLATFORM == OGRE_PLATFORM_ANDROID || OGRE_PLATFORM == OGRE_PLATFORM_EMSCRIPTEN void GLSLESProgram::notifyOnContextLost() { unloadHighLevelImpl(); } #endif GLuint GLSLESProgram::createGLProgramHandle() { if(!Root::getSingleton().getRenderSystem()->getCapabilities()->hasCapability(RSC_SEPARATE_SHADER_OBJECTS)) return 0; if (mGLProgramHandle) return mGLProgramHandle; OGRE_CHECK_GL_ERROR(mGLProgramHandle = glCreateProgram()); if(Root::getSingleton().getRenderSystem()->getCapabilities()->hasCapability(RSC_DEBUG)) { glLabelObjectEXT(GL_PROGRAM_OBJECT_EXT, mGLProgramHandle, 0, mName.c_str()); } return mGLProgramHandle; } bool GLSLESProgram::compile(bool checkErrors) { if (mCompiled == 1) { return true; } // Only create a shader object if glsl es is supported if (isSupported()) { // Create shader object GLenum shaderType = 0x0000; if (mType == GPT_VERTEX_PROGRAM) { shaderType = GL_VERTEX_SHADER; } else if (mType == GPT_FRAGMENT_PROGRAM) { shaderType = GL_FRAGMENT_SHADER; } OGRE_CHECK_GL_ERROR(mGLShaderHandle = glCreateShader(shaderType)); if(Root::getSingleton().getRenderSystem()->getCapabilities()->hasCapability(RSC_DEBUG)) { glLabelObjectEXT(GL_SHADER_OBJECT_EXT, mGLShaderHandle, 0, mName.c_str()); } createGLProgramHandle(); } // Add preprocessor extras and main source if (!mSource.empty()) { const RenderSystemCapabilities* caps = Root::getSingleton().getRenderSystem()->getCapabilities(); // Fix up the source in case someone forgot to redeclare gl_Position if (caps->hasCapability(RSC_GLSL_SSO_REDECLARE) && mType == GPT_VERTEX_PROGRAM) { size_t versionPos = mSource.find("#version"); int shaderVersion = StringConverter::parseInt(mSource.substr(versionPos+9, 3)); size_t belowVersionPos = mSource.find('\n', versionPos) + 1; if(shaderVersion >= 300) { // Check that it's missing and that this shader has a main function, ie. not a child shader. if(mSource.find("out highp vec4 gl_Position") == String::npos) { mSource.insert(belowVersionPos, "out highp vec4 gl_Position;\nout highp float gl_PointSize;\n"); } if(mSource.find("#extension GL_EXT_separate_shader_objects : require") == String::npos) { mSource.insert(belowVersionPos, "#extension GL_EXT_separate_shader_objects : require\n"); } } } #if !OGRE_NO_GLES2_GLSL_OPTIMISER const char *source = (getOptimiserEnabled() && getIsOptimised()) ? mOptimisedSource.c_str() : mSource.c_str(); #else const char *source = mSource.c_str(); #endif OGRE_CHECK_GL_ERROR(glShaderSource(mGLShaderHandle, 1, &source, NULL)); } if (checkErrors) GLSLES::logObjectInfo("GLSL ES compiling: " + mName, mGLShaderHandle); OGRE_CHECK_GL_ERROR(glCompileShader(mGLShaderHandle)); // Check for compile errors OGRE_CHECK_GL_ERROR(glGetShaderiv(mGLShaderHandle, GL_COMPILE_STATUS, &mCompiled)); if(!mCompiled && checkErrors) { String message = GLSLES::logObjectInfo("GLSL ES compile log: " + mName, mGLShaderHandle); checkAndFixInvalidDefaultPrecisionError(message); } // Log a message that the shader compiled successfully. if (mCompiled && checkErrors) GLSLES::logObjectInfo("GLSL ES compiled: " + mName, mGLShaderHandle); return (mCompiled == 1);<|fim▁hole|> } #if !OGRE_NO_GLES2_GLSL_OPTIMISER //----------------------------------------------------------------------- void GLSLESProgram::setOptimiserEnabled(bool enabled) { if(mOptimiserEnabled != enabled && mOptimiserEnabled && mCompiled == 1) { OGRE_CHECK_GL_ERROR(glDeleteShader(mGLShaderHandle)); if(Root::getSingleton().getRenderSystem()->getCapabilities()->hasCapability(RSC_SEPARATE_SHADER_OBJECTS)) { OGRE_CHECK_GL_ERROR(glDeleteProgram(mGLProgramHandle)); } mGLShaderHandle = 0; mGLProgramHandle = 0; mCompiled = 0; } mOptimiserEnabled = enabled; } #endif //----------------------------------------------------------------------- void GLSLESProgram::createLowLevelImpl(void) { } //----------------------------------------------------------------------- void GLSLESProgram::unloadHighLevelImpl(void) { if (isSupported()) { // LogManager::getSingleton().logMessage("Deleting shader " + StringConverter::toString(mGLShaderHandle) + // " and program " + StringConverter::toString(mGLProgramHandle)); OGRE_CHECK_GL_ERROR(glDeleteShader(mGLShaderHandle)); if(Root::getSingleton().getRenderSystem()->getCapabilities()->hasCapability(RSC_SEPARATE_SHADER_OBJECTS)) { OGRE_CHECK_GL_ERROR(glDeleteProgram(mGLProgramHandle)); } // destroy all programs using this shader GLSLESProgramManager::getSingletonPtr()->destroyAllByShader(this); mGLShaderHandle = 0; mGLProgramHandle = 0; mCompiled = 0; mLinked = 0; } } //----------------------------------------------------------------------- void GLSLESProgram::buildConstantDefinitions() const { // We need an accurate list of all the uniforms in the shader, but we // can't get at them until we link all the shaders into a program object. // Therefore instead, parse the source code manually and extract the uniforms createParameterMappingStructures(true); GLSLESProgramManager::getSingleton().extractUniformsFromGLSL(mSource, *mConstantDefs, mName); } //----------------------------------------------------------------------- #if !OGRE_NO_GLES2_GLSL_OPTIMISER String GLSLESProgram::CmdOptimisation::doGet(const void *target) const { return StringConverter::toString(static_cast<const GLSLESProgram*>(target)->getOptimiserEnabled()); } void GLSLESProgram::CmdOptimisation::doSet(void *target, const String& val) { static_cast<GLSLESProgram*>(target)->setOptimiserEnabled(StringConverter::parseBool(val)); } #endif //----------------------------------------------------------------------- void GLSLESProgram::attachToProgramObject( const GLuint programObject ) { // LogManager::getSingleton().logMessage("Attaching shader " + StringConverter::toString(mGLShaderHandle) + // " to program " + StringConverter::toString(programObject)); OGRE_CHECK_GL_ERROR(glAttachShader(programObject, mGLShaderHandle)); } //----------------------------------------------------------------------- void GLSLESProgram::detachFromProgramObject( const GLuint programObject ) { // LogManager::getSingleton().logMessage("Detaching shader " + StringConverter::toString(mGLShaderHandle) + // " to program " + StringConverter::toString(programObject)); OGRE_CHECK_GL_ERROR(glDetachShader(programObject, mGLShaderHandle)); } //----------------------------------------------------------------------- const String& GLSLESProgram::getLanguage(void) const { static const String language = "glsles"; return language; } //----------------------------------------------------------------------- Ogre::GpuProgramParametersSharedPtr GLSLESProgram::createParameters( void ) { GpuProgramParametersSharedPtr params = HighLevelGpuProgram::createParameters(); params->setTransposeMatrices(true); return params; } //----------------------------------------------------------------------- void GLSLESProgram::checkAndFixInvalidDefaultPrecisionError( String &message ) { String precisionQualifierErrorString = ": 'Default Precision Qualifier' : invalid type Type for default precision qualifier can be only float or int"; vector< String >::type linesOfSource = StringUtil::split(mSource, "\n"); if( message.find(precisionQualifierErrorString) != String::npos ) { LogManager::getSingleton().logMessage("Fixing invalid type Type for default precision qualifier by deleting bad lines the re-compiling"); // remove relevant lines from source vector< String >::type errors = StringUtil::split(message, "\n"); // going from the end so when we delete a line the numbers of the lines before will not change for(int i = static_cast<int>(errors.size()) - 1 ; i != -1 ; i--) { String & curError = errors[i]; size_t foundPos = curError.find(precisionQualifierErrorString); if(foundPos != String::npos) { String lineNumber = curError.substr(0, foundPos); size_t posOfStartOfNumber = lineNumber.find_last_of(':'); if (posOfStartOfNumber != String::npos) { lineNumber = lineNumber.substr(posOfStartOfNumber + 1, lineNumber.size() - (posOfStartOfNumber + 1)); if (StringConverter::isNumber(lineNumber)) { int iLineNumber = StringConverter::parseInt(lineNumber); linesOfSource.erase(linesOfSource.begin() + iLineNumber - 1); } } } } // rebuild source StringStream newSource; for(size_t i = 0; i < linesOfSource.size() ; i++) { newSource << linesOfSource[i] << "\n"; } mSource = newSource.str(); const char *source = mSource.c_str(); OGRE_CHECK_GL_ERROR(glShaderSource(mGLShaderHandle, 1, &source, NULL)); if (compile()) { LogManager::getSingleton().logMessage("The removing of the lines fixed the invalid type Type for default precision qualifier error."); } else { LogManager::getSingleton().logMessage("The removing of the lines didn't help."); } } } //----------------------------------------------------------------------------- void GLSLESProgram::bindProgram(void) { // Tell the Link Program Manager what shader is to become active switch (mType) { case GPT_VERTEX_PROGRAM: GLSLESProgramManager::getSingleton().setActiveVertexShader( this ); break; case GPT_FRAGMENT_PROGRAM: GLSLESProgramManager::getSingleton().setActiveFragmentShader( this ); break; case GPT_GEOMETRY_PROGRAM: default: break; } } //----------------------------------------------------------------------------- void GLSLESProgram::unbindProgram(void) { // Tell the Link Program Manager what shader is to become inactive if (mType == GPT_VERTEX_PROGRAM) { GLSLESProgramManager::getSingleton().setActiveVertexShader( NULL ); } else if (mType == GPT_FRAGMENT_PROGRAM) { GLSLESProgramManager::getSingleton().setActiveFragmentShader( NULL ); } } //----------------------------------------------------------------------------- void GLSLESProgram::bindProgramParameters(GpuProgramParametersSharedPtr params, uint16 mask) { // Link can throw exceptions, ignore them at this point try { // Activate the link program object GLSLESProgramCommon* linkProgram = GLSLESProgramManager::getSingleton().getActiveProgram(); // Pass on parameters from params to program object uniforms linkProgram->updateUniforms(params, mask, mType); } catch (Exception& e) {} } //----------------------------------------------------------------------------- void GLSLESProgram::bindProgramSharedParameters(GpuProgramParametersSharedPtr params, uint16 mask) { // Link can throw exceptions, ignore them at this point try { // Activate the link program object GLSLESProgramCommon* linkProgram = GLSLESProgramManager::getSingleton().getActiveProgram(); // Pass on parameters from params to program object uniforms linkProgram->updateUniformBlocks(params, mask, mType); } catch (Exception& e) {} } //----------------------------------------------------------------------------- void GLSLESProgram::bindProgramPassIterationParameters(GpuProgramParametersSharedPtr params) { // Activate the link program object GLSLESProgramCommon* linkProgram = GLSLESProgramManager::getSingleton().getActiveProgram(); // Pass on parameters from params to program object uniforms linkProgram->updatePassIterationUniforms( params ); } //----------------------------------------------------------------------------- size_t GLSLESProgram::calculateSize(void) const { size_t memSize = 0; // Delegate Names memSize += sizeof(GLuint); memSize += sizeof(GLenum); memSize += GpuProgram::calculateSize(); return memSize; } }<|fim▁end|>
<|file_name|>LoginLog.java<|end_file_name|><|fim▁begin|>package com.jeesms.entity.system; import com.fasterxml.jackson.annotation.JsonFormat; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import com.jeesms.common.Constants; import com.jeesms.entity.base.IdEntity; import javax.persistence.*; import java.util.Date; /** * 登录日志Entity * * @author ASHNL * @time 2014-08-22 15:42:51 */ @Entity @Table(name = "T_S_LOGIN_LOG") @JsonIgnoreProperties(value = {"hibernateLazyInitializer", "handler", "fieldHandler"}) public class LoginLog extends IdEntity { /** * 登录用户 */ private User user; /** * 登录时间 */ private Date loginTime; /** * 登录IP */ private String loginIp; /** * 描述 */ private String remark; public LoginLog() { } public LoginLog(String id) { this(); this.id = id; } @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "user_id") @JsonIgnore public User getUser() { return this.user; } public void setUser(User user) { this.user = user; } @Column(name = "LOGIN_TIME") @JsonFormat(pattern = Constants.yyyyMMddHHmmss, locale = Constants.LOCALE_ZH, timezone = Constants.TIMEZONE) public Date getLoginTime() { return this.loginTime; } public void setLoginTime(Date loginTime) { this.loginTime = loginTime; } @Column(name = "LOGIN_IP", length = 255) public String getLoginIp() { return this.loginIp; } public void setLoginIp(String loginIp) { this.loginIp = loginIp; } @Column(name = "REMARK", length = 255) public String getRemark() {<|fim▁hole|> public void setRemark(String remark) { this.remark = remark; } @Transient public String getLoginName() { return user == null ? null : user.getName(); } }<|fim▁end|>
return remark; }
<|file_name|>sequence_feature_column_integration_test.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Integration test for sequence feature columns with SequenceExamples.""" import string import tempfile from google.protobuf import text_format from tensorflow.core.example import example_pb2 from tensorflow.python.feature_column import feature_column_v2 as fc from tensorflow.python.feature_column import sequence_feature_column as sfc from tensorflow.python.ops import parsing_ops from tensorflow.python.platform import test from tensorflow.python.util import compat class SequenceExampleParsingTest(test.TestCase): def test_seq_ex_in_sequence_categorical_column_with_identity(self): self._test_parsed_sequence_example( 'int_list', sfc.sequence_categorical_column_with_identity, 10, [3, 6], [2, 4, 6]) def test_seq_ex_in_sequence_categorical_column_with_hash_bucket(self): self._test_parsed_sequence_example( 'bytes_list', sfc.sequence_categorical_column_with_hash_bucket, 10, [3, 4], [compat.as_bytes(x) for x in 'acg']) def test_seq_ex_in_sequence_categorical_column_with_vocabulary_list(self): self._test_parsed_sequence_example( 'bytes_list', sfc.sequence_categorical_column_with_vocabulary_list, list(string.ascii_lowercase), [3, 4], [compat.as_bytes(x) for x in 'acg']) def test_seq_ex_in_sequence_categorical_column_with_vocabulary_file(self): _, fname = tempfile.mkstemp() with open(fname, 'w') as f: f.write(string.ascii_lowercase) self._test_parsed_sequence_example( 'bytes_list', sfc.sequence_categorical_column_with_vocabulary_file, fname, [3, 4], [compat.as_bytes(x) for x in 'acg']) def _test_parsed_sequence_example( self, col_name, col_fn, col_arg, shape, values): """Helper function to check that each FeatureColumn parses correctly. Args: col_name: string, name to give to the feature column. Should match the name that the column will parse out of the features dict. col_fn: function used to create the feature column. For example, sequence_numeric_column. col_arg: second arg that the target feature column is expecting. shape: the expected dense_shape of the feature after parsing into a SparseTensor. values: the expected values at index [0, 2, 6] of the feature after parsing into a SparseTensor. """ example = _make_sequence_example() columns = [ fc.categorical_column_with_identity('int_ctx', num_buckets=100), fc.numeric_column('float_ctx'), col_fn(col_name, col_arg) ] context, seq_features = parsing_ops.parse_single_sequence_example( example.SerializeToString(), context_features=fc.make_parse_example_spec_v2(columns[:2]), sequence_features=fc.make_parse_example_spec_v2(columns[2:])) with self.cached_session() as sess: ctx_result, seq_result = sess.run([context, seq_features]) self.assertEqual(list(seq_result[col_name].dense_shape), shape) self.assertEqual( list(seq_result[col_name].values[[0, 2, 6]]), values) self.assertEqual(list(ctx_result['int_ctx'].dense_shape), [1]) self.assertEqual(ctx_result['int_ctx'].values[0], 5) self.assertEqual(list(ctx_result['float_ctx'].shape), [1]) self.assertAlmostEqual(ctx_result['float_ctx'][0], 123.6, places=1) _SEQ_EX_PROTO = """ context { feature { key: "float_ctx" value { float_list { value: 123.6 } } } feature { key: "int_ctx" value { int64_list { value: 5 } } } } feature_lists { feature_list { key: "bytes_list" value { feature { bytes_list { value: "a" } } feature { bytes_list { value: "b" value: "c" } } feature { bytes_list { value: "d" value: "e" value: "f" value: "g" } } } } feature_list { key: "float_list" value { feature { float_list { value: 1.0 } } feature { float_list { value: 3.0 value: 3.0 value: 3.0 } } feature { float_list { value: 5.0 value: 5.0 value: 5.0 value: 5.0 value: 5.0 } } } } feature_list { key: "int_list" value { feature { int64_list { value: 2 value: 2 } } feature { int64_list { value: 4 value: 4 value: 4 value: 4 } } feature { int64_list { value: 6 value: 6 value: 6 value: 6 value: 6 value: 6 } } }<|fim▁hole|>} """ def _make_sequence_example(): example = example_pb2.SequenceExample() return text_format.Parse(_SEQ_EX_PROTO, example) if __name__ == '__main__': test.main()<|fim▁end|>
}
<|file_name|>persistent_list.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! A persistent, thread-safe singly-linked list. use std::mem; use std::sync::Arc; pub struct PersistentList<T> { head: PersistentListLink<T>, length: uint, } struct PersistentListEntry<T> { value: T, next: PersistentListLink<T>, } type PersistentListLink<T> = Option<Arc<PersistentListEntry<T>>>; impl<T> PersistentList<T> where T: Send + Sync { #[inline] pub fn new() -> PersistentList<T> { PersistentList { head: None, length: 0, } } #[inline] pub fn len(&self) -> uint { self.length } #[inline] pub fn front(&self) -> Option<&T> { self.head.as_ref().map(|head| &head.value) } #[inline] pub fn prepend_elem(&self, value: T) -> PersistentList<T> { PersistentList { head: Some(Arc::new(PersistentListEntry { value: value, next: self.head.clone(), })), length: self.length + 1, } } #[inline] pub fn iter<'a>(&'a self) -> PersistentListIterator<'a,T> { // This could clone (and would not need the lifetime if it did), but then it would incur // atomic operations on every call to `.next()`. Bad. PersistentListIterator { entry: self.head.as_ref().map(|head| &**head), } } } impl<T> Clone for PersistentList<T> where T: Send + Sync { fn clone(&self) -> PersistentList<T> { // This establishes the persistent nature of this list: we can clone a list by just cloning // its head. PersistentList { head: self.head.clone(), length: self.length, } } }<|fim▁hole|>pub struct PersistentListIterator<'a,T> where T: 'a + Send + Sync { entry: Option<&'a PersistentListEntry<T>>, } impl<'a,T> Iterator<&'a T> for PersistentListIterator<'a,T> where T: Send + Sync { #[inline] fn next(&mut self) -> Option<&'a T> { let entry = match self.entry { None => return None, Some(entry) => { // This `transmute` is necessary to ensure that the lifetimes of the next entry and // this entry match up; the compiler doesn't know this, but we do because of the // reference counting behavior of `Arc`. unsafe { mem::transmute::<&'a PersistentListEntry<T>, &'static PersistentListEntry<T>>(entry) } } }; let value = &entry.value; self.entry = match entry.next { None => None, Some(ref entry) => Some(&**entry), }; Some(value) } }<|fim▁end|>
<|file_name|>animation.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! CSS transitions and animations. <|fim▁hole|>use clock_ticks; use gfx::display_list::OpaqueNode; use layout_task::{LayoutTask, LayoutTaskData}; use msg::constellation_msg::{AnimationState, Msg, PipelineId}; use script::layout_interface::Animation; use script_traits::{ConstellationControlMsg, ScriptControlChan}; use std::mem; use std::sync::mpsc::Sender; use style::animation::{GetMod, PropertyAnimation}; use style::properties::ComputedValues; /// Inserts transitions into the queue of running animations as applicable for the given style /// difference. This is called from the layout worker threads. pub fn start_transitions_if_applicable(new_animations_sender: &Sender<Animation>, node: OpaqueNode, old_style: &ComputedValues, new_style: &mut ComputedValues) { for i in 0..new_style.get_animation().transition_property.0.len() { // Create any property animations, if applicable. let property_animations = PropertyAnimation::from_transition(i, old_style, new_style); for property_animation in property_animations.into_iter() { // Set the property to the initial value. property_animation.update(new_style, 0.0); // Kick off the animation. let now = clock_ticks::precise_time_s() as f32; let animation_style = new_style.get_animation(); let start_time = now + animation_style.transition_delay.0.get_mod(i).seconds(); new_animations_sender.send(Animation { node: node.id(), property_animation: property_animation, start_time: start_time, end_time: start_time + animation_style.transition_duration.0.get_mod(i).seconds(), }).unwrap() } } } /// Processes any new animations that were discovered after style recalculation. pub fn process_new_animations(rw_data: &mut LayoutTaskData, pipeline_id: PipelineId) { while let Ok(animation) = rw_data.new_animations_receiver.try_recv() { rw_data.running_animations.push(animation) } let animation_state; if rw_data.running_animations.is_empty() { animation_state = AnimationState::NoAnimationsPresent; } else { animation_state = AnimationState::AnimationsPresent; } rw_data.constellation_chan .0 .send(Msg::ChangeRunningAnimationsState(pipeline_id, animation_state)) .unwrap(); } /// Recalculates style for an animation. This does *not* run with the DOM lock held. pub fn recalc_style_for_animation(flow: &mut Flow, animation: &Animation) { #![allow(unsafe_code)] // #6376 let mut damage = RestyleDamage::empty(); flow.mutate_fragments(&mut |fragment| { if fragment.node.id() != animation.node { return } let now = clock_ticks::precise_time_s() as f32; let mut progress = (now - animation.start_time) / animation.duration(); if progress > 1.0 { progress = 1.0 } if progress <= 0.0 { return } let mut new_style = fragment.style.clone(); animation.property_animation.update(&mut *unsafe { new_style.make_unique() }, progress); damage.insert(incremental::compute_damage(&Some(fragment.style.clone()), &new_style)); fragment.style = new_style }); let base = flow::mut_base(flow); base.restyle_damage.insert(damage); for kid in base.children.iter_mut() { recalc_style_for_animation(kid, animation) } } /// Handles animation updates. pub fn tick_all_animations(layout_task: &LayoutTask, rw_data: &mut LayoutTaskData) { let running_animations = mem::replace(&mut rw_data.running_animations, Vec::new()); let now = clock_ticks::precise_time_s() as f32; for running_animation in running_animations.into_iter() { layout_task.tick_animation(&running_animation, rw_data); if now < running_animation.end_time { // Keep running the animation if it hasn't expired. rw_data.running_animations.push(running_animation) } } let ScriptControlChan(ref chan) = layout_task.script_chan; chan.send(ConstellationControlMsg::TickAllAnimations(layout_task.id)).unwrap(); }<|fim▁end|>
use flow::{self, Flow}; use incremental::{self, RestyleDamage};
<|file_name|>function_classification_evaluation.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # This research is supported by the European Union Seventh Framework Programme (FP7/2007-2013), project ASPIRE (Advanced Software Protection: Integration, Research, and Exploitation), under grant agreement no. 609734; on-line at https://aspire-fp7.eu/. */ # The development of portions of the code contained in this file was sponsored by Samsung Electronics UK. */ import math import sys est_file = sys.argv[1] ref_file = sys.argv[2] # returns map[ins]->id def read_mapping(f): m = {} for line in open(f): s = line.split(',') ins = int(s[0], base=16) id = int(s[1]) if id != -1: m[ins] = id return m # Gets a map[group_id] -> set(ins) def make_grouping(m): cs = {} for ins, id in m.iteritems(): if id in cs: cs[id].add(ins) else: cs[id] = set() cs[id].add(ins) return cs def make_grouping_ida(m): cs = {} for ins, id in m.iteritems(): if id == 1: continue if id in cs: cs[id].add(ins) else: cs[id] = set() cs[id].add(ins) return cs # Given a cluster (estimated: set(ins)), get its classes (reference, set(ins)) def classes_for_cluster(cluster, ref_map): classes = set() for ins in cluster: # TODO if ins not in ref_map if ins in ref_map: classes.add(ref_map[ins]) return classes # cluster: set(ins), return: purity(float) def purity_of_cluster(cluster, ref_map): classes = classes_for_cluster(cluster, ref_map) m = float(0) n_c = float(len(cluster)) for c in classes: c_count = float(0) for i in cluster: if i in ref_map and ref_map[i] == c: # TODO: not in ref_map? c_count+=1 m = max(m, c_count/n_c) return m def purity(clusters, ref_map): maxes = {} n = float(len(ref_map)) p = float(0) for c in clusters: n_c = float(len(clusters[c])) p += purity_of_cluster(clusters[c], ref_map) * n_c / n return p def entropy_of_cluster(cluster, ref_map): classes = classes_for_cluster(cluster, ref_map) e = float(0) n_c = len(cluster) for c in classes: c_count = float(0) for i in cluster: if i in ref_map and ref_map[i] == c: # TODO: not in ref_map? c_count+=1 #e += c_count / c_ e = e + c_count/n_c * math.log(c_count/n_c) return - e def entropy(clusters, ref_map): maxes = {} n = len(ref_map) e = float(0) for c in clusters: n_c = len(clusters[c]) e += entropy_of_cluster(clusters[c], ref_map) * n_c / n return e def FN(ida_clusters, ida_mapping, truth_clusters): seen = set() fn = float(0) tot = float(0) for fun in truth_clusters: fun_insts = truth_clusters[fun] fn_fun = 0 tot_fun = 0 for inst in fun_insts: if inst in seen: continue seen.add(inst) if inst in ida_mapping: id = ida_mapping[inst] if id in ida_clusters: ida_fun = ida_clusters[id] else: ida_fun = set() else: ida_fun = set() for inst_j in fun_insts: if inst_j in seen: continue tot_fun += 1 if inst_j not in ida_fun: fn_fun += 1 fn += float(fn_fun) / float(len(fun_insts)) tot += float(tot_fun) / float(len(fun_insts)) return (fn, float(fn)/float(tot)) def FP(ida_clusters, truth_clusters, truth_mapping): seen = set() fp = float(0) tot = float(0) #max_fp = 0 #start_fp = 0 for fun in ida_clusters:<|fim▁hole|> #start_fp = fp fp_fun = 0 tot_fun = 0 for inst in fun_insts: if inst in seen: continue seen.add(inst) if inst in truth_mapping: id = truth_mapping[inst] if id in truth_clusters: truth_fun = truth_clusters[id] else: truth_fun = set() else: truth_fun = set() for inst_j in fun_insts: if inst_j in seen: continue tot_fun += 1 if inst_j not in truth_fun: fp_fun += 1 fp += float(fp_fun) / float(len(fun_insts)) tot += float(tot_fun) / float(len(fun_insts)) #if fp - start_fp > max_fp: # print "New largest cluster @ %s, size %i" % (str(fun_insts), fp - max_fp) # max_fp = fp - start_fp #print "tot = %i" % tot return (fp, float(fp)/float(tot)) def metrics(ref_map, est_map, metric): #ref = make_grouping(ref_map) clusters = make_grouping(est_map) print "Number of classes: %i" % len(clusters) print "Number of instructions: %i" % len(est_map) p = metric(clusters, ref_map) print "The evaluation of the mapping: %f" % p #reference_mapping = read_mapping("E:\\tmp\\reference_mapping_%s" % f) #estimated_mapping = read_mapping("E:\\tmp\\estimated_mapping_%s" % f) reference_mapping = read_mapping(ref_file) estimated_mapping = read_mapping(est_file) reference_functions = make_grouping(reference_mapping) estimated_functions = make_grouping_ida(estimated_mapping) fn = FN(estimated_functions, estimated_mapping, reference_functions) print "FN,%i,%f" % (fn[0], fn[1]) #fp = FP(estimated_functions, reference_functions, reference_mapping) #print "FP,%i,%f" % (fp[0], fp[1]) #print "FP,%i,%f,FN,%i,%f" % (fp[0], fp[1], fn[0], fn[1]) #for m in [purity, entropy]: #print "BEGIN %s METRICS: " % str(m) #print "" #print "reference -> estimated" #metrics(reference_mapping, estimated_mapping, m) #print "" #print "estimated -> reference" #metrics(estimated_mapping, reference_mapping, m) #print "" #print "========="<|fim▁end|>
fun_insts = ida_clusters[fun]
<|file_name|>event_serializer.d.ts<|end_file_name|><|fim▁begin|>export declare function serializeGenericEvent(e: Event): { [key: string]: any; }; export declare function serializeEventWithTarget(e: Event): {<|fim▁hole|>}; export declare function serializeMouseEvent(e: MouseEvent): { [key: string]: any; }; export declare function serializeKeyboardEvent(e: KeyboardEvent): { [key: string]: any; }; export declare function serializeTransitionEvent(e: TransitionEvent): { [key: string]: any; };<|fim▁end|>
[key: string]: any;
<|file_name|>shape_base.py<|end_file_name|><|fim▁begin|>from . import numeric as _nx from .numeric import asanyarray, newaxis def atleast_1d(*arys): res = [] for ary in arys:<|fim▁hole|> else : result = ary res.append(result) if len(res) == 1: return res[0] else: return res def atleast_2d(*arys): res = [] for ary in arys: ary = asanyarray(ary) if len(ary.shape) == 0 : result = ary.reshape(1, 1) elif len(ary.shape) == 1 : result = ary[newaxis,:] else : result = ary res.append(result) if len(res) == 1: return res[0] else: return res def vstack(tup): return _nx.concatenate([atleast_2d(_m) for _m in tup], 0) def hstack(tup): arrs = [atleast_1d(_m) for _m in tup] # As a special case, dimension 0 of 1-dimensional arrays is "horizontal" if arrs[0].ndim == 1: return _nx.concatenate(arrs, 0) else: return _nx.concatenate(arrs, 1)<|fim▁end|>
ary = asanyarray(ary) if len(ary.shape) == 0 : result = ary.reshape(1)
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import codecs from ConfigParser import ConfigParser import os import subprocess import sys import six import twiggy from twiggy import log from twiggy.levels import name2level from xdg import BaseDirectory def asbool(some_value): """ Cast config values to boolean. """ return six.text_type(some_value).lower() in [ 'y', 'yes', 't', 'true', '1', 'on' ] def get_service_password(service, username, oracle=None, interactive=False): """ Retrieve the sensitive password for a service by: * retrieving password from a secure store (@oracle:use_keyring, default) * asking the password from the user (@oracle:ask_password, interactive) * executing a command and use the output as password (@oracle:eval:<command>) Note that the keyring may or may not be locked which requires that the user provides a password (interactive mode). <|fim▁hole|> .. seealso:: https://bitbucket.org/kang/python-keyring-lib """ import getpass import keyring password = None if not oracle or oracle == "@oracle:use_keyring": password = keyring.get_password(service, username) if interactive and password is None: # -- LEARNING MODE: Password is not stored in keyring yet. oracle = "@oracle:ask_password" password = get_service_password(service, username, oracle, interactive=True) if password: keyring.set_password(service, username, password) elif interactive and oracle == "@oracle:ask_password": prompt = "%s password: " % service password = getpass.getpass(prompt) elif oracle.startswith('@oracle:eval:'): command = oracle[13:] p = subprocess.Popen( command, shell=True, stdout=subprocess.PIPE, #stderr=subprocess.STDOUT ) password = p.stdout.read()[:-1] if password is None: die("MISSING PASSWORD: oracle='%s', interactive=%s for service=%s" % (oracle, interactive, service)) return password def load_example_rc(): fname = os.path.join( os.path.dirname(__file__), 'docs/configuration.rst' ) with open(fname, 'r') as f: readme = f.read() example = readme.split('.. example')[1][4:] return example error_template = """ ************************************************* * There was a problem with your bugwarriorrc * * {msg} * Here's an example template to help: * ************************************************* {example}""" def die(msg): log.options(suppress_newlines=False).critical( error_template, msg=msg, example=load_example_rc(), ) sys.exit(1) def validate_config(config, main_section): if not config.has_section(main_section): die("No [%s] section found." % main_section) twiggy.quickSetup( name2level(config.get(main_section, 'log.level')), config.get(main_section, 'log.file') ) if not config.has_option(main_section, 'targets'): die("No targets= item in [%s] found." % main_section) targets = config.get(main_section, 'targets') targets = filter(lambda t: len(t), [t.strip() for t in targets.split(",")]) if not targets: die("Empty targets= item in [%s]." % main_section) for target in targets: if target not in config.sections(): die("No [%s] section found." % target) # Validate each target one by one. for target in targets: service = config.get(target, 'service') if not service: die("No 'service' in [%s]" % target) if service not in SERVICES: die("'%s' in [%s] is not a valid service." % (service, target)) # Call the service-specific validator SERVICES[service].validate_config(config, target) def load_config(main_section): config = ConfigParser({'log.level': "DEBUG", 'log.file': None}) path = None first_path = BaseDirectory.load_first_config('bugwarrior') if first_path is not None: path = os.path.join(first_path, 'bugwarriorrc') old_path = os.path.expanduser("~/.bugwarriorrc") if path is None or not os.path.exists(path): if os.path.exists(old_path): path = old_path else: path = os.path.join(BaseDirectory.save_config_path('bugwarrior'), 'bugwarriorrc') config.readfp( codecs.open( path, "r", "utf-8", ) ) config.interactive = False # TODO: make this a command-line option validate_config(config, main_section) return config def get_taskrc_path(conf, main_section): path = '~/.taskrc' if conf.has_option(main_section, 'taskrc'): path = conf.get(main_section, 'taskrc') return os.path.normpath( os.path.expanduser(path) ) # This needs to be imported here and not above to avoid a circular-import. from bugwarrior.services import SERVICES<|fim▁end|>
:param service: Service name, may be key into secure store (as string). :param username: Username for the service (as string). :param oracle: Hint which password oracle strategy to use. :return: Retrieved password (as string)
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>use std::fs::File; use std::io::prelude::*; use std::path::Path; pub fn read_file<P: AsRef<Path>>(path: P) -> String { let mut contents = String::new(); File::open(path).unwrap().read_to_string(&mut contents).unwrap(); contents<|fim▁hole|><|fim▁end|>
}
<|file_name|>RobotsActivity.java<|end_file_name|><|fim▁begin|>package com.holdit.feedthemax; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.view.View; import android.widget.ImageButton; import android.widget.RelativeLayout; import android.widget.TextView; import static com.holdit.feedthemax.MainActivity.rbt1qu; import static com.holdit.feedthemax.R.id.rbt1price; import static com.holdit.feedthemax.R.id.rbt2price; <|fim▁hole|>/** * Created by Antek on 2017-05-10. * Activity started after robot button pressed */ public class RobotsActivity extends AppCompatActivity { @Override public void onCreate(Bundle savedInstanceState){ super.onCreate(savedInstanceState); setContentView(R.layout.robots_layout); //final MainActivity mact = new MainActivity(); RelativeLayout rbt1rl = (RelativeLayout) findViewById(R.id.rbt1rl); RelativeLayout rbt2rl = (RelativeLayout) findViewById(R.id.rbt2rl); RelativeLayout rbt3rl = (RelativeLayout) findViewById(R.id.rbt3rl); final TextView rbt1prc = (TextView) findViewById(rbt1price); rbt1prc.setText(MainActivity.rbt1price + "C"); final TextView rbt2prc = (TextView) findViewById(rbt2price); rbt2prc.setText(MainActivity.rbt2price + "C"); final TextView rbt3prc = (TextView) findViewById(rbt3price); rbt3prc.setText(MainActivity.rbt3price + "C"); rbt1rl.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { if (MainActivity.cookies >= MainActivity.rbt1price) { MainActivity.cookies -= MainActivity.rbt1price; MainActivity.cps ++; MainActivity.rbt1qu++; MainActivity.rbt1price = (int) (100 * Math.pow(1.15, MainActivity.rbt1qu)); rbt1prc.setText(MainActivity.rbt1price + "C"); } } }); rbt2rl.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { if (MainActivity.cookies >= MainActivity.rbt2price) { MainActivity.cookies -= MainActivity.rbt2price; MainActivity.cps += 8; MainActivity.rbt2qu++; MainActivity.rbt2price = (int) (1100 * Math.pow(1.15, MainActivity.rbt2qu)); rbt2prc.setText(MainActivity.rbt2price + "C"); } } }); rbt3rl.setOnClickListener(new View.OnClickListener() { public void onClick(View v) { if (MainActivity.cookies >= MainActivity.rbt3price) { MainActivity.cookies -= MainActivity.rbt3price; MainActivity.cps += 47; MainActivity.rbt3qu++; MainActivity.rbt3price = (int) (12000 * Math.pow(1.15, MainActivity.rbt3qu)); rbt3prc.setText(MainActivity.rbt3price + "C"); } } }); } }<|fim▁end|>
import static com.holdit.feedthemax.R.id.rbt3price;
<|file_name|>initialization.py<|end_file_name|><|fim▁begin|>from graphics_module.objects import *<|fim▁hole|> def make_pixels_array_config_based(config): if config.colorscheme == "b&w": c = Color() elif config.colorscheme == "light": c = Color(r=245,g=235,b=234,a=0.85) #"light" or whatever to be slightly colorized dots if config.aplha == True: lol = 4 #random influenced aplha #and so on def get_color(config): if not config:#has attribute "lower_limit": I don't know lower_limit = 230<|fim▁end|>
import numpy as np def make_pixels_array_basic(amount): return np.full(10,Pixel(), dtype=np.object)
<|file_name|>direct_client.py<|end_file_name|><|fim▁begin|># Copyright (c) 2010-2012 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied.<|fim▁hole|>Internal client library for making calls directly to the servers rather than through the proxy. """ import socket from httplib import HTTPException from time import time from urllib import quote as _quote from eventlet import sleep, Timeout from swift.common.bufferedhttp import http_connect from swiftclient import ClientException, json_loads from swift.common.utils import normalize_timestamp from swift.common.http import HTTP_NO_CONTENT, HTTP_INSUFFICIENT_STORAGE, \ is_success, is_server_error def quote(value, safe='/'): if isinstance(value, unicode): value = value.encode('utf8') return _quote(value, safe) def direct_get_account(node, part, account, marker=None, limit=None, prefix=None, delimiter=None, conn_timeout=5, response_timeout=15): """ Get listings directly from the account server. :param node: node dictionary from the ring :param part: partition the account is on :param account: account name :param marker: marker query :param limit: query limit :param prefix: prefix query :param delimeter: delimeter for the query :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :returns: a tuple of (response headers, a list of containers) The response headers will be a dict and all header names will be lowercase. """ path = '/' + account qs = 'format=json' if marker: qs += '&marker=%s' % quote(marker) if limit: qs += '&limit=%d' % limit if prefix: qs += '&prefix=%s' % quote(prefix) if delimiter: qs += '&delimiter=%s' % quote(delimiter) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'GET', path, query_string=qs) with Timeout(response_timeout): resp = conn.getresponse() if not is_success(resp.status): resp.read() raise ClientException( 'Account server %s:%s direct GET %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value if resp.status == HTTP_NO_CONTENT: resp.read() return resp_headers, [] return resp_headers, json_loads(resp.read()) def direct_head_container(node, part, account, container, conn_timeout=5, response_timeout=15): """ Request container information directly from the container server. :param node: node dictionary from the ring :param part: partition the container is on :param account: account name :param container: container name :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :returns: a dict containing the response's headers (all header names will be lowercase) """ path = '/%s/%s' % (account, container) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'HEAD', path) with Timeout(response_timeout): resp = conn.getresponse() resp.read() if not is_success(resp.status): raise ClientException( 'Container server %s:%s direct HEAD %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value return resp_headers def direct_get_container(node, part, account, container, marker=None, limit=None, prefix=None, delimiter=None, conn_timeout=5, response_timeout=15): """ Get container listings directly from the container server. :param node: node dictionary from the ring :param part: partition the container is on :param account: account name :param container: container name :param marker: marker query :param limit: query limit :param prefix: prefix query :param delimeter: delimeter for the query :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :returns: a tuple of (response headers, a list of objects) The response headers will be a dict and all header names will be lowercase. """ path = '/%s/%s' % (account, container) qs = 'format=json' if marker: qs += '&marker=%s' % quote(marker) if limit: qs += '&limit=%d' % limit if prefix: qs += '&prefix=%s' % quote(prefix) if delimiter: qs += '&delimiter=%s' % quote(delimiter) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'GET', path, query_string=qs) with Timeout(response_timeout): resp = conn.getresponse() if not is_success(resp.status): resp.read() raise ClientException( 'Container server %s:%s direct GET %s gave stats %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value if resp.status == HTTP_NO_CONTENT: resp.read() return resp_headers, [] return resp_headers, json_loads(resp.read()) def direct_delete_container(node, part, account, container, conn_timeout=5, response_timeout=15, headers={}): path = '/%s/%s' % (account, container) headers['X-Timestamp'] = normalize_timestamp(time()) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'DELETE', path, headers) with Timeout(response_timeout): resp = conn.getresponse() resp.read() if not is_success(resp.status): raise ClientException( 'Container server %s:%s direct DELETE %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) def direct_head_object(node, part, account, container, obj, conn_timeout=5, response_timeout=15): """ Request object information directly from the object server. :param node: node dictionary from the ring :param part: partition the container is on :param account: account name :param container: container name :param obj: object name :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :returns: a dict containing the response's headers (all header names will be lowercase) """ path = '/%s/%s/%s' % (account, container, obj) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'HEAD', path) with Timeout(response_timeout): resp = conn.getresponse() resp.read() if not is_success(resp.status): raise ClientException( 'Object server %s:%s direct HEAD %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value return resp_headers def direct_get_object(node, part, account, container, obj, conn_timeout=5, response_timeout=15, resp_chunk_size=None, headers={}): """ Get object directly from the object server. :param node: node dictionary from the ring :param part: partition the container is on :param account: account name :param container: container name :param obj: object name :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :param resp_chunk_size: if defined, chunk size of data to read. :param headers: dict to be passed into HTTPConnection headers :returns: a tuple of (response headers, the object's contents) The response headers will be a dict and all header names will be lowercase. """ path = '/%s/%s/%s' % (account, container, obj) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'GET', path, headers=headers) with Timeout(response_timeout): resp = conn.getresponse() if not is_success(resp.status): resp.read() raise ClientException( 'Object server %s:%s direct GET %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) if resp_chunk_size: def _object_body(): buf = resp.read(resp_chunk_size) while buf: yield buf buf = resp.read(resp_chunk_size) object_body = _object_body() else: object_body = resp.read() resp_headers = {} for header, value in resp.getheaders(): resp_headers[header.lower()] = value return resp_headers, object_body def direct_put_object(node, part, account, container, name, contents, content_length=None, etag=None, content_type=None, headers=None, conn_timeout=5, response_timeout=15, resp_chunk_size=None): """ Put object directly from the object server. :param node: node dictionary from the ring :param part: partition the container is on :param account: account name :param container: container name :param name: object name :param contents: a string to read object data from :param content_length: value to send as content-length header :param etag: etag of contents :param content_type: value to send as content-type header :param headers: additional headers to include in the request :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :param chunk_size: if defined, chunk size of data to send. :returns: etag from the server response """ # TODO: Add chunked puts path = '/%s/%s/%s' % (account, container, name) if headers is None: headers = {} if etag: headers['ETag'] = etag.strip('"') if content_length is not None: headers['Content-Length'] = str(content_length) if content_type is not None: headers['Content-Type'] = content_type else: headers['Content-Type'] = 'application/octet-stream' if not contents: headers['Content-Length'] = '0' headers['X-Timestamp'] = normalize_timestamp(time()) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'PUT', path, headers=headers) conn.send(contents) with Timeout(response_timeout): resp = conn.getresponse() resp.read() if not is_success(resp.status): raise ClientException( 'Object server %s:%s direct PUT %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) return resp.getheader('etag').strip('"') def direct_post_object(node, part, account, container, name, headers, conn_timeout=5, response_timeout=15): """ Direct update to object metadata on object server. :param node: node dictionary from the ring :param part: partition the container is on :param account: account name :param container: container name :param name: object name :param headers: headers to store as metadata :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :raises ClientException: HTTP POST request failed """ path = '/%s/%s/%s' % (account, container, name) headers['X-Timestamp'] = normalize_timestamp(time()) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'POST', path, headers=headers) with Timeout(response_timeout): resp = conn.getresponse() resp.read() if not is_success(resp.status): raise ClientException( 'Object server %s:%s direct POST %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) def direct_delete_object(node, part, account, container, obj, conn_timeout=5, response_timeout=15, headers={}): """ Delete object directly from the object server. :param node: node dictionary from the ring :param part: partition the container is on :param account: account name :param container: container name :param obj: object name :param conn_timeout: timeout in seconds for establishing the connection :param response_timeout: timeout in seconds for getting the response :returns: response from server """ path = '/%s/%s/%s' % (account, container, obj) headers['X-Timestamp'] = normalize_timestamp(time()) with Timeout(conn_timeout): conn = http_connect(node['ip'], node['port'], node['device'], part, 'DELETE', path, headers) with Timeout(response_timeout): resp = conn.getresponse() resp.read() if not is_success(resp.status): raise ClientException( 'Object server %s:%s direct DELETE %s gave status %s' % (node['ip'], node['port'], repr('/%s/%s%s' % (node['device'], part, path)), resp.status), http_host=node['ip'], http_port=node['port'], http_device=node['device'], http_status=resp.status, http_reason=resp.reason) def retry(func, *args, **kwargs): """ Helper function to retry a given function a number of times. :param func: callable to be called :param retries: number of retries :param error_log: logger for errors :param args: arguments to send to func :param kwargs: keyward arguments to send to func (if retries or error_log are sent, they will be deleted from kwargs before sending on to func) :returns: restult of func """ retries = 5 if 'retries' in kwargs: retries = kwargs['retries'] del kwargs['retries'] error_log = None if 'error_log' in kwargs: error_log = kwargs['error_log'] del kwargs['error_log'] attempts = 0 backoff = 1 while attempts <= retries: attempts += 1 try: return attempts, func(*args, **kwargs) except (socket.error, HTTPException, Timeout), err: if error_log: error_log(err) if attempts > retries: raise except ClientException, err: if error_log: error_log(err) if attempts > retries or not is_server_error(err.http_status) or \ err.http_status == HTTP_INSUFFICIENT_STORAGE: raise sleep(backoff) backoff *= 2 # Shouldn't actually get down here, but just in case. if args and 'ip' in args[0]: raise ClientException('Raise too many retries', http_host=args[0]['ip'], http_port=args[0]['port'], http_device=args[0]['device']) else: raise ClientException('Raise too many retries')<|fim▁end|>
# See the License for the specific language governing permissions and # limitations under the License. """
<|file_name|>AbstractMapLayerModel.js<|end_file_name|><|fim▁begin|>/** * @class Oskari.mapframework.domain.AbstractLayer * * Superclass for layer objects copy pasted from wmslayer. Need to check * if something should be moved back to wmslayer. Nothing else currently uses this. */ Oskari.clazz.define('Oskari.mapframework.domain.AbstractMapLayerModel', /** * @method create called automatically on construction * @static */ function (params, options) { /* Internal id for this map layer */ this._id = null; /* Name of this layer */ this._name = null; /* Description for layer */ this._description = null; /* either NORMAL_LAYER, GROUP_LAYER or BASE_LAYER */ this._type = null; /* either WMS, WMTS, WFS or VECTOR */ this._layerType = ''; /* optional params */ this._params = params || {}; /* optional options */ this._options = options || {}; /* modules can "tag" the layers with this for easier reference */ this._metaType = null; /* Max scale for layer */ this._maxScale = null; /* Min scale for layer */ this._minScale = null; /* is layer visible */ this._visible = null; /* opacity from 0 to 100 */ this._opacity = null; /* visible layer switch off enable/disable */ this._isSticky = null; this._inspireName = null; this._organizationName = null; this._dataUrl = null; this._orderNumber = null; /* * Array of sublayers. Notice that only type BASE_LAYER can * have sublayers. */ this._subLayers = []; /* Array of styles that this layer supports */ this._styles = []; /* Currently selected style */ this._currentStyle = null; /* Legend image location */ this._legendImage = null; /* is it possible to ask for feature info */ this._featureInfoEnabled = null; /* is this layer queryable (GetFeatureInfo) boolean */ this._queryable = null; this._queryFormat = null; // f.ex. permissions.publish this._permissions = {}; // if given, tells where the layer has content // array of Openlayers.Geometry[] objects if already processed from _geometryWKT this._geometry = []; // wellknown text for polygon geometry this._geometryWKT = null; // Tools array for layer specific functions this._tools = []; /* link to metadata service */ this._metadataIdentifier = null; this._backendStatus = null; }, { /** * @method setId * @param {String} id * unique identifier for map layer used to reference the layer internally * (e.g. MapLayerService) */ setId: function (id) { this._id = id;<|fim▁hole|> * @method getId * @return {String} * unique identifier for map layer used to reference the layer internally * (e.g. MapLayerService) */ getId: function () { return this._id; }, /** * @method setQueryFormat * @param {String} queryFormat * f.ex. 'text/html' */ setQueryFormat: function (queryFormat) { this._queryFormat = queryFormat; }, /** * @method getQueryFormat * f.ex. 'text/html' * @return {String} */ getQueryFormat: function () { return this._queryFormat; }, /** * @method setName * @param {String} name * name for the maplayer that is shown in UI */ setName: function (name) { this._name = name; }, /** * @method getName * @return {String} maplayer UI name */ getName: function () { return this._name; }, /** * @method setType * @param {String} type * layer type (e.g. NORMAL, BASE, GROUP) * * Not as type WMS or Vector but base or normal layer. * See #setAsBaseLayer(), #setAsGroupLayer() and #setAsNormalLayer() */ setType: function (type) { this._type = type; }, /** * @method getType * @return {String} maplayer type (BASE/NORMAL) */ getType: function () { return this._type; }, /** * @method setDataUrl * @param {String} param * URL string used to show more info about the layer */ setDataUrl: function (param) { this._dataUrl = param; }, /** * @method getDataUrl * @return {String} URL string used to show more info about the layer */ getDataUrl: function () { return this._dataUrl; }, /** * @method setOrganizationName * @param {String} param * organization name under which the layer is listed in UI */ setOrganizationName: function (param) { this._organizationName = param; }, /** * @method getOrganizationName * @return {String} organization name under which the layer is listed in UI */ getOrganizationName: function () { return this._organizationName; }, /** * @method setInspireName * @param {String} param * inspire theme name under which the layer is listed in UI */ setInspireName: function (param) { this._inspireName = param; }, /** * @method getInspireName * @return {String} inspire theme name under which the layer is listed in UI */ getInspireName: function () { return this._inspireName; }, /** * @method setFeatureInfoEnabled * @return {Boolean} featureInfoEnabled true to enable feature info functionality */ setFeatureInfoEnabled: function (featureInfoEnabled) { this._featureInfoEnabled = featureInfoEnabled; }, /** * @method isFeatureInfoEnabled * @return {Boolean} true if feature info functionality should be enabled */ isFeatureInfoEnabled: function () { if (this._featureInfoEnabled === true) { return true; } return false; }, /** * @method setDescription * @param {String} description * map layer description text */ setDescription: function (description) { this._description = description; }, /** * @method getDescription * @return {String} map layer description text */ getDescription: function () { return this._description; }, /** * @method addSubLayer * @param {Oskari.mapframework.domain.WmsLayer} map layer * actual sub map layer that is used for a given scale range (only for * base & group layers) * * If layer has sublayers, it is basically a "metalayer" for maplayer ui * purposes and actual map images to show are done with sublayers */ addSubLayer: function (layer) { this._subLayers.push(layer); }, /** * @method getSubLayers * @return {Oskari.mapframework.domain.WmsLayer[]} array of sub map layers * * If layer has sublayers, it is basically a "metalayer" for maplayer ui * purposes and actual map images to show are done with sublayers */ getSubLayers: function () { return this._subLayers; }, /** * @method setMaxScale * @param {Number} maxScale * largest scale when the layer is shown (otherwise not shown in map and * "greyed out"/disabled in ui) */ setMaxScale: function (maxScale) { this._maxScale = maxScale; }, /** * @method getMaxScale * @return {Number} * largest scale when the layer is shown (otherwise not shown in map and * "greyed out"/disabled in ui) */ getMaxScale: function () { return this._maxScale; }, /** * @method setMinScale * @param {Number} minScale * smallest scale when the layer is shown (otherwise not shown in map and * "greyed out"/disabled in ui) */ setMinScale: function (minScale) { this._minScale = minScale; }, /** * @method getMinScale * @return {Number} * smallest scale when the layer is shown (otherwise not shown in map and * "greyed out"/disabled in ui) */ getMinScale: function () { return this._minScale; }, /** * @method setOrderNumber * @param {Number} orderNumber */ setOrderNumber: function (orderNumber) { this._orderNumber = orderNumber; }, /** * @method getOrderNumber * @return {Number} orderNumber */ getOrderNumber: function () { return this._orderNumber; }, /** * @method isVisible * @return {Boolean} true if this is should be shown */ isVisible: function () { return this._visible === true; }, /** * @method setVisible * @param {Boolean} visible true if this is should be shown */ setVisible: function (visible) { this._visible = visible; }, /** * @method setOpacity * @param {Number} opacity * 0-100 in percents */ setOpacity: function (opacity) { this._opacity = opacity; }, /** * @method getOpacity * @return {Number} opacity * 0-100 in percents */ getOpacity: function () { return this._opacity; }, /** * @method setGeometryWKT * Set geometry as wellknown text * @param {String} value * WKT geometry */ setGeometryWKT: function (value) { this._geometryWKT = value; }, /** * @method getGeometryWKT * Get geometry as wellknown text * @return {String} WKT geometry */ getGeometryWKT: function () { return this._geometryWKT; }, /** * @method setGeometry * @param {OpenLayers.Geometry.Geometry[]} value * array of WKT geometries or actual OpenLayer geometries */ setGeometry: function (value) { this._geometry = value; }, /** * @method getGeometry * @return {OpenLayers.Geometry.Geometry[]} * array of WKT geometries or actual OpenLayer geometries */ getGeometry: function () { return this._geometry; }, /** * @method addPermission * @param {String} action * action key that we want to add permission setting for * @param {String} permission * actual permission setting for action */ addPermission: function (action, permission) { this._permissions[action] = permission; }, /** * @method removePermission * @param {String} action * action key from which permission setting should be removed */ removePermission: function (action) { this._permissions[action] = null; delete this._permissions[action]; }, /** * @method getPermission * @param {String} action * action key for which permission we want * @return {String} permission setting for given action */ getPermission: function (action) { return this._permissions[action]; }, /** * @method getMetadataIdentifier * Gets the identifier (uuid style) for getting layers metadata * @return {String} */ getMetadataIdentifier: function () { return this._metadataIdentifier; }, /** * @method setMetadataIdentifier * Sets the identifier (uuid style) for getting layers metadata * @param {String} metadataid */ setMetadataIdentifier: function (metadataid) { this._metadataIdentifier = metadataid; }, /** * @method getBackendStatus * Status text for layer operatibility (f.ex. 'DOWN') * @return {String} */ getBackendStatus: function () { return this._backendStatus; }, /** * @method setBackendStatus * Status text for layer operatibility (f.ex. 'DOWN') * @param {String} backendStatus */ setBackendStatus: function (backendStatus) { this._backendStatus = backendStatus; }, /** * @method setMetaType * @param {String} type used to group layers by f.ex. functionality. * Layers can be fetched based on metatype f.ex. 'myplaces' */ setMetaType: function (type) { this._metaType = type; }, /** * @method getMetaType * @return {String} type used to group layers by f.ex. functionality. * Layers can be fetched based on metatype f.ex. 'myplaces' */ getMetaType: function () { return this._metaType; }, /** * @method addStyle * @param {Oskari.mapframework.domain.Style} style * adds style to layer */ addStyle: function (style) { this._styles.push(style); }, /** * @method getStyles * @return {Oskari.mapframework.domain.Style[]} * Gets layer styles */ getStyles: function () { return this._styles; }, /** * @method selectStyle * @param {String} styleName * Selects a #Oskari.mapframework.domain.Style with given name as #getCurrentStyle. * If style is not found, assigns an empty #Oskari.mapframework.domain.Style to #getCurrentStyle */ selectStyle: function (styleName) { var style = null, i; // Layer have styles if (this._styles.length > 0) { // There is default style defined if (styleName !== '') { for (i = 0; i < this._styles.length; i += 1) { style = this._styles[i]; if (style.getName() === styleName) { this._currentStyle = style; if (style.getLegend() !== '') { this._legendImage = style.getLegend(); } return; } } } // There is not default style defined else { //var style = // Oskari.clazz.create('Oskari.mapframework.domain.Style'); // Layer have more than one style, set first // founded style to default // Because of layer style error this if clause // must compare at there is more than one style. if (this._styles.length > 1) { this._currentStyle = this._styles[0]; } // Layer have not styles, add empty style to // default else { style = Oskari.clazz.create('Oskari.mapframework.domain.Style'); style.setName(''); style.setTitle(''); style.setLegend(''); this._currentStyle = style; } return; } } // Layer have not styles else { style = Oskari.clazz.create('Oskari.mapframework.domain.Style'); style.setName(''); style.setTitle(''); style.setLegend(''); this._currentStyle = style; return; } }, /** * @method getCurrentStyle * @return {Oskari.mapframework.domain.Style} current style */ getCurrentStyle: function () { return this._currentStyle; }, /** * @method getTools * @return {Oskari.mapframework.domain.Tool[]} * Get layer tools */ getTools: function () { return this._tools; }, /** * @method setTools * @params {Oskari.mapframework.domain.Tool[]} * Set layer tools */ setTools: function (tools) { this._tools = tools; }, /** * @method addTool * @params {Oskari.mapframework.domain.Tool} * adds layer tool to tools */ addTool: function (tool) { this._tools.push(tool); }, /** * @method getTool * @return {Oskari.mapframework.domain.Tool} * adds layer tool to tools */ getTool: function (toolName) { var tool = null, i; // Layer have tools if (this._tools.length > 0 ) { // if (toolName !== '') { for (i = 0; i < this._tools.length; i += 1) { tool = this._tools[i]; if (tool.getName() === toolName) { return tool; } } } } return tool; }, /** * @method setLegendImage * @return {String} legendImage URL to a legend image */ setLegendImage: function (legendImage) { this._legendImage = legendImage; }, /** * @method getLegendImage * @return {String} URL to a legend image */ getLegendImage: function () { return this._legendImage; }, /** * @method getLegendImage * @return {Boolean} true if layer has a legendimage or its styles have legend images */ hasLegendImage: function () { var i; if (this._legendImage) { return true; } else { for (i = 0; i < this._styles.length; i += 1) { if (this._styles[i].getLegend()) { return true; } } } return false; }, /** * @method setSticky * True if layer switch off is disable * @param {Boolean} isSticky */ setSticky: function (isSticky) { this._isSticky = isSticky; }, /** * @method isSticky * True if layer switch off is disable */ isSticky: function () { return this._isSticky; }, /** * @method setQueryable * True if we should call GFI on the layer * @param {Boolean} queryable */ setQueryable: function (queryable) { this._queryable = queryable; }, /** * @method getQueryable * True if we should call GFI on the layer * @param {Boolean} queryable */ getQueryable: function () { return this._queryable; }, /** * @method setAsBaseLayer * sets layer type to BASE_LAYER */ setAsBaseLayer: function () { this._type = 'BASE_LAYER'; }, /** * @method setAsNormalLayer * sets layer type to NORMAL_LAYER */ setAsNormalLayer: function () { this._type = 'NORMAL_LAYER'; }, /** * @method setAsGroupLayer * Sets layer type to GROUP_LAYER */ setAsGroupLayer: function () { this._type = 'GROUP_LAYER'; }, /** * @method isGroupLayer * @return {Boolean} true if this is a group layer (=has sublayers) */ isGroupLayer: function () { return this._type === 'GROUP_LAYER'; }, /** * @method isBaseLayer * @return {Boolean} true if this is a base layer (=has sublayers) */ isBaseLayer: function () { return this._type === 'BASE_LAYER'; }, /** * @method isInScale * @param {Number} scale scale to compare to * @return {Boolean} true if given scale is between this layers min/max scales. Always return true for base-layers. */ isInScale: function (scale) { var _return = this.isBaseLayer(); if (!scale) { var sandbox = Oskari.$().sandbox; scale = sandbox.getMap().getScale(); } // Check layer scales only normal layers if (!this.isBaseLayer()) { if ((scale > this.getMaxScale() || !this.getMaxScale()) && (scale < this.getMinScale()) || !this.getMinScale()) { _return = true; } } return _return; }, /** * @method getLayerType * @return {String} layer type in lower case */ getLayerType: function () { return this._layerType.toLowerCase(); }, /** * @method isLayerOfType * @param {String} flavour layer type to check against. A bit misleading since setType is base/group/normal, this is used to check if the layer is a WMS layer. * @return {Boolean} true if flavour is the specified layer type */ isLayerOfType: function (flavour) { return flavour && flavour.toLowerCase() === this.getLayerType(); }, /** * @method getIconClassname * @return {String} layer icon classname used in the CSS style. */ getIconClassname: function () { if (this.isBaseLayer()) { return 'layer-base'; } else if (this.isGroupLayer()) { return 'layer-group'; } else { return 'layer-' + this.getLayerType(); } }, /** * @method getParams * @return {Object} optional layer parameters for OpenLayers, empty object if no parameters were passed in construction */ getParams: function () { return this._params; }, /** * @method getOptions * @return {Object} optional layer options for OpenLayers, empty object if no options were passed in construction */ getOptions: function () { return this._options; } });<|fim▁end|>
}, /**
<|file_name|>users_leaderboard.go<|end_file_name|><|fim▁begin|>/* These are more functions for querying the "users" table, but these functions are only used in "leaderboard.go" */ package models import ( "database/sql" "errors" "strconv" ) const ( SoloSeason1StartDatetime = "2017-10-17 23:00:00" SoloSeason1EndDatetime = "2018-03-17 00:00:00" SoloSeason2StartDatetime = "2018-03-18 23:00:00" SoloSeason2EndDatetime = "2018-10-26 00:00:00" // This is not actually when the Repentance DLC was released, // but rather when the Repentance version of Racing+ was released RepentanceReleasedDatetime = "2021-05-21 00:00:00" SoloSeason3StartDatetime = "2021-12-03 00:00:00" SoloSeason3EndDatetime = "2030-00-00 00:00:00" SoloSeasonStartDatetime = SoloSeason3StartDatetime SoloSeasonEndDatetime = SoloSeason3EndDatetime ) type StatsUnseeded struct { AdjustedAverage int RealAverage int NumRaces int NumForfeits int ForfeitPenalty int LowestTime int LastRace sql.NullTime } type StatsTrueSkill struct { TrueSkill float64 Mu float64 Sigma float64 Change float64 NumRaces int LastRace sql.NullTime } func (*Users) GetTrueSkill(userID int, format string) (StatsTrueSkill, error) { var stats StatsTrueSkill if err := db.QueryRow(` SELECT `+format+`_trueskill, `+format+`_trueskill_mu, `+format+`_trueskill_sigma, `+format+`_trueskill_change, `+format+`_num_races, `+format+`_last_race FROM users WHERE id = ? `, userID).Scan( &stats.TrueSkill, &stats.Mu, &stats.Sigma, &stats.Change, &stats.NumRaces, &stats.LastRace, ); err != nil { return stats, err } return stats, nil } func (*Users) SetTrueSkill(userID int, stats StatsTrueSkill, format string) error { var stmt *sql.Stmt if v, err := db.Prepare(` UPDATE users SET ` + format + `_trueskill = ?, ` + format + `_trueskill_mu = ?, ` + format + `_trueskill_sigma = ?, ` + format + `_trueskill_change = ?, ` + format + `_num_races = ?, ` + format + `_last_race = NOW() WHERE id = ? `); err != nil { return err } else { stmt = v } defer stmt.Close() if _, err := stmt.Exec( stats.TrueSkill, stats.Mu, stats.Sigma, stats.Change, stats.NumRaces, userID, ); err != nil { return err } return nil } // Only used in the "leaderboardRecalculate" functions func (*Users) SetLastRace(format string) error { var SQLString string if format == "ranked_solo" { SQLString = ` UPDATE users SET ` + format + `_last_race = ( SELECT races.datetime_finished FROM race_participants JOIN races ON race_participants.race_id = races.id WHERE user_id = users.id AND races.finished = 1 AND races.ranked = 1 AND races.solo = 1 AND races.datetime_finished > "` + SoloSeasonStartDatetime + `" AND races.datetime_finished < "` + SoloSeasonEndDatetime + `" ORDER BY races.datetime_finished DESC LIMIT 1 ) ` } else { SQLString = ` UPDATE users SET ` + format + `_last_race = ( SELECT races.datetime_finished FROM race_participants JOIN races ON race_participants.race_id = races.id WHERE user_id = users.id AND races.finished = 1 AND races.format = "` + format + `" AND races.solo = 0 AND races.datetime_finished > "` + RepentanceReleasedDatetime + `" ORDER BY races.datetime_finished DESC LIMIT 1 ) ` } var stmt *sql.Stmt if v, err := db.Prepare(SQLString); err != nil { return err } else { stmt = v } defer stmt.Close() if _, err := stmt.Exec(); err != nil { return err } return nil } func (*Users) ResetTrueSkill(format string) error { var stmt *sql.Stmt if v, err := db.Prepare(` UPDATE users SET ` + format + `_trueskill = 25, ` + format + `_trueskill_sigma = 8.333, ` + format + `_trueskill_change = 0, ` + format + `_num_races = 0, ` + format + `_last_race = NULL `); err != nil { return err } else { stmt = v } defer stmt.Close() if _, err := stmt.Exec(); err != nil { return err } return nil } func (*Users) SetStatsRankedSolo( userID int, realAverage int, numForfeits int, forfeitPenalty int, lowestTime int64, startingBuild int, ) error { adjustedAverage := realAverage + forfeitPenalty // 1800000 is 30 minutes (1000 * 60 * 30) var stmt *sql.Stmt if v, err := db.Prepare(` UPDATE users SET ranked_solo_adjusted_average = ?, ranked_solo_real_average = ?, ranked_solo_num_races = ( SELECT COUNT(race_participants.id)<|fim▁hole|> JOIN races ON race_participants.race_id = races.id WHERE race_participants.user_id = ? AND races.finished = 1 AND races.ranked = 1 AND races.solo = 1 AND races.datetime_finished > "` + SoloSeasonStartDatetime + `" AND races.datetime_finished < "` + SoloSeasonEndDatetime + `" ), ranked_solo_num_forfeits = ?, ranked_solo_forfeit_penalty = ?, ranked_solo_lowest_time = ?, ranked_solo_last_race = NOW(), ranked_solo_metadata = ? WHERE id = ? `); err != nil { return err } else { stmt = v } defer stmt.Close() if _, err := stmt.Exec( adjustedAverage, realAverage, userID, numForfeits, forfeitPenalty, lowestTime, startingBuild, userID, ); err != nil { return err } return nil } func (*Users) ResetRankedSolo(userID int) error { var stmt *sql.Stmt if v, err := db.Prepare(` UPDATE users SET ranked_solo_adjusted_average = 0, ranked_solo_real_average = 0, ranked_solo_num_forfeits = 0, ranked_solo_forfeit_penalty = 0, ranked_solo_lowest_time = 0, ranked_solo_num_races = 0, ranked_solo_last_race = NULL, ranked_solo_metadata = NULL WHERE id = ? `); err != nil { return err } else { stmt = v } defer stmt.Close() if _, err := stmt.Exec(userID); err != nil { return err } return nil } func (*Users) ResetRankedSoloAll() error { var stmt *sql.Stmt if v, err := db.Prepare(` UPDATE users SET ranked_solo_adjusted_average = 0, ranked_solo_real_average = 0, ranked_solo_num_forfeits = 0, ranked_solo_forfeit_penalty = 0, ranked_solo_lowest_time = 0, ranked_solo_num_races = 0, ranked_solo_last_race = NULL, ranked_solo_metadata = NULL `); err != nil { return err } else { stmt = v } defer stmt.Close() if _, err := stmt.Exec(); err != nil { return err } return nil } func (*Users) GetRankedSoloMetadata(userID int) (sql.NullInt64, error) { var metadata sql.NullInt64 if err := db.QueryRow(` SELECT ranked_solo_metadata FROM users WHERE id = ? `, userID).Scan(&metadata); err == sql.ErrNoRows { return sql.NullInt64{}, errors.New("A user with an ID of \"" + strconv.Itoa(userID) + "\" does not exist.") } else if err != nil { return sql.NullInt64{}, err } return metadata, nil }<|fim▁end|>
FROM race_participants
<|file_name|>class-cast-to-trait.rs<|end_file_name|><|fim▁begin|>// run-pass #![allow(dead_code)] #![allow(unused_mut)] #![allow(non_camel_case_types)] // ignore-freebsd FIXME fails on BSD trait noisy { fn speak(&mut self); } struct cat { meows: usize, how_hungry: isize, name: String, } impl noisy for cat { fn speak(&mut self) { self.meow(); } } impl cat { pub fn eat(&mut self) -> bool { if self.how_hungry > 0 { println!("OM NOM NOM"); self.how_hungry -= 2; return true; } else { println!("Not hungry!");<|fim▁hole|>} impl cat { fn meow(&mut self) { println!("Meow"); self.meows += 1; if self.meows % 5 == 0 { self.how_hungry += 1; } } } fn cat(in_x : usize, in_y : isize, in_name: String) -> cat { cat { meows: in_x, how_hungry: in_y, name: in_name } } pub fn main() { let mut nyan = cat(0, 2, "nyan".to_string()); let mut nyan: &mut dyn noisy = &mut nyan; nyan.speak(); }<|fim▁end|>
return false; } }
<|file_name|>fluiddb.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ A very thin wrapper on top of the FluidDB RESTful API Copyright (c) 2009-2010 Seo Sanghyeon, Nicholas Tollervey and others See README, AUTHORS and LICENSE for more information """ import sys import httplib2 import urllib import types if sys.version_info < (2, 6): import simplejson as json else: import json # There are currently two instances of FluidDB. MAIN is the default standard # instance and SANDBOX is a scratch version for testing purposes. Data in # SANDBOX can (and will) be blown away. MAIN = 'https://fluiddb.fluidinfo.com' SANDBOX = 'https://sandbox.fluidinfo.com' instance = MAIN ITERABLE_TYPES = set((list, tuple)) SERIALIZABLE_TYPES = set((types.NoneType, bool, int, float, str, unicode, list, tuple)) global_headers = { 'Accept': '*/*', } def login(username, password): """ Creates the 'Authorization' token from the given username and password. """ userpass = username + ':' + password auth = 'Basic ' + userpass.encode('base64').strip() global_headers['Authorization'] = auth def logout(): """ Removes the 'Authorization' token from the headers passed into FluidDB """ if 'Authorization' in global_headers: del global_headers['Authorization'] def call(method, path, body=None, mime=None, tags=[], custom_headers={}, **kw): """ Makes a call to FluidDB method = HTTP verb. e.g. PUT, POST, GET, DELETE or HEAD path = Path appended to the instance to locate the resource in FluidDB this can be either a string OR a list of path elements. body = The request body (a dictionary will be translated to json, primitive types will also be jsonified) mime = The mime-type for the body of the request - will override the jsonification of primitive types tags = The list of tags to return if the request is to values headers = A dictionary containing additional headers to send in the request **kw = Query-string arguments to be appended to the URL """ http = httplib2.Http() # build the URL url = build_url(path) if kw: url = url + '?' + urllib.urlencode(kw) if tags and path.startswith('/values'): # /values based requests must have a tags list to append to the # url args (which are passed in as **kw), so append them so everything # gets urlencoded correctly below url = url + '&' + urllib.urlencode([('tag', tag) for tag in tags]) # set the headers headers = global_headers.copy() if custom_headers: headers.update(custom_headers) # make sure the path is a string for the following elif check for PUT # based requests if isinstance(path, list):<|fim▁hole|> # jsonify dicts headers['content-type'] = 'application/json' body = json.dumps(body) elif method.upper() == 'PUT' and ( path.startswith('/objects/') or path.startswith('/about')): # A PUT to an "/objects/" or "/about/" resource means that we're # handling tag-values. Make sure we handle primitive/opaque value types # properly. if mime: # opaque value (just set the mime type) headers['content-type'] = mime elif isprimitive(body): # primitive values need to be json-ified and have the correct # content-type set headers['content-type'] = 'application/vnd.fluiddb.value+json' body = json.dumps(body) else: # No way to work out what content-type to send to FluidDB so # bail out. raise TypeError("You must supply a mime-type") response, content = http.request(url, method, body, headers) if ((response['content-type'] == 'application/json' or response['content-type'] == 'application/vnd.fluiddb.value+json') and content): result = json.loads(content) else: result = content return response, result def isprimitive(body): """ Given the body of a request will return a boolean to indicate if the value is a primitive value type. See: http://doc.fluidinfo.com/fluidDB/api/tag-values.html & http://bit.ly/hmrMzT For an explanation of the difference between primitive and opaque values. """ bodyType = type(body) if bodyType in SERIALIZABLE_TYPES: if bodyType in ITERABLE_TYPES: if not all(isinstance(x, basestring) for x in body): return False return True else: return False def build_url(path): """ Given a path that is either a string or list of path elements, will return the correct URL """ url = instance if isinstance(path, list): url += '/' url += '/'.join([urllib.quote(element, safe='') for element in path]) else: url += urllib.quote(path) return url<|fim▁end|>
path = '/'+'/'.join(path) # Make sure the correct content-type header is sent if isinstance(body, dict):
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>mod ui; fn main() { let button = ui::widgets::button::Button::new();<|fim▁hole|>}<|fim▁end|>
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>import re from django import forms from django.conf import settings from django.template.defaultfilters import slugify from tower import ugettext_lazy as _lazy from kitsune.products.models import Product, Topic from kitsune.sumo.form_fields import MultiUsernameField, StrippedCharField from kitsune.wiki.config import SIGNIFICANCES, CATEGORIES from kitsune.wiki.models import ( Document, Revision, MAX_REVISION_COMMENT_LENGTH) from kitsune.wiki.tasks import add_short_links from kitsune.wiki.widgets import ( RadioFieldRendererWithHelpText, ProductTopicsAndSubtopicsWidget, RelatedDocumentsWidget) TITLE_REQUIRED = _lazy(u'Please provide a title.') TITLE_SHORT = _lazy(u'The title is too short (%(show_value)s characters). ' u'It must be at least %(limit_value)s characters.') TITLE_LONG = _lazy(u'Please keep the length of the title to %(limit_value)s ' u'characters or less. It is currently %(show_value)s ' u'characters.') SLUG_REQUIRED = _lazy(u'Please provide a slug.') SLUG_INVALID = _lazy(u'The slug provided is not valid.') SLUG_SHORT = _lazy(u'The slug is too short (%(show_value)s characters). ' u'It must be at least %(limit_value)s characters.') SLUG_LONG = _lazy(u'Please keep the length of the slug to %(limit_value)s ' u'characters or less. It is currently %(show_value)s ' u'characters.') SUMMARY_REQUIRED = _lazy(u'Please provide a summary.') SUMMARY_SHORT = _lazy(u'The summary is too short (%(show_value)s characters). ' u'It must be at least %(limit_value)s characters.') SUMMARY_LONG = _lazy(u'Please keep the length of the summary to ' u'%(limit_value)s characters or less. It is currently ' u'%(show_value)s characters.') CONTENT_REQUIRED = _lazy(u'Please provide content.') CONTENT_SHORT = _lazy(u'The content is too short (%(show_value)s characters). ' u'It must be at least %(limit_value)s characters.') CONTENT_LONG = _lazy(u'Please keep the length of the content to ' u'%(limit_value)s characters or less. It is currently ' u'%(show_value)s characters.') COMMENT_LONG = _lazy(u'Please keep the length of the comment to ' u'%(limit_value)s characters or less. It is currently ' u'%(show_value)s characters.') PRODUCT_REQUIRED = _lazy(u'Please select at least one product.') TOPIC_REQUIRED = _lazy(u'Please select at least one topic.') class DocumentForm(forms.ModelForm): """Form to create/edit a document.""" def __init__(self, *args, **kwargs): # Quasi-kwargs: can_archive = kwargs.pop('can_archive', False) can_edit_needs_change = kwargs.pop('can_edit_needs_change', False) initial_title = kwargs.pop('initial_title', '') super(DocumentForm, self).__init__(*args, **kwargs) title_field = self.fields['title'] title_field.initial = initial_title slug_field = self.fields['slug'] slug_field.initial = slugify(initial_title) topics_field = self.fields['topics'] topics_field.choices = Topic.objects.values_list('id', 'title') products_field = self.fields['products'] products_field.choices = Product.objects.values_list('id', 'title') related_documents_field = self.fields['related_documents'] related_documents_field.choices = Document.objects.values_list('id', 'title') # If user hasn't permission to frob is_archived, remove the field. This # causes save() to skip it as well. if not can_archive: del self.fields['is_archived'] # If user hasn't permission to mess with needs_change*, remove the # fields. This causes save() to skip it as well. if not can_edit_needs_change: del self.fields['needs_change'] del self.fields['needs_change_comment'] title = StrippedCharField( min_length=5, max_length=255, widget=forms.TextInput(), label=_lazy(u'Title:'), help_text=_lazy(u'Title of article'), error_messages={'required': TITLE_REQUIRED, 'min_length': TITLE_SHORT, 'max_length': TITLE_LONG}) # We don't use forms.SlugField because it is too strict in # what it allows (English/Roman alpha-numeric characters and dashes). # Instead, we do custom validation in `clean_slug` below. slug = StrippedCharField( min_length=3, max_length=255, widget=forms.TextInput(), label=_lazy(u'Slug:'), help_text=_lazy(u'Article URL'), error_messages={'required': SLUG_REQUIRED, 'min_length': SLUG_SHORT, 'max_length': SLUG_LONG}) products = forms.MultipleChoiceField( label=_lazy(u'Relevant to:'), required=False, widget=forms.CheckboxSelectMultiple()) is_localizable = forms.BooleanField( initial=True, label=_lazy(u'Allow translations:'), required=False) is_archived = forms.BooleanField( label=_lazy(u'Obsolete:'), required=False) <|fim▁hole|> initial=True, required=False) category = forms.ChoiceField( choices=CATEGORIES, # Required for non-translations, which is # enforced in Document.clean(). required=False, label=_lazy(u'Category:'), help_text=_lazy(u'Type of article')) topics = forms.MultipleChoiceField( label=_lazy(u'Topics:'), required=False, widget=ProductTopicsAndSubtopicsWidget()) related_documents = forms.MultipleChoiceField( label=_lazy(u'Related documents:'), required=False, widget=RelatedDocumentsWidget()) locale = forms.CharField(widget=forms.HiddenInput()) needs_change = forms.BooleanField( label=_lazy(u'Needs change:'), initial=False, required=False) needs_change_comment = forms.CharField( label=_lazy(u'Comment:'), widget=forms.Textarea(), required=False) def clean_slug(self): slug = self.cleaned_data['slug'] # Blacklist /, ?, % and +, if not re.compile(r'^[^/^\+^\?%]+$').match(slug): raise forms.ValidationError(SLUG_INVALID) return slug def clean(self): c = super(DocumentForm, self).clean() locale = c.get('locale') # Products are required for en-US products = c.get('products') if (locale == settings.WIKI_DEFAULT_LANGUAGE and (not products or len(products) < 1)): raise forms.ValidationError(PRODUCT_REQUIRED) # Topics are required for en-US topics = c.get('topics') if (locale == settings.WIKI_DEFAULT_LANGUAGE and (not topics or len(topics) < 1)): raise forms.ValidationError(TOPIC_REQUIRED) return c class Meta: model = Document fields = ('title', 'slug', 'category', 'is_localizable', 'products', 'topics', 'locale', 'is_archived', 'allow_discussion', 'needs_change', 'needs_change_comment', 'related_documents') def save(self, parent_doc, **kwargs): """Persist the Document form, and return the saved Document.""" doc = super(DocumentForm, self).save(commit=False, **kwargs) doc.parent = parent_doc # If document doesn't need change, clear out the comment. if not doc.needs_change: doc.needs_change_comment = '' # Create the share link if it doesn't exist and is in # a category it should show for. doc.save() if (doc.category in settings.IA_DEFAULT_CATEGORIES and not doc.share_link): # This operates under the constraints of passing in a list. add_short_links.delay([doc.pk]) self.save_m2m() if parent_doc: # Products are not set on translations. doc.products.remove(*[p for p in doc.products.all()]) return doc class RevisionForm(forms.ModelForm): """Form to create new revisions.""" keywords = StrippedCharField(required=False, label=_lazy(u'Keywords:'), help_text=_lazy(u'Affects search results')) summary = StrippedCharField( min_length=5, max_length=1000, widget=forms.Textarea(), label=_lazy(u'Search result summary:'), help_text=_lazy(u'Only displayed on search results page'), error_messages={'required': SUMMARY_REQUIRED, 'min_length': SUMMARY_SHORT, 'max_length': SUMMARY_LONG}) content = StrippedCharField( min_length=5, max_length=100000, label=_lazy(u'Content:'), widget=forms.Textarea(), error_messages={'required': CONTENT_REQUIRED, 'min_length': CONTENT_SHORT, 'max_length': CONTENT_LONG}) expires = forms.DateField( label=_lazy(u'Expiry date:'), required=False) comment = StrippedCharField(required=False, label=_lazy(u'Comment:')) class Meta(object): model = Revision fields = ('keywords', 'summary', 'content', 'comment', 'based_on', 'expires') def __init__(self, *args, **kwargs): super(RevisionForm, self).__init__(*args, **kwargs) self.fields['based_on'].widget = forms.HiddenInput() self.fields['comment'].widget = forms.TextInput( attrs={'maxlength': MAX_REVISION_COMMENT_LENGTH}) def save(self, creator, document, based_on_id=None, base_rev=None, **kwargs): """Persist me, and return the saved Revision. Take several other necessary pieces of data that aren't from the form. """ # Throws a TypeError if somebody passes in a commit kwarg: new_rev = super(RevisionForm, self).save(commit=False, **kwargs) new_rev.document = document new_rev.creator = creator if based_on_id: new_rev.based_on_id = based_on_id # If the document doesn't allow the revision creator to edit the # keywords, keep the old value. if base_rev and not document.allows(creator, 'edit_keywords'): new_rev.keywords = base_rev.keywords new_rev.save() return new_rev class ReviewForm(forms.Form): comment = StrippedCharField(max_length=2000, widget=forms.Textarea(), required=False, label=_lazy(u'Comment:'), error_messages={'max_length': COMMENT_LONG}) _widget = forms.RadioSelect(renderer=RadioFieldRendererWithHelpText) significance = forms.TypedChoiceField( label=_lazy(u'Significance:'), choices=SIGNIFICANCES, initial=SIGNIFICANCES[1][0], required=False, widget=_widget, coerce=int, empty_value=SIGNIFICANCES[1][0]) is_ready_for_localization = forms.BooleanField( initial=False, label=_lazy(u'Ready for localization'), required=False) needs_change = forms.BooleanField( label=_lazy(u'Needs change'), initial=False, required=False) needs_change_comment = forms.CharField( label=_lazy(u'Comment:'), widget=forms.Textarea(), required=False) class AddContributorForm(forms.Form): """Form to add contributors to a document.""" users = MultiUsernameField( widget=forms.TextInput(attrs={'placeholder': _lazy(u'username'), 'class': 'user-autocomplete'})) languages = [('', 'Any')] + [(l[0], u'{1} ({0})'.format(*l)) for l in settings.LANGUAGE_CHOICES] class RevisionFilterForm(forms.Form): """Form to filter a list of revisions.""" locale = forms.ChoiceField(label=_lazy(u'Locale:'), choices=languages, required=False) users = MultiUsernameField(label=_lazy(u'Users:'), required=False) start = forms.DateField(label=_lazy(u'Start:'), required=False) end = forms.DateField(label=_lazy(u'End:'), required=False)<|fim▁end|>
allow_discussion = forms.BooleanField( label=_lazy(u'Allow discussion on this article?'),
<|file_name|>sunos.py<|end_file_name|><|fim▁begin|>"""engine.SCons.Platform.sunos Platform-specific initialization for Sun systems. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Platform.Platform() selection method. """ # # Copyright (c) 2001 - 2016 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. #<|fim▁hole|># NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Platform/sunos.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog" import posix def generate(env): posix.generate(env) # Based on sunSparc 8:32bit # ARG_MAX=1048320 - 3000 for environment expansion env['MAXLINELENGTH'] = 1045320 env['PKGINFO'] = 'pkginfo' env['PKGCHK'] = '/usr/sbin/pkgchk' env['ENV']['PATH'] = env['ENV']['PATH'] + ':/opt/SUNWspro/bin:/usr/ccs/bin' # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|>
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
<|file_name|>UnsafeHttpsClient.java<|end_file_name|><|fim▁begin|>package com.android.potlach.cloud.client; import java.io.IOException; import java.net.Socket; import java.net.UnknownHostException; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.CertificateException; import java.security.cert.X509Certificate; <|fim▁hole|>import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import org.apache.http.client.HttpClient; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.impl.client.DefaultHttpClient; /** * This is an example of an HTTP client that does not properly * validate SSL certificates that are used for HTTPS. You should * NEVER use a client like this in a production application. Self-signed * certificates are ususally only OK for testing purposes, such as * this use case. * * @author jules * */ public class UnsafeHttpsClient { private static class MySSLSocketFactory extends SSLSocketFactory { SSLContext sslContext = SSLContext.getInstance("TLS"); public MySSLSocketFactory(KeyStore truststore) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { super(truststore); TrustManager tm = new X509TrustManager() { public void checkClientTrusted(X509Certificate[] chain, String authType) {} public void checkServerTrusted(X509Certificate[] chain, String authType) {} public X509Certificate[] getAcceptedIssuers() { return null; } }; sslContext.init(null, new TrustManager[] { tm }, null); } public MySSLSocketFactory(SSLContext context) throws KeyManagementException, NoSuchAlgorithmException, KeyStoreException, UnrecoverableKeyException { super(null); sslContext = context; } @Override public Socket createSocket(Socket socket, String host, int port, boolean autoClose) throws IOException, UnknownHostException { return sslContext.getSocketFactory().createSocket(socket, host, port, autoClose); } @Override public Socket createSocket() throws IOException { return sslContext.getSocketFactory().createSocket(); } } public static HttpClient createUnsafeClient() { try { HttpClient client = new DefaultHttpClient(); client = sslClient(client); // Execute HTTP Post Request // HttpGet post = new HttpGet(new URI("https://google.com")); // HttpResponse result = client.execute(post); // Log.v("test", EntityUtils.toString(result.getEntity())); // KeyStore trusted = KeyStore.getInstance("BKS"); // SSLContextBuilder builder = new SSLContextBuilder(); // builder.loadTrustMaterial(null, new AllowAllHostnameVerifier()); // // SSLConnectionSocketFactory sslsf = new SSLSocketFactory( // builder.build()); // CloseableHttpClient httpclient = HttpClients.custom() // .setSSLSocketFactory(sslsf).build(); return client; } catch (Exception e) { throw new RuntimeException(e); } } private static HttpClient sslClient(HttpClient client) { try { X509TrustManager tm = new X509TrustManager() { public void checkClientTrusted(X509Certificate[] xcs, String string) throws CertificateException { } public void checkServerTrusted(X509Certificate[] xcs, String string) throws CertificateException { } public X509Certificate[] getAcceptedIssuers() { return null; } }; SSLContext ctx = SSLContext.getInstance("TLS"); ctx.init(null, new TrustManager[]{tm}, null); SSLSocketFactory ssf = new MySSLSocketFactory(ctx); ssf.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); ClientConnectionManager ccm = client.getConnectionManager(); SchemeRegistry sr = ccm.getSchemeRegistry(); sr.register(new Scheme("https", ssf, 8443)); return new DefaultHttpClient(ccm, client.getParams()); } catch (Exception ex) { return null; } } }<|fim▁end|>
import javax.net.ssl.SSLContext;
<|file_name|>visualization.py<|end_file_name|><|fim▁begin|>"""Klamp't visualization routines. See Python/demos/vistemplate.py for an example of how to run this module. The visualization module lets you draw most Klamp't objects in a 3D world using a simple interface. It also lets you customize the GUI using Qt widgets, OpenGL drawing, and keyboard/mouse intercept routines. Main features include: - Simple interface to modify the visualization - Simple interface to animate and render trajectories - Simple interface to edit certain Klamp't objects (configurations, points, transforms) - Simple interface to drawing text and text labels, and drawing plots - Multi-window, multi-viewport support - Unified interface to PyQt and GLUT (with loss of resource editing functionality under GLUT) - Automatic camera setup The resource editing functionality in the klampt.io.resource module (based on klampt.vis.editors) use this module as well. Due to weird OpenGL and Qt behavior in multi-threaded programs, you should only run visualizations using the methods in this module. There are two primary ways of setting up a visualization: - The first is by adding items to the visualization world and customizing them using the vis.X routines that mirror the methods in VisualizationPlugin (like add, setColor, animate, etc). See Python/demos/vistemplate.py for more information. - The second is by creating a subclass of GLPluginInterface and doing all the necessary drawing / interaction yourself inside its hooks. In the latter case, you will call vis.setPlugin(plugin) to override the default visualization behavior before creating your window. See Python/demos/visplugin.py for more information. A third way of setting up a visualization is a hybrid of the two, where you can add functionality on top of default the visualization world. You can either use vis.pushPlugin(plugin) in which case your plugin adds additional functionality, or you can subclass the vis.VisualizationPlugin class, and selectively augment / override the default functionality. Instructions: - To add things to the default visualization: Call the VisualizationPlugin aliases (add, animate, setColor, etc) - To show the visualization and quit when the user closes the window: vis.run() - To show the visualization and return when the user closes the window: vis.dialog() ... do stuff afterwards ... vis.kill() - To show the visualization and be able to run a script alongside it until the user closes the window: vis.show() while vis.shown(): vis.lock() ... do stuff ... [to exit the loop call show(False)] vis.unlock() time.sleep(dt) ... do stuff afterwards ... vis.kill() - To run a window with a custom plugin (GLPluginInterface) and terminate on closure: vis.run(plugin) - To show a dialog or parallel window vis.setPlugin(plugin) ... then call vis.dialog() ... or vis.show() ... do stuff afterwards ... vis.kill() - To add a GLPluginInterface that just customizes a few things on top of the default visualization: vis.pushPlugin(plugin) vis.dialog() vis.popPlugin() - To run plugins side-by-side in the same window: vis.setPlugin(plugin1) vis.addPlugin(plugin2) #this creates a new split-screen vis.dialog() ... or vis.show() ... do stuff afterwards ... vis.kill() - To run a custom dialog in a QtWindow vis.setPlugin([desired plugin or None for visualization]) vis.setParent(qt_window) vis.dialog() ... or vis.show() ... do stuff afterwards ... vis.kill() - To launch a second window after the first is closed: just call whatever you want again. Note: if show was previously called with a plugin and you wish to revert to the default visualization, you should call setPlugin(None) first to restore the default. - To create a separate window with a given plugin: w1 = vis.createWindow() #w1=0 show() w2 = vis.createWindow() #w2=1 vis.setPlugin(plugin) vis.dialog() #to restore commands to the original window vis.setWindow(w1) while vis.shown(): ... vis.kill() Note: when changing the data shown by the window (e.g., modifying the configurations of robots in a WorldModel) you must call vis.lock() before accessing the data and then call vis.unlock() afterwards. The main interface is as follows: def createWindow(title=None): creates a new visualization window and returns an integer identifier. def setWindow(id): sets the active window for all subsequent calls. ID 0 is the default visualization window. def getWindow(): gets the active window ID. def setWindowTitle(title): sets the title of the visualization window. def getWindowTitle(): returns the title of the visualization window def setPlugin(plugin=None): sets the current plugin (a GLPluginInterface instance). This plugin will now capture input from the visualization and can override any of the default behavior of the visualizer. Set plugin=None if you want to return to the default visualization. def addPlugin(plugin): adds a second OpenGL viewport governed by the given plugin (a GLPluginInterface instance). def run([plugin]): pops up a dialog and then kills the program afterwards. def kill(): kills all previously launched visualizations. Afterwards, you may not be able to start new windows. Call this to cleanly quit. def dialog(): pops up a dialog box (does not return to calling thread until closed). def show(hidden=False): shows/hides a visualization window run in parallel with the calling script. def spin(duration): shows the visualization window for the desired amount of time before returning, or until the user closes the window. def shown(): returns true if the window is shown. def lock(): locks the visualization world for editing. The visualization will be paused until unlock() is called. def unlock(): unlocks the visualization world. Must only be called once after every lock(). def customUI(make_func): launches a user-defined UI window by calling make_func(gl_backend) in the visualization thread. This can be used to build custom editors and windows that are compatible with other visualization functionality. Here gl_backend is an instance of _GLBackend instantiated for the current plugin. def getViewport(): Returns the currently active viewport. The following VisualizationPlugin methods are also added to the klampt.vis namespace and operate on the default plugin. If you are calling these methods from an external loop (as opposed to inside a plugin) be sure to lock/unlock the visualization before/after calling these methods. def add(name,item,keepAppearance=False): adds an item to the visualization. name is a unique identifier. If an item with the same name already exists, it will no longer be shown. If keepAppearance=True, then the prior item's appearance will be kept, if a prior item exists. def clear(): clears the visualization world. def listItems(): prints out all names of visualization objects def listItems(name): prints out all names of visualization objects under the given name def dirty(item_name='all'): marks the given item as dirty and recreates the OpenGL display lists. You may need to call this if you modify an item's geometry, for example. def remove(name): removes an item from the visualization. def setItemConfig(name,vector): sets the configuration of a named item. def getItemConfig(name): returns the configuration of a named item. def hide(name,hidden=True): hides/unhides an item. The item is not removed, it just becomes invisible. def edit(name,doedit=True): turns on/off visual editing of some item. Only points, transforms, coordinate.Point's, coordinate.Transform's, coordinate.Frame's, robots, and objects are currently accepted. def hideLabel(name,hidden=True): hides/unhides an item's text label. def setAppearance(name,appearance): changes the Appearance of an item. def revertAppearance(name): restores the Appearance of an item def setAttribute(name,attribute,value): sets an attribute of the appearance of an item. Typical attributes are 'color', 'size', 'length', 'width'... TODO: document all accepted attributes. def setColor(name,r,g,b,a=1.0): changes the color of an item. def setDrawFunc(name,func): sets a custom OpenGL drawing function for an item. func is a one-argument function that takes the item data as input. Set func to None to revert to default drawing. def animate(name,animation,speed=1.0,endBehavior='loop'): Sends an animation to the object. May be a Trajectory or a list of configurations. Works with points, so3 elements, se3 elements, rigid objects, or robots. - speed: a modulator on the animation speed. If the animation is a list of milestones, it is by default run at 1 milestone per second. - endBehavior: either 'loop' (animation repeats forever) or 'halt' (plays once). def pauseAnimation(paused=True): Turns on/off animation. def stepAnimation(amount): Moves forward the animation time by the given amount in seconds def animationTime(newtime=None): Gets/sets the current animation time If newtime == None (default), this gets the animation time. If newtime != None, this sets a new animation time. def addText(name,text,position=None): adds text. You need to give an identifier to all pieces of text, which will be used to access the text as any other vis object. If position is None, this is added as an on-screen display. If position is of length 2, it is the (x,y) position of the upper left corner of the text on the screen. Negative units anchor the text to the right or bottom of the window. If position is of length 3, the text is drawn in the world coordinates. You can then set the color, 'size' attribute, and 'position' attribute of the text using the identifier given in 'name'. def clearText(): clears all previously added text. def addPlot(name): creates a new empty plot. def addPlotItem(name,itemname): adds a visualization item to a plot. def logPlot(name,itemname,value): logs a custom visualization item to a plot def logPlotEvent(name,eventname,color=None): logs an event on the plot. def hidePlotItem(name,itemname,hidden=True): hides an item in the plot. To hide a particular channel of a given item pass a pair (itemname,channelindex). For example, to hide configurations 0-5 of 'robot', call hidePlotItem('plot',('robot',0)), ..., hidePlotItem('plot',('robot',5)). def setPlotDuration(name,time): sets the plot duration. def setPlotRange(name,vmin,vmax): sets the y range of a plot. def setPlotPosition(name,x,y): sets the upper left position of the plot on the screen. def setPlotSize(name,w,h): sets the width and height of the plot. def savePlot(name,fn): saves a plot to a CSV (extension .csv) or Trajectory (extension .traj) file. def autoFitCamera(scale=1.0): Automatically fits the camera to all objects in the visualization. A scale > 1 magnifies the camera zoom. Utility function: def autoFitViewport(viewport,objects): Automatically fits the viewport's camera to see all the given objects. NAMING CONVENTION: The world, if one exists, should be given the name 'world'. Configurations and paths are drawn with reference to the first robot in the world. All items that refer to a name (except add) can either be given a top level item name (a string) or a sub-item (a sequence of strings, given a path from the root to the leaf). For example, if you've added a RobotWorld under the name 'world' containing a robot called 'myRobot', then setColor(('world','myRobot'),0,1,0) will turn the robot green. If 'link5' is the robot's 5th link, then setColor(('world','myRobot','link5'),0,0,1) will turn the 5th link blue. """ from OpenGL.GL import * from threading import Thread,RLock from ..robotsim import * from ..math import vectorops,so3,se3 import gldraw from glinit import * from glinit import _GLBackend,_PyQtAvailable,_GLUTAvailable from glinterface import GLPluginInterface from glprogram import GLPluginProgram import glcommon import time import signal import weakref from ..model import types from ..model import config from ..model import coordinates from ..model.subrobot import SubRobotModel from ..model.trajectory import * from ..model.contact import ContactPoint,Hold class WindowInfo: """Mode can be hidden, shown, or dialog""" def __init__(self,name,frontend,vis,glwindow=None): self.name = name self.frontend = frontend self.vis = vis self.glwindow = glwindow self.mode = 'hidden' self.guidata = None self.custom_ui = None self.doRefresh = False self.doReload = False self.worlds = [] self.active_worlds = [] _globalLock = RLock() #the VisualizationPlugin instance of the currently active window _vis = None #the GLPluginProgram of the currently active window. Accepts _vis as plugin or other user-defined plugins as well _frontend = GLPluginProgram() #the window title for the next created window _window_title = "Klamp't visualizer" #a list of WorldModel's in the current window. A world cannot be used in multiple simultaneous #windows in GLUT. If a world is reused with a different window, its display lists will be refreshed. #Note: must be proxies to allow for deletion _current_worlds = [] #list of WindowInfo's _windows = [] #the index of the current window _current_window = None def createWindow(name): """Creates a new window (and sets it active).""" global _globalLock,_frontend,_vis,_window_title,_current_worlds,_windows,_current_window _globalLock.acquire() if len(_windows) == 0: #save the defaults in window 0 _windows.append(WindowInfo(_window_title,_frontend,_vis)) _windows[-1].worlds = _current_worlds _windows[-1].active_worlds = _current_worlds[:] #make a new window _window_title = name _frontend = GLPluginProgram() _vis = VisualizationPlugin() _frontend.setPlugin(_vis) _windows.append(WindowInfo(_window_title,_frontend,_vis)) _current_worlds = [] id = len(_windows)-1 _current_window = id _globalLock.release() return id def setWindow(id): """Sets currently active window.""" global _globalLock,_frontend,_vis,_window_title,_windows,_current_window,_current_worlds if id == _current_window: return _globalLock.acquire() if len(_windows) == 0: #save the defaults in window 0 _windows.append(WindowInfo(_window_title,_frontend,_vis)) _windows[-1].worlds = _current_worlds _windows[-1].active_worlds = _current_worlds[:] assert id >= 0 and id < len(_windows),"Invalid window id" _window_title,_frontend,_vis,_current_worlds = _windows[id].name,_windows[id].frontend,_windows[id].vis,_windows[id].worlds #print "vis.setWindow(",id,") the window has status",_windows[id].mode if not _PyQtAvailable: #PyQt interface allows sharing display lists but GLUT does not. #refresh all worlds' display lists that were once active. for w in _current_worlds: if w in _windows[_current_window].active_worlds: print "klampt.vis.setWindow(): world",w().index,"becoming active in the new window",id _refreshDisplayLists(w()) _windows[_current_window].active_worlds.remove(w) _windows[id].active_worlds = _current_worlds[:] _current_window = id _globalLock.release() def getWindow(): """Retrieves ID of currently active window or -1 if no window is active""" global _current_window if _current_window == None: return 0 return _current_window def setPlugin(plugin): """Lets the user capture input via a glinterface.GLPluginInterface class. Set plugin to None to disable plugins and return to the standard visualization""" global _globalLock,_frontend,_windows,_current_window _globalLock.acquire() if not isinstance(_frontend,GLPluginProgram): _frontend = GLPluginProgram() if _current_window != None: if _windows[_current_window].glwindow != None: _frontend.window = _windows[_current_window].glwindow if plugin == None: global _vis if _vis==None: raise RuntimeError("Visualization disabled") _frontend.setPlugin(_vis) else: _frontend.setPlugin(plugin) if hasattr(plugin,'world'): _checkWindowCurrent(plugin.world) _onFrontendChange() _globalLock.release() def pushPlugin(plugin): """Adds a new glinterface.GLPluginInterface plugin on top of the old one.""" global _globalLock,_frontend _globalLock.acquire() assert isinstance(_frontend,GLPluginProgram),"Can't push a plugin after addPlugin" if len(_frontend.plugins) == 0: global _vis if _vis==None: raise RuntimeError("Visualization disabled") _frontend.setPlugin(_vis) _frontend.pushPlugin(plugin) _onFrontendChange() _globalLock.release() def popPlugin(): """Reverses a prior pushPlugin() call""" global _frontend _globalLock.acquire() _frontend.popPlugin() _onFrontendChange() _globalLock.release() def addPlugin(plugin): """Adds a second OpenGL viewport in the same window, governed by the given plugin (a glinterface.GLPluginInterface instance).""" global _frontend _globalLock.acquire() #create a multi-view widget if isinstance(_frontend,glcommon.GLMultiViewportProgram): _frontend.addView(plugin) else: if len(_frontend.plugins) == 0: setPlugin(None) multiProgram = glcommon.GLMultiViewportProgram() multiProgram.window = None if _current_window != None: if _windows[_current_window].glwindow != None: multiProgram.window = _windows[_current_window].glwindow multiProgram.addView(_frontend) multiProgram.addView(plugin) multiProgram.name = _window_title _frontend = multiProgram _onFrontendChange() _globalLock.release() def run(plugin=None): """A blocking call to start a single window and then kill the visualization when closed. If plugin == None, the default visualization is used. Otherwise, plugin is a glinterface.GLPluginInterface object, and it is used.""" setPlugin(plugin) show() while shown(): time.sleep(0.1) setPlugin(None) kill() def dialog(): """A blocking call to start a single dialog window with the current plugin. It is closed by pressing OK or closing the window.""" _dialog() def setWindowTitle(title): global _window_title _window_title = title _onFrontendChange() def getWindowTitle(): global _window_title return _window_title def kill(): """This should be called at the end of the calling program to cleanly terminate the visualization thread""" global _vis,_globalLock if _vis==None: print "vis.kill() Visualization disabled" return _kill() def show(display=True): """Shows or hides the current window""" _globalLock.acquire() if display: _show() else: _hide() _globalLock.release() def spin(duration): """Spin-shows a window for a certain duration or until the window is closed.""" show() t = 0 while t < duration: if not shown(): break time.sleep(min(0.04,duration-t)) t += 0.04 show(False) return def lock(): """Begins a locked section. Needs to be called any time you modify a visualization item outside of the visualization thread. unlock() must be called to let the visualization thread proceed.""" global _globalLock _globalLock.acquire() def unlock(): """Ends a locked section acquired by lock().""" global _globalLock,_windows for w in _windows: if w.glwindow: w.doRefresh = True _globalLock.release() def shown(): """Returns true if a visualization window is currently shown.""" global _globalLock,_thread_running,_current_window _globalLock.acquire() res = (_thread_running and _current_window != None and _windows[_current_window].mode in ['shown','dialog'] or _windows[_current_window].guidata is not None) _globalLock.release() return res def customUI(func): """Tells the next created window/dialog to use a custom UI function. func is a 1-argument function that takes a QtWindow or GLUTWindow as its argument.""" global _globalLock _globalLock.acquire() _set_custom_ui(func) _globalLock.release() def getViewport(): """Returns the GLViewport of the current window (see klampt.vis.glprogram.GLViewport)""" return _frontend.get_view() def setViewport(viewport): """Sets the current window to use a given GLViewport (see klampt.vis.glprogram.GLViewport)""" _frontend.set_view(viewport) ######### CONVENIENCE ALIASES FOR VisualizationPlugin methods ########### def clear(): """Clears the visualization world.""" global _vis if _vis==None: return _vis.clear() def add(name,item,keepAppearance=False): """Adds an item to the visualization. name is a unique identifier. If an item with the same name already exists, it will no longer be shown. If keepAppearance=True, then the prior item's appearance will be kept, if a prior item exists.""" global _vis if _vis==None: print "Visualization disabled" return _globalLock.acquire() _checkWindowCurrent(item) _globalLock.release() _vis.add(name,item,keepAppearance) def listItems(name=None,indent=0): global _vis if _vis==None: print "Visualization disabled" return _vis.listItems(name,indent) def dirty(item_name='all'): """Marks the given item as dirty and recreates the OpenGL display lists. You may need to call this if you modify an item's geometry, for example. If things start disappearing from your world when you create a new window, you may need to call this too.""" global _vis if _vis==None: print "Visualization disabled" return _vis.dirty(item_name) def animate(name,animation,speed=1.0,endBehavior='loop'): """Sends an animation to the named object. Works with points, so3 elements, se3 elements, rigid objects, or robots, and may work with other objects as well. Parameters: - animation: may be a Trajectory or a list of configurations. - speed: a modulator on the animation speed. If the animation is a list of milestones, it is by default run at 1 milestone per second. - endBehavior: either 'loop' (animation repeats forever) or 'halt' (plays once). """ global _vis if _vis==None: print "Visualization disabled" return _vis.animate(name,animation,speed,endBehavior) def pauseAnimation(paused=True): global _vis if _vis==None: print "Visualization disabled" return _vis.pauseAnimation(paused) def stepAnimation(amount): global _vis if _vis==None: print "Visualization disabled" return _vis.stepAnimation(amount) def animationTime(newtime=None): """Gets/sets the current animation time If newtime == None (default), this gets the animation time. If newtime != None, this sets a new animation time. """ global _vis if _vis==None: print "Visualization disabled" return 0 return _vis.animationTime(newtime) def remove(name): global _vis if _vis==None: return return _vis.remove(name) def getItemConfig(name): global _vis if _vis==None: return None return _vis.getItemConfig(name) def setItemConfig(name,value): global _vis if _vis==None: return return _vis.setItemConfig(name,value) def hideLabel(name,hidden=True): global _vis if _vis==None: return return _vis.hideLabel(name,hidden) def hide(name,hidden=True): global _vis if _vis==None: return _vis.hide(name,hidden) def edit(name,doedit=True): """Turns on/off visual editing of some item. Only points, transforms, coordinate.Point's, coordinate.Transform's, coordinate.Frame's, robots, and objects are currently accepted.""" global _vis if _vis==None: return _vis.edit(name,doedit) def setAppearance(name,appearance): global _vis if _vis==None: return _vis.setAppearance(name,appearance) def setAttribute(name,attr,value): global _vis if _vis==None: return _vis.setAttribute(name,attr,value) def revertAppearance(name): global _vis if _vis==None: return _vis.revertAppearance(name) def setColor(name,r,g,b,a=1.0): global _vis if _vis==None: return _vis.setColor(name,r,g,b,a) def setDrawFunc(name,func): global _vis if _vis==None: return _vis.setDrawFunc(name,func) def _getOffsets(object): if isinstance(object,WorldModel): res = [] for i in range(object.numRobots()): res += _getOffsets(object.robot(i)) for i in range(object.numRigidObjects()): res += _getOffsets(object.rigidObject(i)) return res elif isinstance(object,RobotModel): q = object.getConfig() object.setConfig([0.0]*len(q)) worig = [object.link(i).getTransform()[1] for i in range(object.numLinks())] object.setConfig(q) wnew = [object.link(i).getTransform()[1] for i in range(object.numLinks())] return [vectorops.sub(b,a) for a,b in zip(worig,wnew)] elif isinstance(object,RigidObjectModel): return [object.getTransform()[1]] elif isinstance(object,Geometry3D): return object.getCurrentTransform()[1] elif isinstance(object,VisAppearance): res = _getOffsets(object.item) if len(res) != 0: return res if len(object.subAppearances) == 0: bb = object.getBounds() if bb != None and not aabb_empty(bb): return [vectorops.mul(vectorops.add(bb[0],bb[1]),0.5)] else: res = [] for a in object.subAppearances.itervalues(): res += _getOffsets(a) return res return [] def _getBounds(object): if isinstance(object,WorldModel): res = [] for i in range(object.numRobots()): res += _getBounds(object.robots(i)) for i in range(object.numRigidObjects()): res += _getBounds(object.rigidObject(i)) return res elif isinstance(object,RobotModel): return sum([object.link(i).geometry().getBB() for i in range(object.numLinks())],[]) elif isinstance(object,RigidObjectModel): return object.geometry().getAABB() elif isinstance(object,Geometry3D): return object.getAABB() elif isinstance(object,VisAppearance): if len(object.subAppearances) == 0: if isinstance(object.item,TerrainModel): return [] bb = object.getBounds() if bb != None and not aabb_empty(bb): return list(bb) else: res = [] for a in object.subAppearances.itervalues(): res += _getBounds(a) return res return [] def _fitPlane(pts): import numpy as np if len(pts) < 3: raise ValueError("Point set is degenerate") centroid = vectorops.div(vectorops.add(*pts),len(pts)) A = np.array([vectorops.sub(pt,centroid) for pt in pts]) U,S,V = np.linalg.svd(A,full_matrices=False) imin = 0 smin = S[0] zeros = [] for i in xrange(len(S)): if abs(S[i]) < 1e-6: zeros.append(i) if abs(S[i]) < smin: smin = S[i] imin = i if len(zeros) > 1: raise ValueError("Point set is degenerate") assert V.shape == (3,3) #normal is the corresponding row of U normal = V[imin,:] return centroid,normal.tolist() def autoFitViewport(viewport,objects): ofs = sum([_getOffsets(o) for o in objects],[]) pts = sum([_getBounds(o) for o in objects],[]) #print "Bounding box",bb,"center",center #raw_input() #reset viewport.camera.rot = [0.,0.,0.] viewport.camera.tgt = [0.,0.,0.] viewport.camera.dist = 6.0 viewport.clippingplanes = (0.2,20) if len(ofs) == 0: return bb = aabb_create(*pts) center = vectorops.mul(vectorops.add(bb[0],bb[1]),0.5) viewport.camera.tgt = center radius = max(vectorops.distance(bb[0],center),0.1) viewport.camera.dist = 1.2*radius / math.tan(math.radians(viewport.fov*0.5)) #default: oblique view viewport.camera.rot = [0,math.radians(30),math.radians(45)] #fit a plane to these points try: centroid,normal = _fitPlane(ofs) except Exception as e: try: centroid,normal = _fitPlane(pts) except Exception as e: print "Exception occurred during fitting to points" print ofs print pts raise return if normal[2] > 0: normal = vectorops.mul(normal,-1) z,x,y = so3.matrix(so3.inv(so3.canonical(normal))) #print z,x,y<|fim▁hole|> radius = max([abs(vectorops.dot(x,vectorops.sub(center,pt))) for pt in pts] + [abs(vectorops.dot(y,vectorops.sub(center,pt)))*viewport.w/viewport.h for pt in pts]) zmin = min([vectorops.dot(z,vectorops.sub(center,pt)) for pt in pts]) zmax = max([vectorops.dot(z,vectorops.sub(center,pt)) for pt in pts]) #print "Viewing direction",normal,"at point",center,"with scene size",radius #orient camera to point along normal direction viewport.camera.tgt = center viewport.camera.dist = 1.2*radius / math.tan(math.radians(viewport.fov*0.5)) near,far = viewport.clippingplanes if viewport.camera.dist + zmin < near: near = max((viewport.camera.dist + zmin)*0.5, radius*0.1) if viewport.camera.dist + zmax > far: far = max((viewport.camera.dist + zmax)*1.5, radius*3) viewport.clippingplanes = (near,far) roll = 0 yaw = math.atan2(normal[0],normal[1]) pitch = math.atan2(-normal[2],vectorops.norm(normal[0:2])) #print "Roll pitch and yaw",roll,pitch,yaw #print "Distance",viewport.camera.dist viewport.camera.rot = [roll,pitch,yaw] def addText(name,text,pos=None): """Adds text to the visualizer. You must give an identifier to all pieces of text, which will be used to access the text as any other vis object. Parameters: - name: the text's unique identifier. - text: the string to be drawn - pos: the position of the string. If pos=None, this is added to the on-screen "console" display. If pos has length 2, it is the (x,y) position of the upper left corner of the text on the screen. Negative units anchor the text to the right or bottom of the window. If pos has length 3, the text is drawn in the world coordinates. To customize the text appearance, you can set the color, 'size' attribute, and 'position' attribute of the text using the identifier given in 'name'. """ global _vis _vis.add(name,text,True) if pos is not None: _vis.setAttribute(name,'position',pos) def clearText(): """Clears all text in the visualization.""" global _vis if _vis==None: return _vis.clearText() def addPlot(name): add(name,VisPlot()) def addPlotItem(name,itemname): global _vis if _vis==None: return _vis.addPlotItem(name,itemname) def logPlot(name,itemname,value): """Logs a custom visualization item to a plot""" global _vis if _vis==None: return _vis.logPlot(name,itemname,value) def logPlotEvent(name,eventname,color=None): """Logs an event on the plot.""" global _vis if _vis==None: return _vis.logPlotEvent(name,eventname,color) def hidePlotItem(name,itemname,hidden=True): global _vis if _vis==None: return _vis.hidePlotItem(name,itemname,hidden) def setPlotDuration(name,time): setAttribute(name,'duration',time) def setPlotRange(name,vmin,vmax): setAttribute(name,'range',(vmin,vmax)) def setPlotPosition(name,x,y): setAttribute(name,'position',(x,y)) def setPlotSize(name,w,h): setAttribute(name,'size',(w,h)) def savePlot(name,fn): global _vis if _vis==None: return _vis.savePlot(name,fn) def autoFitCamera(scale=1): global _vis if _vis==None: return print "klampt.vis: auto-fitting camera to scene." _vis.autoFitCamera(scale) def objectToVisType(item,world): itypes = types.objectToTypes(item,world) if isinstance(itypes,(list,tuple)): #ambiguous, still need to figure out what to draw validtypes = [] for t in itypes: if t == 'Config': if world != None and len(item) == world.robot(0).numLinks(): validtypes.append(t) elif t=='Vector3': validtypes.append(t) elif t=='RigidTransform': validtypes.append(t) if len(validtypes) > 1: print "Unable to draw item of ambiguous types",validtypes return if len(validtypes) == 0: print "Unable to draw any of types",itypes return return validtypes[0] return itypes def aabb_create(*ptlist): if len(ptlist) == 0: return [float('inf')]*3,[float('-inf')]*3 else: bmin,bmax = list(ptlist[0]),list(ptlist[0]) for i in xrange(1,len(ptlist)): x = ptlist[i] bmin = [min(a,b) for (a,b) in zip(bmin,x)] bmax = [max(a,b) for (a,b) in zip(bmax,x)] return bmin,bmax def aabb_expand(bb,bb2): bmin = [min(a,b) for a,b in zip(bb[0],bb2[0])] bmax = [max(a,b) for a,b in zip(bb[1],bb2[1])] return (bmin,bmax) def aabb_empty(bb): return any((a > b) for (a,b) in zip(bb[0],bb[1])) _defaultCompressThreshold = 1e-2 class VisPlotItem: def __init__(self,itemname,linkitem): self.name = itemname self.itemnames = [] self.linkitem = linkitem self.traces = [] self.hidden = [] self.traceRanges = [] self.luminosity = [] self.compressThreshold = _defaultCompressThreshold if linkitem is not None: q = config.getConfig(linkitem.item) assert q is not None from collections import deque self.traces = [deque() for i in range(len(q))] self.itemnames = config.getConfigNames(linkitem.item) def customUpdate(self,item,t,v): for i,itemname in enumerate(self.itemnames): if item == itemname: self.updateTrace(i,t,v) self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v)) return else: from collections import deque self.itemnames.append(item) self.traces.append(deque()) i = len(self.itemnames)-1 self.updateTrace(i,t,v) self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v)) #raise ValueError("Invalid item specified: "+str(item)) def update(self,t): if self.linkitem is None: return q = config.getConfig(self.linkitem.item) assert len(self.traces) == len(q) for i,v in enumerate(q): self.updateTrace(i,t,v) self.traceRanges[i] = (min(self.traceRanges[i][0],v),max(self.traceRanges[i][1],v)) def discard(self,tstart): for t in self.traces: if len(t)<=1: return while len(t) >= 2: if t[1][0] < tstart: t.popleft() else: break def updateTrace(self,i,t,v): import random assert i < len(self.traces) assert i <= len(self.hidden) assert i <= len(self.luminosity) while i >= len(self.hidden): self.hidden.append(False) while i >= len(self.traceRanges): self.traceRanges.append((v,v)) if i >= len(self.luminosity): initialLuminosity = [0.5,0.25,0.75,1.0] while i >= len(self.luminosity): if len(self.luminosity)<len(initialLuminosity): self.luminosity.append(initialLuminosity[len(self.luminosity)]) else: self.luminosity.append(random.uniform(0,1)) trace = self.traces[i] if len(trace) > 0 and trace[-1][0] == t: trace[-1] = (t,v) return if self.compressThreshold is None: trace.append((t,v)) else: if len(trace) < 2: trace.append((t,v)) else: pprev = trace[-2] prev = trace[-1] assert prev > pprev,"Added two items with the same time?" assert t > prev[0] slope_old = (prev[1]-pprev[1])/(prev[0]-pprev[0]) slope_new = (v-prev[1])/(t-prev[0]) if (slope_old > 0 != slope_new > 0) or abs(slope_old-slope_new) > self.compressThreshold: trace.append((t,v)) else: #near-linear, just extend along straight line trace[-1] = (t,v) class VisPlot: def __init__(self): self.items = [] self.colors = [] self.events = dict() self.eventColors = dict() self.outfile = None self.outformat = None def __del__(self): self.endSave() def update(self,t,duration,compressThreshold): for i in self.items: i.compressThreshold = compressThreshold i.update(t) if self.outfile: self.dumpCurrent() self.discard(t-duration) else: self.discard(t-60.0) def discard(self,tmin): for i in self.items: i.discard(tmin) delevents = [] for e,times in self.events.iteritems(): while len(times) > 0 and times[0] < tmin: times.popleft() if len(times)==0: delevents.append(e) for e in delevents: del self.events[e] def addEvent(self,name,t,color=None): if name in self.events: self.events[name].append(t) else: from collections import deque self.events[name] = deque([t]) if color == None: import random color = (random.uniform(0.01,1),random.uniform(0.01,1),random.uniform(0.01,1)) color = vectorops.mul(color,1.0/max(color)) if color != None: self.eventColors[name] = color if len(color)==3: self.eventColors[name] += [1.0] def autoRange(self): vmin = float('inf') vmax = -float('inf') for i in self.items: for j in xrange(len(i.traceRanges)): if not i.hidden[j]: vmin = min(vmin,i.traceRanges[j][0]) vmax = max(vmax,i.traceRanges[j][1]) if math.isinf(vmin): return (0.,1.) if vmax == vmin: vmax += 1.0 return (float(vmin),float(vmax)) def render(self,window,x,y,w,h,duration,vmin=None,vmax=None): if vmin == None: vmin,vmax = self.autoRange() import random while len(self.colors) < len(self.items): c = (random.uniform(0.01,1),random.uniform(0.01,1),random.uniform(0.01,1)) c = vectorops.mul(c,1.0/max(c)) self.colors.append(c) glColor3f(0,0,0) glBegin(GL_LINE_LOOP) glVertex2f(x,y) glVertex2f(x+w,y) glVertex2f(x+w,y+h) glVertex2f(x,y+h) glEnd() window.draw_text((x-18,y+4),'%.2f'%(vmax,),9) window.draw_text((x-18,y+h+4),'%.2f'%(vmin,),9) tmax = 0 for i in self.items: for trace in i.traces: if len(trace)==0: continue tmax = max(tmax,trace[-1][0]) for i,item in enumerate(self.items): for j,trace in enumerate(item.traces): if len(trace)==0: continue labelheight = trace[-1][1] if len(item.name)==0: label = item.itemnames[j] else: label = str(item.name) + '.' + item.itemnames[j] labelheight = (labelheight - vmin)/(vmax-vmin) labelheight = y + h - h*labelheight glColor3fv(vectorops.mul(self.colors[i],item.luminosity[j])) window.draw_text((x+w+3,labelheight+4),label,9) glBegin(GL_LINE_STRIP) for k in xrange(len(trace)-1): if trace[k+1][0] > tmax-duration: u,v = trace[k] if trace[k][0] < tmax-duration: #interpolate so x is at tmax-duration u2,v2 = trace[k+1] #u + s(u2-u) = tmax-duration s = (tmax-duration-u)/(u2-u) v = v + s*(v2-v) u = (tmax-duration) u = (u-(tmax-duration))/duration v = (v-vmin)/(vmax-vmin) glVertex2f(x+w*u,y+(1-v)*h) u,v = trace[-1] u = (u-(tmax-duration))/duration v = (v-vmin)/(vmax-vmin) glVertex2f(x+w*u,y+(1-v)*h) glEnd() if len(self.events) > 0: for e,times in self.events.iteritems(): for t in times: if t < tmax-duration: continue labelx = (t - (tmax-duration))/duration labelx = x + w*labelx c = self.eventColors[e] glColor4f(c[0]*0.5,c[1]*0.5,c[2]*0.5,c[3]) window.draw_text((labelx,y+h+12),e,9) glEnable(GL_BLEND) glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA) glBegin(GL_LINES) for e,times in self.events.iteritems(): for t in times: if t < tmax-duration: continue labelx = (t - (tmax-duration))/duration labelx = x + w*labelx glColor4f(c[0],c[1],c[2],c[3]*0.5) glVertex2f(labelx,y) glVertex2f(labelx,y+h) glEnd() glDisable(GL_BLEND) def beginSave(self,fn): import os ext = os.path.splitext(fn)[1] if ext == '.csv' or ext == '.traj': self.outformat = ext else: raise ValueError("Invalid extension for visualization plot, can only accept .csv or .traj") self.outfile = open(fn,'w') if self.outformat == '.csv': #output a header self.outfile.write("time") for i in self.items: self.outfile.write(",") fullitemnames = [] if len(i.name) != 0: name = None if isinstance(i.name,(list,tuple)): name = '.'.join(v for v in i.name) else: name = i.name fullitemnames = [name+'.'+itemname for itemname in i.itemnames] else: fullitemnames = i.itemnames self.outfile.write(",".join(fullitemnames)) self.outfile.write("\n") self.dumpAll() def endSave(self): if self.outfile is not None: self.outfile.close() def dumpAll(self): assert self.outfile is not None if len(self.items) == 0: return cols = [] mindt = float('inf') mint = float('inf') maxt = -float('inf') for i in self.items: if len(i.traces) == 0: continue for j,trace in enumerate(i.traces): times,vals = zip(*trace) if isinstance(vals[0],(int,float)): vals = [[v] for v in vals] traj = Trajectory(times,vals) cols.append(traj) mint = min(mint,traj.times[0]) maxt = max(maxt,traj.times[-1]) for k in xrange(len(traj.times)-1): mindt = min(mindt,traj.times[k+1] - traj.times[k]) assert mindt > 0, "For some reason, there is a duplicate time?" N = int((maxt - mint)/mindt) dt = (maxt - mint)/N times = [mint + i*(maxt-mint)/N for i in range(N+1)] for i in xrange(N+1): vals = [col.eval(times[i]) for col in cols] if self.outformat == '.csv': self.outfile.write(str(times[i])+',') self.outfile.write(','.join([str(v[0]) for v in vals])) self.outfile.write('\n') else: self.outfile.write(str(times[i])+'\t') self.outfile.write(str(len(vals))+' ') self.outfile.write(' '.join([str(v[0]) for v in vals])) self.outfile.write('\n') def dumpCurrent(self): if len(self.items) == 0: return assert len(self.items[0].trace) > 0, "Item has no channels?" assert len(self.items[0].trace[0]) > 0, "Item has no readings yet?" t = self.items[0].trace[0][-1] vals = [] for i in self.items: if len(i.trace) == 0: continue for j,trace in enumerate(i.trace): vals.append(trace[-1][1]) if self.outformat == '.csv': self.outfile.write(str(t)+',') self.outfile.write(','.join([str(v) for v in vals])) self.outfile.write('\n') else: self.outfile.write(str(t)+'\t') self.outfile.write(str(len(vals))+' ') self.outfile.write(' '.join([str(v) for v in vals])) self.outfile.write('\n') class VisAppearance: def __init__(self,item,name = None): self.name = name self.hidden = False self.useDefaultAppearance = True self.customAppearance = None self.customDrawFunc = None #For group items, this allows you to customize appearance of sub-items self.subAppearances = {} self.animation = None self.animationStartTime = 0 self.animationSpeed = 1.0 self.attributes = {} #used for Qt text rendering self.widget = None #used for visual editing of certain items self.editor = None #cached drawing self.displayCache = [glcommon.CachedGLObject()] self.displayCache[0].name = name #temporary configuration of the item self.drawConfig = None self.setItem(item) def setItem(self,item): self.item = item self.subAppearances = {} #Parse out sub-items which can have their own appearance changed if isinstance(item,WorldModel): for i in xrange(item.numRobots()): self.subAppearances[("Robot",i)] = VisAppearance(item.robot(i),item.robot(i).getName()) for i in xrange(item.numRigidObjects()): self.subAppearances[("RigidObject",i)] = VisAppearance(item.rigidObject(i),item.rigidObject(i).getName()) for i in xrange(item.numTerrains()): self.subAppearances[("Terrain",i)] = VisAppearance(item.terrain(i),item.terrain(i).getName()) elif isinstance(item,RobotModel): for i in xrange(item.numLinks()): self.subAppearances[("Link",i)] = VisAppearance(item.link(i),item.link(i).getName()) elif isinstance(item,coordinates.Group): for n,f in item.frames.iteritems(): self.subAppearances[("Frame",n)] = VisAppearance(f,n) for n,p in item.points.iteritems(): self.subAppearances[("Point",n)] = VisAppearance(p,n) for n,d in item.directions.iteritems(): self.subAppearances[("Direction",n)] = VisAppearance(d,n) for n,g in item.subgroups.iteritems(): self.subAppearances[("Subgroup",n)] = VisAppearance(g,n) elif isinstance(item,Hold): if item.ikConstraint is not None: self.subAppearances["ikConstraint"] = VisAppearance(item.ikConstraint,"ik") for n,c in enumerate(item.contacts): self.subAppearances[("contact",n)] = VisAppearance(c,n) for (k,a) in self.subAppearances.iteritems(): a.attributes = self.attributes def markChanged(self): for c in self.displayCache: c.markChanged() for (k,a) in self.subAppearances.iteritems(): a.markChanged() self.update_editor(True) self.doRefresh = True def destroy(self): for c in self.displayCache: c.destroy() for (k,a) in self.subAppearances.iteritems(): a.destroy() self.subAppearances = {} def drawText(self,text,point): """Draws the given text at the given point""" if self.attributes.get("text_hidden",False): return self.widget.addLabel(text,point[:],[0,0,0]) def updateAnimation(self,t): """Updates the configuration, if it's being animated""" if not self.animation: self.drawConfig = None else: u = self.animationSpeed*(t-self.animationStartTime) q = self.animation.eval(u,self.animationEndBehavior) self.drawConfig = q for n,app in self.subAppearances.iteritems(): app.updateAnimation(t) def updateTime(self,t): """Updates in real time""" if isinstance(self.item,VisPlot): compressThreshold = self.attributes.get('compress',_defaultCompressThreshold) duration = self.attributes.get('duration',5.) for items in self.item.items: if items.linkitem: items.linkitem.swapDrawConfig() self.item.update(t,duration,compressThreshold) for items in self.item.items: if items.linkitem: items.linkitem.swapDrawConfig() def swapDrawConfig(self): """Given self.drawConfig!=None, swaps out the item's curren configuration with self.drawConfig. Used for animations""" if self.drawConfig: try: newDrawConfig = config.getConfig(self.item) #self.item = config.setConfig(self.item,self.drawConfig) self.drawConfig = newDrawConfig except Exception as e: print "Warning, exception thrown during animation update. Probably have incorrect length of configuration" import traceback traceback.print_exc() pass for n,app in self.subAppearances.iteritems(): app.swapDrawConfig() def clearDisplayLists(self): if isinstance(self.item,WorldModel): for r in range(self.item.numRobots()): for link in range(self.item.robot(r).numLinks()): self.item.robot(r).link(link).appearance().refresh() for i in range(self.item.numRigidObjects()): self.item.rigidObject(i).appearance().refresh() for i in range(self.item.numTerrains()): self.item.terrain(i).appearance().refresh() elif hasattr(self.item,'appearance'): self.item.appearance().refresh() elif isinstance(self.item,RobotModel): for link in range(self.item.numLinks()): self.item.link(link).appearance().refresh() for n,o in self.subAppearances.iteritems(): o.clearDisplayLists() self.markChanged() def draw(self,world=None): """Draws the specified item in the specified world. If name is given and text_hidden != False, then the name of the item is shown.""" if self.hidden: return if self.customDrawFunc is not None: self.customDrawFunc(self.item) return item = self.item name = self.name #set appearance if not self.useDefaultAppearance and hasattr(item,'appearance'): if not hasattr(self,'oldAppearance'): self.oldAppearance = item.appearance().clone() if self.customAppearance != None: #print "Changing appearance of",name item.appearance().set(self.customAppearance) elif "color" in self.attributes: #print "Changing color of",name item.appearance().setColor(*self.attributes["color"]) if len(self.subAppearances)!=0: for n,app in self.subAppearances.iteritems(): app.widget = self.widget app.draw(world) elif hasattr(item,'drawGL'): item.drawGL() elif hasattr(item,'drawWorldGL'): item.drawWorldGL() elif isinstance(item,str): pos = self.attributes.get("position",None) if pos is not None and len(pos)==3: col = self.attributes.get("color",(0,0,0)) self.widget.addLabel(self.item,pos,col) elif isinstance(item,VisPlot): pass elif isinstance(item,Trajectory): doDraw = False centroid = None if isinstance(item,RobotTrajectory): ees = self.attributes.get("endeffectors",[-1]) if world: doDraw = (len(ees) > 0) robot = world.robot(0) for i,ee in enumerate(ees): if ee < 0: ees[i] = robot.numLinks()-1 if doDraw: robot.setConfig(item.milestones[0]) centroid = vectorops.div(vectorops.add(*[robot.link(ee).getTransform()[1] for ee in ees]),len(ees)) elif isinstance(item,SE3Trajectory): doDraw = True centroid = item.milestones[0][9:] else: if len(item.milestones[0]) == 3: #R3 trajectory doDraw = True centroid = item.milestones[0] elif len(item.milestones[0]) == 2: #R2 trajectory doDraw = True centroid = item.milestones[0]+[0.0] if doDraw: def drawRaw(): pointTrajectories = [] if isinstance(item,RobotTrajectory): robot = world.robot(0) ees = self.attributes.get("endeffectors",[-1]) for i,ee in enumerate(ees): if ee < 0: ees[i] = robot.numLinks()-1 if world: for ee in ees: pointTrajectories.append([]) for m in item.milestones: robot.setConfig(m) for ee,eetraj in zip(ees,pointTrajectories): eetraj.append(robot.link(ee).getTransform()[1]) elif isinstance(item,SE3Trajectory): pointTrajectories.append([]) for m in item.milestones: pointTrajectories[-1].append(m[9:]) else: if len(item.milestones[0]) == 3: #R3 trajectory pointTrajectories.append(item.milestones) elif len(item.milestones[0]) == 2: #R2 trajectory pointTrajectories.append([v + [0.0] for v in item.milestones]) glDisable(GL_LIGHTING) glLineWidth(self.attributes.get("width",3)) glColor4f(*self.attributes.get("color",[1,0.5,0,1])) for traj in pointTrajectories: if len(traj) == 1: glBegin(GL_POINTS) glVertex3f(*traj[0]) glEnd() if len(traj) >= 2: glBegin(GL_LINE_STRIP) for p in traj: glVertex3f(*p) glEnd() glLineWidth(1.0) self.displayCache[0].draw(drawRaw,se3.identity()) if name != None: self.drawText(name,centroid) elif isinstance(item,coordinates.Point): def drawRaw(): glDisable(GL_LIGHTING) glEnable(GL_POINT_SMOOTH) glPointSize(self.attributes.get("size",5.0)) glColor4f(*self.attributes.get("color",[0,0,0,1])) glBegin(GL_POINTS) glVertex3f(0,0,0) glEnd() #write name glDisable(GL_DEPTH_TEST) self.displayCache[0].draw(drawRaw,[so3.identity(),item.worldCoordinates()]) glEnable(GL_DEPTH_TEST) if name != None: self.drawText(name,item.worldCoordinates()) elif isinstance(item,coordinates.Direction): def drawRaw(): glDisable(GL_LIGHTING) glDisable(GL_DEPTH_TEST) L = self.attributes.get("length",0.15) source = [0,0,0] glColor4f(*self.attributes.get("color",[0,1,1,1])) glBegin(GL_LINES) glVertex3f(*source) glVertex3f(*vectorops.mul(item.localCoordinates(),L)) glEnd() glEnable(GL_DEPTH_TEST) #write name self.displayCache[0].draw(drawRaw,item.frame().worldCoordinates(),parameters = item.localCoordinates()) if name != None: self.drawText(name,vectorops.add(item.frame().worldCoordinates()[1],item.worldCoordinates())) elif isinstance(item,coordinates.Frame): t = item.worldCoordinates() if item.parent() != None: tp = item.parent().worldCoordinates() else: tp = se3.identity() tlocal = item.relativeCoordinates() def drawRaw(): glDisable(GL_DEPTH_TEST) glDisable(GL_LIGHTING) glLineWidth(2.0) gldraw.xform_widget(tlocal,self.attributes.get("length",0.1),self.attributes.get("width",0.01)) glLineWidth(1.0) #draw curve between frame and parent if item.parent() != None: d = vectorops.norm(tlocal[1]) vlen = d*0.5 v1 = so3.apply(tlocal[0],[-vlen]*3) v2 = [vlen]*3 #glEnable(GL_BLEND) #glBlendFunc(GL_SRC_ALPHA,GL_ONE_MINUS_SRC_ALPHA) #glColor4f(1,1,0,0.5) glColor3f(1,1,0) gldraw.hermite_curve(tlocal[1],v1,[0,0,0],v2,0.03*max(0.1,vectorops.norm(tlocal[1]))) #glDisable(GL_BLEND) glEnable(GL_DEPTH_TEST) #For some reason, cached drawing is causing OpenGL problems #when the frame is rapidly changing self.displayCache[0].draw(drawRaw,transform=tp, parameters = tlocal) #glPushMatrix() #glMultMatrixf(sum(zip(*se3.homogeneous(tp)),())) #drawRaw() #glPopMatrix() #write name if name != None: self.drawText(name,t[1]) elif isinstance(item,coordinates.Transform): #draw curve between frames t1 = item.source().worldCoordinates() if item.destination() != None: t2 = item.destination().worldCoordinates() else: t2 = se3.identity() d = vectorops.distance(t1[1],t2[1]) vlen = d*0.5 v1 = so3.apply(t1[0],[-vlen]*3) v2 = so3.apply(t2[0],[vlen]*3) def drawRaw(): glDisable(GL_DEPTH_TEST) glDisable(GL_LIGHTING) glColor3f(1,1,1) gldraw.hermite_curve(t1[1],v1,t2[1],v2,0.03) glEnable(GL_DEPTH_TEST) #write name at curve self.displayCache[0].draw(drawRaw,transform=None,parameters = (t1,t2)) if name != None: self.drawText(name,spline.hermite_eval(t1[1],v1,t2[1],v2,0.5)) elif isinstance(item,coordinates.Group): pass elif isinstance(item,ContactPoint): def drawRaw(): glDisable(GL_LIGHTING) glEnable(GL_POINT_SMOOTH) glPointSize(self.attributes.get("size",5.0)) l = self.attributes.get("length",0.05) glColor4f(*self.attributes.get("color",[1,0.5,0,1])) glBegin(GL_POINTS) glVertex3f(0,0,0) glEnd() glBegin(GL_LINES) glVertex3f(0,0,0) glVertex3f(l,0,0) glEnd() self.displayCache[0].draw(drawRaw,[so3.canonical(item.n),item.x]) elif isinstance(item,Hold): pass else: try: itypes = objectToVisType(item,world) except: print "visualization.py: Unsupported object type",item,"of type:",item.__class__.__name__ return if itypes == None: print "Unable to convert item",item,"to drawable" return elif itypes == 'Config': if world: robot = world.robot(0) if not self.useDefaultAppearance: oldAppearance = [robot.link(i).appearance().clone() for i in xrange(robot.numLinks())] for i in xrange(robot.numLinks()): if self.customAppearance is not None: robot.link(i).appearance().set(self.customAppearance) elif "color" in self.attributes: robot.link(i).appearance().setColor(*self.attributes["color"]) oldconfig = robot.getConfig() robot.setConfig(item) robot.drawGL() robot.setConfig(oldconfig) if not self.useDefaultAppearance: for (i,app) in enumerate(oldAppearance): robot.link(i).appearance().set(app) else: print "Unable to draw Config tiems without a world" elif itypes == 'Configs': if world: maxConfigs = self.attributes.get("maxConfigs",min(10,len(item))) robot = world.robot(0) if not self.useDefaultAppearance: oldAppearance = [robot.link(i).appearance().clone() for i in xrange(robot.numLinks())] for i in xrange(robot.numLinks()): if self.customAppearance is not None: robot.link(i).appearance().set(self.customAppearance) elif "color" in self.attributes: robot.link(i).appearance().setColor(*self.attributes["color"]) oldconfig = robot.getConfig() for i in xrange(maxConfigs): idx = int(i*len(item))/maxConfigs robot.setConfig(item[idx]) robot.drawGL() robot.setConfig(oldconfig) if not self.useDefaultAppearance: for (i,app) in enumerate(oldAppearance): robot.link(i).appearance().set(app) else: print "Unable to draw Configs items without a world" elif itypes == 'Vector3': def drawRaw(): glDisable(GL_LIGHTING) glEnable(GL_POINT_SMOOTH) glPointSize(self.attributes.get("size",5.0)) glColor4f(*self.attributes.get("color",[0,0,0,1])) glBegin(GL_POINTS) glVertex3f(0,0,0) glEnd() self.displayCache[0].draw(drawRaw,[so3.identity(),item]) if name != None: self.drawText(name,item) elif itypes == 'RigidTransform': def drawRaw(): fancy = self.attributes.get("fancy",False) if fancy: glEnable(GL_LIGHTING) else: glDisable(GL_LIGHTING) gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01),fancy=fancy) self.displayCache[0].draw(drawRaw,transform=item) if name != None: self.drawText(name,item[1]) elif itypes == 'IKGoal': if hasattr(item,'robot'): #need this to be built with a robot element. #Otherwise, can't determine the correct transforms robot = item.robot elif world: if world.numRobots() >= 1: robot = world.robot(0) else: robot = None else: robot = None if robot != None: link = robot.link(item.link()) dest = robot.link(item.destLink()) if item.destLink()>=0 else None while len(self.displayCache) < 3: self.displayCache.append(glcommon.CachedGLObject()) self.displayCache[1].name = self.name+" target position" self.displayCache[2].name = self.name+" curve" if item.numPosDims() != 0: lp,wp = item.getPosition() #set up parameters of connector p1 = se3.apply(link.getTransform(),lp) if dest != None: p2 = se3.apply(dest.getTransform(),wp) else: p2 = wp d = vectorops.distance(p1,p2) v1 = [0.0]*3 v2 = [0.0]*3 if item.numRotDims()==3: #full constraint R = item.getRotation() def drawRaw(): gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01)) t1 = se3.mul(link.getTransform(),(so3.identity(),lp)) t2 = (R,wp) if dest==None else se3.mul(dest.getTransform(),(R,wp)) self.displayCache[0].draw(drawRaw,transform=t1) self.displayCache[1].draw(drawRaw,transform=t2) vlen = d*0.1 v1 = so3.apply(t1[0],[-vlen]*3) v2 = so3.apply(t2[0],[vlen]*3) elif item.numRotDims()==0: #point constraint def drawRaw(): glDisable(GL_LIGHTING) glEnable(GL_POINT_SMOOTH) glPointSize(self.attributes.get("size",5.0)) glColor4f(*self.attributes.get("color",[0,0,0,1])) glBegin(GL_POINTS) glVertex3f(0,0,0) glEnd() self.displayCache[0].draw(drawRaw,transform=(so3.identity(),p1)) self.displayCache[1].draw(drawRaw,transform=(so3.identity(),p2)) #set up the connecting curve vlen = d*0.5 d = vectorops.sub(p2,p1) v1 = vectorops.mul(d,0.5) #curve in the destination v2 = vectorops.cross((0,0,0.5),d) else: #hinge constraint p = [0,0,0] d = [0,0,0] def drawRawLine(): glDisable(GL_LIGHTING) glEnable(GL_POINT_SMOOTH) glPointSize(self.attributes.get("size",5.0)) glColor4f(*self.attributes.get("color",[0,0,0,1])) glBegin(GL_POINTS) glVertex3f(*p) glEnd() glColor4f(*self.attributes.get("color",[0.5,0,0.5,1])) glLineWidth(self.attributes.get("width",3.0)) glBegin(GL_LINES) glVertex3f(*p) glVertex3f(*vectorops.madd(p,d,self.attributes.get("length",0.1))) glEnd() glLineWidth(1.0) ld,wd = item.getRotationAxis() p = lp d = ld self.displayCache[0].draw(drawRawLine,transform=link.getTransform(),parameters=(p,d)) p = wp d = wd self.displayCache[1].draw(drawRawLine,transform=dest.getTransform() if dest else se3.identity(),parameters=(p,d)) #set up the connecting curve d = vectorops.sub(p2,p1) v1 = vectorops.mul(d,0.5) #curve in the destination v2 = vectorops.cross((0,0,0.5),d) def drawConnection(): glDisable(GL_LIGHTING) glDisable(GL_DEPTH_TEST) glColor3f(1,0.5,0) gldraw.hermite_curve(p1,v1,p2,v2,0.03*max(0.1,vectorops.distance(p1,p2))) #glBegin(GL_LINES) #glVertex3f(*p1) #glVertex3f(*p2) #glEnd() glEnable(GL_DEPTH_TEST) #TEMP for some reason the cached version sometimes gives a GL error self.displayCache[2].draw(drawConnection,transform=None,parameters = (p1,v1,p2,v2)) #drawConnection() if name != None: self.drawText(name,wp) else: wp = link.getTransform()[1] if item.numRotDims()==3: #full constraint R = item.getRotation() def drawRaw(): gldraw.xform_widget(se3.identity(),self.attributes.get("length",0.1),self.attributes.get("width",0.01)) self.displayCache[0].draw(drawRaw,transform=link.getTransform()) self.displayCache[1].draw(drawRaw,transform=se3.mul(link.getTransform(),(R,[0,0,0]))) elif item.numRotDims() > 0: #axis constraint d = [0,0,0] def drawRawLine(): glDisable(GL_LIGHTING) glColor4f(*self.attributes.get("color",[0.5,0,0.5,1])) glLineWidth(self.attributes.get("width",3.0)) glBegin(GL_LINES) glVertex3f(0,0,0) glVertex3f(*vectorops.mul(d,self.attributes.get("length",0.1))) glEnd() glLineWidth(1.0) ld,wd = item.getRotationAxis() d = ld self.displayCache[0].draw(drawRawLine,transform=link.getTransform(),parameters=d) d = wd self.displayCache[1].draw(drawRawLine,transform=(dest.getTransform()[0] if dest else so3.identity(),wp),parameters=d) else: #no drawing pass if name != None: self.drawText(name,wp) else: print "Unable to draw item of type",itypes #revert appearance if not self.useDefaultAppearance and hasattr(item,'appearance'): item.appearance().set(self.oldAppearance) def getBounds(self): """Returns a bounding box (bmin,bmax) or None if it can't be found""" if len(self.subAppearances)!=0: bb = aabb_create() for n,app in self.subAppearances.iteritems(): bb = aabb_expand(bb,app.getBounds()) return bb item = self.item if isinstance(item,coordinates.Point): return [item.worldCoordinates(),item.worldCoordinates()] elif isinstance(item,coordinates.Direction): T = item.frame().worldCoordinates() d = item.localCoordinates() L = self.attributes.get("length",0.1) return aabb_create(T[1],se3.apply(T,vectorops.mul(d,L))) elif isinstance(item,coordinates.Frame): T = item.worldCoordinates() L = self.attributes.get("length",0.1) return aabb_create(T[1],se3.apply(T,(L,0,0)),se3.apply(T,(0,L,0)),se3.apply(T,(0,0,L))) elif isinstance(item,ContactPoint): L = self.attributes.get("length",0.05) return aabb_create(item.x,vectorops.madd(item.x,item.n,L)) elif isinstance(item,WorldModel): pass elif hasattr(item,'geometry'): return item.geometry().getBB() elif isinstance(item,(str,VisPlot)): pass else: try: vtype = objectToVisType(item,None) if 'Vector3' == vtype: #assumed to be a point return (item,item) elif 'RigidTransform' == vtype: #assumed to be a rigid transform return (item[1],item[1]) except Exception: pass print "Empty bound for object",self.name,"type",self.item.__class__.__name__ return aabb_create() def getSubItem(self,path): if len(path) == 0: return self for k,v in self.subAppearances.iteritems(): if v.name == path[0]: try: return v.getSubItem(path[1:]) except ValueError,e: raise ValueError("Invalid sub-path specified "+str(path)+" at "+str(e)) raise ValueError("Invalid sub-item specified "+path[0]) def make_editor(self): if self.editor != None: return item = self.item if isinstance(item,coordinates.Point): res = PointPoser() res.set(self.item.worldCoordinates()) res.setAxes(self.item.frame().worldCoordinates()[0]) elif isinstance(item,coordinates.Direction): res = PointPoser() res.set(self.item.worldCoordinates()) res.setAxes(self.item.frame().worldCoordinates()[0]) elif isinstance(item,coordinates.Frame): res = TransformPoser() res.set(*self.item.worldCoordinates()) elif isinstance(self.item,RobotModel): res = RobotPoser(self.item) self.hidden = True elif isinstance(self.item,SubRobotModel): res = RobotPoser(self.item._robot) res.setActiveDofs(self.item.links); self.hidden = True elif isinstance(self.item,RigidObjectModel): res = ObjectPoser(self.item) elif isinstance(self.item,(list,tuple)): #determine if it's a rotation, transform, or point itype = objectToVisType(self.item,None) if itype == 'Vector3': res = PointPoser() res.set(self.item) elif itype == 'Matrix3': res = TransformPoser() res.enableRotation(True) res.enableTranslation(False) res.set(self.item) elif itype == 'RigidTransform': res = TransformPoser() res.enableRotation(True) res.enableTranslation(True) res.set(*self.item) else: print "VisAppearance.make_editor(): Warning, editor for object of type",itype,"not defined" return else: print "VisAppearance.make_editor(): Warning, editor for object of type",self.item.__class__.__name__,"not defined" return self.editor = res def update_editor(self,item_to_editor=False): for (name,item) in self.subAppearances.iteritems(): item.update_editor(item_to_editor) if self.editor == None: return item = self.item if item_to_editor: if isinstance(item,coordinates.Point): self.editor.set(self.item.worldCoordinates()) elif isinstance(item,coordinates.Direction): self.editor.set(self.item.worldCoordinates()) elif isinstance(item,coordinates.Frame): self.editor.set(*self.item.worldCoordinates()) elif isinstance(self.item,RobotModel): self.editor.set(self.item.getConfig()) elif isinstance(self.item,SubRobotModel): self.editor.set(self.item.tofull(self.item.getConfig())) elif isinstance(self.item,RigidObjectModel): self.editor.set(*self.item.getTransform()) elif isinstance(self.item,(list,tuple)): itype = objectToVisType(self.item,None) if itype in ('Vector3','Matrix3'): self.editor.set(self.item) elif itype == 'RigidTransform': self.editor.set(*self.item) else: raise RuntimeError("Uh... unsupported type with an editor?") else: if not self.editor.hasFocus(): return if isinstance(item,coordinates.Point): self.item._localCoordinates = se3.apply(se3.inv(self.item._frame.worldCoordinates()),self.editor.get()) elif isinstance(item,coordinates.Direction): self.item._localCoordinates = se3.apply(se3.inv(self.item._frame.worldCoordinates()),self.editor.get()) elif isinstance(item,coordinates.Frame): self.item._worldCoordinates = self.editor.get() self.item._relativeCoordinates = se3.mul(se3.inv(self.item.parent().worldCoordinates()),self.editor.get()) #TODO: updating downstream frames? elif isinstance(self.item,RobotModel): self.item.setConfig(self.editor.getConditioned(self.item.getConfig())) elif isinstance(self.item,SubRobotModel): self.item.setConfig(self.item.fromfull(self.editor.get())) elif isinstance(self.item,RigidObjectModel): self.item.setTransform(*self.editor.get()) elif isinstance(self.item,(tuple,list)): def setList(a,b): if isinstance(a,(list,tuple)) and isinstance(b,(list,tuple)): if len(a) == len(b): for i in xrange(len(a)): if not setList(a[i],b[i]): if isinstance(a,list): a[i] = b[i] else: return False return True return False v = self.editor.get() if not setList(self.item,v): self.item = v elif isinstance(self.item,tuple): print "Edited a tuple... maybe a point or an xform? can't actually edit" self.item = self.editor.get() else: raise RuntimeError("Uh... unsupported type with an editor?") def remove_editor(self): self.editor = None self.hidden = False class VisualizationPlugin(glcommon.GLWidgetPlugin): def __init__(self): glcommon.GLWidgetPlugin.__init__(self) self.items = {} self.labels = [] self.t = time.time() self.startTime = self.t self.animating = True self.currentAnimationTime = 0 self.doRefresh = False def initialize(self): #keep or refresh display lists? #self._clearDisplayLists() return glcommon.GLWidgetPlugin.initialize(self) def addLabel(self,text,point,color): self.labels.append((text,point,color)) def display(self): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.display(self) self.labels = [] world = self.items.get('world',None) if world != None: world=world.item for (k,v) in self.items.iteritems(): v.widget = self v.swapDrawConfig() v.draw(world) v.swapDrawConfig() v.widget = None #allows garbage collector to delete these objects #cluster label points pointTolerance = self.view.camera.dist*0.03 pointHash = {} for (text,point,color) in self.labels: index = tuple([int(x/pointTolerance) for x in point]) try: pointHash[index][1].append((text,color)) except KeyError: pointHash[index] = [point,[(text,color)]] for (p,items) in pointHash.itervalues(): self._drawLabelRaw(p,*zip(*items)) _globalLock.release() def display_screen(self): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.display_screen(self) cx = 20 cy = 20 glDisable(GL_LIGHTING) glDisable(GL_DEPTH_TEST) for (k,v) in self.items.iteritems(): if isinstance(v.item,VisPlot): pos = v.attributes.get('position',None) duration = v.attributes.get('duration',5.) vrange = v.attributes.get('range',(None,None)) w,h = v.attributes.get('size',(200,150)) if pos is None: v.item.render(self.window,cx,cy,w,h,duration,vrange[0],vrange[1]) cy += h+18 else: x = pos[0] y = pos[1] if x < 0: x = self.view.w + x if y < 0: y = self.view.h + y v.item.render(self.window,x,y,w,h,duration,vrange[0],vrange[1]) for (k,v) in self.items.iteritems(): if isinstance(v.item,str): pos = v.attributes.get('position',None) col = v.attributes.get('color',(0,0,0)) size = v.attributes.get('size',12) if pos is None: #draw at console self.window.draw_text((cx,cy+size),v.item,size,col) cy += (size*15)/10 elif len(pos)==2: x = pos[0] y = pos[1] if x < 0: x = self.view.w + x if y < 0: y = self.view.h + y self.window.draw_text((x,y+size),v.item,size,col) glEnable(GL_DEPTH_TEST) _globalLock.release() def reshapefunc(self,w,h): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.reshapefunc(self,w,h) _globalLock.release() def keyboardfunc(self,c,x,y): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.keyboardfunc(self,c,x,y) _globalLock.release() def keyboardupfunc(self,c,x,y): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.keyboardupfunc(self,c,x,y) _globalLock.release() def mousefunc(self,button,state,x,y): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.mousefunc(self,button,state,x,y) _globalLock.release() def motionfunc(self,x,y,dx,dy): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.motionfunc(self,x,y,dx,dy) _globalLock.release() def eventfunc(self,type,args=""): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.eventfunc(self,type,args) _globalLock.release() def closefunc(self): global _globalLock _globalLock.acquire() glcommon.GLWidgetPlugin.closefunc(self) _globalLock.release() def _drawLabelRaw(self,point,textList,colorList): #assert not self.makingDisplayList,"drawText must be called outside of display list" assert self.window != None for i,(text,c) in enumerate(zip(textList,colorList)): if i+1 < len(textList): text = text+"," projpt = self.view.project(point,clip=False) if projpt[2] > self.view.clippingplanes[0]: d = float(12)/float(self.view.w)*projpt[2]*0.7 point = vectorops.add(point,so3.apply(so3.inv(self.view.camera.matrix()[0]),(0,-d,0))) glDisable(GL_LIGHTING) glDisable(GL_DEPTH_TEST) glColor3f(*c) self.draw_text(point,text,size=12) glEnable(GL_DEPTH_TEST) def _clearDisplayLists(self): for i in self.items.itervalues(): i.clearDisplayLists() def idle(self): global _globalLock _globalLock.acquire() oldt = self.t self.t = time.time() if self.animating: self.currentAnimationTime += (self.t - oldt) for (k,v) in self.items.iteritems(): #do animation updates v.updateAnimation(self.currentAnimationTime) for (k,v) in self.items.iteritems(): #do other updates v.updateTime(self.t-self.startTime) _globalLock.release() return False def getItem(self,item_name): """Returns an VisAppearance according to the given name or path""" if isinstance(item_name,(list,tuple)): components = item_name if len(components)==1: return self.getItem(components[0]) if components[0] not in self.items: raise ValueError("Invalid top-level item specified: "+item_name) return self.items[components[0]].getSubItem(components[1:]) if item_name in self.items: return self.items[item_name] def dirty(self,item_name='all'): """Marks an item or everything as dirty, forcing a deep redraw.""" global _globalLock _globalLock.acquire() if item_name == 'all': if (name,itemvis) in self.items.iteritems(): itemvis.markChanged() else: self.getItem(item_name).markChanged() _globalLock.release() def clear(self): """Clears the visualization world""" global _globalLock _globalLock.acquire() for (name,itemvis) in self.items.iteritems(): itemvis.destroy() self.items = {} _globalLock.release() def clearText(self): """Clears all text in the visualization.""" global _globalLock _globalLock.acquire() del_items = [] for (name,itemvis) in self.items.iteritems(): if isinstance(itemvis.item,str): itemvis.destroy() del_items.append(name) for n in del_items: del self.items[n] _globalLock.release() def listItems(self,root=None,indent=0): """Prints out all items in the visualization world.""" if root == None: for name,value in self.items.iteritems(): self.listItems(value,indent) else: if isinstance(root,str): root = self.getItem(root) if indent > 0: print " "*(indent-1), print root.name for n,v in root.subAppearances.iteritems(): self.listItems(v,indent+2) def add(self,name,item,keepAppearance=False): """Adds a named item to the visualization world. If the item already exists, the appearance information will be reinitialized if keepAppearance=False (default) or be kept if keepAppearance=True.""" global _globalLock assert not isinstance(name,(list,tuple)),"Cannot add sub-path items" _globalLock.acquire() if keepAppearance and name in self.items: self.items[name].setItem(item) else: #need to erase prior item visualizer if name in self.items: self.items[name].destroy() app = VisAppearance(item,name) self.items[name] = app _globalLock.release() #self.refresh() def animate(self,name,animation,speed=1.0,endBehavior='loop'): global _globalLock _globalLock.acquire() if hasattr(animation,'__iter__'): #a list of milestones -- loop through them with 1s delay print "visualization.animate(): Making a Trajectory with unit durations between",len(animation),"milestones" animation = Trajectory(range(len(animation)),animation) if isinstance(animation,HermiteTrajectory): animation = animation.configTrajectory() item = self.getItem(name) item.animation = animation item.animationStartTime = self.currentAnimationTime item.animationSpeed = speed item.animationEndBehavior = endBehavior item.markChanged() _globalLock.release() def pauseAnimation(self,paused=True): global _globalLock _globalLock.acquire() self.animating = not paused _globalLock.release() def stepAnimation(self,amount): global _globalLock _globalLock.acquire() self.currentAnimationTime += amount self.doRefresh = True _globalLock.release() def animationTime(self,newtime=None): global _globalLock if self==None: print "Visualization disabled" return 0 if newtime != None: _globalLock.acquire() self.currentAnimationTime = newtime _globalLock.release() return self.currentAnimationTime def remove(self,name): global _globalLock _globalLock.acquire() assert name in self.items,"Can only remove top level objects from visualization, try hide() instead" item = self.getItem(name) item.destroy() del self.items[name] self.doRefresh = True _globalLock.release() def getItemConfig(self,name): global _globalLock _globalLock.acquire() res = config.getConfig(self.getItem(name).item) _globalLock.release() return res def setItemConfig(self,name,value): global _globalLock _globalLock.acquire() item = self.getItem(name) if isinstance(item.item,(list,tuple,str)): item.item = value else: config.setConfig(item.item,value) if item.editor: item.update_editor(item_to_editor = True) self.doRefresh = True _globalLock.release() def hideLabel(self,name,hidden=True): global _globalLock _globalLock.acquire() item = self.getItem(name) item.attributes["text_hidden"] = hidden item.markChanged() self.doRefresh = True _globalLock.release() def edit(self,name,doedit=True): global _globalLock _globalLock.acquire() obj = self.getItem(name) if obj == None: _globalLock.release() raise ValueError("Object "+name+" does not exist in visualization") if doedit: obj.make_editor() if obj.editor: self.klamptwidgetmaster.add(obj.editor) else: if obj.editor: self.klamptwidgetmaster.remove(obj.editor) obj.remove_editor() self.doRefresh = True _globalLock.release() def widgetchangefunc(self,edit): """Called by GLWidgetPlugin on any widget change""" for name,item in self.items.iteritems(): item.update_editor() def hide(self,name,hidden=True): global _globalLock _globalLock.acquire() self.getItem(name).hidden = hidden self.doRefresh = True _globalLock.release() def addPlotItem(self,plotname,itemname): global _globalLock _globalLock.acquire() plot = self.getItem(plotname) assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot") plot = plot.item for i in plot.items: assert i.name != itemname,(str(itemname)+" is already in the plot "+plotname) item = self.getItem(itemname) assert item != None,(str(itemname)+" is not a valid item") plot.items.append(VisPlotItem(itemname,item)) _globalLock.release() def logPlot(self,plotname,itemname,value): global _globalLock _globalLock.acquire() customIndex = -1 plot = self.getItem(plotname) assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot") compress = plot.attributes.get('compress',_defaultCompressThreshold) plot = plot.item for i,item in enumerate(plot.items): if len(item.name)==0: customIndex = i if customIndex < 0: customIndex = len(plot.items) plot.items.append(VisPlotItem('',None)) plot.items[customIndex].compressThreshold = compress plot.items[customIndex].customUpdate(itemname,self.t - self.startTime,value) _globalLock.release() def logPlotEvent(self,plotname,eventname,color): global _globalLock _globalLock.acquire() plot = self.getItem(plotname) assert plot != None and isinstance(plot.item,VisPlot),(plotname+" is not a valid plot") plot.item.addEvent(eventname,self.t-self.startTime,color) _globalLock.release() def hidePlotItem(self,plotname,itemname,hidden=True): global _globalLock _globalLock.acquire() plot = self.getItem(plotname) assert plot != None and isinstance(plot.item,VisPlot),plotname+" is not a valid plot" plot = plot.item identified = False if isinstance(itemname,(tuple,list)): for i in plot.items: if i.name == itemname[0]: assert itemname[1] < len(i.hidden),("Invalid component index of item "+str(itemname[0])) identified = True i.hidden[itemname] = hidden else: for i in plot.items: if i.name == itemname: for j in xrange(len(i.hidden)): i.hidden[j] = hidden assert identified,("Invalid item "+str(itemname)+" specified in plot "+plotname) self.doRefresh = True _globalLock.release() def savePlot(self,plotname,fn): global _globalLock _globalLock.acquire() plot = self.getItem(plotname) assert plot != None and isinstance(plot.item,VisPlot),plotname+" is not a valid plot" plot = plot.item if fn != None: plot.beginSave(fn) else: plot.endSave(fn) _globalLock.release() def setAppearance(self,name,appearance): global _globalLock _globalLock.acquire() item = self.getItem(name) item.useDefaultAppearance = False item.customAppearance = appearance item.markChanged() self.doRefresh = True _globalLock.release() def setAttribute(self,name,attr,value): global _globalLock _globalLock.acquire() item = self.getItem(name) item.attributes[attr] = value if value==None: del item.attributes[attr] item.markChanged() self.doRefresh = True _globalLock.release() def revertAppearance(self,name): global _globalLock _globalLock.acquire() item = self.getItem(name) item.useDefaultApperance = True item.markChanged() self.doRefresh = True _globalLock.release() def setColor(self,name,r,g,b,a=1.0): global _globalLock _globalLock.acquire() item = self.getItem(name) item.attributes["color"] = [r,g,b,a] item.useDefaultAppearance = False item.markChanged() self.doRefresh = True _globalLock.release() def setDrawFunc(self,name,func): global _globalLock _globalLock.acquire() item = self.getItem(name) item.customDrawFunc = func self.doRefresh = True _globalLock.release() def autoFitCamera(self,scale=1.0): vp = None if self.window == None: global _frontend vp = _frontend.get_view() else: vp = self.window.get_view() try: autoFitViewport(vp,self.items.values()) vp.camera.dist /= scale except Exception as e: print "Unable to auto-fit camera" print e _vis = VisualizationPlugin() _frontend.setPlugin(_vis) #signals to visualization thread _quit = False _thread_running = False if _PyQtAvailable: from PyQt4 import QtGui #Qt specific startup #need to set up a QDialog and an QApplication class _MyDialog(QDialog): def __init__(self,windowinfo): QDialog.__init__(self) self.windowinfo = windowinfo glwidget = windowinfo.glwindow glwidget.setMinimumSize(640,480) glwidget.setMaximumSize(4000,4000) glwidget.setSizePolicy(QSizePolicy(QSizePolicy.Maximum,QSizePolicy.Maximum)) self.description = QLabel("Press OK to continue") self.description.setSizePolicy(QSizePolicy(QSizePolicy.Preferred,QSizePolicy.Fixed)) self.layout = QVBoxLayout(self) self.layout.addWidget(glwidget) self.layout.addWidget(self.description) self.buttons = QDialogButtonBox(QDialogButtonBox.Ok,Qt.Horizontal, self) self.buttons.accepted.connect(self.accept) self.layout.addWidget(self.buttons) self.setWindowTitle(windowinfo.name) glwidget.name = windowinfo.name def accept(self): global _globalLock _globalLock.acquire() self.windowinfo.glwindow.hide() _globalLock.release() print "#########################################" print "klampt.vis: Dialog accept" print "#########################################" return QDialog.accept(self) def reject(self): global _globalLock _globalLock.acquire() self.windowinfo.glwindow.hide() print "#########################################" print "klampt.vis: Dialog reject" print "#########################################" _globalLock.release() return QDialog.reject(self) class _MyWindow(QMainWindow): def __init__(self,windowinfo): QMainWindow.__init__(self) self.windowinfo = windowinfo self.glwidget = windowinfo.glwindow self.glwidget.setMinimumSize(self.glwidget.width,self.glwidget.height) self.glwidget.setMaximumSize(4000,4000) self.glwidget.setSizePolicy(QSizePolicy(QSizePolicy.Maximum,QSizePolicy.Maximum)) self.setCentralWidget(self.glwidget) self.setWindowTitle(windowinfo.name) self.glwidget.name = windowinfo.name self.saving_movie = False self.movie_timer = QTimer(self) self.movie_timer.timeout.connect(self.movie_update) self.movie_frame = 0 self.movie_time_last = 0 self.saving_html = False self.html_saver = None self.html_start_time = 0 self.html_timer = QTimer(self) self.html_timer.timeout.connect(self.html_update) #TODO: for action-free programs, don't add this... but this has to be detected after initializeGL()? mainMenu = self.menuBar() fileMenu = mainMenu.addMenu('&Actions') self.glwidget.actionMenu = fileMenu visMenu = mainMenu.addMenu('&Visualization') a = QtGui.QAction('Save world...', self) a.setStatusTip('Saves world to xml file') a.triggered.connect(self.save_world) visMenu.addAction(a) a = QtGui.QAction('Add to world...', self) a.setStatusTip('Adds an item to the world') a.triggered.connect(self.add_to_world) visMenu.addAction(a) a = QtGui.QAction('Save camera...', self) a.setStatusTip('Saves camera settings') a.triggered.connect(self.save_camera) visMenu.addAction(a) a = QtGui.QAction('Load camera...', self) a.setStatusTip('Loads camera settings') a.triggered.connect(self.load_camera) visMenu.addAction(a) a = QtGui.QAction('Start/stop movie output', self) a.setShortcut('Ctrl+M') a.setStatusTip('Starts / stops saving movie frames') a.triggered.connect(self.toggle_movie_mode) visMenu.addAction(a) a = QtGui.QAction('Start/stop html output', self) a.setShortcut('Ctrl+H') a.setStatusTip('Starts / stops saving animation to HTML file') a.triggered.connect(self.toggle_html_mode) visMenu.addAction(a) def getWorld(self): if not hasattr(self.glwidget.program,'plugins'): return None for p in self.glwidget.program.plugins: if hasattr(p,'world'): return p.world elif isinstance(p,VisualizationPlugin): world = p.items.get('world',None) if world != None: return world.item return None def getSimulator(self): if not hasattr(self.glwidget.program,'plugins'): return None for p in self.glwidget.program.plugins: if hasattr(p,'sim'): return p.sim return None def save_camera(self): if not hasattr(self.glwidget.program,'get_view'): print "Program does not appear to have a camera" return v = self.glwidget.program.get_view() fn = QFileDialog.getSaveFileName(caption="Viewport file (*.txt)",filter="Viewport file (*.txt);;All files (*.*)") if fn is None: return f = open(str(fn),'w') f.write("VIEWPORT\n") f.write("FRAME %d %d %d %d\n"%(v.x,v.y,v.w,v.h)) f.write("PERSPECTIVE 1\n") aspect = float(v.w)/float(v.h) rfov = v.fov*math.pi/180.0 scale = 1.0/(2.0*math.tan(rfov*0.5/aspect)*aspect) f.write("SCALE %f\n"%(scale,)) f.write("NEARPLANE %f\n"%(v.clippingplanes[0],)) f.write("FARPLANE %f\n"%(v.clippingplanes[0],)) f.write("CAMTRANSFORM ") mat = se3.homogeneous(v.camera.matrix()) f.write(' '.join(str(v) for v in sum(mat,[]))) f.write('\n') f.write("ORBITDIST %f\n"%(v.camera.dist,)) f.close() def load_camera(self): print "TODO" def save_world(self): w = self.getWorld() if w is None: print "Program does not appear to have a world" fn = QFileDialog.getSaveFileName(caption="World file (elements will be saved to folder)",filter="World file (*.xml);;All files (*.*)") if fn != None: w.saveFile(str(fn)) print "Saved to",fn,"and elements were saved to a directory of the same name." def add_to_world(self): w = self.getWorld() if w is None: print "Program does not appear to have a world" fn = QFileDialog.getOpenFileName(caption="World element",filter="Robot file (*.rob *.urdf);;Object file (*.obj);;Terrain file (*.env *.off *.obj *.stl *.wrl);;All files (*.*)") if fn != None: w.loadElement(str(fn)) for p in self.glwidget.program.plugins: if isinstance(p,VisualizationPlugin): p.getItem('world').setItem(w) def toggle_movie_mode(self): self.saving_movie = not self.saving_movie if self.saving_movie: self.movie_timer.start(33) sim = self.getSimulator() if sim != None: self.movie_time_last = sim.getTime() else: self.movie_timer.stop() dlg = QtGui.QInputDialog(self) dlg.setInputMode( QtGui.QInputDialog.TextInput) dlg.setLabelText("Command") dlg.setTextValue('ffmpeg -y -f image2 -i image%04d.png klampt_record.mp4') dlg.resize(500,100) ok = dlg.exec_() cmd = dlg.textValue() #(cmd,ok) = QtGui.QInputDialog.getText(self,"Process with ffmpeg?","Command", text='ffmpeg -y -f image2 -i image%04d.png klampt_record.mp4') if ok: import os,glob os.system(str(cmd)) print "Removing temporary files" for fn in glob.glob('image*.png'): os.remove(fn) def movie_update(self): sim = self.getSimulator() if sim != None: while sim.getTime() >= self.movie_time_last + 1.0/30.0: self.glwidget.program.save_screen('image%04d.png'%(self.movie_frame)) self.movie_frame += 1 self.movie_time_last += 1.0/30.0 else: self.glwidget.program.save_screen('image%04d.png'%(self.movie_frame)) self.movie_frame += 1 def toggle_html_mode(self): self.saving_html = not self.saving_html if self.saving_html: world = self.getSimulator() if world is None: world = self.getWorld() if world is None: print "There is no world in the current plugin, can't save" self.saving_html = False return fn = QFileDialog.getSaveFileName(caption="Save path HTML file to...",filter="HTML file (*.html);;All files (*.*)") if fn is None: self.saving_html = False return from ..io import html self.html_start_time = time.time() self.html_saver = html.HTMLSharePath(fn) self.html_saver.dt = 0.033; self.html_saver.start(world) self.html_timer.start(33) else: self.html_saver.end() self.html_timer.stop() def html_update(self): t = None if self.html_saver.sim == None: #t = time.time()-self.html_start_time t = self.html_saver.last_t + 0.034 self.html_saver.animate(t) def closeEvent(self,event): global _globalLock _globalLock.acquire() self.windowinfo.glwindow.hide() self.windowinfo.mode = 'hidden' self.windowinfo.glwindow.idlesleep() self.windowinfo.glwindow.setParent(None) if self.saving_movie: self.toggle_movie_mode() if self.saving_html: self.toggle_html_mode() print "#########################################" print "klampt.vis: Window close" print "#########################################" _globalLock.release() def _run_app_thread(): global _thread_running,_vis,_widget,_window,_quit,_showdialog,_showwindow,_globalLock _thread_running = True _GLBackend.initialize("Klamp't visualization") res = None while not _quit: _globalLock.acquire() for i,w in enumerate(_windows): if w.glwindow == None and w.mode != 'hidden': print "vis: creating GL window" w.glwindow = _GLBackend.createWindow(w.name) w.glwindow.setProgram(w.frontend) w.glwindow.setParent(None) w.glwindow.refresh() if w.doRefresh: if w.mode != 'hidden': w.glwindow.updateGL() w.doRefresh = False if w.doReload and w.glwindow != None: w.glwindow.setProgram(w.frontend) if w.guidata: w.guidata.setWindowTitle(w.name) w.guidata.glwidget = w.glwindow w.guidata.setCentralWidget(w.glwindow) w.doReload = False if w.mode == 'dialog': print "#########################################" print "klampt.vis: Dialog on window",i print "#########################################" if w.custom_ui == None: dlg = _MyDialog(w) else: dlg = w.custom_ui(w.glwindow) #need to cache the bastards to avoid deleting the GL object. Not sure why it's being kept around. #alldlgs.append(dlg) #here's the crash -- above line deleted the old dialog, which for some reason kills the widget if dlg != None: w.glwindow.show() w.glwindow.idlesleep(0) w.glwindow.refresh() w.glwindow.refresh() _globalLock.release() res = dlg.exec_() _globalLock.acquire() print "#########################################" print "klampt.vis: Dialog done on window",i print "#########################################" w.glwindow.hide() w.glwindow.setParent(None) w.glwindow.idlesleep() w.mode = 'hidden' if w.mode == 'shown' and w.guidata == None: print "#########################################" print "klampt.vis: Making window",i print "#########################################" if w.custom_ui == None: w.guidata = _MyWindow(w) else: w.guidata = w.custom_ui(w.glwindow) w.glwindow.show() w.glwindow.idlesleep(0) if w.mode == 'shown' and not w.guidata.isVisible(): print "#########################################" print "klampt.vis: Showing window",i print "#########################################" w.glwindow.show() w.glwindow.setParent(w.guidata) w.glwindow.idlesleep(0) w.guidata.show() if w.mode == 'hidden' and w.guidata != None: if w.guidata.isVisible(): print "#########################################" print "klampt.vis: Hiding window",i print "#########################################" w.glwindow.setParent(None) w.glwindow.idlesleep() w.glwindow.hide() w.guidata.hide() #prevent deleting the GL window w.glwindow.setParent(None) w.guidata = None _globalLock.release() _GLBackend.app.processEvents() time.sleep(0.001) print "Visualization thread closing..." for w in _windows: w.vis.clear() if w.glwindow: w.glwindow.close() _thread_running = False return res elif _GLUTAvailable: print "klampt.visualization: QT is not available, falling back to poorer" print "GLUT interface. Returning to another GLUT thread will not work" print "properly." print "" class GLUTHijacker(GLPluginProgram): def __init__(self,windowinfo): GLPluginProgram.__init__(self) self.windowinfo = windowinfo self.name = windowinfo.name self.view = windowinfo.frontend.view self.clearColor = windowinfo.frontend.clearColor self.actions = windowinfo.frontend.actions self.frontend = windowinfo.frontend self.inDialog = False self.hidden = False def initialize(self): self.frontend.window = self.window if not self.frontend.initialize(): return False GLPluginProgram.initialize(self) return True def display(self): global _globalLock _globalLock.acquire() self.frontend.display() _globalLock.release() return True def display_screen(self): global _globalLock _globalLock.acquire() self.frontend.display_screen() glColor3f(1,1,1) glRasterPos(20,50) gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"(Do not close this window except to quit)") if self.inDialog: glColor3f(1,1,0) glRasterPos(20,80) gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"In Dialog mode. Press 'Esc' to return to normal mode") else: glColor3f(1,1,0) glRasterPos(20,80) gldraw.glutBitmapString(GLUT_BITMAP_HELVETICA_18,"In Window mode. Press 'Esc' to hide window") _globalLock.release() def keyboardfunc(self,c,x,y): if ord(c)==27: if self.inDialog: print "Esc pressed, hiding dialog" self.inDialog = False else: print "Esc pressed, hiding window" global _globalLock _globalLock.acquire() self.windowinfo.mode = 'hidden' self.hidden = True glutHideWindow() _globalLock.release() return True else: return self.frontend.keyboardfunc(c,x,y) def keyboardupfunc(self,c,x,y): return self.frontend.keyboardupfunc(c,x,y) def motionfunc(self,x,y,dx,dy): return self.frontend.motionfunc(x,y,dx,dy) def mousefunc(self,button,state,x,y): return self.frontend.mousefunc(button,state,x,y) def idlefunc(self): global _quit,_showdialog global _globalLock _globalLock.acquire() if _quit: if bool(glutLeaveMainLoop): glutLeaveMainLoop() else: print "Not compiled with freeglut, can't exit main loop safely. Press Ctrl+C instead" raw_input() if self.hidden: print "hidden, waiting...",self.windowinfo.mode if self.windowinfo.mode == 'shown': print "Showing window" glutSetWindow(self.window.glutWindowID) glutShowWindow() self.hidden = False elif self.windowinfo.mode == 'dialog': print "Showing window in dialog mode" self.inDialog = True glutSetWindow(self.window.glutWindowID) glutShowWindow() self.hidden = False _globalLock.release() return self.frontend.idlefunc() def _run_app_thread(): global _thread_running,_vis,_old_glut_window,_quit,_windows import weakref _thread_running = True _GLBackend.initialize("Klamp't visualization") w = _GLBackend.createWindow("Klamp't visualization") hijacker = GLUTHijacker(_windows[0]) _windows[0].guidata = weakref.proxy(hijacker) w.setProgram(hijacker) _GLBackend.run() print "Visualization thread closing..." for w in _windows: w.vis.clear() _thread_running = False return def _kill(): global _quit _quit = True while _thread_running: time.sleep(0.01) _quit = False if _PyQtAvailable: from PyQt4 import QtCore class MyQThread(QtCore.QThread): def __init__(self,func,*args): self.func = func self.args = args QtCore.QThread.__init__(self) def run(self): self.func(*self.args) def _show(): global _windows,_current_window,_thread_running if len(_windows)==0: _windows.append(WindowInfo(_window_title,_frontend,_vis)) _current_window = 0 _windows[_current_window].mode = 'shown' _windows[_current_window].worlds = _current_worlds _windows[_current_window].active_worlds = _current_worlds[:] if not _thread_running: signal.signal(signal.SIGINT, signal.SIG_DFL) if _PyQtAvailable and False: #for some reason, QThread doesn't allow for mouse events to be posted? thread = MyQThread(_run_app_thread) thread.start() else: thread = Thread(target=_run_app_thread) thread.setDaemon(True) thread.start() time.sleep(0.1) def _hide(): global _windows,_current_window,_thread_running if _current_window == None: return _windows[_current_window].mode = 'hidden' def _dialog(): global __windows,_current_window,_thread_running if len(_windows)==0: _windows.append(WindowInfo(_window_title,_frontend,_vis,None)) _current_window = 0 if not _thread_running: signal.signal(signal.SIGINT, signal.SIG_DFL) thread = Thread(target=_run_app_thread) thread.setDaemon(True) thread.start() #time.sleep(0.1) _globalLock.acquire() assert _windows[_current_window].mode == 'hidden',"dialog() called inside dialog?" _windows[_current_window].mode = 'dialog' _windows[_current_window].worlds = _current_worlds _windows[_current_window].active_worlds = _current_worlds[:] _globalLock.release() while _windows[_current_window].mode == 'dialog': time.sleep(0.1) return def _set_custom_ui(func): global _windows,_current_window,_thread_running if len(_windows)==0: _windows.append(WindowInfo(_window_title,_frontend,_vis,None)) _current_window = 0 _windows[_current_window].custom_ui = func return def _onFrontendChange(): global _windows,_frontend,_window_title,_current_window,_thread_running if _current_window == None: return w = _windows[_current_window] w.doReload = True w.name = _window_title w.frontend = _frontend if w.glwindow: w.glwindow.reshape(_frontend.view.w,_frontend.view.h) if w.guidata and not _PyQtAvailable: w.guidata.frontend = _frontend _frontend.window = w.guidata.window def _refreshDisplayLists(item): if isinstance(item,WorldModel): for i in xrange(item.numRobots()): _refreshDisplayLists(item.robot(i)) for i in xrange(item.numRigidObjects()): _refreshDisplayLists(item.rigidObject(i)) for i in xrange(item.numTerrains()): _refreshDisplayLists(item.terrain(i)) elif isinstance(item,RobotModel): for i in xrange(item.numLinks()): _refreshDisplayLists(item.link(i)) elif hasattr(item,'appearance'): item.appearance().refresh(False) def _checkWindowCurrent(item): global _windows,_current_window,_world_to_window,_current_worlds if isinstance(item,int): if not all(w.index != item for w in _current_worlds): print "klampt.vis: item appears to be in a new world, but doesn't have a full WorldModel instance" if isinstance(item,WorldModel): #print "Worlds active in current window",_current_window,":",[w().index for w in _current_worlds] if all(item != w() for w in _current_worlds): #PyQt interface allows sharing display lists but GLUT does not. #refresh all worlds' display lists that will be shifted to the current window. for i,win in enumerate(_windows): #print "Window",i,"active worlds",[w().index for w in win.active_worlds] if any(item == w() for w in win.active_worlds): if not _PyQtAvailable: print "klampt.vis: world",item.index,"was shown in a different window, now refreshing display lists" _refreshDisplayLists(item) win.active_worlds.remove(weakref.ref(item)) _current_worlds.append(weakref.ref(item)) #print "klampt.vis: world added to the visualization's world (items:",[w().index for w in _current_worlds],")" #else: # print "klampt.vis: world",item,"is already in the current window's world" elif hasattr(item,'world'): _checkWindowCurrent(item.world)<|fim▁end|>
#raw_input()
<|file_name|>parser.go<|end_file_name|><|fim▁begin|>package parser // #include "query_types.h" // #include <stdlib.h> import "C" import ( "bytes" "fmt" "math" "reflect" "regexp" "strconv" "strings" "time" "unsafe" ) type From struct { TableName string } type Operation int type IntoClause struct { Target *Value } type BasicQuery struct { startTime time.Time endTime time.Time } type SelectDeleteCommonQuery struct { BasicQuery FromClause *FromClause Condition *WhereCondition } type SelectQuery struct { SelectDeleteCommonQuery ColumnNames []*Value groupByClause *GroupByClause IntoClause *IntoClause Limit int Ascending bool Explain bool } type ListType int const ( Series ListType = iota ContinuousQueries ) type ListQuery struct { Type ListType } type DropQuery struct { Id int } type DropSeriesQuery struct { tableName string } func (self *DropSeriesQuery) GetTableName() string { return self.tableName } type DeleteQuery struct { SelectDeleteCommonQuery } type Query struct { QueryString string SelectQuery *SelectQuery DeleteQuery *DeleteQuery ListQuery *ListQuery DropSeriesQuery *DropSeriesQuery DropQuery *DropQuery } func (self *IntoClause) GetString() string { return self.Target.GetString() } func (self *Query) GetQueryString() string { return self.commonGetQueryString(false) } func (self *Query) GetQueryStringWithTimeCondition() string { return self.commonGetQueryString(true) } func (self *Query) commonGetQueryString(withTime bool) string { if self.SelectQuery != nil { if withTime { return self.SelectQuery.GetQueryStringWithTimeCondition() } return self.SelectQuery.GetQueryString() } else if self.ListQuery != nil { return "list series" } else if self.DeleteQuery != nil { return self.DeleteQuery.GetQueryString(withTime) } return self.QueryString } func (self *Query) IsListQuery() bool { return self.ListQuery != nil } func (self *Query) IsExplainQuery() bool { return self.SelectQuery != nil && self.SelectQuery.Explain } func (self *Query) IsListSeriesQuery() bool { return self.ListQuery != nil && self.ListQuery.Type == Series } func (self *Query) IsListContinuousQueriesQuery() bool { return self.ListQuery != nil && self.ListQuery.Type == ContinuousQueries } func (self *DeleteQuery) GetQueryString(withTime bool) string { buffer := bytes.NewBufferString("delete ") fmt.Fprintf(buffer, "from %s", self.FromClause.GetString()) if withTime { fmt.Fprintf(buffer, " where %s", self.GetWhereConditionWithTime(self.startTime, self.endTime).GetString()) } else if condition := self.GetWhereCondition(); condition != nil { fmt.Fprintf(buffer, " where %s", condition.GetString()) } return buffer.String() } func (self *SelectQuery) GetColumnNames() []*Value { return self.ColumnNames } func (self *SelectQuery) IsExplainQuery() bool { return self.Explain } func (self *SelectQuery) GetQueryString() string { return self.commonGetQueryStringWithTimes(false, true, self.startTime, self.endTime) } func (self *SelectQuery) GetQueryStringWithTimeCondition() string { return self.commonGetQueryStringWithTimes(true, true, self.startTime, self.endTime) } func (self *SelectQuery) GetQueryStringWithTimes(startTime, endTime time.Time) string { return self.commonGetQueryStringWithTimes(true, true, startTime, endTime) } func (self *SelectQuery) GetQueryStringWithTimesAndNoIntoClause(startTime, endTime time.Time) string { return self.commonGetQueryStringWithTimes(true, false, startTime, endTime) } func (self *SelectQuery) commonGetQueryStringWithTimes(withTime, withIntoClause bool, startTime, endTime time.Time) string { buffer := bytes.NewBufferString("") fmt.Fprintf(buffer, "select ") buffer.WriteString(Values(self.ColumnNames).GetString()) <|fim▁hole|> } else if condition := self.GetWhereCondition(); condition != nil { fmt.Fprintf(buffer, " where %s", condition.GetString()) } if self.GetGroupByClause() != nil && len(self.GetGroupByClause().Elems) > 0 { fmt.Fprintf(buffer, " group by %s", self.GetGroupByClause().GetString()) } if self.Limit > 0 { fmt.Fprintf(buffer, " limit %d", self.Limit) } if self.Ascending { fmt.Fprintf(buffer, " order asc") } if clause := self.IntoClause; withIntoClause && clause != nil { fmt.Fprintf(buffer, " into %s", clause.GetString()) } return buffer.String() } func (self *SelectQuery) IsSinglePointQuery() bool { w := self.GetWhereCondition() if w == nil { return false } leftWhereCondition, ok := w.GetLeftWhereCondition() if !ok { return false } leftBoolExpression, ok := leftWhereCondition.GetBoolExpression() if !ok { return false } rightBoolExpression, ok := w.Right.GetBoolExpression() if !ok { return false } if leftBoolExpression.Name != "=" && rightBoolExpression.Name != "=" { return false } if leftBoolExpression.Elems[0].Name != "time" || rightBoolExpression.Elems[0].Name != "sequence_number" { return false } return true } func (self *SelectQuery) GetSinglePointQuerySequenceNumber() (int64, error) { w := self.GetWhereCondition() rightBoolExpression, _ := w.Right.GetBoolExpression() sequence := rightBoolExpression.Elems[1].Name sequence_number, err := strconv.ParseInt(sequence, 10, 64) if err != nil { return 0, fmt.Errorf("The column sequence_number can only be queried as an integer.") } return sequence_number, nil } func (self *SelectQuery) IsContinuousQuery() bool { return self.GetIntoClause() != nil } func (self *SelectQuery) IsValidContinuousQuery() bool { groupByClause := self.GetGroupByClause() if len(groupByClause.Elems) == 0 { return true } for _, elem := range groupByClause.Elems { if elem.Name == "time" { return true } } return false } func (self *SelectQuery) IsNonRecursiveContinuousQuery() bool { fromClause := self.GetFromClause() intoClause := self.GetIntoClause() for _, from := range fromClause.Names { regex, ok := from.Name.GetCompiledRegex() if !ok { continue } regexString := regex.String() intoTarget := intoClause.Target.Name if !strings.Contains(intoTarget, ":series_name") { continue } else { if strings.HasPrefix(regexString, "^") && !strings.HasPrefix(intoTarget, ":series_name") { continue } if strings.HasSuffix(regexString, "$") && !strings.HasSuffix(intoTarget, ":series_name") { continue } return false } } return true } func (self *SelectQuery) GetIntoClause() *IntoClause { return self.IntoClause } func (self *SelectDeleteCommonQuery) GetFromClause() *FromClause { return self.FromClause } func setupSlice(hdr *reflect.SliceHeader, ptr unsafe.Pointer, size C.size_t) { hdr.Cap = int(size) hdr.Len = int(size) hdr.Data = uintptr(ptr) } func GetGroupByClause(groupByClause *C.groupby_clause) (*GroupByClause, error) { if groupByClause == nil { return &GroupByClause{Elems: nil}, nil } values, err := GetValueArray(groupByClause.elems) if err != nil { return nil, err } fillWithZero := false var fillValue *Value if groupByClause.fill_function != nil { fun, err := GetValue(groupByClause.fill_function) if err != nil { return nil, err } if fun.Name != "fill" { return nil, fmt.Errorf("You can't use %s with group by", fun.Name) } if len(fun.Elems) != 1 { return nil, fmt.Errorf("`fill` accepts one argument only") } fillValue = fun.Elems[0] fillWithZero = true } return &GroupByClause{ Elems: values, FillWithZero: fillWithZero, FillValue: fillValue, }, nil } func GetValueArray(array *C.value_array) ([]*Value, error) { if array == nil { return nil, nil } var values []*C.value setupSlice((*reflect.SliceHeader)((unsafe.Pointer(&values))), unsafe.Pointer(array.elems), array.size) valuesSlice := make([]*Value, 0, array.size) for _, value := range values { value, err := GetValue(value) if err != nil { return nil, err } valuesSlice = append(valuesSlice, value) } return valuesSlice, nil } func GetStringArray(array *C.array) []string { if array == nil { return nil } var values []*C.char setupSlice((*reflect.SliceHeader)((unsafe.Pointer(&values))), unsafe.Pointer(array.elems), array.size) stringSlice := make([]string, 0, array.size) for _, value := range values { stringSlice = append(stringSlice, C.GoString(value)) } return stringSlice } func GetValue(value *C.value) (*Value, error) { v := &Value{} v.Name = C.GoString(value.name) var err error v.Elems, err = GetValueArray(value.args) if err != nil { return nil, err } v.Type = ValueType(value.value_type) isCaseInsensitive := value.is_case_insensitive != 0 if v.Type == ValueRegex { if isCaseInsensitive { v.compiledRegex, err = regexp.Compile("(?i)" + v.Name) } else { v.compiledRegex, err = regexp.Compile(v.Name) } v.IsInsensitive = isCaseInsensitive } if value.alias != nil { v.Alias = C.GoString(value.alias) } return v, err } func GetTableName(name *C.table_name) (*TableName, error) { value, err := GetValue(name.name) if err != nil { return nil, err } table := &TableName{Name: value} if name.alias != nil { table.Alias = C.GoString(name.alias) } return table, nil } func GetTableNameArray(array *C.table_name_array) ([]*TableName, error) { var names []*C.table_name setupSlice((*reflect.SliceHeader)((unsafe.Pointer(&names))), unsafe.Pointer(array.elems), array.size) tableNamesSlice := make([]*TableName, 0, array.size) for _, name := range names { tableName, err := GetTableName(name) if err != nil { return nil, err } tableNamesSlice = append(tableNamesSlice, tableName) } return tableNamesSlice, nil } func GetFromClause(fromClause *C.from_clause) (*FromClause, error) { arr, err := GetTableNameArray(fromClause.names) if err != nil { return nil, err } return &FromClause{FromClauseType(fromClause.from_clause_type), arr}, nil } func GetIntoClause(intoClause *C.into_clause) (*IntoClause, error) { if intoClause == nil { return nil, nil } target, err := GetValue(intoClause.target) if err != nil { return nil, err } return &IntoClause{target}, nil } func GetWhereCondition(condition *C.condition) (*WhereCondition, error) { if condition.is_bool_expression != 0 { expr, err := GetValue((*C.value)(condition.left)) if err != nil { return nil, err } return &WhereCondition{ isBooleanExpression: true, Left: expr, Operation: "", Right: nil, }, nil } c := &WhereCondition{} var err error c.Left, err = GetWhereCondition((*C.condition)(condition.left)) if err != nil { return nil, err } c.Operation = C.GoString(condition.op) c.Right, err = GetWhereCondition((*C.condition)(unsafe.Pointer(condition.right))) return c, err } func (self *SelectDeleteCommonQuery) GetWhereCondition() *WhereCondition { return self.Condition } func (self *SelectDeleteCommonQuery) GetWhereConditionWithTime(startTime, endTime time.Time) *WhereCondition { timeCondition := &WhereCondition{ isBooleanExpression: false, Operation: "AND", Left: &WhereCondition{ isBooleanExpression: true, Left: &Value{ Name: "<", Type: ValueExpression, Elems: []*Value{ &Value{Name: "time", Type: ValueSimpleName}, &Value{Name: strconv.FormatInt(endTime.UnixNano(), 10), Type: ValueInt}, }, }, }, Right: &WhereCondition{ isBooleanExpression: true, Left: &Value{ Name: ">", Type: ValueExpression, Elems: []*Value{ &Value{Name: "time", Type: ValueSimpleName}, &Value{Name: strconv.FormatInt(startTime.UnixNano(), 10), Type: ValueInt}, }, }, }, } if self.Condition == nil { return timeCondition } return &WhereCondition{ isBooleanExpression: false, Left: self.Condition, Right: timeCondition, Operation: "AND", } } func (self *SelectQuery) GetGroupByClause() *GroupByClause { return self.groupByClause } // This is just for backward compatability so we don't have // to change all the code. func ParseSelectQuery(query string) (*SelectQuery, error) { queries, err := ParseQuery(query) if err != nil { return nil, err } if len(queries) == 0 { return nil, fmt.Errorf("No queries found") } selectQuery := queries[0].SelectQuery if selectQuery == nil { return nil, fmt.Errorf("Query isn't a select query: '%s'", queries[0].GetQueryString()) } return selectQuery, nil } func ParseQuery(query string) ([]*Query, error) { queryString := C.CString(query) defer C.free(unsafe.Pointer(queryString)) q := C.parse_query(queryString) defer C.close_query(&q) if q.error != nil { str := C.GoString(q.error.err) return nil, &QueryError{ firstLine: int(q.error.first_line), firstColumn: int(q.error.first_column) - 1, lastLine: int(q.error.last_line), lastColumn: int(q.error.last_column) - 1, errorString: str, } } if q.list_series_query != 0 { return []*Query{&Query{QueryString: query, ListQuery: &ListQuery{Type: Series}}}, nil } if q.list_continuous_queries_query != 0 { return []*Query{&Query{QueryString: query, ListQuery: &ListQuery{Type: ContinuousQueries}}}, nil } if q.select_query != nil { selectQuery, err := parseSelectQuery(q.select_query) if err != nil { return nil, err } return []*Query{&Query{QueryString: query, SelectQuery: selectQuery}}, nil } else if q.delete_query != nil { deleteQuery, err := parseDeleteQuery(q.delete_query) if err != nil { return nil, err } return []*Query{&Query{QueryString: query, DeleteQuery: deleteQuery}}, nil } else if q.drop_series_query != nil { dropSeriesQuery, err := parseDropSeriesQuery(query, q.drop_series_query) if err != nil { return nil, err } return []*Query{&Query{QueryString: query, DropSeriesQuery: dropSeriesQuery}}, nil } else if q.drop_query != nil { return []*Query{&Query{QueryString: query, DropQuery: &DropQuery{Id: int(q.drop_query.id)}}}, nil } return nil, fmt.Errorf("Unknown query type encountered") } func parseDropSeriesQuery(queryStirng string, dropSeriesQuery *C.drop_series_query) (*DropSeriesQuery, error) { name, err := GetValue(dropSeriesQuery.name) if err != nil { return nil, err } return &DropSeriesQuery{ tableName: name.Name, }, nil } func parseSelectDeleteCommonQuery(fromClause *C.from_clause, whereCondition *C.condition) (SelectDeleteCommonQuery, error) { goQuery := SelectDeleteCommonQuery{ BasicQuery: BasicQuery{ startTime: time.Unix(math.MinInt64/1000000000, 0).UTC(), endTime: time.Now().UTC(), }, } var err error // get the from clause goQuery.FromClause, err = GetFromClause(fromClause) if err != nil { return goQuery, err } // get the where condition if whereCondition != nil { goQuery.Condition, err = GetWhereCondition(whereCondition) if err != nil { return goQuery, err } } var startTime, endTime *time.Time goQuery.Condition, endTime, err = getTime(goQuery.GetWhereCondition(), false) if err != nil { return goQuery, err } if endTime != nil { goQuery.endTime = *endTime } goQuery.Condition, startTime, err = getTime(goQuery.GetWhereCondition(), true) if err != nil { return goQuery, err } if startTime != nil { goQuery.startTime = *startTime } return goQuery, nil } func parseSelectQuery(q *C.select_query) (*SelectQuery, error) { limit := q.limit if limit == -1 { // no limit by default limit = 0 } basicQuery, err := parseSelectDeleteCommonQuery(q.from_clause, q.where_condition) if err != nil { return nil, err } goQuery := &SelectQuery{ SelectDeleteCommonQuery: basicQuery, Limit: int(limit), Ascending: q.ascending != 0, Explain: q.explain != 0, } // get the column names goQuery.ColumnNames, err = GetValueArray(q.c) if err != nil { return nil, err } // get the group by clause if q.group_by == nil { goQuery.groupByClause = &GroupByClause{} } else { goQuery.groupByClause, err = GetGroupByClause(q.group_by) if err != nil { return nil, err } } // get the into clause goQuery.IntoClause, err = GetIntoClause(q.into_clause) if err != nil { return goQuery, err } return goQuery, nil } func parseDeleteQuery(query *C.delete_query) (*DeleteQuery, error) { basicQuery, err := parseSelectDeleteCommonQuery(query.from_clause, query.where_condition) if err != nil { return nil, err } goQuery := &DeleteQuery{ SelectDeleteCommonQuery: basicQuery, } if basicQuery.GetWhereCondition() != nil { return nil, fmt.Errorf("Delete queries can't have where clause that don't reference time") } return goQuery, nil }<|fim▁end|>
fmt.Fprintf(buffer, " from %s", self.FromClause.GetString()) if withTime { fmt.Fprintf(buffer, " where %s", self.GetWhereConditionWithTime(startTime, endTime).GetString())
<|file_name|>intelli.js<|end_file_name|><|fim▁begin|>intelli = { /** * Name of the current page */ pageName: '', securityTokenKey: '__st', lang: {}, /** * Check if value exists in array * * @param {Array} val value to be checked * @param {String} arr array * * @return {Boolean} */ inArray: function (val, arr) { if (typeof arr === 'object' && arr) { for (var i = 0; i < arr.length; i++) { if (arr[i] == val) { return true; } } } return false; }, cookie: { /** * Returns the value of cookie * * @param {String} name cookie name * * @return {String} */ read: function (name) { var nameEQ = name + '='; var ca = document.cookie.split(';'); for (var i = 0; i < ca.length; i++) { var c = ca[i]; while (c.charAt(0) == ' ') c = c.substring(1, c.length); if (c.indexOf(nameEQ) == 0) return c.substring(nameEQ.length, c.length); } return null; }, /** * Creates new cookie * * @param {String} name cookie name * @param {String} value cookie value * @param {Integer} days number of days to keep cookie value for * @param {String} value path value */ write: function (name, value, days, path) { var expires = ''; if (days) { var date = new Date(); date.setTime(date.getTime() + (days * 24 * 60 * 60 * 1000)); expires = '; expires=' + date.toGMTString(); } path = path || '/'; document.cookie = name + '=' + value + expires + '; path=' + path; }, /** * Clear cookie value * * @param {String} name cookie name */ clear: function (name) { intelli.cookie.write(name, '', -1); } }, urlVal: function (name) { name = name.replace(/[\[]/, "\\\[").replace(/[\]]/, "\\\]"); var regex = new RegExp('[\\?&]' + name + '=([^&#]*)'); var results = regex.exec(window.location.href); return (null === results) ? null : decodeURIComponent(results[1]); }, notifBox: function (opt) { var msg = opt.msg; var type = opt.type || 'info'; var autohide = opt.autohide || (type == 'notification' || type == 'success' || type == 'error' ? true : false); var pause = opt.pause || 10; var html = ''; if ('notif' == type || type == 'notification') { type = 'success'; } var boxid = 'notification'; if (opt.boxid) { boxid = opt.boxid; } var obj = $('#' + boxid); if ($.isArray(msg)) { html += '<ul class="unstyled">'; for (var i = 0; i < msg.length; i++) { if ('' != msg[i]) { html += '<li>' + msg[i] + '</li>'; } } html += '</ul>'; } else { html += ['<div>', msg, '</div>'].join(''); } obj.attr('class', 'alert alert-' + type).html(html).show(); if (autohide) { obj.delay(pause * 1000).fadeOut('slow'); } $('html, body').animate({scrollTop: obj.offset().top}, 'slow'); return obj; }, notifFloatBox: function (options) { var msg = options.msg, type = options.type || 'info', pause = options.pause || 3000, autohide = options.autohide, html = ''; // building message box html += '<div id="notifFloatBox" class="notifFloatBox notifFloatBox--' + type + '"><a href="#" class="close">&times;</a>'; if ($.isArray(msg)) { html += '<ul>'; for (var i = 0; i < msg.length; i++) { if ('' != msg[i]) { html += '<li>' + msg[i] + '</li>'; } } html += '</ul>'; } else { html += '<ul><li>' + msg + '</li></ul>'; } html += '</div>'; // placing message box if (!$('#notifFloatBox').length > 0) { $(html).appendTo('body').css('display', 'block').addClass('animated bounceInDown'); if (autohide) { setTimeout(function () { $('#notifFloatBox').fadeOut(function () { $(this).remove(); }); }, pause); } $('.close', '#notifFloatBox').on('click', function (e) { e.preventDefault(); $('#notifFloatBox').fadeOut(function () { $(this).remove(); }); }); } }, is_email: function (email) { return (email.search(/^([a-zA-Z0-9_\.\-\+])+\@(([a-zA-Z0-9\-])+\.)+([a-zA-Z]{2,3})+$/) > -1); }, ckeditor: function (name, params) { if (CKEDITOR.instances[name]) { return false; } params = params || {}; params.baseHref = intelli.config.clear_url; CKEDITOR.replace(name, params); }, add_tab: function (name, text) { var $tab = $('<li>').append($('<a>').attr({'data-toggle': 'tab', href: '#' + name}).text(text)); var $content = $('<div>').attr('id', name).addClass('tab-pane'); if ($('.nav-tabs', '.tabbable').children().length == 0) { $tab.addClass('active'); $content.addClass('active'); } $('.nav-tabs', '.tabbable').append($tab); $('.tab-content', '.tabbable').append($content); }, sortable: function (elem, params) { /*! Sortable 1.0.1 - MIT | git://github.com/rubaxa/Sortable.git */ !function (a) { "use strict"; "function" == typeof define && define.amd ? define(a) : "undefined" != typeof module && "undefined" != typeof module.exports ? module.exports = a() : "undefined" != typeof Package ? Sortable = a() : window.Sortable = a() }(function () { "use strict"; function a(a, b) { this.el = a, this.options = b = b || {}; var d = { group: Math.random(), sort: !0, disabled: !1, store: null, handle: null, scroll: !0, scrollSensitivity: 30, scrollSpeed: 10, draggable: /[uo]l/i.test(a.nodeName) ? "li" : ">*", ghostClass: "sortable-ghost", ignore: "a, img", filter: null, animation: 0, setData: function (a, b) { a.setData("Text", b.textContent) }, dropBubble: !1, dragoverBubble: !1 }; for (var e in d)!(e in b) && (b[e] = d[e]); var g = b.group; g && "object" == typeof g || (g = b.group = {name: g}), ["pull", "put"].forEach(function (a) { a in g || (g[a] = !0) }), L.forEach(function (d) { b[d] = c(this, b[d] || M), f(a, d.substr(2).toLowerCase(), b[d]) }, this), a[E] = g.name + " " + (g.put.join ? g.put.join(" ") : ""); for (var h in this)"_" === h.charAt(0) && (this[h] = c(this, this[h])); f(a, "mousedown", this._onTapStart), f(a, "touchstart", this._onTapStart), I && f(a, "selectstart", this._onTapStart), f(a, "dragover", this._onDragOver), f(a, "dragenter", this._onDragOver), P.push(this._onDragOver), b.store && this.sort(b.store.get(this)) } function b(a) { s && s.state !== a && (i(s, "display", a ? "none" : ""), !a && s.state && t.insertBefore(s, q), s.state = a) } function c(a, b) { var c = O.call(arguments, 2); return b.bind ? b.bind.apply(b, [a].concat(c)) : function () { return b.apply(a, c.concat(O.call(arguments))) } } function d(a, b, c) { if (a) { c = c || G, b = b.split("."); var d = b.shift().toUpperCase(), e = new RegExp("\\s(" + b.join("|") + ")\\s", "g"); do if (">*" === d && a.parentNode === c || ("" === d || a.nodeName.toUpperCase() == d) && (!b.length || ((" " + a.className + " ").match(e) || []).length == b.length))return a; while (a !== c && (a = a.parentNode)) } return null } function e(a) { a.dataTransfer.dropEffect = "move", a.preventDefault() } function f(a, b, c) { a.addEventListener(b, c, !1) } function g(a, b, c) { a.removeEventListener(b, c, !1) } function h(a, b, c) { if (a)if (a.classList) a.classList[c ? "add" : "remove"](b); else { var d = (" " + a.className + " ").replace(/\s+/g, " ").replace(" " + b + " ", ""); a.className = d + (c ? " " + b : "") } } function i(a, b, c) { var d = a && a.style; if (d) { if (void 0 === c)return G.defaultView && G.defaultView.getComputedStyle ? c = G.defaultView.getComputedStyle(a, "") : a.currentStyle && (c = a.currentStyle), void 0 === b ? c : c[b]; b in d || (b = "-webkit-" + b), d[b] = c + ("string" == typeof c ? "" : "px") } } function j(a, b, c) { if (a) { var d = a.getElementsByTagName(b), e = 0, f = d.length; if (c)for (; f > e; e++)c(d[e], e); return d } return [] } function k(a) { a.draggable = !1 } function l() { J = !1 } function m(a, b) { var c = a.lastElementChild, d = c.getBoundingClientRect(); return b.clientY - (d.top + d.height) > 5 && c } function n(a) { for (var b = a.tagName + a.className + a.src + a.href + a.textContent, c = b.length, d = 0; c--;)d += b.charCodeAt(c); return d.toString(36) } function o(a) { for (var b = 0; a && (a = a.previousElementSibling) && "TEMPLATE" !== a.nodeName.toUpperCase();)b++; return b } function p(a, b) { var c, d; return function () { void 0 === c && (c = arguments, d = this, setTimeout(function () { 1 === c.length ? a.call(d, c[0]) : a.apply(d, c), c = void 0 }, b)) } } var q, r, s, t, u, v, w, x, y, z, A, B, C, D = {}, E = "Sortable" + (new Date).getTime(), F = window, G = F.document, H = F.parseInt, I = !!G.createElement("div").dragDrop, J = !1, K = function (a, b, c, d, e, f) { var g = G.createEvent("Event"); g.initEvent(b, !0, !0), g.item = c || a, g.from = d || a, g.clone = s, g.oldIndex = e, g.newIndex = f, a.dispatchEvent(g) }, L = "onAdd onUpdate onRemove onStart onEnd onFilter onSort".split(" "), M = function () { }, N = Math.abs, O = [].slice, P = []; return a.prototype = { constructor: a, _dragStarted: function () { h(q, this.options.ghostClass, !0), a.active = this, K(t, "start", q, t, y) }, _onTapStart: function (a) { var b = a.type, c = a.touches && a.touches[0], e = (c || a).target, g = e, h = this.options, i = this.el, l = h.filter; if (!("mousedown" === b && 0 !== a.button || h.disabled)) { if (h.handle && (e = d(e, h.handle, i)), e = d(e, h.draggable, i), y = o(e), "function" == typeof l) { if (l.call(this, a, e, this))return K(g, "filter", e, i, y), void a.preventDefault() } else if (l && (l = l.split(",").some(function (a) { return a = d(g, a.trim(), i), a ? (K(a, "filter", e, i, y), !0) : void 0 })))return void a.preventDefault(); if (e && !q && e.parentNode === i) { "selectstart" === b && e.dragDrop(), B = a, t = this.el, q = e, v = q.nextSibling, A = this.options.group, q.draggable = !0, h.ignore.split(",").forEach(function (a) { j(e, a.trim(), k) }), c && (B = { target: e, clientX: c.clientX, clientY: c.clientY }, this._onDragStart(B, !0), a.preventDefault()), f(G, "mouseup", this._onDrop), f(G, "touchend", this._onDrop), f(G, "touchcancel", this._onDrop), f(q, "dragend", this), f(t, "dragstart", this._onDragStart), f(G, "dragover", this); try { G.selection ? G.selection.empty() : window.getSelection().removeAllRanges() } catch (m) { } } } }, _emulateDragOver: function () { if (C) { i(r, "display", "none"); var a = G.elementFromPoint(C.clientX, C.clientY), b = a, c = this.options.group.name, d = P.length; if (b)do { if ((" " + b[E] + " ").indexOf(c) > -1) { for (; d--;)P[d]({clientX: C.clientX, clientY: C.clientY, target: a, rootEl: b}); break } a = b } while (b = b.parentNode); i(r, "display", "") } }, _onTouchMove: function (a) { if (B) { var b = a.touches[0], c = b.clientX - B.clientX, d = b.clientY - B.clientY, e = "translate3d(" + c + "px," + d + "px,0)"; C = b, i(r, "webkitTransform", e), i(r, "mozTransform", e), i(r, "msTransform", e), i(r, "transform", e), this._onDrag(b), a.preventDefault() } }, _onDragStart: function (a, b) { var c = a.dataTransfer, d = this.options; if (this._offUpEvents(), "clone" == A.pull && (s = q.cloneNode(!0), i(s, "display", "none"), t.insertBefore(s, q)), b) { var e, g = q.getBoundingClientRect(), h = i(q); r = q.cloneNode(!0), i(r, "top", g.top - H(h.marginTop, 10)), i(r, "left", g.left - H(h.marginLeft, 10)), i(r, "width", g.width), i(r, "height", g.height), i(r, "opacity", "0.8"), i(r, "position", "fixed"), i(r, "zIndex", "100000"), t.appendChild(r), e = r.getBoundingClientRect(), i(r, "width", 2 * g.width - e.width), i(r, "height", 2 * g.height - e.height), f(G, "touchmove", this._onTouchMove), f(G, "touchend", this._onDrop), f(G, "touchcancel", this._onDrop), this._loopId = setInterval(this._emulateDragOver, 150) } else c && (c.effectAllowed = "move", d.setData && d.setData.call(this, c, q)), f(G, "drop", this); if (u = d.scroll, u === !0) { u = t; do if (u.offsetWidth < u.scrollWidth || u.offsetHeight < u.scrollHeight)break; while (u = u.parentNode) } setTimeout(this._dragStarted, 0) }, _onDrag: p(function (a) { if (t && this.options.scroll) { var b, c, d = this.options, e = d.scrollSensitivity, f = d.scrollSpeed, g = a.clientX, h = a.clientY, i = window.innerWidth, j = window.innerHeight, k = (e >= i - g) - (e >= g), l = (e >= j - h) - (e >= h); k || l ? b = F : u && (b = u, c = u.getBoundingClientRect(), k = (N(c.right - g) <= e) - (N(c.left - g) <= e), l = (N(c.bottom - h) <= e) - (N(c.top - h) <= e)), (D.vx !== k || D.vy !== l || D.el !== b) && (D.el = b, D.vx = k, D.vy = l, clearInterval(D.pid), b && (D.pid = setInterval(function () { b === F ? F.scrollTo(F.scrollX + k * f, F.scrollY + l * f) : (l && (b.scrollTop += l * f), k && (b.scrollLeft += k * f)) }, 24))) } }, 30), _onDragOver: function (a) { var c, e, f, g = this.el, h = this.options, j = h.group, k = j.put, n = A === j, o = h.sort; if (void 0 !== a.preventDefault && (a.preventDefault(), !h.dragoverBubble && a.stopPropagation()), !J && A && (n ? o || (f = !t.contains(q)) : A.pull && k && (A.name === j.name || k.indexOf && ~k.indexOf(A.name))) && (void 0 === a.rootEl || a.rootEl === this.el)) { if (c = d(a.target, h.draggable, g), e = q.getBoundingClientRect(), f)return b(!0), void(s || v ? t.insertBefore(q, s || v) : o || t.appendChild(q)); if (0 === g.children.length || g.children[0] === r || g === a.target && (c = m(g, a))) { if (c) {<|fim▁hole|> } b(n), g.appendChild(q), this._animate(e, q), c && this._animate(u, c) } else if (c && !c.animated && c !== q && void 0 !== c.parentNode[E]) { w !== c && (w = c, x = i(c)); var p, u = c.getBoundingClientRect(), y = u.right - u.left, z = u.bottom - u.top, B = /left|right|inline/.test(x.cssFloat + x.display), C = c.offsetWidth > q.offsetWidth, D = c.offsetHeight > q.offsetHeight, F = (B ? (a.clientX - u.left) / y : (a.clientY - u.top) / z) > .5, G = c.nextElementSibling; J = !0, setTimeout(l, 30), b(n), p = B ? c.previousElementSibling === q && !C || F && C : G !== q && !D || F && D, p && !G ? g.appendChild(q) : c.parentNode.insertBefore(q, p ? G : c), this._animate(e, q), this._animate(u, c) } } }, _animate: function (a, b) { var c = this.options.animation; if (c) { var d = b.getBoundingClientRect(); i(b, "transition", "none"), i(b, "transform", "translate3d(" + (a.left - d.left) + "px," + (a.top - d.top) + "px,0)"), b.offsetWidth, i(b, "transition", "all " + c + "ms"), i(b, "transform", "translate3d(0,0,0)"), clearTimeout(b.animated), b.animated = setTimeout(function () { i(b, "transition", ""), b.animated = !1 }, c) } }, _offUpEvents: function () { g(G, "mouseup", this._onDrop), g(G, "touchmove", this._onTouchMove), g(G, "touchend", this._onDrop), g(G, "touchcancel", this._onDrop) }, _onDrop: function (b) { var c = this.el, d = this.options; clearInterval(this._loopId), clearInterval(D.pid), g(G, "drop", this), g(G, "dragover", this), g(c, "dragstart", this._onDragStart), this._offUpEvents(), b && (b.preventDefault(), !d.dropBubble && b.stopPropagation(), r && r.parentNode.removeChild(r), q && (g(q, "dragend", this), k(q), h(q, this.options.ghostClass, !1), t !== q.parentNode ? (z = o(q), K(q.parentNode, "sort", q, t, y, z), K(t, "sort", q, t, y, z), K(q, "add", q, t, y, z), K(t, "remove", q, t, y, z)) : (s && s.parentNode.removeChild(s), q.nextSibling !== v && (z = o(q), K(t, "update", q, t, y, z), K(t, "sort", q, t, y, z))), a.active && K(t, "end", q, t, y, z)), t = q = r = v = s = B = C = w = x = A = a.active = null, this.save()) }, handleEvent: function (a) { var b = a.type; "dragover" === b ? (this._onDrag(a), e(a)) : ("drop" === b || "dragend" === b) && this._onDrop(a) }, toArray: function () { for (var a, b = [], c = this.el.children, e = 0, f = c.length; f > e; e++)a = c[e], d(a, this.options.draggable, this.el) && b.push(a.getAttribute("data-id") || n(a)); return b }, sort: function (a) { var b = {}, c = this.el; this.toArray().forEach(function (a, e) { var f = c.children[e]; d(f, this.options.draggable, c) && (b[a] = f) }, this), a.forEach(function (a) { b[a] && (c.removeChild(b[a]), c.appendChild(b[a])) }) }, save: function () { var a = this.options.store; a && a.set(this) }, closest: function (a, b) { return d(a, b || this.options.draggable, this.el) }, option: function (a, b) { var c = this.options; return void 0 === b ? c[a] : void(c[a] = b) }, destroy: function () { var a = this.el, b = this.options; L.forEach(function (c) { g(a, c.substr(2).toLowerCase(), b[c]) }), g(a, "mousedown", this._onTapStart), g(a, "touchstart", this._onTapStart), g(a, "selectstart", this._onTapStart), g(a, "dragover", this._onDragOver), g(a, "dragenter", this._onDragOver), Array.prototype.forEach.call(a.querySelectorAll("[draggable]"), function (a) { a.removeAttribute("draggable") }), P.splice(P.indexOf(this._onDragOver), 1), this._onDrop(), this.el = null } }, a.utils = { on: f, off: g, css: i, find: j, bind: c, is: function (a, b) { return !!d(a, b, a) }, throttle: p, closest: d, toggleClass: h, dispatchEvent: K, index: o }, a.version = "1.0.1", a.create = function (b, c) { return new a(b, c) }, a }); var el = document.getElementById(elem); Sortable.create(el, params); }, confirm: function (text, options, callback) { bootbox.confirm(text, function (result) { if (result) { if (typeof options === 'object' && options) { if ('' != options.url) { window.location = options.url; } } } if (typeof callback === 'function') { callback(result); } }); }, includeSecurityToken: function(params) { if ('object' === typeof params) { params[this.securityTokenKey] = intelli.securityToken; } return params; }, post: function(url, data, success, dataType) { return $.post(url, this.includeSecurityToken(data), success, dataType); }, getLocale: function() { if ('function' === typeof moment) { var existLocales = moment.locales(); var locales = [ intelli.languages[intelli.config.lang].locale.replace('_', '-'), intelli.config.lang ]; var map = { zh: 'zh-cn' }; for (var i in locales) { var locale = locales[i]; if (typeof map[locale] !== 'undefined') { locale = map[locale]; } if (-1 !== $.inArray(locale, existLocales)) { return locale; } } } return 'en'; } }; function _t(key, def) { if (intelli.admin && intelli.admin.lang[key]) { return intelli.admin.lang[key]; } return _f(key, def); } function _f(key, def) { if (intelli.lang[key]) { return intelli.lang[key]; } return (def ? (def === true ? key : def) : '{' + key + '}'); }<|fim▁end|>
if (c.animated)return; u = c.getBoundingClientRect()
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2013-2015 Sandstorm Development Group, Inc. and contributors // Licensed under the MIT License: // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. #[macro_use] extern crate capnp_rpc; pub mod calculator_capnp { include!(concat!(env!("OUT_DIR"), "/calculator_capnp.rs")); } pub mod client; pub mod server; #[tokio::main(flavor = "current_thread")] async fn main() -> Result<(), Box<dyn std::error::Error>> { let args: Vec<String> = ::std::env::args().collect(); if args.len() >= 2 { match &args[1][..] { "client" => return client::main().await, "server" => return server::main().await,<|fim▁hole|> } println!("usage: {} [client | server] ADDRESS", args[0]); Ok(()) }<|fim▁end|>
_ => () }
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from captcha.fields import CaptchaField from django import forms from django.contrib.auth.models import User from django.http import HttpResponse try: from django.template import engines __is_18 = True except ImportError: from django.template import loader<|fim▁hole|> __is_18 = False TEST_TEMPLATE = r""" <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> <html> <head> <meta http-equiv="Content-type" content="text/html; charset=utf-8"> <title>captcha test</title> </head> <body> {% if passed %} <p style="color:green">Form validated</p> {% endif %} {% if form.errors %} {{form.errors}} {% endif %} <form action="{% url 'captcha-test' %}" method="post"> {{form.as_p}} <p><input type="submit" value="Continue &rarr;"></p> </form> </body> </html> """ def _get_template(template_string): if __is_18: return engines["django"].from_string(template_string) else: return loader.get_template_from_string(template_string) def _test(request, form_class): passed = False if request.POST: form = form_class(request.POST) if form.is_valid(): passed = True else: form = form_class() t = _get_template(TEST_TEMPLATE) return HttpResponse(t.render(context=dict(passed=passed, form=form), request=request)) def test(request): class CaptchaTestForm(forms.Form): subject = forms.CharField(max_length=100) sender = forms.EmailField() captcha = CaptchaField(help_text="asdasd") return _test(request, CaptchaTestForm) def test_model_form(request): class CaptchaTestModelForm(forms.ModelForm): subject = forms.CharField(max_length=100) sender = forms.EmailField() captcha = CaptchaField(help_text="asdasd") class Meta: model = User fields = ("subject", "sender", "captcha") return _test(request, CaptchaTestModelForm) def test_custom_generator(request): class CaptchaTestModelForm(forms.ModelForm): subject = forms.CharField(max_length=100) sender = forms.EmailField() captcha = CaptchaField(generator=lambda: ("111111", "111111")) class Meta: model = User fields = ("subject", "sender", "captcha") return _test(request, CaptchaTestModelForm) def test_custom_error_message(request): class CaptchaTestErrorMessageForm(forms.Form): captcha = CaptchaField( help_text="asdasd", error_messages=dict(invalid="TEST CUSTOM ERROR MESSAGE") ) return _test(request, CaptchaTestErrorMessageForm) def test_per_form_format(request): class CaptchaTestFormatForm(forms.Form): captcha = CaptchaField( help_text="asdasd", error_messages=dict(invalid="TEST CUSTOM ERROR MESSAGE"), output_format=( "%(image)s testPerFieldCustomFormatString " "%(hidden_field)s %(text_field)s" ), ) return _test(request, CaptchaTestFormatForm) def test_non_required(request): class CaptchaTestForm(forms.Form): sender = forms.EmailField() subject = forms.CharField(max_length=100) captcha = CaptchaField(help_text="asdasd", required=False) return _test(request, CaptchaTestForm) def test_id_prefix(request): class CaptchaTestForm(forms.Form): sender = forms.EmailField() subject = forms.CharField(max_length=100) captcha1 = CaptchaField(id_prefix="form1") captcha2 = CaptchaField(id_prefix="form2") return _test(request, CaptchaTestForm)<|fim▁end|>
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os<|fim▁hole|> from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)<|fim▁end|>
import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "PhotoLoader.settings")
<|file_name|>vsbackup.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015-2017 winapi-rs developers // Licensed under the Apache License, Version 2.0 // <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your option. // All files in the project carrying such notice may not be copied, modified, or distributed // except according to those terms. //! Declaration of backup interfaces. use ctypes::c_void; use shared::guiddef::IID; use shared::minwindef::{BOOL, BYTE, DWORD, UINT}; use shared::wtypes::BSTR; use um::unknwnbase::{IUnknown, IUnknownVtbl}; use um::vss::{ IVssAsync, IVssEnumObject, VSS_BACKUP_TYPE, VSS_ID, VSS_OBJECT_TYPE, VSS_PWSZ, VSS_RESTORE_TYPE, VSS_ROLLFORWARD_TYPE, VSS_SNAPSHOT_PROP, VSS_WRITER_STATE }; use um::vswriter::{ IVssWMDependency, IVssWMFiledesc, IVssWriterComponentsVtbl, VSS_COMPONENT_TYPE, VSS_FILE_RESTORE_STATUS, VSS_RESTOREMETHOD_ENUM, VSS_SOURCE_TYPE, VSS_USAGE_TYPE, VSS_WRITERRESTORE_ENUM }; use um::winnt::{HRESULT, LONG, LPCWSTR}; DEFINE_GUID!(IID_IVssExamineWriterMetadata, 0x902fcf7f, 0xb7fd, 0x42f8, 0x81, 0xf1, 0xb2, 0xe4, 0x00, 0xb1, 0xe5, 0xbd); DEFINE_GUID!(IID_IVssExamineWriterMetadataEx, 0x0c0e5ec0, 0xca44, 0x472b, 0xb7, 0x02, 0xe6, 0x52, 0xdb, 0x1c, 0x04, 0x51); DEFINE_GUID!(IID_IVssBackupComponents, 0x665c1d5f, 0xc218, 0x414d, 0xa0, 0x5d, 0x7f, 0xef, 0x5f, 0x9d, 0x5c, 0x86); DEFINE_GUID!(IID_IVssBackupComponentsEx, 0x963f03ad, 0x9e4c, 0x4a34, 0xac, 0x15, 0xe4, 0xb6, 0x17, 0x4e, 0x50, 0x36); STRUCT!{struct VSS_COMPONENTINFO { type_: VSS_COMPONENT_TYPE, // type is a keyword in rust bstrLogicalPath: BSTR, bstrComponentName: BSTR, bstrCaption: BSTR, pbIcon: *mut BYTE, cbIcon: UINT, bRestoreMetadata: bool, bNotifyOnBackupComplete: bool, bSelectable: bool, bSelectableForRestore: bool, dwComponentFlags: DWORD, cFileCount: UINT, cDatabases: UINT, cLogFiles: UINT, cDependencies: UINT, }} pub type PVSSCOMPONENTINFO = *const VSS_COMPONENTINFO; RIDL!( #[uuid(0x00000000, 0x0000, 0x0000, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00)] interface IVssWMComponent(IVssWMComponentVtbl): IUnknown(IUnknownVtbl) { fn GetComponentInfo( ppInfo: *mut PVSSCOMPONENTINFO, ) -> HRESULT, fn FreeComponentInfo( pInfo: PVSSCOMPONENTINFO, ) -> HRESULT, fn GetFile( iFile: UINT, ppFiledesc: *mut *mut IVssWMFiledesc, ) -> HRESULT, fn GetDatabaseFile( iDBFile: UINT, ppFiledesc: *mut *mut IVssWMFiledesc, ) -> HRESULT, fn GetDatabaseLogFile( iDbLogFile: UINT, ppFiledesc: *mut *mut IVssWMFiledesc, ) -> HRESULT, fn GetDependency( iDependency: UINT, ppDependency: *mut *mut IVssWMDependency, ) -> HRESULT, } ); RIDL!( #[uuid(0x902fcf7f, 0xb7fd, 0x42f8, 0x81, 0xf1, 0xb2, 0xe4, 0x00, 0xb1, 0xe5, 0xbd)] interface IVssExamineWriterMetadata(IVssExamineWriterMetadataVtbl): IUnknown(IUnknownVtbl) { fn GetIdentity( pidInstance: *mut VSS_ID, pidWriter: *mut VSS_ID, pbstrWriterName: *mut BSTR, pUsage: *mut VSS_USAGE_TYPE, pSource: *mut VSS_SOURCE_TYPE, ) -> HRESULT, fn GetFileCounts( pcIncludeFiles: *mut UINT, pcExcludeFiles: *mut UINT, pcComponents: *mut UINT, ) -> HRESULT,<|fim▁hole|> fn GetExcludeFile( iFile: UINT, ppFiledesc: *mut *mut IVssWMFiledesc, ) -> HRESULT, fn GetComponent( iComponent: UINT, ppComponent: *mut *mut IVssWMComponent, ) -> HRESULT, fn GetRestoreMethod( pMethod: *mut VSS_RESTOREMETHOD_ENUM, pbstrService: *mut BSTR, pbstrUserProcedure: *mut BSTR, pwriterRestore: *mut VSS_WRITERRESTORE_ENUM, pbRebootRequired: *mut bool, pcMappings: *mut UINT, ) -> HRESULT, fn GetAlternateLocationMapping( iMapping: UINT, ppFiledesc: *mut *mut IVssWMFiledesc, ) -> HRESULT, fn GetBackupSchema( pdwSchemaMask: *mut DWORD, ) -> HRESULT, fn GetDocument( pDoc: *mut c_void, ) -> HRESULT, //TODO IXMLDOMDocument, fn SaveAsXML( pbstrXML: *mut BSTR, ) -> HRESULT, fn LoadFromXML( pbstrXML: *mut BSTR, ) -> HRESULT, } ); RIDL!( #[uuid(0x0c0e5ec0, 0xca44, 0x472b, 0xb7, 0x02, 0xe6, 0x52, 0xdb, 0x1c, 0x04, 0x51)] interface IVssExamineWriterMetadataEx(IVssExamineWriterMetadataExVtbl): IVssExamineWriterMetadata(IVssExamineWriterMetadataVtbl) { fn GetIdentityEx( pidInstance: *mut VSS_ID, pidWriter: *mut VSS_ID, pbstrWriterName: *mut BSTR, pbstrInstanceName: *mut BSTR, pUsage: *mut VSS_USAGE_TYPE, pSource: *mut VSS_SOURCE_TYPE, ) -> HRESULT, } ); RIDL!( #[uuid(0xce115780, 0xa611, 0x431b, 0xb5, 0x7f, 0xc3, 0x83, 0x03, 0xab, 0x6a, 0xee)] interface IVssExamineWriterMetadataEx2(IVssExamineWriterMetadataEx2Vtbl): IVssExamineWriterMetadataEx(IVssExamineWriterMetadataExVtbl) { fn GetVersion( pdwMajorVersion: *mut DWORD, pdwMinorVersion: *mut DWORD, ) -> HRESULT, fn GetExcludeFromSnapshotCount( pcExcludedFromSnapshot: *mut UINT, ) -> HRESULT, fn GetExcludeFromSnapshotFile( iFile: UINT, ppFiledesc: *mut *mut IVssWMFiledesc, ) -> HRESULT, } ); #[repr(C)] pub struct IVssWriterComponentsExt { pub lpVtbl: *const IVssWriterComponentsExtVtbl, } #[repr(C)] pub struct IVssWriterComponentsExtVtbl { pub parent1: IVssWriterComponentsVtbl, pub parent2: IUnknownVtbl, } RIDL!( #[uuid(0x665c1d5f, 0xc218, 0x414d, 0xa0, 0x5d, 0x7f, 0xef, 0x5f, 0x9d, 0x5c, 0x86)] interface IVssBackupComponents(IVssBackupComponentsVtbl): IUnknown(IUnknownVtbl) { fn GetWriterComponentsCount( pcComponents: *mut UINT, ) -> HRESULT, fn GetWriterComponents( iWriter: UINT, ppWriter: *mut *mut IVssWriterComponentsExt, ) -> HRESULT, fn InitializeForBackup( bstrXML: BSTR, ) -> HRESULT, fn SetBackupState( bSelectComponents: bool, bBackupBootableSystemState: bool, backupType: VSS_BACKUP_TYPE, bPartialFileSupport: bool, ) -> HRESULT, fn InitializeForRestore( bstrXML: BSTR, ) -> HRESULT, fn SetRestoreState( restoreType: VSS_RESTORE_TYPE, ) -> HRESULT, fn GatherWriterMetadata( pAsync: *mut *mut IVssAsync, ) -> HRESULT, fn GetWriterMetadataCount( pcWriters: *mut UINT, ) -> HRESULT, fn GetWriterMetadata( iWriter: UINT, pidInstance: *mut VSS_ID, ppMetadata: *mut *mut IVssExamineWriterMetadata, ) -> HRESULT, fn FreeWriterMetadata() -> HRESULT, fn AddComponent( instanceId: VSS_ID, writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, ) -> HRESULT, fn PrepareForBackup( ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn AbortBackup() -> HRESULT, fn GatherWriterStatus( ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn GetWriterStatusCount( pcWriters: *mut UINT, ) -> HRESULT, fn FreeWriterStatus() -> HRESULT, fn GetWriterStatus( iWriter: UINT, pidInstance: *mut VSS_ID, pidWriter: *mut VSS_ID, pbstrWriter: *mut BSTR, pnStatus: *mut VSS_WRITER_STATE, phResultFailure: *mut HRESULT, ) -> HRESULT, fn SetBackupSucceeded( instanceId: VSS_ID, writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, bSucceded: bool, ) -> HRESULT, fn SetBackupOptions( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, wszBackupOptions: LPCWSTR, ) -> HRESULT, fn SetSelectedForRestore( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, bSelectedForRestore: bool, ) -> HRESULT, fn SetRestoreOptions( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, wszRestoreOptions: LPCWSTR, ) -> HRESULT, fn SetAdditionalRestores( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, bAdditionalRestores: bool, ) -> HRESULT, fn SetPreviousBackupStamp( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, wszPreviousBackupStamp: LPCWSTR, ) -> HRESULT, fn SaveAsXML( pbstrXML: *mut BSTR, ) -> HRESULT, fn BackupComplete( ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn AddAlternativeLocationMapping( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, wszPath: LPCWSTR, wszFilespec: LPCWSTR, bRecursive: bool, wszDestination: LPCWSTR, ) -> HRESULT, fn AddRestoreSubcomponent( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, wszSubComponentLogicalPath: LPCWSTR, wszSubComponentName: LPCWSTR, bRepair: bool, ) -> HRESULT, fn SetFileRestoreStatus( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, status: VSS_FILE_RESTORE_STATUS, ) -> HRESULT, fn AddNewTarget( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, wszPath: LPCWSTR, wszFileName: LPCWSTR, bRecursive: bool, wszAlternatePath: LPCWSTR, ) -> HRESULT, fn SetRangesFilePath( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, iPartialFile: UINT, wszRangesFile: LPCWSTR, ) -> HRESULT, fn PreRestore( ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn PostRestore( ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn SetContext( lContext: LONG, ) -> HRESULT, fn StartSnapshotSet( pSnapshotSetId: *mut VSS_ID, ) -> HRESULT, fn AddToSnapshotSet( pwszVolumeName: VSS_PWSZ, ProviderId: VSS_ID, pidSnapshot: *mut VSS_ID, ) -> HRESULT, fn DoSnapshotSet( ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn DeleteSnapshots( SourceObjectId: VSS_ID, eSourceObjectType: VSS_OBJECT_TYPE, bForceDelete: BOOL, plDeletedSnapshots: *mut LONG, pNondeletedSnapshotID: *mut VSS_ID, ) -> HRESULT, fn ImportSnapshots( ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn BreakSnapshotSet( SnapshotSetId: VSS_ID, ) -> HRESULT, fn GetSnapshotProperties( SnapshotId: VSS_ID, pProp: *mut VSS_SNAPSHOT_PROP, ) -> HRESULT, fn Query( QueriedObjectId: VSS_ID, eQueriedObjectType: VSS_OBJECT_TYPE, eReturnedObjectsType: VSS_OBJECT_TYPE, ppEnum: *mut *mut IVssEnumObject, ) -> HRESULT, fn IsVolumeSupported( ProviderId: VSS_ID, pwszVolumeName: VSS_PWSZ, pbSupportedByThisProvider: *mut BOOL, ) -> HRESULT, fn DisableWriterClasses( rgWriterClassId: *const VSS_ID, cClassId: UINT, ) -> HRESULT, fn EnableWriterClasses( rgWriterClassId: *const VSS_ID, cClassId: UINT, ) -> HRESULT, fn DisableWriterInstances( rgWriterInstanceId: *const VSS_ID, cInstanceId: UINT, ) -> HRESULT, fn ExposeSnapshot( SnapshotId: VSS_ID, wszPathFromRoot: VSS_PWSZ, lAttributes: LONG, wszExpose: VSS_PWSZ, pwszExposed: VSS_PWSZ, ) -> HRESULT, fn RevertToSnapshot( SnapshotId: VSS_ID, bForceDismount: BOOL, ) -> HRESULT, fn QueryRevertStatus( pwszVolume: VSS_PWSZ, ppAsync: *mut *mut IVssAsync, ) -> HRESULT, } ); RIDL!( #[uuid(0x963f03ad, 0x9e4c, 0x4a34, 0xac, 0x15, 0xe4, 0xb6, 0x17, 0x4e, 0x50, 0x36)] interface IVssBackupComponentsEx(IVssBackupComponentsExVtbl): IVssBackupComponents(IVssBackupComponentsVtbl) { fn GetWriterMetadataEx( iWriter: UINT, pidInstance: *mut VSS_ID, ppMetadata: *mut *mut IVssExamineWriterMetadataEx, ) -> HRESULT, fn SetSelectedForRestoreEx( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, bSelectedForRestore: bool, instanceId: VSS_ID, ) -> HRESULT, } ); RIDL!( #[uuid(0xacfe2b3a, 0x22c9, 0x4ef8, 0xbd, 0x03, 0x2f, 0x9c, 0xa2, 0x30, 0x08, 0x4e)] interface IVssBackupComponentsEx2(IVssBackupComponentsEx2Vtbl): IVssBackupComponentsEx(IVssBackupComponentsExVtbl) { fn UnexposeSnapshot( snapshotId: VSS_ID, ) -> HRESULT, fn SetAuthoritativeRestore( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, bAuth: bool, ) -> HRESULT, fn SetRollForward( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, rollType: VSS_ROLLFORWARD_TYPE, wszRollForwardPoint: LPCWSTR, ) -> HRESULT, fn SetRestoreName( writerId: VSS_ID, ct: VSS_COMPONENT_TYPE, wszLogicalPath: LPCWSTR, wszComponentName: LPCWSTR, wszRestoreName: LPCWSTR, ) -> HRESULT, fn BreakSnapshotSetEx( SnapshotSetID: VSS_ID, dwBreakFlags: DWORD, ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn PreFastRecovery( SnapshotSetID: VSS_ID, dwPreFastRecoveryFlags: DWORD, ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn FastRecovery( SnapshotSetID: VSS_ID, dwFastRecoveryFlags: DWORD, ppAsync: *mut *mut IVssAsync, ) -> HRESULT, } ); RIDL!( #[uuid(0xc191bfbc, 0xb602, 0x4675, 0x8b, 0xd1, 0x67, 0xd6, 0x42, 0xf5, 0x29, 0xd5)] interface IVssBackupComponentsEx3(IVssBackupComponentsEx3Vtbl): IVssBackupComponentsEx2(IVssBackupComponentsEx2Vtbl) { fn GetWriterStatusEx( iWriter: UINT, pidInstance: *mut VSS_ID, pidWriter: *mut VSS_ID, pbstrWriter: *mut BSTR, pnStatus: *mut VSS_WRITER_STATE, phrFailureWriter: *mut HRESULT, phrApplication: *mut HRESULT, pbstrApplicationMessage: *mut BSTR, ) -> HRESULT, fn AddSnapshotToRecoverySet( snapshotId: VSS_ID, dwFlags: DWORD, pwszDestinationVolume: VSS_PWSZ, ) -> HRESULT, fn RecoverSet( dwFlags: DWORD, ppAsync: *mut *mut IVssAsync, ) -> HRESULT, fn GetSessionId( idSession: *mut VSS_ID, ) -> HRESULT, } ); RIDL!( #[uuid(0xf434c2fd, 0xb553, 0x4961, 0xa9, 0xf9, 0xa8, 0xe9, 0x0b, 0x67, 0x3e, 0x53)] interface IVssBackupComponentsEx4(IVssBackupComponentsEx4Vtbl): IVssBackupComponentsEx3(IVssBackupComponentsEx3Vtbl) { fn GetRootAndLogicalPrefixPaths( pwszFilePath: VSS_PWSZ, ppwszRootPath: *mut VSS_PWSZ, ppwszLogicalPrefix: *mut VSS_PWSZ, bNormalizeFQDNforRootPath: BOOL, ) -> HRESULT, } ); pub const VSS_SW_BOOTABLE_STATE: DWORD = 1; extern "system" { #[link_name="CreateVssBackupComponentsInternal"] pub fn CreateVssBackupComponents( ppBackup: *mut *mut IVssBackupComponents, ) -> HRESULT; #[link_name="CreateVssExamineWriterMetadataInternal"] pub fn CreateVssExamineWriterMetadata( bstrXML: BSTR, ppMetadata: *mut *mut IVssExamineWriterMetadata, ) -> HRESULT; #[link_name="IsVolumeSnapshottedInternal"] pub fn IsVolumeSnapshotted( pwszVolumeName: VSS_PWSZ, pbSnapshotsPresent: *mut BOOL, plSnapshotCapability: *mut LONG, ) -> HRESULT; #[link_name="VssFreeSnapshotPropertiesInternal"] pub fn VssFreeSnapshotProperties( pProp: *mut VSS_SNAPSHOT_PROP, ); #[link_name="GetProviderMgmtInterfaceInternal"] pub fn GetProviderMgmtInterface( ProviderId: VSS_ID, InterfaceId: IID, ppItf: *mut *mut IUnknown, ) -> HRESULT; #[link_name="ShouldBlockRevertInternal"] pub fn ShouldBlockRevert( wszVolumeName: LPCWSTR, pbBlock: *mut bool, ) -> HRESULT; }<|fim▁end|>
fn GetIncludeFile( iFile: UINT, ppFiledesc: *mut *mut IVssWMFiledesc, ) -> HRESULT,
<|file_name|>managed-pointer-within-unique.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-android: FIXME(#10381) // compile-flags:-g // === GDB TESTS =================================================================================== // gdb-command:set print pretty off // gdb-command:rbreak zzz // gdb-command:run // gdb-command:finish // gdb-command:print *ordinary_unique<|fim▁hole|>// gdb-check:$2 = -3 // gdb-command:print managed_within_unique->y->val // gdb-check:$3 = -4 // === LLDB TESTS ================================================================================== // lldb-command:run // lldb-command:print *ordinary_unique // lldb-check:[...]$0 = (-1, -2) // lldb-command:print managed_within_unique->x // lldb-check:[...]$1 = -3 // lldb-command:print managed_within_unique->y->val // lldb-check:[...]$2 = -4 #![allow(unused_variable)] use std::gc::{GC, Gc}; struct ContainsManaged { x: int, y: Gc<int>, } fn main() { let ordinary_unique = box() (-1i, -2i); let managed_within_unique = box ContainsManaged { x: -3, y: box(GC) -4i }; zzz(); // #break } fn zzz() {()}<|fim▁end|>
// gdb-check:$1 = {-1, -2} // gdb-command:print managed_within_unique->x
<|file_name|>throbber.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node "use strict";<|fim▁hole|>var setupThrobber = require("../../throbber") , throbber = setupThrobber(process.stdout.write.bind(process.stdout), 200); process.stdout.write("START"); throbber.start(); setTimeout(throbber.stop, 1100);<|fim▁end|>
<|file_name|>Mobitel.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h> template<typename T> T gcd(T a, T b) { if(!b) return a; return gcd(b, a % b); } template<typename T> T lcm(T a, T b) { return a * b / gcd(a, b); } template<typename T> void chmin(T& a, T b) { a = (a > b) ? b : a; } template<typename T> void chmax(T& a, T b) { a = (a < b) ? b : a; }<|fim▁hole|> typedef long long Int; typedef unsigned uint; int TL[10]; char S[110]; int map_key[10]; string key[10]; int main(void) { key[1] = ""; key[2] = "abc"; key[3] = "def"; key[4] = "ghi"; key[5] = "jkl"; key[6] = "mno"; key[7] = "pqrs"; key[8] = "tuv"; key[9] = "wxyz"; for (int i = 1; i <= 9; i++) { scanf("%d", &TL[i]); map_key[i] = TL[i]; } scanf("%s", S); int N = strlen(S); int last = -1; for (int i = 0; i < N; i++) { int id = -1, press = 0; bool sharp = false; for (int j = 1; j <= 9; j++) { if (key[map_key[j]].find(S[i]) != string::npos) { //cout << "\n" << j << " " << S[i] << " " << key[map_key[j]] << "\n"; id = j; for (int k = 0; k < key[map_key[j]].size(); k++) { if (key[map_key[j]][k] == S[i]) { press = k; break; } } if (i > 0) { if (id == last) { sharp = true; } } last = j; break; } } if (sharp) { putchar('#'); } for (int i = 0; i <= press; i++) { printf("%d", id); } } printf("\n"); return 0; }<|fim▁end|>
int in() { int x; scanf("%d", &x); return x; } using namespace std;
<|file_name|>addusertogroup.py<|end_file_name|><|fim▁begin|>from django.core.management.base import BaseCommand, CommandError from django.db import IntegrityError <|fim▁hole|>from olympia.users.models import UserProfile class Command(BaseCommand): help = 'Add a new user to a group.' log = olympia.core.logger.getLogger('z.users') def add_arguments(self, parser): parser.add_argument('user', type=unicode, help='User id or email') parser.add_argument('group_id', type=int, help='Group id') def handle(self, *args, **options): do_adduser(options['user'], options['group_id']) msg = 'Adding {user} to {group}\n'.format( user=options['user'], group=options['group_id']) self.log.info(msg) self.stdout.write(msg) def do_adduser(user, group): try: if '@' in user: user = UserProfile.objects.get(email=user) elif user.isdigit(): user = UserProfile.objects.get(pk=user) else: raise CommandError('Unknown input for user.') group = Group.objects.get(pk=group) GroupUser.objects.create(user=user, group=group) except IntegrityError, e: raise CommandError('User is already in that group? %s' % e) except UserProfile.DoesNotExist: raise CommandError('User ({user}) does not exist.'.format(user=user)) except Group.DoesNotExist: raise CommandError('Group ({group}) does not exist.' .format(group=group))<|fim▁end|>
import olympia.core.logger from olympia.access.models import Group, GroupUser
<|file_name|>config.py<|end_file_name|><|fim▁begin|># Database DB_NAME = 'censusreporter_ke' DB_USER = 'censusreporter_ke'<|fim▁hole|><|fim▁end|>
DB_PASSWORD = 'censusreporter_ke'
<|file_name|>transaction.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd. // This file is part of Parity. // Parity is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // Parity is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License // along with Parity. If not, see <http://www.gnu.org/licenses/>. //! Executed transaction. use hash::Address; use uint::Uint; use bytes::Bytes; /// Executed transaction. #[derive(Debug, PartialEq, Deserialize)] pub struct Transaction { /// Contract address. pub address: Address, /// Transaction sender. #[serde(rename="caller")] pub sender: Address, /// Contract code. pub code: Bytes, /// Input data. pub data: Bytes, /// Gas. pub gas: Uint, /// Gas price. #[serde(rename="gasPrice")] pub gas_price: Uint, /// Transaction origin. pub origin: Address, /// Sent value. pub value: Uint, } #[cfg(test)] mod tests { use serde_json; use vm::Transaction; #[test] fn transaction_deserialization() { let s = r#"{ "address" : "0f572e5295c57f15886f9b263e2f6d2d6c7b5ec6",<|fim▁hole|> "gasPrice" : "0x5af3107a4000", "origin" : "cd1722f2947def4cf144679da39c4c32bdc35681", "value" : "0x0de0b6b3a7640000" }"#; let _deserialized: Transaction = serde_json::from_str(s).unwrap(); } }<|fim▁end|>
"caller" : "cd1722f2947def4cf144679da39c4c32bdc35681", "code" : "0x7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff7fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff01600055", "data" : "0x", "gas" : "0x0186a0",
<|file_name|>hello.cc<|end_file_name|><|fim▁begin|>/* This file is part of MADNESS. Copyright (C) 2007,2010 Oak Ridge National Laboratory This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA For more information please contact: Robert J. Harrison Oak Ridge National Laboratory One Bethel Valley Road<|fim▁hole|> email: [email protected] tel: 865-241-3937 fax: 865-572-0680 $Id$ */ /// \file nick/hello.cc /// We are projecting a time evolved wave function onto some bound states #include <mra/mra.h> #include <complex> #include <string> #include <fstream> using std::ofstream; #include <nick/wavef.h> using namespace madness; const int nIOProcessors =1; const std::string prefix = "data"; typedef std::complex<double> complexd; typedef Function<complexd,NDIM> complex_functionT; const char* wave_function_filename(int step); bool wave_function_exists(World& world, int step); void wave_function_store(World& world, int step, const complex_functionT& psi); complex_functionT wave_function_load(World& world, int step); const char* wave_function_filename(int step) { static char fname[1024]; sprintf(fname, "%s-%5.5d", prefix.c_str(), step); return fname; } bool wave_function_exists(World& world, int step) { return archive::ParallelInputArchive::exists(world, wave_function_filename(step)); } void wave_function_store(World& world, int step, const complex_functionT& psi) { archive::ParallelOutputArchive ar(world, wave_function_filename(step), nIOProcessors); ar & psi; } complex_functionT wave_function_load(World& world, int step) { complex_functionT psi; archive::ParallelInputArchive ar(world, wave_function_filename(step)); ar & psi; return psi; } void doWork(World& world) { PRINTLINE("Creating three basis functions"); Function<complexd,NDIM> psi100 = FunctionFactory<complexd,NDIM>(world). functor(functorT( new BoundWF(1.0, 1,0,0))); Function<complexd,NDIM> psi200 = FunctionFactory<complexd,NDIM>(world). functor(functorT( new BoundWF(1.0, 2,0,0))); Function<complexd,NDIM> psi210 = FunctionFactory<complexd,NDIM>(world). functor(functorT( new BoundWF(1.0, 2,1,0))); int step = 0; PRINTLINE("Testing our capacity to load a wave function from disk"); if(wave_function_exists(world,step)) { PRINTLINE("wave_function_exists = true"); Function<complexd, NDIM> loadedFunc = wave_function_load(world, step); PRINT("<data|100> = ") << loadedFunc.inner(psi100) << endl; PRINT("<data|200> = ") << loadedFunc.inner(psi200) << endl; PRINT("<data|210> = ") << loadedFunc.inner(psi210) << endl; } else PRINTLINE("LoadedFunc doesn't exist"); } int main(int argc, char**argv) { // Initialize the parallel programming environment MPI::Init(argc, argv); World world(MPI::COMM_WORLD); // Load info for MADNESS numerical routines startup(world,argc,argv); // Setup defaults for numerical functions FunctionDefaults<NDIM>::set_k(8); // Wavelet order FunctionDefaults<NDIM>::set_thresh(1e-3); // Accuracy FunctionDefaults<NDIM>::set_cubic_cell(-20.0, 20.0); try { doWork(world); } catch (const MPI::Exception& e) { //print(e); error("caught an MPI exception"); } catch (const madness::MadnessException& e) { print(e); error("caught a MADNESS exception"); } catch (const madness::TensorException& e) { print(e); error("caught a Tensor exception"); } catch (const char* s) { print(s); error("caught a c-string exception"); } catch (char* s) { print(s); error("caught a c-string exception"); } catch (const std::string& s) { print(s); error("caught a string (class) exception"); } catch (const std::exception& e) { print(e.what()); error("caught an STL exception"); } catch (...) { error("caught unhandled exception"); } MPI::Finalize(); //FLAG return 0; }<|fim▁end|>
P.O. Box 2008, MS-6367
<|file_name|>issue-30438-b.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Modified regression test for Issue #30438 that exposed an // independent issue (see discussion on ticket). use std::ops::Index; struct Test<'a> { s: &'a String } impl <'a> Index<usize> for Test<'a> { type Output = Test<'a>; fn index(&self, _: usize) -> &Self::Output { &Test { s: &self.s} //~^ ERROR: borrowed value does not live long enough } } fn main() {<|fim▁hole|> let s = "Hello World".to_string(); let test = Test{s: &s}; let r = &test[0]; println!("{}", test.s); // OK since test is valid println!("{}", r.s); // Segfault since value pointed by r has already been dropped }<|fim▁end|>
<|file_name|>booking_shooting_service_book_test.go<|end_file_name|><|fim▁begin|>package book import ( "testing" "time" . "github.com/bborbe/assert" booking_booked_event "github.com/bborbe/booking/booked_event" booking_shooting "github.com/bborbe/booking/shooting" "github.com/bborbe/eventbus" ) func TestImplementsService(t *testing.T) { r := New(nil, nil, nil) var i *Service err := AssertThat(r, Implements(i)) if err != nil { t.Fatal(err) } } <|fim▁hole|> t.Fatal(err) } now, err := time.Parse(time.RFC3339, "2012-01-02T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } if err := AssertThat(timeInFuture(ti, now), Is(false)); err != nil { t.Fatal(err) } } func TestTimeInFutureYearGt(t *testing.T) { ti, err := time.Parse(time.RFC3339, "2013-01-02T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } now, err := time.Parse(time.RFC3339, "2012-01-02T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } if err := AssertThat(timeInFuture(ti, now), Is(true)); err != nil { t.Fatal(err) } } func TestTimeInFutureMonthGt(t *testing.T) { ti, err := time.Parse(time.RFC3339, "2012-02-02T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } now, err := time.Parse(time.RFC3339, "2012-01-02T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } if err := AssertThat(timeInFuture(ti, now), Is(true)); err != nil { t.Fatal(err) } } func TestTimeInFutureDayGt(t *testing.T) { ti, err := time.Parse(time.RFC3339, "2012-02-03T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } now, err := time.Parse(time.RFC3339, "2012-01-02T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } if err := AssertThat(timeInFuture(ti, now), Is(true)); err != nil { t.Fatal(err) } } func TestPostEvent(t *testing.T) { var err error e := eventbus.New() counter := 0 if err := e.RegisterHandler(func(booking_booked_event.BookedEvent) { counter++ }); err != nil { t.Fatal(err) } r := New(nil, e, nil) if err := AssertThat(counter, Is(0)); err != nil { t.Fatal(err) } err = r.postEvent(&booking_shooting.Shooting{}) if err := AssertThat(err, NilValue()); err != nil { t.Fatal(err) } if err := AssertThat(counter, Is(1)); err != nil { t.Fatal(err) } }<|fim▁end|>
func TestTimeInFutureEqual(t *testing.T) { ti, err := time.Parse(time.RFC3339, "2012-01-02T15:04:05Z") if err := AssertThat(err, NilValue()); err != nil {
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>mod stat_utils; mod text_utils; mod vector_utils; mod vision_utils; pub use direction_utils::*; pub use number_utils::*; pub use portal_utils::*; pub use stat_utils::*; pub use text_utils::*; pub use vector_utils::*; pub use vision_utils::*;<|fim▁end|>
mod direction_utils; mod number_utils; mod portal_utils;
<|file_name|>496A - Minimum Difficulty.cpp<|end_file_name|><|fim▁begin|>#include <iostream> using namespace std; int main() { int n, x, y; cin >> n; int a[n]; for (int i = 0; i < n; ++i) {<|fim▁hole|> for (int i = 2; i < n; ++i) { x = max(a[i] - a[i - 1], x); y = min(a[i] - a[i - 2], y); } cout << max(x, y); }<|fim▁end|>
cin >> a[i]; } x = a[1] - a[0]; y = a[2] - a[0];
<|file_name|>statistic.go<|end_file_name|><|fim▁begin|><|fim▁hole|>import ( "net/http" "strconv" "time" "gopkg.in/gin-gonic/gin.v1" "github.com/knopt/iot/backend/api/model" "github.com/knopt/iot/backend/error" ) // GetStatisticsByDeviceDataType by given requests parameters func (api *Api) GetStatisticsByDeviceDataType(context *gin.Context) { deviceID := context.Param("id") dateFrom := context.Param("from") dateTo := context.Param("to") dataType := context.Param("type") responseStatistics, err := api.Service.GetStatistics(deviceID, dateFrom, dateTo, dataType) if err != nil { error.Handler(&error.Error{Code: http.StatusBadRequest, Err: err}, context) return } context.IndentedJSON(http.StatusOK, responseStatistics) } //InsertStatistic from api statistic form func (api *Api) InsertStatistic(context *gin.Context) { var statisticForm model.StatisticForm if err := context.BindJSON(&statisticForm); err != nil { context.AbortWithError(http.StatusBadRequest, err) return } err := api.Service.InsertStatistic(&statisticForm) if err != nil { error.Handler(&error.Error{Code: http.StatusBadRequest, Err: err}, context) return } context.String(http.StatusOK, "Success") } //InsertStatisticInUrl from api statistic form func (api *Api) InsertStatisticInUrl(context *gin.Context) { var statisticForm model.StatisticForm value := context.Param("value") deviceID := context.Param("id") statType := context.Param("type") valueFloat, err := strconv.ParseFloat(value, 64) if err != nil { context.AbortWithError(http.StatusBadRequest, err) return } statisticForm.Value = valueFloat statisticForm.DeviceID = deviceID statisticForm.Type = statType statisticForm.Date = time.Now() err = api.Service.InsertStatistic(&statisticForm) if err != nil { error.Handler(&error.Error{Code: http.StatusBadRequest, Err: err}, context) return } context.String(http.StatusOK, "Success") } //GetStatisticsTypes by deviceID func (api *Api) GetStatisticsTypes(context *gin.Context) { deviceID := context.Param("id") responseTypes, err := api.Service.GetStatisticsTypes(deviceID) if err != nil { error.Handler(&error.Error{Code: http.StatusBadRequest, Err: err}, context) return } context.IndentedJSON(http.StatusOK, responseTypes) }<|fim▁end|>
package api
<|file_name|>job-progress.component.spec.ts<|end_file_name|><|fim▁begin|>import { async, ComponentFixture, TestBed } from '@angular/core/testing'; import { JobProgressComponent } from './job-progress.component'; describe('JobProgressComponent', () => { let component: JobProgressComponent; let fixture: ComponentFixture<JobProgressComponent>; beforeEach(async(() => { TestBed.configureTestingModule({ declarations: [ JobProgressComponent ] })<|fim▁hole|> })); beforeEach(() => { fixture = TestBed.createComponent(JobProgressComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should be created', () => { expect(component).toBeTruthy(); }); });<|fim▁end|>
.compileComponents();
<|file_name|>batch_decoupled_variational_strategy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 import torch from torch.distributions.kl import kl_divergence from ..distributions import Delta, MultivariateNormal from ..lazy import MatmulLazyTensor, SumLazyTensor from ..utils.errors import CachingError from ..utils.memoize import pop_from_cache_ignore_args from .delta_variational_distribution import DeltaVariationalDistribution from .variational_strategy import VariationalStrategy class BatchDecoupledVariationalStrategy(VariationalStrategy): r""" A VariationalStrategy that uses a different set of inducing points for the variational mean and variational covar. It follows the "decoupled" model proposed by `Jankowiak et al. (2020)`_ (which is roughly based on the strategies proposed by `Cheng et al. (2017)`_. Let :math:`\mathbf Z_\mu` and :math:`\mathbf Z_\sigma` be the mean/variance inducing points. The variational distribution for an input :math:`\mathbf<|fim▁hole|> \begin{align*} \mathbb E[ f(\mathbf x) ] &= \mathbf k_{\mathbf Z_\mu \mathbf x}^\top \mathbf K_{\mathbf Z_\mu \mathbf Z_\mu}^{-1} \mathbf m \\ \text{Var}[ f(\mathbf x) ] &= k_{\mathbf x \mathbf x} - \mathbf k_{\mathbf Z_\sigma \mathbf x}^\top \mathbf K_{\mathbf Z_\sigma \mathbf Z_\sigma}^{-1} \left( \mathbf K_{\mathbf Z_\sigma} - \mathbf S \right) \mathbf K_{\mathbf Z_\sigma \mathbf Z_\sigma}^{-1} \mathbf k_{\mathbf Z_\sigma \mathbf x} \end{align*} where :math:`\mathbf m` and :math:`\mathbf S` are the variational parameters. Unlike the original proposed implementation, :math:`\mathbf Z_\mu` and :math:`\mathbf Z_\sigma` have **the same number of inducing points**, which allows us to perform batched operations. Additionally, you can use a different set of kernel hyperparameters for the mean and the variance function. We recommend using this feature only with the :obj:`~gpytorch.mlls.PredictiveLogLikelihood` objective function as proposed in "Parametric Gaussian Process Regressors" (`Jankowiak et al. (2020)`_). Use the :attr:`mean_var_batch_dim` to indicate which batch dimension corresponds to the different mean/var kernels. .. note:: We recommend using the "right-most" batch dimension (i.e. :attr:`mean_var_batch_dim=-1`) for the dimension that corresponds to the different mean/variance kernel parameters. Assuming you want `b1` many independent GPs, the :obj:`~gpytorch.variational._VariationalDistribution` objects should have a batch shape of `b1`, and the mean/covar modules of the GP should have a batch shape of `b1 x 2`. (The 2 corresponds to the mean/variance hyperparameters.) .. seealso:: :obj:`~gpytorch.variational.OrthogonallyDecoupledVariationalStrategy` (a variant proposed by `Salimbeni et al. (2018)`_ that uses orthogonal projections.) :param ~gpytorch.models.ApproximateGP model: Model this strategy is applied to. Typically passed in when the VariationalStrategy is created in the __init__ method of the user defined model. :param torch.Tensor inducing_points: Tensor containing a set of inducing points to use for variational inference. :param ~gpytorch.variational.VariationalDistribution variational_distribution: A VariationalDistribution object that represents the form of the variational distribution :math:`q(\mathbf u)` :param learn_inducing_locations: (Default True): Whether or not the inducing point locations :math:`\mathbf Z` should be learned (i.e. are they parameters of the model). :type learn_inducing_locations: `bool`, optional :type mean_var_batch_dim: `int`, optional :param mean_var_batch_dim: (Default `None`): Set this parameter (ideally to `-1`) to indicate which dimension corresponds to different kernel hyperparameters for the mean/variance functions. .. _Cheng et al. (2017): https://arxiv.org/abs/1711.10127 .. _Salimbeni et al. (2018): https://arxiv.org/abs/1809.08820 .. _Jankowiak et al. (2020): https://arxiv.org/abs/1910.07123 Example (**different** hypers for mean/variance): >>> class MeanFieldDecoupledModel(gpytorch.models.ApproximateGP): >>> ''' >>> A batch of 3 independent MeanFieldDecoupled PPGPR models. >>> ''' >>> def __init__(self, inducing_points): >>> # The variational parameters have a batch_shape of [3] >>> variational_distribution = gpytorch.variational.MeanFieldVariationalDistribution( >>> inducing_points.size(-1), batch_shape=torch.Size([3]), >>> ) >>> variational_strategy = gpytorch.variational.BatchDecoupledVariationalStrategy( >>> self, inducing_points, variational_distribution, learn_inducing_locations=True, >>> mean_var_batch_dim=-1 >>> ) >>> >>> # The mean/covar modules have a batch_shape of [3, 2] >>> # where the last batch dim corresponds to the mean & variance hyperparameters >>> super().__init__(variational_strategy) >>> self.mean_module = gpytorch.means.ConstantMean(batch_shape=torch.Size([3, 2])) >>> self.covar_module = gpytorch.kernels.ScaleKernel( >>> gpytorch.kernels.RBFKernel(batch_shape=torch.Size([3, 2])), >>> batch_shape=torch.Size([3, 2]), >>> ) Example (**shared** hypers for mean/variance): >>> class MeanFieldDecoupledModel(gpytorch.models.ApproximateGP): >>> ''' >>> A batch of 3 independent MeanFieldDecoupled PPGPR models. >>> ''' >>> def __init__(self, inducing_points): >>> # The variational parameters have a batch_shape of [3] >>> variational_distribution = gpytorch.variational.MeanFieldVariationalDistribution( >>> inducing_points.size(-1), batch_shape=torch.Size([3]), >>> ) >>> variational_strategy = gpytorch.variational.BatchDecoupledVariationalStrategy( >>> self, inducing_points, variational_distribution, learn_inducing_locations=True, >>> ) >>> >>> # The mean/covar modules have a batch_shape of [3] >>> super().__init__(variational_strategy) >>> self.mean_module = gpytorch.means.ConstantMean(batch_shape=torch.Size([3])) >>> self.covar_module = gpytorch.kernels.ScaleKernel( >>> gpytorch.kernels.RBFKernel(batch_shape=torch.Size([3])), >>> batch_shape=torch.Size([3]), >>> ) """ def __init__( self, model, inducing_points, variational_distribution, learn_inducing_locations=True, mean_var_batch_dim=None ): if isinstance(variational_distribution, DeltaVariationalDistribution): raise NotImplementedError( "BatchDecoupledVariationalStrategy does not work with DeltaVariationalDistribution" ) if mean_var_batch_dim is not None and mean_var_batch_dim >= 0: raise ValueError(f"mean_var_batch_dim should be negative indexed, got {mean_var_batch_dim}") self.mean_var_batch_dim = mean_var_batch_dim # Maybe unsqueeze inducing points if inducing_points.dim() == 1: inducing_points = inducing_points.unsqueeze(-1) # We're going to create two set of inducing points # One set for computing the mean, one set for computing the variance if self.mean_var_batch_dim is not None: inducing_points = torch.stack([inducing_points, inducing_points], dim=(self.mean_var_batch_dim - 2)) else: inducing_points = torch.stack([inducing_points, inducing_points], dim=-3) super().__init__(model, inducing_points, variational_distribution, learn_inducing_locations) def _expand_inputs(self, x, inducing_points): # If we haven't explicitly marked a dimension as batch, add the corresponding batch dimension to the input if self.mean_var_batch_dim is None: x = x.unsqueeze(-3) else: x = x.unsqueeze(self.mean_var_batch_dim - 2) return super()._expand_inputs(x, inducing_points) def forward(self, x, inducing_points, inducing_values, variational_inducing_covar=None): # We'll compute the covariance, and cross-covariance terms for both the # pred-mean and pred-covar, using their different inducing points (and maybe kernel hypers) mean_var_batch_dim = self.mean_var_batch_dim or -1 # Compute full prior distribution full_inputs = torch.cat([inducing_points, x], dim=-2) full_output = self.model.forward(full_inputs) full_covar = full_output.lazy_covariance_matrix # Covariance terms num_induc = inducing_points.size(-2) test_mean = full_output.mean[..., num_induc:] induc_induc_covar = full_covar[..., :num_induc, :num_induc].add_jitter() induc_data_covar = full_covar[..., :num_induc, num_induc:].evaluate() data_data_covar = full_covar[..., num_induc:, num_induc:] # Compute interpolation terms # K_ZZ^{-1/2} K_ZX # K_ZZ^{-1/2} \mu_Z L = self._cholesky_factor(induc_induc_covar) if L.shape != induc_induc_covar.shape: # Aggressive caching can cause nasty shape incompatibilies when evaluating with different batch shapes # TODO: Use a hook to make this cleaner try: pop_from_cache_ignore_args(self, "cholesky_factor") except CachingError: pass L = self._cholesky_factor(induc_induc_covar) interp_term = L.inv_matmul(induc_data_covar.double()).to(full_inputs.dtype) mean_interp_term = interp_term.select(mean_var_batch_dim - 2, 0) var_interp_term = interp_term.select(mean_var_batch_dim - 2, 1) # Compute the mean of q(f) # k_XZ K_ZZ^{-1/2} m + \mu_X # Here we're using the terms that correspond to the mean's inducing points predictive_mean = torch.add( torch.matmul(mean_interp_term.transpose(-1, -2), inducing_values.unsqueeze(-1)).squeeze(-1), test_mean.select(mean_var_batch_dim - 1, 0), ) # Compute the covariance of q(f) # K_XX + k_XZ K_ZZ^{-1/2} (S - I) K_ZZ^{-1/2} k_ZX middle_term = self.prior_distribution.lazy_covariance_matrix.mul(-1) if variational_inducing_covar is not None: middle_term = SumLazyTensor(variational_inducing_covar, middle_term) predictive_covar = SumLazyTensor( data_data_covar.add_jitter(1e-4).evaluate().select(mean_var_batch_dim - 2, 1), MatmulLazyTensor(var_interp_term.transpose(-1, -2), middle_term @ var_interp_term), ) return MultivariateNormal(predictive_mean, predictive_covar) def kl_divergence(self): variational_dist = self.variational_distribution prior_dist = self.prior_distribution mean_dist = Delta(variational_dist.mean) covar_dist = MultivariateNormal( torch.zeros_like(variational_dist.mean), variational_dist.lazy_covariance_matrix ) return kl_divergence(mean_dist, prior_dist) + kl_divergence(covar_dist, prior_dist)<|fim▁end|>
x` is given by: .. math::
<|file_name|>queue.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import frappe import HTMLParser import smtplib, quopri from frappe import msgprint, throw, _ from frappe.email.smtp import SMTPServer, get_outgoing_email_account from frappe.email.email_body import get_email, get_formatted_html from frappe.utils.verified_command import get_signed_params, verify_request from html2text import html2text from frappe.utils import get_url, nowdate, encode, now_datetime, add_days, split_emails, cstr, cint from rq.timeouts import JobTimeoutException from frappe.utils.scheduler import log class EmailLimitCrossedError(frappe.ValidationError): pass def send(recipients=None, sender=None, subject=None, message=None, reference_doctype=None, reference_name=None, unsubscribe_method=None, unsubscribe_params=None, unsubscribe_message=None, attachments=None, reply_to=None, cc=[], message_id=None, in_reply_to=None, send_after=None, expose_recipients=None, send_priority=1, communication=None, now=False, read_receipt=None, queue_separately=False, is_notification=False, add_unsubscribe_link=1): """Add email to sending queue (Email Queue) :param recipients: List of recipients. :param sender: Email sender. :param subject: Email subject. :param message: Email message. :param reference_doctype: Reference DocType of caller document. :param reference_name: Reference name of caller document. :param send_priority: Priority for Email Queue, default 1. :param unsubscribe_method: URL method for unsubscribe. Default is `/api/method/frappe.email.queue.unsubscribe`. :param unsubscribe_params: additional params for unsubscribed links. default are name, doctype, email :param attachments: Attachments to be sent. :param reply_to: Reply to be captured here (default inbox) :param in_reply_to: Used to send the Message-Id of a received email back as In-Reply-To. :param send_after: Send this email after the given datetime. If value is in integer, then `send_after` will be the automatically set to no of days from current date. :param communication: Communication link to be set in Email Queue record :param now: Send immediately (don't send in the background) :param queue_separately: Queue each email separately :param is_notification: Marks email as notification so will not trigger notifications from system :param add_unsubscribe_link: Send unsubscribe link in the footer of the Email, default 1. """ if not unsubscribe_method: unsubscribe_method = "/api/method/frappe.email.queue.unsubscribe" if not recipients and not cc: return if isinstance(recipients, basestring): recipients = split_emails(recipients) if isinstance(cc, basestring): cc = split_emails(cc) if isinstance(send_after, int): send_after = add_days(nowdate(), send_after) email_account = get_outgoing_email_account(True, append_to=reference_doctype) if not sender or sender == "Administrator": sender = email_account.default_sender check_email_limit(recipients) formatted = get_formatted_html(subject, message, email_account=email_account) try: text_content = html2text(formatted) except HTMLParser.HTMLParseError: text_content = "See html attachment" if reference_doctype and reference_name: unsubscribed = [d.email for d in frappe.db.get_all("Email Unsubscribe", "email", {"reference_doctype": reference_doctype, "reference_name": reference_name})] unsubscribed += [d.email for d in frappe.db.get_all("Email Unsubscribe", "email", {"global_unsubscribe": 1})] else: unsubscribed = [] recipients = [r for r in list(set(recipients)) if r and r not in unsubscribed] email_content = formatted email_text_context = text_content if add_unsubscribe_link and reference_doctype and (unsubscribe_message or reference_doctype=="Newsletter") and add_unsubscribe_link==1: unsubscribe_link = get_unsubscribe_message(unsubscribe_message, expose_recipients) email_content = email_content.replace("<!--unsubscribe link here-->", unsubscribe_link.html) email_text_context += unsubscribe_link.text # add to queue add(recipients, sender, subject, formatted=email_content, text_content=email_text_context, reference_doctype=reference_doctype, reference_name=reference_name, attachments=attachments, reply_to=reply_to, cc=cc, message_id=message_id, in_reply_to=in_reply_to, send_after=send_after, send_priority=send_priority, email_account=email_account, communication=communication, add_unsubscribe_link=add_unsubscribe_link, unsubscribe_method=unsubscribe_method, unsubscribe_params=unsubscribe_params, expose_recipients=expose_recipients, read_receipt=read_receipt, queue_separately=queue_separately, is_notification = is_notification, now=now) def add(recipients, sender, subject, **kwargs): """Add to Email Queue""" if kwargs.get('queue_separately') or len(recipients) > 20: email_queue = None for r in recipients: if not email_queue: email_queue = get_email_queue([r], sender, subject, **kwargs) if kwargs.get('now'): email_queue(email_queue.name, now=True) else: duplicate = email_queue.get_duplicate([r]) duplicate.insert(ignore_permissions=True) if kwargs.get('now'): send_one(duplicate.name, now=True) frappe.db.commit() else: email_queue = get_email_queue(recipients, sender, subject, **kwargs) if kwargs.get('now'): send_one(email_queue.name, now=True) def get_email_queue(recipients, sender, subject, **kwargs): '''Make Email Queue object''' e = frappe.new_doc('Email Queue') e.priority = kwargs.get('send_priority') try: mail = get_email(recipients, sender=sender, subject=subject, formatted=kwargs.get('formatted'), text_content=kwargs.get('text_content'), attachments=kwargs.get('attachments'), reply_to=kwargs.get('reply_to'), cc=kwargs.get('cc'), email_account=kwargs.get('email_account'), expose_recipients=kwargs.get('expose_recipients')) mail.set_message_id(kwargs.get('message_id'),kwargs.get('is_notification')) if kwargs.get('read_receipt'): mail.msg_root["Disposition-Notification-To"] = sender if kwargs.get('in_reply_to'): mail.set_in_reply_to(kwargs.get('in_reply_to')) e.message_id = mail.msg_root["Message-Id"].strip(" <>") e.message = cstr(mail.as_string()) e.sender = mail.sender except frappe.InvalidEmailAddressError: # bad Email Address - don't add to queue frappe.log_error('Invalid Email ID Sender: {0}, Recipients: {1}'.format(mail.sender, ', '.join(mail.recipients)), 'Email Not Sent') e.set_recipients(recipients + kwargs.get('cc', [])) e.reference_doctype = kwargs.get('reference_doctype') e.reference_name = kwargs.get('reference_name') e.add_unsubscribe_link = kwargs.get("add_unsubscribe_link") e.unsubscribe_method = kwargs.get('unsubscribe_method') e.unsubscribe_params = kwargs.get('unsubscribe_params') e.expose_recipients = kwargs.get('expose_recipients') e.communication = kwargs.get('communication') e.send_after = kwargs.get('send_after') e.show_as_cc = ",".join(kwargs.get('cc', [])) e.insert(ignore_permissions=True) return e def check_email_limit(recipients): # if using settings from site_config.json, check email limit # No limit for own email settings smtp_server = SMTPServer() if (smtp_server.email_account and getattr(smtp_server.email_account, "from_site_config", False) or frappe.flags.in_test): monthly_email_limit = frappe.conf.get('limits', {}).get('emails') if frappe.flags.in_test: monthly_email_limit = 500 if not monthly_email_limit: return # get count of mails sent this month this_month = get_emails_sent_this_month() if (this_month + len(recipients)) > monthly_email_limit: throw(_("Cannot send this email. You have crossed the sending limit of {0} emails for this month.").format(monthly_email_limit), EmailLimitCrossedError) def get_emails_sent_this_month(): return frappe.db.sql("""select count(name) from `tabEmail Queue` where status='Sent' and MONTH(creation)=MONTH(CURDATE())""")[0][0] def get_unsubscribe_message(unsubscribe_message, expose_recipients): if not unsubscribe_message: unsubscribe_message = _("Unsubscribe from this list") html = """<div style="margin: 15px auto; padding: 0px 7px; text-align: center; color: #8d99a6;"> <!--cc message--> <p style="margin: 15px auto;"> <a href="<!--unsubscribe url-->" style="color: #8d99a6; text-decoration: underline; target="_blank">{unsubscribe_message} </a> </p> </div>""".format(unsubscribe_message=unsubscribe_message) if expose_recipients == "footer": text = "\n<!--cc message-->" else: text = "" text += "\n\n{unsubscribe_message}: <!--unsubscribe url-->\n".format(unsubscribe_message=unsubscribe_message) return frappe._dict({ "html": html, "text": text }) def get_unsubcribed_url(reference_doctype, reference_name, email, unsubscribe_method, unsubscribe_params): params = {"email": email.encode("utf-8"), "doctype": reference_doctype.encode("utf-8"), "name": reference_name.encode("utf-8")} if unsubscribe_params: params.update(unsubscribe_params) query_string = get_signed_params(params) # for test frappe.local.flags.signed_query_string = query_string return get_url(unsubscribe_method + "?" + get_signed_params(params)) @frappe.whitelist(allow_guest=True) def unsubscribe(doctype, name, email): # unsubsribe from comments and communications if not verify_request(): return try: frappe.get_doc({ "doctype": "Email Unsubscribe", "email": email, "reference_doctype": doctype, "reference_name": name }).insert(ignore_permissions=True) except frappe.DuplicateEntryError: frappe.db.rollback() else: frappe.db.commit() return_unsubscribed_page(email, doctype, name) def return_unsubscribed_page(email, doctype, name): frappe.respond_as_web_page(_("Unsubscribed"), _("{0} has left the conversation in {1} {2}").format(email, _(doctype), name), indicator_color='green') def flush(from_test=False): """flush email queue, every time: called from scheduler""" # additional check cache = frappe.cache() check_email_limit([]) auto_commit = not from_test if frappe.are_emails_muted(): msgprint(_("Emails are muted")) from_test = True smtpserver = SMTPServer() make_cache_queue() for i in xrange(cache.llen('cache_email_queue')): email = cache.lpop('cache_email_queue') if cint(frappe.defaults.get_defaults().get("hold_queue"))==1: break if email: send_one(email, smtpserver, auto_commit, from_test=from_test) # NOTE: removing commit here because we pass auto_commit # finally: # frappe.db.commit() def make_cache_queue(): '''cache values in queue before sendign''' cache = frappe.cache() emails = frappe.db.sql('''select name from `tabEmail Queue` where (status='Not Sent' or status='Partially Sent') and (send_after is null or send_after < %(now)s) order by priority desc, creation asc limit 500''', { 'now': now_datetime() }) # reset value cache.delete_value('cache_email_queue') for e in emails: cache.rpush('cache_email_queue', e[0]) def send_one(email, smtpserver=None, auto_commit=True, now=False, from_test=False): '''Send Email Queue with given smtpserver''' email = frappe.db.sql('''select name, status, communication, message, sender, reference_doctype, reference_name, unsubscribe_param, unsubscribe_method, expose_recipients, show_as_cc, add_unsubscribe_link from `tabEmail Queue` where name=%s for update''', email, as_dict=True)[0]<|fim▁hole|> recipients_list = frappe.db.sql('''select name, recipient, status from `tabEmail Queue Recipient` where parent=%s''',email.name,as_dict=1) if frappe.are_emails_muted(): frappe.msgprint(_("Emails are muted")) return if cint(frappe.defaults.get_defaults().get("hold_queue"))==1 : return if email.status not in ('Not Sent','Partially Sent') : # rollback to release lock and return frappe.db.rollback() return frappe.db.sql("""update `tabEmail Queue` set status='Sending', modified=%s where name=%s""", (now_datetime(), email.name), auto_commit=auto_commit) if email.communication: frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit) try: if not frappe.flags.in_test: if not smtpserver: smtpserver = SMTPServer() smtpserver.setup_email_account(email.reference_doctype) for recipient in recipients_list: if recipient.status != "Not Sent": continue message = prepare_message(email, recipient.recipient, recipients_list) if not frappe.flags.in_test: smtpserver.sess.sendmail(email.sender, recipient.recipient, encode(message)) recipient.status = "Sent" frappe.db.sql("""update `tabEmail Queue Recipient` set status='Sent', modified=%s where name=%s""", (now_datetime(), recipient.name), auto_commit=auto_commit) #if all are sent set status if any("Sent" == s.status for s in recipients_list): frappe.db.sql("""update `tabEmail Queue` set status='Sent', modified=%s where name=%s""", (now_datetime(), email.name), auto_commit=auto_commit) else: frappe.db.sql("""update `tabEmail Queue` set status='Error', error=%s where name=%s""", ("No recipients to send to", email.name), auto_commit=auto_commit) if frappe.flags.in_test: frappe.flags.sent_mail = message return if email.communication: frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit) except (smtplib.SMTPServerDisconnected, smtplib.SMTPConnectError, smtplib.SMTPHeloError, smtplib.SMTPAuthenticationError, JobTimeoutException): # bad connection/timeout, retry later if any("Sent" == s.status for s in recipients_list): frappe.db.sql("""update `tabEmail Queue` set status='Partially Sent', modified=%s where name=%s""", (now_datetime(), email.name), auto_commit=auto_commit) else: frappe.db.sql("""update `tabEmail Queue` set status='Not Sent', modified=%s where name=%s""", (now_datetime(), email.name), auto_commit=auto_commit) if email.communication: frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit) # no need to attempt further return except Exception, e: frappe.db.rollback() if any("Sent" == s.status for s in recipients_list): frappe.db.sql("""update `tabEmail Queue` set status='Partially Errored', error=%s where name=%s""", (unicode(e), email.name), auto_commit=auto_commit) else: frappe.db.sql("""update `tabEmail Queue` set status='Error', error=%s where name=%s""", (unicode(e), email.name), auto_commit=auto_commit) if email.communication: frappe.get_doc('Communication', email.communication).set_delivery_status(commit=auto_commit) if now: raise e else: # log to Error Log log('frappe.email.queue.flush', unicode(e)) def prepare_message(email, recipient, recipients_list): message = email.message if email.add_unsubscribe_link and email.reference_doctype: # is missing the check for unsubscribe message but will not add as there will be no unsubscribe url unsubscribe_url = get_unsubcribed_url(email.reference_doctype, email.reference_name, recipient, email.unsubscribe_method, email.unsubscribe_params) message = message.replace("<!--unsubscribe url-->", quopri.encodestring(unsubscribe_url)) if email.expose_recipients == "header": pass else: if email.expose_recipients == "footer": if isinstance(email.show_as_cc, basestring): email.show_as_cc = email.show_as_cc.split(",") email_sent_to = [r.recipient for r in recipients_list] email_sent_cc = ", ".join([e for e in email_sent_to if e in email.show_as_cc]) email_sent_to = ", ".join([e for e in email_sent_to if e not in email.show_as_cc]) if email_sent_cc: email_sent_message = _("This email was sent to {0} and copied to {1}").format(email_sent_to,email_sent_cc) else: email_sent_message = _("This email was sent to {0}").format(email_sent_to) message = message.replace("<!--cc message-->", quopri.encodestring(email_sent_message)) message = message.replace("<!--recipient-->", recipient) return message def clear_outbox(): """Remove low priority older than 31 days in Outbox and expire mails not sent for 7 days. Called daily via scheduler.""" frappe.db.sql("""delete q, r from `tabEmail Queue` as q, `tabEmail Queue Recipient` as r where q.name = r.parent and q.priority=0 and datediff(now(), q.modified) > 31""") frappe.db.sql("""update `tabEmail Queue` as q, `tabEmail Queue Recipient` as r set q.status='Expired', r.status='Expired' where q.name = r.parent and datediff(curdate(), q.modified) > 7 and q.status='Not Sent' and r.status='Not Sent'""")<|fim▁end|>
<|file_name|>ok.py<|end_file_name|><|fim▁begin|>""" OK resolveurl XBMC Addon Copyright (C) 2016 Seberoth Version 0.0.2 This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import json, urllib from resolveurl import common from lib import helpers from resolveurl.resolver import ResolveUrl, ResolverError class OKResolver(ResolveUrl): name = "ok.ru" domains = ['ok.ru', 'odnoklassniki.ru'] pattern = '(?://|\.)(ok\.ru|odnoklassniki\.ru)/(?:videoembed|video)/(\d+)' header = {"User-Agent": common.OPERA_USER_AGENT} qual_map = {'ultra': '2160', 'quad': '1440', 'full': '1080', 'hd': '720', 'sd': '480', 'low': '360', 'lowest': '240', 'mobile': '144'} def __init__(self): self.net = common.Net() def get_media_url(self, host, media_id): vids = self.__get_Metadata(media_id) sources = [] for entry in vids['urls']: quality = self.__replaceQuality(entry['name']) sources.append((quality, entry['url'])) try: sources.sort(key=lambda x: int(x[0]), reverse=True) except: pass source = helpers.pick_source(sources) source = source.encode('utf-8') + helpers.append_headers(self.header) return source def __replaceQuality(self, qual): return self.qual_map.get(qual.lower(), '000') def __get_Metadata(self, media_id): url = "http://www.ok.ru/dk" data = {'cmd': 'videoPlayerMetadata', 'mid': media_id} data = urllib.urlencode(data) html = self.net.http_POST(url, data, headers=self.header).content json_data = json.loads(html) <|fim▁hole|> info = dict() info['urls'] = [] for entry in json_data['videos']: info['urls'].append(entry) return info def get_url(self, host, media_id): return self._default_get_url(host, media_id, 'http://{host}/videoembed/{media_id}')<|fim▁end|>
if 'error' in json_data: raise ResolverError('File Not Found or removed')
<|file_name|>testsuite.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function import numpy as np import paddle.fluid.core as core from paddle.fluid.op import Operator def create_op(scope, op_type, inputs, outputs, attrs): kwargs = dict() op_maker = core.op_proto_and_checker_maker op_role_attr_name = op_maker.kOpRoleAttrName() if op_role_attr_name not in attrs: attrs[op_role_attr_name] = int(op_maker.OpRole.Forward) def __create_var__(name, var_name): scope.var(var_name).get_tensor() kwargs[name].append(var_name) for in_name, in_dup in Operator.get_op_inputs(op_type): if in_name in inputs: kwargs[in_name] = [] if in_dup: sub_in = inputs[in_name] for item in sub_in: sub_in_name, _ = item[0], item[1] __create_var__(in_name, sub_in_name) else: __create_var__(in_name, in_name) for out_name, out_dup in Operator.get_op_outputs(op_type): if out_name in outputs: kwargs[out_name] = [] if out_dup: sub_out = outputs[out_name] for item in sub_out: sub_out_name, _ = item[0], item[1] __create_var__(out_name, sub_out_name) else: __create_var__(out_name, out_name) for attr_name in Operator.get_op_attr_names(op_type): if attr_name in attrs: kwargs[attr_name] = attrs[attr_name] return Operator(op_type, **kwargs) def set_input(scope, op, inputs, place): def np_value_to_fluid_value(input): if input.dtype == np.float16: input = input.view(np.uint16) return input def __set_input__(var_name, var): if isinstance(var, tuple) or isinstance(var, np.ndarray): tensor = scope.find_var(var_name).get_tensor() if isinstance(var, tuple): tensor.set_recursive_sequence_lengths(var[1]) var = var[0] tensor._set_dims(var.shape) tensor.set(np_value_to_fluid_value(var), place) elif isinstance(var, float): scope.find_var(var_name).set_float(var) elif isinstance(var, int): scope.find_var(var_name).set_int(var) for in_name, in_dup in Operator.get_op_inputs(op.type()): if in_name in inputs: if in_dup: sub_in = inputs[in_name] for item in sub_in: sub_in_name, sub_in_val = item[0], item[1] __set_input__(sub_in_name, sub_in_val) else: __set_input__(in_name, inputs[in_name]) def append_input_output(block, op_proto, np_list, is_input, dtype): '''Insert VarDesc and generate Python variable instance''' proto_list = op_proto.inputs if is_input else op_proto.outputs def create_var(block, name, np_list, var_proto): dtype = None shape = None lod_level = None if name not in np_list: assert var_proto.intermediate, "{} not found".format(name) else: # inferece the dtype from numpy value. np_value = np_list[name] if isinstance(np_value, tuple): dtype = np_value[0].dtype # output shape, lod should be infered from input. if is_input: shape = list(np_value[0].shape) lod_level = len(np_value[1]) else: dtype = np_value.dtype if is_input: shape = list(np_value.shape) lod_level = 0 # NOTE(dzhwinter): type hacking # numpy float16 is binded to paddle::platform::float16 # in tensor_py.h via the help of uint16 datatype. Because # the internal memory representation of float16 is # actually uint16_t in paddle. So we use np.uint16 in numpy for # raw memory, it can pass through the pybind. So in the testcase, # we feed data use data.view(uint16), but the dtype is float16 in fact. # The data.view(uint16) means do not cast the data type, but process data as the uint16 if dtype == np.uint16: dtype = np.float16 return block.create_var( dtype=dtype, shape=shape, lod_level=lod_level, name=name) var_dict = {} for var_proto in proto_list: var_name = str(var_proto.name) if is_input: if (var_name not in np_list) and var_proto.dispensable: continue assert (var_name in np_list) or (var_proto.dispensable), \ "Missing {} as input".format(var_name) if var_proto.duplicable: assert isinstance(np_list[var_name], list), \ "Duplicable {} should be set as list".format(var_name) var_list = [] for (name, np_value) in np_list[var_name]: var_list.append( create_var(block, name, {name: np_value}, var_proto)) var_dict[var_name] = var_list else: var_dict[var_name] = create_var(block, var_name, np_list, var_proto) return var_dict def append_loss_ops(block, output_names): mean_inputs = list(map(block.var, output_names)) if len(mean_inputs) == 1: loss = block.create_var(dtype=mean_inputs[0].dtype, shape=[1]) op = block.append_op( inputs={"X": mean_inputs}, outputs={"Out": loss}, type='mean') op.desc.infer_var_type(block.desc) op.desc.infer_shape(block.desc) else: avg_sum = [] for cur_loss in mean_inputs: cur_avg_loss = block.create_var(dtype=cur_loss.dtype, shape=[1]) op = block.append_op( inputs={"X": [cur_loss]}, outputs={"Out": [cur_avg_loss]}, type="mean") op.desc.infer_var_type(block.desc) op.desc.infer_shape(block.desc) avg_sum.append(cur_avg_loss) loss_sum = block.create_var(dtype=avg_sum[0].dtype, shape=[1]) op_sum = block.append_op( inputs={"X": avg_sum}, outputs={"Out": loss_sum}, type='sum') op_sum.desc.infer_var_type(block.desc) op_sum.desc.infer_shape(block.desc) loss = block.create_var(dtype=loss_sum.dtype, shape=[1]) op_loss = block.append_op( inputs={"X": loss_sum},<|fim▁hole|> attrs={'scale': 1.0 / float(len(avg_sum))}) op_loss.desc.infer_var_type(block.desc) op_loss.desc.infer_shape(block.desc) return loss<|fim▁end|>
outputs={"Out": loss}, type='scale',
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export { Svgo };<|fim▁end|>
import * as Svgo from './svgo';
<|file_name|>topic-trending.component.js<|end_file_name|><|fim▁begin|>"use strict"; var __extends = (this && this.__extends) || (function () { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; }; return function (d, b) { extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; })(); var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; Object.defineProperty(exports, "__esModule", { value: true }); var core_1 = require("@angular/core"); var paginated_list_component_1 = require("../../paginated-list/paginated-list.component"); var TopicTrendingComponent = (function (_super) { __extends(TopicTrendingComponent, _super); function TopicTrendingComponent(route, postService, topicService) { var _this = _super.call(this) || this; _this.route = route; _this.postService = postService; _this.topicService = topicService; return _this; } TopicTrendingComponent.prototype.ngOnInit = function () { var _this = this; this.route.params.subscribe(function (params) { _this.id = +params['id']; _this.topicService.getDetail(_this.id) .then(function (res) { return _this.topic = res; }); }); _super.prototype.ngOnInit.call(this); }; TopicTrendingComponent.prototype.loadNextPage = function (currentPage, perPage) { var _this = this; this.postService.getTrending(currentPage, perPage, this.id) .then(function (res) { return _this.addPage(res); }); }; return TopicTrendingComponent; }(paginated_list_component_1.PaginatedListComponent)); TopicTrendingComponent = __decorate([<|fim▁hole|> templateUrl: './topic-trending.component.html', styleUrls: ['./topic-trending.component.css'] }) ], TopicTrendingComponent); exports.TopicTrendingComponent = TopicTrendingComponent;<|fim▁end|>
core_1.Component({ selector: 'app-topic-trending',
<|file_name|>shared_queue.hpp<|end_file_name|><|fim▁begin|>/* * queue.hpp * * Created on: 14 ott 2015 * Author: Marco */ #ifndef SOURCE_UTILITIES_INCLUDE_SHARED_QUEUE_HPP_ #define SOURCE_UTILITIES_INCLUDE_SHARED_QUEUE_HPP_ #include <deque> #include <condition_variable> #include <mutex> #include <utilities/include/atend.hpp> #include <utilities/include/singleton.hpp> #include <utilities/include/debug.hpp> #include <utilities/include/strings.hpp> namespace utilities { /** * Classe generale che implementa una coda condivisa da piu' thread */ template<typename T> class shared_queue : public singleton<shared_queue<T>> { std::deque<T> data; std::mutex lk; std::condition_variable cv; friend class singleton<shared_queue<T>>; shared_queue(){}; public: /** * Accoda un elemento nella coda * @param Elemento da accodare nella coda */ void enqueue(const T obj) { LOGF; std::lock_guard<std::mutex> guard(lk); data.push_back(obj); LOGD("FileName: " << utf8_encode(std::wstring(obj->FileName, obj->FileNameLength / sizeof(wchar_t))) << " Action: "<< (int)obj->Action); cv.notify_all(); } /** * Elimina il primo elemento dalla coda e lo ritorna * @return Elemento estratto dalla coda */ T dequeue(void){ std::unique_lock<std::mutex> guard(lk); on_return<> ret([this](){ data.pop_front(); });//FIXME: viene davvero poi ottimizzato? <|fim▁hole|> } inline bool empty() { //FIXME: Serve sincronizzare? std::lock_guard<std::mutex> guard(lk); return data.empty(); } }; } #endif /* SOURCE_UTILITIES_INCLUDE_SHARED_QUEUE_HPP_ */<|fim▁end|>
cv.wait(guard, [this](){ return !data.empty(); }); return data.front();
<|file_name|>region.rs<|end_file_name|><|fim▁begin|>#![experimental] use libc; use std::ptr; use std::os; #[experimental] pub trait MemoryRegion { fn protect(&mut self) -> bool; fn copy(&mut self, data: &[u8]) -> bool; } #[experimental] impl MemoryRegion for os::MemoryMap { fn protect(&mut self) -> bool { unsafe { libc::mprotect(self.data() as *mut libc::c_void, self.len() as libc::size_t, libc::PROT_READ | libc::PROT_EXEC) == -1 } } <|fim▁hole|> if data.len() > self.len() { false } else { unsafe { ptr::copy_memory(self.data(), data.as_ptr(), data.len()); } true } } }<|fim▁end|>
fn copy(&mut self, data: &[u8]) -> bool {
<|file_name|>AntLabMain.java<|end_file_name|><|fim▁begin|>package edu.gatech.oad.antlab.pkg1; import edu.cs2335.antlab.pkg3.*; import edu.gatech.oad.antlab.person.*; import edu.gatech.oad.antlab.pkg2.*; /** * CS2335 Ant Lab * * Prints out a simple message gathered from all of the other classes * in the package structure */ public class AntLabMain { /**antlab11.java message class*/ private AntLab11 ant11; /**antlab12.java message class*/ private AntLab12 ant12; /**antlab21.java message class*/ private AntLab21 ant21; /**antlab22.java message class*/ private AntLab22 ant22; /**antlab31 java message class which is contained in a jar resource file*/ private AntLab31 ant31; /** * the constructor that intializes all the helper classes */ public AntLabMain () { ant11 = new AntLab11(); ant12 = new AntLab12(); ant21 = new AntLab21(); ant22 = new AntLab22(); ant31 = new AntLab31(); } /** * gathers a string from all the other classes and prints the message * out to the console * */ public void printOutMessage() { String toPrint = ant11.getMessage() + ant12.getMessage() + ant21.getMessage() + ant22.getMessage() + ant31.getMessage(); //Person1 replace P1 with your name //and gburdell1 with your gt id Person1 p1 = new Person1("Pranov"); toPrint += p1.toString("pduggasani3"); //Person2 replace P2 with your name //and gburdell with your gt id Person2 p2 = new Person2("Austin Dang"); toPrint += p2.toString("adang31"); //Person3 replace P3 with your name //and gburdell3 with your gt id Person3 p3 = new Person3("Jay Patel"); toPrint += p3.toString("jpatel345"); //Person4 replace P4 with your name //and gburdell4 with your gt id <|fim▁hole|> Person4 p4 = new Person4("Jin Chung"); toPrint += p4.toString("jchung89"); //Person5 replace P4 with your name //and gburdell5 with your gt id Person5 p5 = new Person5("Zachary Hussin"); toPrint += p5.toString("zhussin3"); System.out.println(toPrint); } /** * entry point for the program */ public static void main(String[] args) { new AntLabMain().printOutMessage(); } }<|fim▁end|>