prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright (c) 2013 the BabelFish authors. All rights reserved.
# Use of this source code is governed by the 3-clause BSD license
# that can be found in the LICENSE file.
#
import sys
if sys.version_info[0] >= 3:
basestr = str
else:
basestr = basestring
from .converters import (LanguageConverter, LanguageReverseConverter, LanguageEquivalenceConverter, CountryConverter,
CountryReverseConverter)<|fim▁hole|><|fim▁end|> | from .country import country_converters, COUNTRIES, COUNTRY_MATRIX, Country
from .exceptions import Error, LanguageConvertError, LanguageReverseError, CountryConvertError, CountryReverseError
from .language import language_converters, LANGUAGES, LANGUAGE_MATRIX, Language
from .script import SCRIPTS, SCRIPT_MATRIX, Script |
<|file_name|>Equation.java<|end_file_name|><|fim▁begin|>package org.adastraeducation.liquiz.equation;
import java.util.*;
import java.util.regex.*;
import org.adastraeducation.liquiz.*;
/**
* Present equations with random variables.
* It has two ways to parse the equations in string[]. One is in infix, and the other is in the RPN.
* @author Yingzhu Wang
*
*/
public class Equation implements Displayable {
private Expression func;
private double correctAnswer;
private HashMap<String,Var> variables;
public Equation(String equation, HashMap<String,Var> variables){
this.variables=variables;
ArrayList<String> equationSplit = this.parseQuestion(equation);
this.func = this.parseInfix(equationSplit);
correctAnswer = func.eval();
}
public Equation(Expression func, HashMap<String,Var> variables){
this.func = func;
this.variables = variables;
correctAnswer=func.eval();
}
public Equation(Expression func){<|fim▁hole|> this.variables = new HashMap<String,Var>();
correctAnswer=func.eval();
}
public void setExpression(Expression e){
this.func=e;
correctAnswer=func.eval();
}
public void setVariables(HashMap<String,Var> variables){
this.variables = variables;
}
public String getTagName() { return "Equation"; }
public Expression parseInfix(ArrayList<String> s){
Tree t = new Tree(s);
ArrayList<String> rpn = t.traverse();
return parseRPN(rpn);
}
// Precompile all regular expressions used in parsing
private static final Pattern parseDigits =
Pattern.compile("^[0-9]+$");
private static final Pattern wordPattern =
Pattern.compile("[\\W]|([\\w]*)");
/*TODO: We can do much better than a switch statement,
* but it would require a hash map and lots of little objects
*/
//TODO: Check if binary ops are backgwards? a b - ????
public Expression parseRPN(ArrayList<String> s) {
Stack<Expression> stack = new Stack<Expression>();
for(int i = 0; i<s.size(); i++){
String temp = s.get(i);
if (Functions.MATHFUNCTIONS.contains(temp)) {
Expression op1 ;
Expression op2 ;
switch(temp){
case "+":
op2=stack.pop();
op1=stack.pop();
stack.push(new Plus(op1,op2));
break;
case "-":
op2=stack.pop();
op1=stack.pop();
stack.push( new Minus(op1,op2));
break;
case "*":
op2=stack.pop();
op1=stack.pop();
stack.push( new Multi(op1,op2));break;
case "/":
op2=stack.pop();
op1=stack.pop();
stack.push( new Div(op1,op2));break;
case "sin":
op1=stack.pop();
stack.push(new Sin(op1));break;
case "cos":
op1=stack.pop();
stack.push(new Cos(op1));break;
case "tan":
op1=stack.pop();
stack.push(new Tan(op1));break;
case "abs":
op1=stack.pop();
stack.push(new Abs(op1));break;
case "Asin":
op1=stack.pop();
stack.push(new Asin(op1));break;
case "Atan":
op1=stack.pop();
stack.push(new Atan(op1));break;
case "neg":
op1=stack.pop();
stack.push(new Neg(op1));break;
case "sqrt":
op1=stack.pop();
stack.push(new Sqrt(op1));break;
default:break;
}
}
//deal with the space
else if(temp.equals(""))
;
else{
Matcher m = parseDigits.matcher(temp);
if (m.matches()){
double x = Double.parseDouble(temp);
stack.push(new Constant(x));
}
else{
stack.push(variables.get(temp));
}
}
}
return stack.pop();
}
public ArrayList<String> parseQuestion(String question){
ArrayList<String> s = new ArrayList<String>();
Matcher m = wordPattern.matcher(question);
while(m.find()){
s.add(m.group());
}
return s;
}
// public ResultSet readDatabase(String sql){
// return DatabaseMgr.select(sql);
// }
//
// public void writeDatabase(String sql){
// DatabaseMgr.update(sql);
// }
public Expression getExpression(){
return func;
}
public double getCorrectAnswer(){
return correctAnswer;
}
@Override
public void writeHTML(StringBuilder b) {
func.infixReplaceVar(b);
}
@Override
public void writeXML(StringBuilder b) {
b.append("<Equation question='");
func.infix(b);
b.append("'></Equation>");
}
@Override
public void writeJS(StringBuilder b) {
}
}<|fim▁end|> | this.func = func; |
<|file_name|>relacher_gouvernail.py<|end_file_name|><|fim▁begin|># -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,<|fim▁hole|># without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la volonté RelacherGouvernail"""
import re
from secondaires.navigation.equipage.ordres.relacher_gouvernail import \
RelacherGouvernail as OrdreRelacherGouvernail
from secondaires.navigation.equipage.ordres.long_deplacer import LongDeplacer
from secondaires.navigation.equipage.volonte import Volonte
class RelacherGouvernail(Volonte):
"""Classe représentant une volonté.
Cette volonté demande simplement au matelot qui tient le gouvernail
de le relâcher. Comme la plupart des volontés, le matelot est
encouragé à retourner dans sa salle d'affectation après coup.
"""
cle = "relacher_gouvernail"
ordre_court = re.compile(r"^rg$", re.I)
ordre_long = re.compile(r"^relacher\s+gouvernail?$", re.I)
def choisir_matelots(self, exception=None):
"""Retourne le matelot le plus apte à accomplir la volonté."""
navire = self.navire
equipage = navire.equipage
gouvernail = self.navire.gouvernail
if gouvernail is None or gouvernail.tenu is None:
return None
personnage = gouvernail.tenu
matelot = equipage.get_matelot_depuis_personnage(personnage)
return matelot
def executer(self, matelot):
"""Exécute la volonté."""
if matelot is None:
self.terminer()
return
navire = self.navire
ordres = []
matelot.invalider_ordres("virer")
relacher = OrdreRelacherGouvernail(matelot, navire)
ordres.append(relacher)
ordres.append(self.revenir_affectation(matelot))
self.ajouter_ordres(matelot, ordres)
def crier_ordres(self, personnage):
"""On fait crier l'ordre au personnage."""
msg = "{} s'écrie : relâchez la barre !".format(
personnage.distinction_audible)
self.navire.envoyer(msg)
@classmethod
def extraire_arguments(cls, navire):
"""Extrait les arguments de la volonté."""
return ()<|fim▁end|> | # this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software |
<|file_name|>ChickenBurger.java<|end_file_name|><|fim▁begin|>public class ChickenBurger extends Burger {
@Override
public float price() {
return 50.5f;
}<|fim▁hole|> @Override
public String name() {
return "Chicken Burger";
}
}<|fim▁end|> | |
<|file_name|>action_noise.py<|end_file_name|><|fim▁begin|># Copyright 2018 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Wrapper control suite environments that adds Gaussian noise to actions."""
import dm_env
import numpy as np
_BOUNDS_MUST_BE_FINITE = (
'All bounds in `env.action_spec()` must be finite, got: {action_spec}')
class Wrapper(dm_env.Environment):
"""Wraps a control environment and adds Gaussian noise to actions."""
def __init__(self, env, scale=0.01):
"""Initializes a new action noise Wrapper.
Args:
env: The control suite environment to wrap.
scale: The standard deviation of the noise, expressed as a fraction
of the max-min range for each action dimension.
Raises:
ValueError: If any of the action dimensions of the wrapped environment are
unbounded.
"""
action_spec = env.action_spec()
if not (np.all(np.isfinite(action_spec.minimum)) and
np.all(np.isfinite(action_spec.maximum))):
raise ValueError(_BOUNDS_MUST_BE_FINITE.format(action_spec=action_spec))
self._minimum = action_spec.minimum
self._maximum = action_spec.maximum
self._noise_std = scale * (action_spec.maximum - action_spec.minimum)
self._env = env
def step(self, action):
noisy_action = action + self._env.task.random.normal(scale=self._noise_std)
# Clip the noisy actions in place so that they fall within the bounds<|fim▁hole|> # specified by the `action_spec`. Note that MuJoCo implicitly clips out-of-
# bounds control inputs, but we also clip here in case the actions do not
# correspond directly to MuJoCo actuators, or if there are other wrapper
# layers that expect the actions to be within bounds.
np.clip(noisy_action, self._minimum, self._maximum, out=noisy_action)
return self._env.step(noisy_action)
def reset(self):
return self._env.reset()
def observation_spec(self):
return self._env.observation_spec()
def action_spec(self):
return self._env.action_spec()
def __getattr__(self, name):
return getattr(self._env, name)<|fim▁end|> | |
<|file_name|>record.go<|end_file_name|><|fim▁begin|>package daemon
import (
"log"
"time"
"github.com/Cloakaac/cloak/models"
)
type RecordDaemon struct{}
func (r *RecordDaemon) tick() {
total := models.GetOnlineCount()
err := models.AddOnlineRecord(total, time.Now().Unix())
if err != nil {
log.Fatal(err)<|fim▁hole|> }
}<|fim▁end|> | |
<|file_name|>Generic.py<|end_file_name|><|fim▁begin|># This file is part of pybliographer
#
# Copyright (C) 1998-2004 Frederic GOBRY
# Email : [email protected]
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
#
''' Generic XML bibliographic style handler '''
import string
from Pyblio.Style import Parser
from Pyblio import Autoload, recode
def author_desc (group, coding, initials = 0, reverse = 0):
""" Create a nice string describing a group of authors.
coding : name of the output coding (as requested for recode)
initials : if = 1, uses initials instead of complete first names
reverse :
-1 use First Last format
0 use Last, First, excepted for the first entry
1 use Last, First for all the authors, not only the first
"""
l = len (group)
fulltext = ""
for i in range (0, l):
(honorific, first, last, lineage) = group [i].format (coding)
if initials:
first = group [i].initials (coding)
text = ""
if reverse == 1 or (i == 0 and reverse == 0):
if last: text = text + last
if lineage: text = text + ", " + lineage
if first: text = text + ", " + first
else:
if first: text = first + " "
if last: text = text + last
if lineage: text = text + ", " + lineage
if text:
if i < l - 2:
text = text + ", "
elif i == l - 2:
text = text + " and "
fulltext = fulltext + text
# avoid a dot at the end of the author list
if fulltext [-1] == '.':
fulltext = fulltext [0:-1]
return fulltext
def string_key (entry, fmt, table):
""" Generates an alphabetical key for an entry. fmt is the
output coding """
rc = recode.recode ("latin1.." + fmt)
if entry.has_key ('author'): aut = entry ['author']
elif entry.has_key ('editor'): aut = entry ['editor']
else: aut = ()
if len (aut) > 0:
if len (aut) > 1:
key = ''
for a in aut:
honorific, first, last, lineage = a.format (fmt)
key = key + string.join (map (lambda x:
x [0], string.split (last, ' ')), '')
if len (key) >= 3:
if len (aut) > 3:
key = key + '+'
break
else:
honorific, first, last, lineage = aut [0].format (fmt)
parts = string.split (last, ' ')
if len (parts) == 1:
key = parts [0][0:3]
else:
key = string.join (map (lambda x: x [0], parts), '')
else:
key = rc (entry.key.key [0:3])
if entry.has_key ('date'):
year = entry ['date'].format (fmt) [0]
if year:
key = key + year [2:]
if table.has_key (key) or table.has_key (key + 'a'):
if table.has_key (key):
# rename the old entry
new = key + 'a'
table [new] = table [key]
del table [key]
base = key
suff = ord ('b')
key = base + chr (suff)
while table.has_key (key):
suff = suff + 1
key = base + chr (suff)
return key
def numeric_key (entry, fmt, table):
count = 1
while table.has_key (str (count)):
count = count + 1
return str (count)
def create_string_key (database, keys, fmt):
table = {}
for key in keys:
s = string_key (database [key], fmt, table)
table [s] = key
skeys = table.keys ()
skeys.sort ()
<|fim▁hole|>
def create_numeric_key (database, keys, fmt):
table = {}
skeys = []
for key in keys:
s = numeric_key (database [key], fmt, table)
table [s] = key
skeys.append (s)
return table, skeys
def standard_date (entry, coding):
(text, month, day) = entry.format (coding)
if month: text = "%s/%s" % (month, text)
if day : text = "%s/%s" % (day, text)
return text
def last_first_full_authors (entry, coding):
return author_desc (entry, coding, 0, 1)
def first_last_full_authors (entry, coding):
return author_desc (entry, coding, 0, -1)
def full_authors (entry, coding):
return author_desc (entry, coding, 0, 0)
def initials_authors (entry, coding):
return author_desc (entry, coding, 1, 0)
def first_last_initials_authors (entry, coding):
return author_desc (entry, coding, 1, -1)
def last_first_initials_authors (entry, coding):
return author_desc (entry, coding, 1, 1)
Autoload.register ('style', 'Generic', {
'first_last_full_authors' : first_last_full_authors,
'last_first_full_authors' : last_first_full_authors,
'full_authors' : full_authors,
'first_last_initials_authors' : first_last_initials_authors,
'last_first_initials_authors' : last_first_initials_authors,
'initials_authors' : initials_authors,
'string_keys' : create_string_key,
'numeric_keys' : create_numeric_key,
'european_date' : standard_date,
})<|fim▁end|> | return table, skeys
|
<|file_name|>NSCFBackgroundDownloadTask.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2015 Bartosz Janda
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from .. import helpers
from ..common import SummaryBase
import NSCFBackgroundSessionTask
class NSCFBackgroundDownloadTaskSyntheticProvider(NSCFBackgroundSessionTask.NSCFBackgroundSessionTaskSyntheticProvider):
"""
Class representing __NSCFBackgroundDownloadTask.
"""
def __init__(self, value_obj, internal_dict):
super(NSCFBackgroundDownloadTaskSyntheticProvider, self).__init__(value_obj, internal_dict)
self.type_name = "__NSCFBackgroundDownloadTask"
self.register_child_value("finished", ivar_name="_finished",
primitive_value_function=SummaryBase.get_bool_value,
summary_function=self.get_finished_summary)
@staticmethod
def get_finished_summary(value):<|fim▁hole|>
def summary_provider(value_obj, internal_dict):
return helpers.generic_summary_provider(value_obj, internal_dict, NSCFBackgroundDownloadTaskSyntheticProvider)<|fim▁end|> | if value:
return "finished"
return None |
<|file_name|>Component.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react';
import { connect } from 'react-redux';
import * as Actions from './Actions';
import { ITrackHistoryState } from './ITypes';
let Dropzone = require('react-dropzone');
interface IProps extends ITrackHistoryState {
dispatch: Function;
};
function selectState(state: ITrackHistoryState) {
return state;
};
class Component extends React.Component<IProps, any> {
private static styles = {
button: {
backgroundColor: '#518D21',
borderRadius: '3px',
border: '1px solid rgba(255, 255, 255, 0.2)',
fontSize: '20px',
width: '26px',
height: '22px',
lineHeight: '20px',
fontWeight: 'bold',
padding: '0px 1px',
boxSizing: 'content-box',
color: 'white',
textAlign: 'center',
cursor: 'pointer'
},
disabled: {
backgroundColor: 'rgb(102, 102, 102)',
cursor: 'default'
},
input: {
margin: '4px 0px',
outline: 'none',
border: 'none',
overflow: 'hidden',
width: '100%',
height: '16px',
},
blueBg: {
backgroundColor: 'rgb(33, 122, 141)',
},
clipboard: {
backgroundColor: 'transparent',
// color: 'rgb(33, 122, 141)',
color: '#6FB3D2',
borderColor: '#6FB3D2',
borderStyle: 'dashed',
},
container: {
width: '100%',
position: 'fixed',
bottom: 0,
boxShadow: '0px 0px 4px rgba(0, 0, 0, 0.3)',
backgroundColor: '#2A2F3A',
// maxWidth: '1000px',
color: 'white',
padding: '12px 12px 8px',
left: 0,
boxSizing: 'border-box',
zIndex: 1000
},
links: {
padding: '0px 6px'
},
actions: {
fontSize: 'smaller',
float: 'right',
marginTop: '6px'
},
};
private playback: { setTimeoutReference: number, isPlaying: boolean } = {
setTimeoutReference: null,
isPlaying: false,
};
private timeFromStart = (pos1: number) => {
return Math.floor((this.props.stateHistory.timestamps[pos1] - this.props.stateHistory.timestamps[0 + 1]) / 1000);
};
private currentTime = () => this.timeFromStart(this.props.stateHistory.current);
private totalTime = () => this.timeFromStart(this.props.stateHistory.timestamps.length - 1);
private playUntilEnd = () => {
if (this.props.stateHistory.current >= this.props.stateHistory.timestamps.length - 1) {
return this.stopPlackback();
} else {
const nextActionInMiliseconds =
this.props.stateHistory.timestamps[this.props.stateHistory.current + 1]
- this.props.stateHistory.timestamps[this.props.stateHistory.current];
// Dispatch action after gap and call self
setTimeout(() => {
this.props.dispatch(Actions.selectHistory(this.props.stateHistory.current + 1));
this.playUntilEnd();
}, nextActionInMiliseconds);
}
};
private stopPlackback = () => {
clearTimeout(this.playback.setTimeoutReference);
this.playback.isPlaying = false;
this.forceUpdate(); // refresh immediately to show new play button status, needed becuase we are cheating and not modifying props
};
private startPlayback = () => {
this.playback.isPlaying = true;
this.playUntilEnd();
this.forceUpdate();
};
private selectHistory = (e) => {
this.props.dispatch(Actions.selectHistory(parseInt(e.target.value, 10)));
};
private changeHistory = (step: number) => {
let selected = this.props.stateHistory.current + step;
// Upper & lower bound
if (selected > this.props.stateHistory.history.length - 1) {
selected = this.props.stateHistory.history.length - 1;
} else if (selected < 1) {
selected = 1;
}
this.props.dispatch(Actions.selectHistory(selected));
};
/**
* Read uploaded json file and dispatch action
*/
private uploadHistory = (files: File[]) => {
const blob = files[0].slice(0);<|fim▁hole|> reader.onloadend = () => {
const json = JSON.parse(reader.result);
this.props.dispatch(Actions.uploadHistory(json));
};
reader.readAsText(blob);
};
private downloadHistory(e: any) {
e.target.href = `data:text/json;charset=utf-8, ${encodeURIComponent(JSON.stringify(this.props.stateHistory)) }`;
}
public render() {
return (
<div style={ Component.styles.container }>
<div>
<div style={{ float: 'right' }}>
<span style={ this.playback.isPlaying ? { marginRight: '10px', display: 'inline-block'} : {}}>
{ this.playback.isPlaying && `${this.currentTime()}s|${this.totalTime()}s` }
</span>
{ this.props.stateHistory.current } / { this.props.stateHistory.history.length - 1 }
<button onClick={ this.changeHistory.bind(this, -1)} title='Previous state' style={
this.playback.isPlaying || this.props.stateHistory.current <= 1 ?
Object.assign({}, Component.styles.button, Component.styles.disabled) : Component.styles.button
}>{'<'}</button>
<button onClick={ this.changeHistory.bind(this, 1) } title='Next state' style={
this.playback.isPlaying || this.props.stateHistory.current >= this.props.stateHistory.history.length - 1 ?
Object.assign({}, Component.styles.button, Component.styles.disabled) : Component.styles.button
}>{'>'}</button>
<button onClick={ this.playback.isPlaying ? this.stopPlackback : this.startPlayback } title='Realtime Playblack' style={
this.props.stateHistory.current >= this.props.stateHistory.history.length - 1 ?
Object.assign({}, Component.styles.button, Component.styles.disabled) : Component.styles.button
}>{ this.playback.isPlaying ? '■' : '▸'}</button>
<a
href='#'
onClick={ this.downloadHistory.bind(this) }
download='state.json'
title='Download state'
style={ Object.assign({}, Component.styles.button, Component.styles.blueBg, Component.styles.links) }
>⇓</a>
<Dropzone
multiple={false}
title='Upload state'
onDrop={ this.uploadHistory }
style={ Object.assign({ float: 'right' }, Component.styles.button, Component.styles.clipboard) }>⇧</Dropzone>
</div>
<div style={{ width: 'auto', overflow: 'hidden', paddingRight: '19px' }}>
<input
type='range'
step={1}
min={1}
style={ Component.styles.input }
max={this.props.stateHistory.history.length - 1}
value={ `${this.props.stateHistory.current}` }
onChange={ this.selectHistory.bind(this) }
/>
</div>
<div style={ Component.styles.actions }>
{ this.props.stateHistory.actions[this.props.stateHistory.current] }
</div>
</div>
</div>
);
}
}
export default connect(selectState)(Component);<|fim▁end|> | const reader = new FileReader();
|
<|file_name|>uploadedOn.pipe.ts<|end_file_name|><|fim▁begin|>import { Pipe, PipeTransform } from '@angular/core';
@Pipe({
name: 'uploadedOn'
})
export class UploadedOnPipe implements PipeTransform {
transform(value: any, args?: any): any {<|fim▁hole|><|fim▁end|> | return 'Uploaded On: ' + value;
}
} |
<|file_name|>JwtTokenError.ts<|end_file_name|><|fim▁begin|>/* tslint:disable */
/* eslint-disable */
/**
* OpenCraft Instance Manager
* API for OpenCraft Instance Manager
*
* The version of the OpenAPI document: api
*
*
* NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
* https://openapi-generator.tech
* Do not edit the class manually.
*/
import { exists, mapValues } from '../runtime';
/**
*
* @export
* @interface JwtTokenError
*/
export interface JwtTokenError {
/**
*
* @type {string}
* @memberof JwtTokenError
*/
detail?: string;
/**
*
* @type {string}
* @memberof JwtTokenError
*/
code?: string;
}
export function JwtTokenErrorFromJSON(json: any): JwtTokenError {
return JwtTokenErrorFromJSONTyped(json, false);
}
export function JwtTokenErrorFromJSONTyped(json: any, ignoreDiscriminator: boolean): JwtTokenError {
if ((json === undefined) || (json === null)) {
return json;
}
return {
'detail': !exists(json, 'detail') ? undefined : json['detail'],
'code': !exists(json, 'code') ? undefined : json['code'],
};
}
<|fim▁hole|>export function JwtTokenErrorToJSON(value?: JwtTokenError | null): any {
if (value === undefined) {
return undefined;
}
if (value === null) {
return null;
}
return {
'detail': value.detail,
'code': value.code,
};
}<|fim▁end|> | |
<|file_name|>download.rs<|end_file_name|><|fim▁begin|>use std::fmt::{self, Display, Formatter};
use uuid::Uuid;
/// Details of a package for downloading.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct Package {
pub name: String,
pub version: String
}
impl Display for Package {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{} {}", self.name, self.version)
}
}
/// A request for the device to install a new update.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
#[allow(non_snake_case)]
pub struct UpdateRequest {
pub requestId: Uuid,
pub status: RequestStatus,
pub packageId: Package,
pub installPos: i32,
pub createdAt: String,
}
/// The current status of an `UpdateRequest`.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub enum RequestStatus {
Pending,
InFlight,
Canceled,<|fim▁hole|>}
/// A notification from RVI that a new update is available.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct UpdateAvailable {
pub update_id: String,
pub signature: String,
pub description: String,
pub request_confirmation: bool,
pub size: u64
}
/// A notification to an external package manager that the package was downloaded.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadComplete {
pub update_id: Uuid,
pub update_image: String,
pub signature: String
}
/// A notification to an external package manager that the package download failed.
#[derive(Deserialize, Serialize, PartialEq, Eq, Debug, Clone)]
pub struct DownloadFailed {
pub update_id: Uuid,
pub reason: String
}<|fim▁end|> | Failed,
Finished |
<|file_name|>submittion2_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from django.contrib.auth.models import User
from nose.plugins.attrib import attr
from nose.tools import assert_equal, assert_true, assert_not_equal
from hadoop import cluster, pseudo_hdfs4
from hadoop.conf import HDFS_CLUSTERS, MR_CLUSTERS, YARN_CLUSTERS
from desktop.lib.test_utils import clear_sys_caches
from desktop.lib.django_test_util import make_logged_in_client
from oozie.models2 import Node
from oozie.tests import OozieMockBase
from liboozie.conf import USE_LIBPATH_FOR_JARS
from liboozie.submission2 import Submission
LOG = logging.getLogger(__name__)
@attr('requires_hadoop')
def test_copy_files():
cluster = pseudo_hdfs4.shared_cluster()
try:
c = make_logged_in_client()
user = User.objects.get(username='test')
prefix = '/tmp/test_copy_files'
if cluster.fs.exists(prefix):
cluster.fs.rmtree(prefix)
# Jars in various locations
deployment_dir = '%s/workspace' % prefix
external_deployment_dir = '%s/deployment' % prefix
jar_1 = '%s/udf1.jar' % prefix
jar_2 = '%s/lib/udf2.jar' % prefix
jar_3 = '%s/udf3.jar' % deployment_dir
jar_4 = '%s/lib/udf4.jar' % deployment_dir # Doesn't move
jar_5 = 'udf5.jar'
jar_6 = 'lib/udf6.jar' # Doesn't move
cluster.fs.mkdir(prefix)
cluster.fs.create(jar_1)
cluster.fs.create(jar_2)
cluster.fs.create(jar_3)
cluster.fs.create(jar_4)
cluster.fs.create(deployment_dir + '/' + jar_5)
cluster.fs.create(deployment_dir + '/' + jar_6)
class MockJob():
XML_FILE_NAME = 'workflow.xml'
def __init__(self):
self.deployment_dir = deployment_dir
self.nodes = [
Node({'id': '1', 'type': 'mapreduce', 'properties': {'jar_path': jar_1}}),
Node({'id': '2', 'type': 'mapreduce', 'properties': {'jar_path': jar_2}}),
Node({'id': '3', 'type': 'java', 'properties': {'jar_path': jar_3}}),
Node({'id': '4', 'type': 'java', 'properties': {'jar_path': jar_4}}),
# Workspace relative paths
Node({'id': '5', 'type': 'java', 'properties': {'jar_path': jar_5}}),
Node({'id': '6', 'type': 'java', 'properties': {'jar_path': jar_6}})
]
submission = Submission(user, job=MockJob(), fs=cluster.fs, jt=cluster.jt)
submission._copy_files(deployment_dir, "<xml>My XML</xml>", {'prop1': 'val1'})
submission._copy_files(external_deployment_dir, "<xml>My XML</xml>", {'prop1': 'val1'})
assert_true(cluster.fs.exists(deployment_dir + '/workflow.xml'), deployment_dir)
assert_true(cluster.fs.exists(deployment_dir + '/job.properties'), deployment_dir)
# All sources still there
assert_true(cluster.fs.exists(jar_1))
assert_true(cluster.fs.exists(jar_2))
assert_true(cluster.fs.exists(jar_3))
assert_true(cluster.fs.exists(jar_4))
assert_true(cluster.fs.exists(deployment_dir + '/' + jar_5))
assert_true(cluster.fs.exists(deployment_dir + '/' + jar_6))
# Lib
deployment_dir = deployment_dir + '/lib'
external_deployment_dir = external_deployment_dir + '/lib'
if USE_LIBPATH_FOR_JARS.get():
assert_true(jar_1 in submission.properties['oozie.libpath'])
assert_true(jar_2 in submission.properties['oozie.libpath'])
assert_true(jar_3 in submission.properties['oozie.libpath'])
assert_true(jar_4 in submission.properties['oozie.libpath'])
print deployment_dir + '/' + jar_5
assert_true((deployment_dir + '/' + jar_5) in submission.properties['oozie.libpath'], submission.properties['oozie.libpath'])
assert_true((deployment_dir + '/' + jar_6) in submission.properties['oozie.libpath'], submission.properties['oozie.libpath'])
else:
list_dir_workspace = cluster.fs.listdir(deployment_dir)
list_dir_deployement = cluster.fs.listdir(external_deployment_dir)
# All destinations there
assert_true(cluster.fs.exists(deployment_dir + '/udf1.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf2.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf3.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf4.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf5.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(deployment_dir + '/udf6.jar'), list_dir_workspace)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf1.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf2.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf3.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf4.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf5.jar'), list_dir_deployement)
assert_true(cluster.fs.exists(external_deployment_dir + '/udf6.jar'), list_dir_deployement)
stats_udf1 = cluster.fs.stats(deployment_dir + '/udf1.jar')
stats_udf2 = cluster.fs.stats(deployment_dir + '/udf2.jar')
stats_udf3 = cluster.fs.stats(deployment_dir + '/udf3.jar')
stats_udf4 = cluster.fs.stats(deployment_dir + '/udf4.jar')
stats_udf5 = cluster.fs.stats(deployment_dir + '/udf5.jar')
stats_udf6 = cluster.fs.stats(deployment_dir + '/udf6.jar')
submission._copy_files('%s/workspace' % prefix, "<xml>My XML</xml>", {'prop1': 'val1'})
assert_not_equal(stats_udf1['fileId'], cluster.fs.stats(deployment_dir + '/udf1.jar')['fileId'])
assert_not_equal(stats_udf2['fileId'], cluster.fs.stats(deployment_dir + '/udf2.jar')['fileId'])
assert_not_equal(stats_udf3['fileId'], cluster.fs.stats(deployment_dir + '/udf3.jar')['fileId'])
assert_equal(stats_udf4['fileId'], cluster.fs.stats(deployment_dir + '/udf4.jar')['fileId'])
assert_not_equal(stats_udf5['fileId'], cluster.fs.stats(deployment_dir + '/udf5.jar')['fileId'])
assert_equal(stats_udf6['fileId'], cluster.fs.stats(deployment_dir + '/udf6.jar')['fileId'])
# Test _create_file()
submission._create_file(deployment_dir, 'test.txt', data='Test data')
assert_true(cluster.fs.exists(deployment_dir + '/test.txt'), list_dir_workspace)
finally:
try:
cluster.fs.rmtree(prefix)
except:
LOG.exception('failed to remove %s' % prefix)
class MockFs():
def __init__(self, logical_name=None):
self.fs_defaultfs = 'hdfs://curacao:8020'
self.logical_name = logical_name if logical_name else ''
class MockJt():
def __init__(self, logical_name=None):
self.logical_name = logical_name if logical_name else ''
class TestSubmission(OozieMockBase):
def test_get_properties(self):
submission = Submission(self.user, fs=MockFs())
assert_equal({'security_enabled': False}, submission.properties)
submission._update_properties('curacao:8032', '/deployment_dir')
assert_equal({
'jobTracker': 'curacao:8032',
'nameNode': 'hdfs://curacao:8020',
'security_enabled': False
}, submission.properties)
def test_get_logical_properties(self):
submission = Submission(self.user, fs=MockFs(logical_name='fsname'), jt=MockJt(logical_name='jtname'))
assert_equal({'security_enabled': False}, submission.properties)
submission._update_properties('curacao:8032', '/deployment_dir')
assert_equal({
'jobTracker': 'jtname',
'nameNode': 'fsname',
'security_enabled': False
}, submission.properties)
def test_update_properties(self):
finish = []
finish.append(MR_CLUSTERS.set_for_testing({'default': {}}))
finish.append(MR_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))
finish.append(YARN_CLUSTERS.set_for_testing({'default': {}}))
finish.append(YARN_CLUSTERS['default'].SUBMIT_TO.set_for_testing(True))
try:
properties = {
'user.name': 'hue',
'test.1': 'http://localhost/test?test1=test&test2=test',
'nameNode': 'hdfs://curacao:8020',
'jobTracker': 'jtaddress',
'security_enabled': False
}
final_properties = properties.copy()
submission = Submission(None, properties=properties, oozie_id='test', fs=MockFs())
assert_equal(properties, submission.properties)
submission._update_properties('jtaddress', 'deployment-directory')
assert_equal(final_properties, submission.properties)
clear_sys_caches()
fs = cluster.get_hdfs()
jt = cluster.get_next_ha_mrcluster()[1]
final_properties = properties.copy()
final_properties.update({
'jobTracker': 'jtaddress',
'nameNode': fs.fs_defaultfs
})
submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt)
assert_equal(properties, submission.properties)
submission._update_properties('jtaddress', 'deployment-directory')
assert_equal(final_properties, submission.properties)
finish.append(HDFS_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('namenode'))
finish.append(MR_CLUSTERS['default'].LOGICAL_NAME.set_for_testing('jobtracker'))
clear_sys_caches()
fs = cluster.get_hdfs()
jt = cluster.get_next_ha_mrcluster()[1]
final_properties = properties.copy()
final_properties.update({
'jobTracker': 'jobtracker',
'nameNode': 'namenode'
})
submission = Submission(None, properties=properties, oozie_id='test', fs=fs, jt=jt)
assert_equal(properties, submission.properties)
submission._update_properties('jtaddress', 'deployment-directory')
assert_equal(final_properties, submission.properties)
finally:
clear_sys_caches()
for reset in finish:
reset()
def test_get_external_parameters(self):
xml = """
<workflow-app name="Pig" xmlns="uri:oozie:workflow:0.4">
<start to="Pig"/>
<action name="Pig">
<pig>
<job-tracker>${jobTracker}</job-tracker>
<name-node>${nameNode}</name-node>
<prepare>
<delete path="${output}"/>
</prepare>
<script>aggregate.pig</script>
<argument>-param</argument>
<argument>INPUT=${input}</argument>
<argument>-param</argument>
<argument>OUTPUT=${output}</argument>
<configuration>
<property>
<name>mapred.input.format.class</name>
<value>org.apache.hadoop.examples.SleepJob$SleepInputFormat</value>
</property>
</configuration>
</pig>
<ok to="end"/>
<error to="kill"/>
</action>
<kill name="kill">
<message>Action failed, error message[${wf:errorMessage(wf:lastErrorNode())}]</message>
</kill>
<end name="end"/>
</workflow-app>
"""
properties = """
#
# Licensed to the Hue
#
nameNode=hdfs://localhost:8020
jobTracker=localhost:8021
queueName=default
examplesRoot=examples
oozie.use.system.libpath=true
oozie.wf.application.path=${nameNode}/user/${user.name}/${examplesRoot}/apps/pig
"""
parameters = Submission(self.user)._get_external_parameters(xml, properties)
assert_equal({'oozie.use.system.libpath': 'true',
'input': '',
'jobTracker': 'localhost:8021',
'oozie.wf.application.path': '${nameNode}/user/${user.name}/${examplesRoot}/apps/pig',
'examplesRoot': 'examples',
'output': '',
'nameNode': 'hdfs://localhost:8020',
'queueName': 'default'
},
parameters)<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS, |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod effects;
pub mod formats;
#[macro_use]
mod forward;
use core::geometry::Position;
use core::geometry::M44;
use core::resource::ResourceLoader;
use std::clone::Clone;
use cgmath;
use frontend::render::forward::PrimitiveIndex;
use frontend::render::forward::Vertex;
use frontend::render::forward::VertexIndex;
use std::convert;
use std::fmt;
use std::result;
use gfx;
use gfx::traits::FactoryExt;
use gfx::Factory;
#[derive(Clone, PartialEq)]
pub struct Appearance {
color: formats::Rgba,
effect: formats::Float4,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum Style {
Ball = 0,
Flat,
Stage,
Particle,
Wireframe,
Lit,
Lines,
DebugLines,
Count,
}
impl Appearance {
pub fn new(color: formats::Rgba, effect: formats::Float4) -> Self { Appearance { color, effect } }
pub fn rgba(color: formats::Rgba) -> Self { Appearance { color, effect: [1., 0., 0., 0.] } }
}
// pub type GFormat = Rgba;
pub const BACKGROUND: formats::Rgba = [0.01, 0.01, 0.01, 1.0];
#[allow(unused)]
const QUAD_VERTICES: [Vertex; 4] = [
new_vertex!([-1.0, -1.0, 0.0], [0.0, 0.0]),
new_vertex!([1.0, -1.0, 0.0], [1.0, 0.0]),
new_vertex!([1.0, 1.0, 0.0], [1.0, 1.0]),
new_vertex!([-1.0, 1.0, 0.0], [0.0, 1.0]),
];
const QUAD_INDICES: [VertexIndex; 6] = [0, 1, 2, 0, 2, 3];
const TRI_VERTICES: [Vertex; 3] = [
new_vertex!([0.0, 0.0, 0.0], [0.5, 0.5]),
new_vertex!([1.0, 0.0, 0.0], [1.0, 0.5]),
new_vertex!([0.0, 1.0, 0.0], [0.5, 1.0]),
];
const TRI_INDICES: [VertexIndex; 3] = [0, 1, 2];
pub struct Camera {
pub projection: M44,
pub view: M44,
}
impl Camera {
pub fn ortho(center: Position, scale: f32, ratio: f32) -> Camera {
Camera {
projection: {
let hw = 0.5 * scale;
let hh = hw / ratio;
let near = 10.0;
let far = -near;
cgmath::ortho(-hw, hw, -hh, hh, near, far)
},
view: cgmath::Matrix4::look_at(
cgmath::Point3::new(center.x, center.y, 1.0),
cgmath::Point3::new(center.x, center.y, 0.0),
cgmath::Vector3::unit_y(),
),
}
}
}
#[derive(Debug)]
pub enum RenderError {
Shader(String),
PrimitiveIndexOverflow,
}
pub type Result<T> = result::Result<T, RenderError>;
impl<T: fmt::Display> convert::From<T> for RenderError {
fn from(e: T) -> Self { RenderError::Shader(e.to_string()) }
}
trait RenderFactoryExt<R: gfx::Resources>: gfx::traits::FactoryExt<R> {
fn create_shader_set_with_geometry(
&mut self,
gs_code: &[u8],
vs_code: &[u8],
ps_code: &[u8],
) -> Result<gfx::ShaderSet<R>> {
let gs = self.create_shader_geometry(gs_code)?;
let vs = self.create_shader_vertex(vs_code)?;
let ps = self.create_shader_pixel(ps_code)?;
Ok(gfx::ShaderSet::Geometry(vs, gs, ps))
}
fn create_msaa_surfaces(
&mut self,
width: gfx::texture::Size,
height: gfx::texture::Size,
) -> Result<formats::RenderSurfaceWithDepth<R>> {
let (_, color_resource, color_target) = self.create_msaa_render_target(formats::MSAA_MODE, width, height)?;
let (_, _, depth_target) = self.create_msaa_depth(formats::MSAA_MODE, width, height)?;
Ok((color_resource, color_target, depth_target))
}
fn create_msaa_depth(
&mut self,
aa: gfx::texture::AaMode,
width: gfx::texture::Size,
height: gfx::texture::Size,
) -> Result<formats::DepthSurface<R>> {
let kind = gfx::texture::Kind::D2(width, height, aa);
let tex = self.create_texture(
kind,
1,
gfx::memory::Bind::SHADER_RESOURCE | gfx::memory::Bind::DEPTH_STENCIL,
gfx::memory::Usage::Data,
Some(gfx::format::ChannelType::Float),
)?;
let resource = self.view_texture_as_shader_resource::<formats::RenderDepthFormat>(
&tex,
(0, 0),
gfx::format::Swizzle::new(),
)?;
let target = self.view_texture_as_depth_stencil_trivial(&tex)?;
Ok((tex, resource, target))
}
fn create_msaa_render_target(
&mut self,
aa: gfx::texture::AaMode,
width: gfx::texture::Size,
height: gfx::texture::Size,
) -> Result<formats::RenderSurface<R>> {
let kind = gfx::texture::Kind::D2(width, height, aa);
let tex = self.create_texture(
kind,
1,
gfx::memory::Bind::SHADER_RESOURCE | gfx::memory::Bind::RENDER_TARGET,
gfx::memory::Usage::Data,
Some(gfx::format::ChannelType::Float),
)?;
let hdr_srv = self.view_texture_as_shader_resource::<formats::RenderColorFormat>(
&tex,
(0, 0),
gfx::format::Swizzle::new(),
)?;
let hdr_color_buffer = self.view_texture_as_render_target(&tex, 0, None)?;
Ok((tex, hdr_srv, hdr_color_buffer))
}
}
impl<R: gfx::Resources, E: gfx::traits::FactoryExt<R>> RenderFactoryExt<R> for E {}
#[derive(Clone)]
pub struct PrimitiveBatch {
style: Style,
vertices: Vec<Vertex>,
indices: Vec<VertexIndex>,
transforms: Vec<M44>,
appearances: Vec<Appearance>,
}
#[derive(Clone)]
pub struct PrimitiveBuffer {
max_batch_len: usize,
batches: Vec<Vec<PrimitiveBatch>>,
}
pub trait Draw {
fn draw_triangle(&mut self, style: Option<Style>, transform: M44, p: &[Position], appearance: Appearance);
fn draw_quad(&mut self, style: Option<Style>, transform: M44, ratio: f32, appearance: Appearance);
fn draw_star(&mut self, style: Option<Style>, transform: M44, vertices: &[Position], appearance: Appearance);
fn draw_lines(&mut self, style: Option<Style>, transform: M44, vertices: &[Position], appearance: Appearance);
fn draw_ball(&mut self, style: Option<Style>, transform: M44, appearance: Appearance);
}
pub trait DrawBatch {
fn draw_batch(&mut self, batch: PrimitiveBatch);
}
pub trait DrawBuffer {
fn draw_buffer(&mut self, buffer: PrimitiveBuffer);
}
pub trait PrimitiveSequence {
// Optimized batch
fn push_batch(&mut self, batch: PrimitiveBatch) -> Result<()>;
// Single entry.
// TODO: do I want to maintain both?
fn push_primitive(
&mut self,
shader: Style,
vertices: Vec<Vertex>,
indices: Vec<VertexIndex>,
transform: M44,
appearance: Appearance,
) -> Result<()>;
}
impl<T> Draw for T
where T: PrimitiveSequence
{
fn draw_triangle(&mut self, style: Option<Style>, transform: M44, p: &[Position], appearance: Appearance) {
if p.len() >= 3 {
let v = vec![
Vertex::new([p[0].x, p[0].y, 0.0], [0.5 + p[0].x * 0.5, 0.5 + p[0].y * 0.5]),
Vertex::new([p[1].x, p[1].y, 0.0], [0.5 + p[1].x * 0.5, 0.5 + p[1].y * 0.5]),
Vertex::new([p[2].x, p[2].y, 0.0], [0.5 + p[2].x * 0.5, 0.5 + p[2].y * 0.5]),
];
let i = vec![0, 1, 2];
self.push_primitive(style.unwrap_or(Style::Wireframe), v, i, transform, appearance)
.expect("Unable to draw triangle");
}<|fim▁hole|> Vertex::new([-ratio, -1.0, 0.0], [0.5 - ratio * 0.5, 0.0]),
Vertex::new([ratio, -1.0, 0.0], [0.5 + ratio * 0.5, 0.0]),
Vertex::new([ratio, 1.0, 0.0], [0.5 + ratio * 0.5, 1.0]),
Vertex::new([-ratio, 1.0, 0.0], [0.5 - ratio * 0.5, 1.0]),
];
self.push_primitive(style.unwrap_or(Style::Flat), v, QUAD_INDICES.to_vec(), transform, appearance)
.expect("Unable to draw quad");
}
fn draw_star(&mut self, style: Option<Style>, transform: M44, vertices: &[Position], appearance: Appearance) {
let mut v: Vec<_> =
vertices.iter().map(|v| Vertex::new([v.x, v.y, 0.0], [0.5 + v.x * 0.5, 0.5 + v.y * 0.5])).collect();
let n = v.len();
v.push(Vertex::default());
let mut i: Vec<VertexIndex> = Vec::new();
for k in 0..n {
i.push(n as VertexIndex);
i.push(((k + 1) % n) as VertexIndex);
i.push(k as VertexIndex);
}
self.push_primitive(style.unwrap_or(Style::Wireframe), v, i, transform, appearance)
.expect("Unable to draw star")
}
fn draw_lines(&mut self, style: Option<Style>, transform: M44, vertices: &[Position], appearance: Appearance) {
let n = vertices.len();
if n > 1 {
let dv = 1. / (n - 1) as f32;
let v: Vec<_> =
vertices.iter().enumerate().map(|(i, v)| Vertex::new([v.x, v.y, 0.0], [0.5, i as f32 * dv])).collect();
let mut i: Vec<VertexIndex> = Vec::new();
for k in 0..n - 1 {
i.push(k as VertexIndex);
i.push((k + 1) as VertexIndex);
}
self.push_primitive(style.unwrap_or(Style::Lines), v, i, transform, appearance)
.expect("Unable to draw lines");
}
}
fn draw_ball(&mut self, style: Option<Style>, transform: M44, appearance: Appearance) {
self.push_primitive(
style.unwrap_or(Style::Ball),
TRI_VERTICES.to_vec(),
TRI_INDICES.to_vec(),
transform,
appearance,
)
.expect("Unable to draw ball");
}
}
impl PrimitiveBatch {
#[allow(unused)]
pub fn new(style: Style) -> PrimitiveBatch {
PrimitiveBatch {
style,
vertices: Vec::new(),
indices: Vec::new(),
transforms: Vec::new(),
appearances: Vec::new(),
}
}
pub fn len(&self) -> usize { self.transforms.len() }
}
impl PrimitiveSequence for PrimitiveBatch {
fn push_batch(&mut self, mut batch: PrimitiveBatch) -> Result<()> {
self.push_primitive_buffers(batch.style, batch.vertices, batch.indices)?;
self.transforms.append(&mut batch.transforms);
self.appearances.append(&mut batch.appearances);
Ok(())
}
fn push_primitive(
&mut self,
shader: Style,
vertices: Vec<Vertex>,
indices: Vec<VertexIndex>,
transform: M44,
appearance: Appearance,
) -> Result<()> {
self.push_primitive_buffers(shader, vertices, indices)?;
self.transforms.push(transform);
self.appearances.push(appearance);
Ok(())
}
}
impl PrimitiveBatch {
fn push_primitive_buffers(
&mut self,
shader: Style,
mut vertices: Vec<Vertex>,
mut indices: Vec<VertexIndex>,
) -> Result<()> {
self.style = shader;
let primitive_offset = self.transforms.len();
if primitive_offset > PrimitiveIndex::max_value() as usize {
Err(RenderError::PrimitiveIndexOverflow)
} else {
let vertex_offset = self.vertices.len() as VertexIndex;
for v in &mut vertices {
v.primitive_index = primitive_offset as PrimitiveIndex;
}
for i in &mut indices {
*i += vertex_offset;
}
self.indices.append(&mut indices);
self.vertices.append(&mut vertices);
Ok(())
}
}
}
impl PrimitiveBuffer {
pub fn new() -> PrimitiveBuffer {
PrimitiveBuffer { max_batch_len: 256, batches: vec![Vec::new(); Style::Count as usize] }
}
}
impl PrimitiveSequence for PrimitiveBuffer {
fn push_batch(&mut self, batch: PrimitiveBatch) -> Result<()> {
let batch_list = &mut self.batches[batch.style as usize];
let is_empty = batch_list.is_empty();
let last_len = batch_list.last().map(PrimitiveBatch::len).unwrap_or(0);
if is_empty || last_len + batch.len() > self.max_batch_len {
batch_list.push(batch);
Ok(())
} else {
batch_list.last_mut().map(|l| l.push_batch(batch)).unwrap_or(Ok(()))
}
}
fn push_primitive(
&mut self,
style: Style,
vertices: Vec<Vertex>,
indices: Vec<VertexIndex>,
transform: M44,
appearance: Appearance,
) -> Result<()> {
self.push_batch(PrimitiveBatch {
style,
vertices,
indices,
transforms: vec![transform],
appearances: vec![appearance],
})
}
}
pub trait Overlay<R, F, C>
where
R: gfx::Resources,
C: gfx::CommandBuffer<R>,
F: Factory<R>, {
fn overlay<O>(&mut self, callback: O)
where O: FnMut(&mut F, &mut gfx::Encoder<R, C>);
}
pub enum Light {
PointLight { position: Position, color: formats::Rgba, attenuation: formats::Rgba },
}
pub trait Renderer<R: gfx::Resources, C: gfx::CommandBuffer<R>>: Draw {
fn setup_frame(&mut self, camera: &Camera, background_color: formats::Rgba, lights: &[Light]);
fn begin_frame(&mut self);
fn resolve_frame_buffer(&mut self);
fn end_frame<D: gfx::Device<Resources = R, CommandBuffer = C>>(&mut self, device: &mut D);
fn cleanup<D: gfx::Device<Resources = R, CommandBuffer = C>>(&mut self, device: &mut D);
}
pub struct ForwardRenderer<
'e,
'l,
R: gfx::Resources,
C: 'e + gfx::CommandBuffer<R>,
F: gfx::Factory<R>,
L: 'l + ResourceLoader<u8>,
> {
factory: F,
pub encoder: &'e mut gfx::Encoder<R, C>,
res: &'l L,
frame_buffer: gfx::handle::RenderTargetView<R, formats::ScreenColorFormat>,
depth: gfx::handle::DepthStencilView<R, formats::RenderDepthFormat>,
hdr_srv: gfx::handle::ShaderResourceView<R, formats::Rgba>,
hdr_color: gfx::handle::RenderTargetView<R, formats::RenderColorFormat>,
pass_forward_lighting: forward::ForwardLighting<R, C, forward::ShadedInit<'static>>,
pass_effects: effects::PostLighting<R, C>,
background_color: formats::Rgba,
}
impl<'e, 'l, R: gfx::Resources, C: gfx::CommandBuffer<R>, F: Factory<R> + Clone, L: ResourceLoader<u8>>
ForwardRenderer<'e, 'l, R, C, F, L>
{
pub fn new(
factory: &mut F,
encoder: &'e mut gfx::Encoder<R, C>,
res: &'l L,
frame_buffer: &gfx::handle::RenderTargetView<R, formats::ScreenColorFormat>,
) -> Result<ForwardRenderer<'e, 'l, R, C, F, L>> {
let my_factory = factory.clone();
let (w, h, _, _) = frame_buffer.get_dimensions();
let (hdr_srv, hdr_color_buffer, depth_buffer) = factory.create_msaa_surfaces(w, h)?;
let forward = forward::ForwardLighting::new(factory, res, forward::shaded::new())?;
let effects = effects::PostLighting::new(factory, res, w, h)?;
Ok(ForwardRenderer {
factory: my_factory,
res,
encoder,
hdr_srv,
hdr_color: hdr_color_buffer,
depth: depth_buffer,
frame_buffer: frame_buffer.clone(),
pass_forward_lighting: forward,
pass_effects: effects,
background_color: BACKGROUND,
})
}
pub fn rebuild(&mut self) -> Result<()> {
let factory = &mut self.factory;
let (w, h, _, _) = self.frame_buffer.get_dimensions();
let pass_forward_lighting = forward::ForwardLighting::new(factory, self.res, forward::shaded::new())?;
let pass_effects = effects::PostLighting::new(factory, self.res, w, h)?;
self.pass_forward_lighting = pass_forward_lighting;
self.pass_effects = pass_effects;
Ok(())
}
pub fn resize_to(
&mut self,
frame_buffer: &gfx::handle::RenderTargetView<R, formats::ScreenColorFormat>,
) -> Result<()> {
// TODO: this thing leaks?
let (w, h, _, _) = frame_buffer.get_dimensions();
let (hdr_srv, hdr_color_buffer, depth_buffer) = self.factory.create_msaa_surfaces(w, h)?;
self.hdr_srv = hdr_srv;
self.hdr_color = hdr_color_buffer;
self.depth = depth_buffer;
self.frame_buffer = frame_buffer.clone();
self.pass_effects = effects::PostLighting::new(&mut self.factory, self.res, w, h)?;
Ok(())
}
}
impl<'e, 'l, R: gfx::Resources, C: gfx::CommandBuffer<R>, F: Factory<R>, L: ResourceLoader<u8>> DrawBatch
for ForwardRenderer<'e, 'l, R, C, F, L>
{
fn draw_batch(&mut self, batch: PrimitiveBatch) { self.push_batch(batch).expect("Could not draw batch"); }
}
impl<'e, 'l, R: gfx::Resources, C: gfx::CommandBuffer<R>, F: Factory<R>, L: ResourceLoader<u8>> DrawBuffer
for ForwardRenderer<'e, 'l, R, C, F, L>
{
fn draw_buffer(&mut self, mut buffer: PrimitiveBuffer) {
for batch_list in buffer.batches.drain(..) {
for batch in batch_list {
self.push_batch(batch).expect("Could not draw batch");
}
}
}
}
impl<'e, 'l, R: gfx::Resources, C: gfx::CommandBuffer<R>, F: Factory<R>, L: ResourceLoader<u8>> PrimitiveSequence
for ForwardRenderer<'e, 'l, R, C, F, L>
{
fn push_batch(&mut self, batch: PrimitiveBatch) -> Result<()> {
let models: Vec<forward::ModelArgs> =
batch.transforms.iter().map(|transform| forward::ModelArgs { transform: (*transform).into() }).collect();
let materials: Vec<forward::MaterialArgs> = batch
.appearances
.iter()
.map(|appearance| forward::MaterialArgs { emissive: appearance.color, effect: appearance.effect })
.collect();
let (vertex_buffer, index_buffer) =
self.factory.create_vertex_buffer_with_slice(batch.vertices.as_slice(), batch.indices.as_slice());
self.pass_forward_lighting.draw_primitives(
batch.style,
&mut self.encoder,
vertex_buffer,
&index_buffer,
&models,
&materials,
&self.hdr_color,
&self.depth,
)?;
Ok(())
}
fn push_primitive(
&mut self,
shader: Style,
vertices: Vec<Vertex>,
indices: Vec<VertexIndex>,
transform: M44,
appearance: Appearance,
) -> Result<()> {
let models = vec![forward::ModelArgs { transform: transform.into() }];
let materials = vec![forward::MaterialArgs { emissive: appearance.color, effect: appearance.effect }];
let (vertex_buffer, index_buffer) =
self.factory.create_vertex_buffer_with_slice(vertices.as_slice(), indices.as_slice());
self.pass_forward_lighting.draw_primitives(
shader,
&mut self.encoder,
vertex_buffer,
&index_buffer,
&models,
&materials,
&self.hdr_color,
&self.depth,
)?;
Ok(())
}
}
impl<'e, 'l, R: gfx::Resources, C: 'e + gfx::CommandBuffer<R>, F: Factory<R>, L: ResourceLoader<u8>> Renderer<R, C>
for ForwardRenderer<'e, 'l, R, C, F, L>
{
fn setup_frame(&mut self, camera: &Camera, background_color: formats::Rgba, lights: &[Light]) {
self.background_color = background_color;
let mut forward_lights: Vec<forward::PointLight> = Vec::new();
for p in lights {
match p {
Light::PointLight { position, color, attenuation } => {
forward_lights.push(forward::PointLight {
propagation: *attenuation,
center: [position.x, position.y, 2.0, 1.0],
color: *color,
});
}
}
}
self.pass_forward_lighting
.setup(&mut self.encoder, camera.projection, camera.view, &forward_lights)
.expect("Unable to setup lighting");
}
fn begin_frame(&mut self) {
self.encoder.clear(&self.hdr_color, self.background_color);
self.encoder.clear_depth(&self.depth, 1.0f32);
self.encoder.clear(&self.frame_buffer, self.background_color);
}
fn resolve_frame_buffer(&mut self) {
self.pass_effects.apply_all(&mut self.encoder, &self.hdr_srv, &self.frame_buffer);
}
fn end_frame<D: gfx::Device<Resources = R, CommandBuffer = C>>(&mut self, device: &mut D) {
self.encoder.flush(device);
}
fn cleanup<D: gfx::Device<Resources = R, CommandBuffer = C>>(&mut self, device: &mut D) { device.cleanup(); }
}
impl<'e, 'l, R: gfx::Resources, C: 'e + gfx::CommandBuffer<R>, F: Factory<R>, L: ResourceLoader<u8>> Overlay<R, F, C>
for ForwardRenderer<'e, 'l, R, C, F, L>
{
fn overlay<O>(&mut self, mut callback: O)
where
O: FnMut(&mut F, &mut gfx::Encoder<R, C>),
F: Factory<R>, {
callback(&mut self.factory, &mut self.encoder)
}
}<|fim▁end|> | }
fn draw_quad(&mut self, style: Option<Style>, transform: M44, ratio: f32, appearance: Appearance) {
let v = vec![ |
<|file_name|>list1.py<|end_file_name|><|fim▁begin|>資料 = [1, 2, 3, 4, 5]
'''
program: list1.py
'''
print(資料[:3])
print(資料[2:])
print(資料[1:2])
a = [3, 5, 7, 11, 13]
for x in a:
if x == 7:
print('list contains 7')<|fim▁hole|>
for 索引 in range(-5, 6, 2):
print(索引)
squares = [ x*x for x in range(0, 11) ]
print(squares)
a = [10, 'sage', 3.14159]
b = a[:]
#list.pop([i]) 取出 list 中索引值為 i 的元素,預設是最後一個
print(b.pop())
print(a)
數列 = [0]*10
print(數列)
'''
delete 用法
'''
a = [1, 2, 3, 4]
print("刪除之前:", a)
del a[:2]
print("刪除之後:", a)<|fim▁end|> | break
print(list(range(10))) |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|># coding: utf-8<|fim▁hole|>from django.test import TestCase
from django.db import IntegrityError
from datetime import datetime
from eventex.subscriptions.models import Subscription
class SubscriptionTest(TestCase):
def setUp(self):
self.obj = Subscription(
name='Henrique Bastos',
cpf='12345678901',
email='[email protected]',
phone='21-96186180'
)
def test_create(self):
"""
Subscription must have name, cpf, email, phone
"""
self.obj.save()
self.assertEqual(1, self.obj.pk)
def test_has_created_at(self):
"""
Subscription must have automatica created_at.
"""
self.obj.save()
self.assertIsInstance(self.obj.created_at, datetime)
def test_unicode(self):
self.assertEqual(u'Henrique Bastos', unicode(self.obj))
def test_paid_default_value_is_False(self):
"""
By default paid must be False.
"""
self.assertEqual(False, self.obj.paid)
class SubscriptionUniqueTest(TestCase):
def setUp(self):
# Create a first entry to force collision.
Subscription.objects.create(name='Henrique Bastos', cpf='12345678901',
email='[email protected]', phone='21-96186180')
def test_cpf_unique(self):
"""
CPF must be unique
"""
s = Subscription(name='Henrique Bastos', cpf='12345678901',
email='[email protected]', phone='21-96186180')
self.assertRaises(IntegrityError, s.save)
def test_email_can_repeat(self):
"""
Email is not unique anymore.
"""
s = Subscription.objects.create(name='Henrique Bastos', cpf='00000000011',
email='[email protected]')
self.assertEqual(2, s.pk)<|fim▁end|> | |
<|file_name|>ex40.py<|end_file_name|><|fim▁begin|>class Song(object):
def __init__(self, lyrics):
self.lyrics = lyrics
def sing_me_a_song(self):
for line in self.lyrics:
print line
happy_bday = Song(["Happy birthday to you",
"I don't want to get sued",<|fim▁hole|>bulls_on_parade = Song(["They rally around tha family",
"With pockets full of shells"])
happy_bday.sing_me_a_song()
bulls_on_parade.sing_me_a_song()<|fim▁end|> | "So I'll stop right here"])
|
<|file_name|>component_fixture.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import {ChangeDetectorRef, ComponentRef, DebugElement, ElementRef, getDebugNode, NgZone, RendererFactory2} from '@angular/core';
/**
* Fixture for debugging and testing a component.
*
* @publicApi
*/
export class ComponentFixture<T> {
/**
* The DebugElement associated with the root element of this component.
*/
debugElement: DebugElement;
/**
* The instance of the root component class.
*/
componentInstance: T;
/**
* The native element at the root of the component.
*/
nativeElement: any;
/**
* The ElementRef for the element at the root of the component.
*/
elementRef: ElementRef;
/**
* The ChangeDetectorRef for the component
*/<|fim▁hole|> private _renderer: RendererFactory2|null|undefined;
private _isStable: boolean = true;
private _isDestroyed: boolean = false;
private _resolve: ((result: any) => void)|null = null;
private _promise: Promise<any>|null = null;
private _onUnstableSubscription: any /** TODO #9100 */ = null;
private _onStableSubscription: any /** TODO #9100 */ = null;
private _onMicrotaskEmptySubscription: any /** TODO #9100 */ = null;
private _onErrorSubscription: any /** TODO #9100 */ = null;
constructor(
public componentRef: ComponentRef<T>, public ngZone: NgZone|null,
private _autoDetect: boolean) {
this.changeDetectorRef = componentRef.changeDetectorRef;
this.elementRef = componentRef.location;
this.debugElement = <DebugElement>getDebugNode(this.elementRef.nativeElement);
this.componentInstance = componentRef.instance;
this.nativeElement = this.elementRef.nativeElement;
this.componentRef = componentRef;
this.ngZone = ngZone;
if (ngZone) {
// Create subscriptions outside the NgZone so that the callbacks run oustide
// of NgZone.
ngZone.runOutsideAngular(() => {
this._onUnstableSubscription = ngZone.onUnstable.subscribe({
next: () => {
this._isStable = false;
}
});
this._onMicrotaskEmptySubscription = ngZone.onMicrotaskEmpty.subscribe({
next: () => {
if (this._autoDetect) {
// Do a change detection run with checkNoChanges set to true to check
// there are no changes on the second run.
this.detectChanges(true);
}
}
});
this._onStableSubscription = ngZone.onStable.subscribe({
next: () => {
this._isStable = true;
// Check whether there is a pending whenStable() completer to resolve.
if (this._promise !== null) {
// If so check whether there are no pending macrotasks before resolving.
// Do this check in the next tick so that ngZone gets a chance to update the state of
// pending macrotasks.
scheduleMicroTask(() => {
if (!ngZone.hasPendingMacrotasks) {
if (this._promise !== null) {
this._resolve!(true);
this._resolve = null;
this._promise = null;
}
}
});
}
}
});
this._onErrorSubscription = ngZone.onError.subscribe({
next: (error: any) => {
throw error;
}
});
});
}
}
private _tick(checkNoChanges: boolean) {
this.changeDetectorRef.detectChanges();
if (checkNoChanges) {
this.checkNoChanges();
}
}
/**
* Trigger a change detection cycle for the component.
*/
detectChanges(checkNoChanges: boolean = true): void {
if (this.ngZone != null) {
// Run the change detection inside the NgZone so that any async tasks as part of the change
// detection are captured by the zone and can be waited for in isStable.
this.ngZone.run(() => {
this._tick(checkNoChanges);
});
} else {
// Running without zone. Just do the change detection.
this._tick(checkNoChanges);
}
}
/**
* Do a change detection run to make sure there were no changes.
*/
checkNoChanges(): void {
this.changeDetectorRef.checkNoChanges();
}
/**
* Set whether the fixture should autodetect changes.
*
* Also runs detectChanges once so that any existing change is detected.
*/
autoDetectChanges(autoDetect: boolean = true) {
if (this.ngZone == null) {
throw new Error('Cannot call autoDetectChanges when ComponentFixtureNoNgZone is set');
}
this._autoDetect = autoDetect;
this.detectChanges();
}
/**
* Return whether the fixture is currently stable or has async tasks that have not been completed
* yet.
*/
isStable(): boolean {
return this._isStable && !this.ngZone!.hasPendingMacrotasks;
}
/**
* Get a promise that resolves when the fixture is stable.
*
* This can be used to resume testing after events have triggered asynchronous activity or
* asynchronous change detection.
*/
whenStable(): Promise<any> {
if (this.isStable()) {
return Promise.resolve(false);
} else if (this._promise !== null) {
return this._promise;
} else {
this._promise = new Promise(res => {
this._resolve = res;
});
return this._promise;
}
}
private _getRenderer() {
if (this._renderer === undefined) {
this._renderer = this.componentRef.injector.get(RendererFactory2, null);
}
return this._renderer as RendererFactory2 | null;
}
/**
* Get a promise that resolves when the ui state is stable following animations.
*/
whenRenderingDone(): Promise<any> {
const renderer = this._getRenderer();
if (renderer && renderer.whenRenderingDone) {
return renderer.whenRenderingDone();
}
return this.whenStable();
}
/**
* Trigger component destruction.
*/
destroy(): void {
if (!this._isDestroyed) {
this.componentRef.destroy();
if (this._onUnstableSubscription != null) {
this._onUnstableSubscription.unsubscribe();
this._onUnstableSubscription = null;
}
if (this._onStableSubscription != null) {
this._onStableSubscription.unsubscribe();
this._onStableSubscription = null;
}
if (this._onMicrotaskEmptySubscription != null) {
this._onMicrotaskEmptySubscription.unsubscribe();
this._onMicrotaskEmptySubscription = null;
}
if (this._onErrorSubscription != null) {
this._onErrorSubscription.unsubscribe();
this._onErrorSubscription = null;
}
this._isDestroyed = true;
}
}
}
function scheduleMicroTask(fn: Function) {
Zone.current.scheduleMicroTask('scheduleMicrotask', fn);
}<|fim▁end|> | changeDetectorRef: ChangeDetectorRef;
|
<|file_name|>WriterUtils.cpp<|end_file_name|><|fim▁begin|>//===- WriterUtils.cpp ----------------------------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#include "WriterUtils.h"
#include "lld/Common/ErrorHandler.h"
#include "llvm/Support/Debug.h"
#include "llvm/Support/EndianStream.h"
#include "llvm/Support/LEB128.h"
#define DEBUG_TYPE "lld"
using namespace llvm;
using namespace llvm::wasm;
namespace lld {
std::string toString(ValType type) {
switch (type) {
case ValType::I32:
return "i32";
case ValType::I64:
return "i64";
case ValType::F32:
return "f32";
case ValType::F64:
return "f64";
case ValType::V128:
return "v128";
case ValType::EXNREF:
return "exnref";
case ValType::EXTERNREF:
return "externref";
}
llvm_unreachable("Invalid wasm::ValType");
}
std::string toString(const WasmSignature &sig) {
SmallString<128> s("(");
for (ValType type : sig.Params) {
if (s.size() != 1)
s += ", ";
s += toString(type);
}
s += ") -> ";
if (sig.Returns.empty())
s += "void";
else
s += toString(sig.Returns[0]);
return std::string(s.str());
}
std::string toString(const WasmGlobalType &type) {
return (type.Mutable ? "var " : "const ") +
toString(static_cast<ValType>(type.Type));
}
std::string toString(const WasmEventType &type) {
if (type.Attribute == WASM_EVENT_ATTRIBUTE_EXCEPTION)
return "exception";
return "unknown";
}
namespace wasm {
void debugWrite(uint64_t offset, const Twine &msg) {
LLVM_DEBUG(dbgs() << format(" | %08lld: ", offset) << msg << "\n");
}
void writeUleb128(raw_ostream &os, uint64_t number, const Twine &msg) {
debugWrite(os.tell(), msg + "[" + utohexstr(number) + "]");
encodeULEB128(number, os);
}
void writeSleb128(raw_ostream &os, int64_t number, const Twine &msg) {
debugWrite(os.tell(), msg + "[" + utohexstr(number) + "]");
encodeSLEB128(number, os);
}
void writeBytes(raw_ostream &os, const char *bytes, size_t count,
const Twine &msg) {
debugWrite(os.tell(), msg + " [data[" + Twine(count) + "]]");
os.write(bytes, count);
}
void writeStr(raw_ostream &os, StringRef string, const Twine &msg) {
debugWrite(os.tell(),
msg + " [str[" + Twine(string.size()) + "]: " + string + "]");
encodeULEB128(string.size(), os);
os.write(string.data(), string.size());
}
void writeU8(raw_ostream &os, uint8_t byte, const Twine &msg) {
debugWrite(os.tell(), msg + " [0x" + utohexstr(byte) + "]");
os << byte;
}
void writeU32(raw_ostream &os, uint32_t number, const Twine &msg) {
debugWrite(os.tell(), msg + "[0x" + utohexstr(number) + "]");
support::endian::write(os, number, support::little);
}
void writeU64(raw_ostream &os, uint64_t number, const Twine &msg) {
debugWrite(os.tell(), msg + "[0x" + utohexstr(number) + "]");
support::endian::write(os, number, support::little);
}
void writeValueType(raw_ostream &os, ValType type, const Twine &msg) {
writeU8(os, static_cast<uint8_t>(type),
msg + "[type: " + toString(type) + "]");
}
void writeSig(raw_ostream &os, const WasmSignature &sig) {
writeU8(os, WASM_TYPE_FUNC, "signature type");
writeUleb128(os, sig.Params.size(), "param Count");
for (ValType paramType : sig.Params) {
writeValueType(os, paramType, "param type");
}
writeUleb128(os, sig.Returns.size(), "result Count");
for (ValType returnType : sig.Returns) {
writeValueType(os, returnType, "result type");
}
}
void writeI32Const(raw_ostream &os, int32_t number, const Twine &msg) {
writeU8(os, WASM_OPCODE_I32_CONST, "i32.const");
writeSleb128(os, number, msg);
}
void writeI64Const(raw_ostream &os, int64_t number, const Twine &msg) {
writeU8(os, WASM_OPCODE_I64_CONST, "i64.const");
writeSleb128(os, number, msg);
}
void writeMemArg(raw_ostream &os, uint32_t alignment, uint64_t offset) {
writeUleb128(os, alignment, "alignment");<|fim▁hole|>}
void writeInitExpr(raw_ostream &os, const WasmInitExpr &initExpr) {
writeU8(os, initExpr.Opcode, "opcode");
switch (initExpr.Opcode) {
case WASM_OPCODE_I32_CONST:
writeSleb128(os, initExpr.Value.Int32, "literal (i32)");
break;
case WASM_OPCODE_I64_CONST:
writeSleb128(os, initExpr.Value.Int64, "literal (i64)");
break;
case WASM_OPCODE_F32_CONST:
writeU32(os, initExpr.Value.Float32, "literal (f32)");
break;
case WASM_OPCODE_F64_CONST:
writeU64(os, initExpr.Value.Float64, "literal (f64)");
break;
case WASM_OPCODE_GLOBAL_GET:
writeUleb128(os, initExpr.Value.Global, "literal (global index)");
break;
case WASM_OPCODE_REF_NULL:
writeValueType(os, ValType::EXTERNREF, "literal (externref type)");
break;
default:
fatal("unknown opcode in init expr: " + Twine(initExpr.Opcode));
}
writeU8(os, WASM_OPCODE_END, "opcode:end");
}
void writeLimits(raw_ostream &os, const WasmLimits &limits) {
writeU8(os, limits.Flags, "limits flags");
writeUleb128(os, limits.Initial, "limits initial");
if (limits.Flags & WASM_LIMITS_FLAG_HAS_MAX)
writeUleb128(os, limits.Maximum, "limits max");
}
void writeGlobalType(raw_ostream &os, const WasmGlobalType &type) {
// TODO: Update WasmGlobalType to use ValType and remove this cast.
writeValueType(os, ValType(type.Type), "global type");
writeU8(os, type.Mutable, "global mutable");
}
void writeGlobal(raw_ostream &os, const WasmGlobal &global) {
writeGlobalType(os, global.Type);
writeInitExpr(os, global.InitExpr);
}
void writeEventType(raw_ostream &os, const WasmEventType &type) {
writeUleb128(os, type.Attribute, "event attribute");
writeUleb128(os, type.SigIndex, "sig index");
}
void writeEvent(raw_ostream &os, const WasmEvent &event) {
writeEventType(os, event.Type);
}
void writeTableType(raw_ostream &os, const llvm::wasm::WasmTable &type) {
writeU8(os, WASM_TYPE_FUNCREF, "table type");
writeLimits(os, type.Limits);
}
void writeImport(raw_ostream &os, const WasmImport &import) {
writeStr(os, import.Module, "import module name");
writeStr(os, import.Field, "import field name");
writeU8(os, import.Kind, "import kind");
switch (import.Kind) {
case WASM_EXTERNAL_FUNCTION:
writeUleb128(os, import.SigIndex, "import sig index");
break;
case WASM_EXTERNAL_GLOBAL:
writeGlobalType(os, import.Global);
break;
case WASM_EXTERNAL_EVENT:
writeEventType(os, import.Event);
break;
case WASM_EXTERNAL_MEMORY:
writeLimits(os, import.Memory);
break;
case WASM_EXTERNAL_TABLE:
writeTableType(os, import.Table);
break;
default:
fatal("unsupported import type: " + Twine(import.Kind));
}
}
void writeExport(raw_ostream &os, const WasmExport &export_) {
writeStr(os, export_.Name, "export name");
writeU8(os, export_.Kind, "export kind");
switch (export_.Kind) {
case WASM_EXTERNAL_FUNCTION:
writeUleb128(os, export_.Index, "function index");
break;
case WASM_EXTERNAL_GLOBAL:
writeUleb128(os, export_.Index, "global index");
break;
case WASM_EXTERNAL_EVENT:
writeUleb128(os, export_.Index, "event index");
break;
case WASM_EXTERNAL_MEMORY:
writeUleb128(os, export_.Index, "memory index");
break;
case WASM_EXTERNAL_TABLE:
writeUleb128(os, export_.Index, "table index");
break;
default:
fatal("unsupported export type: " + Twine(export_.Kind));
}
}
} // namespace wasm
} // namespace lld<|fim▁end|> | writeUleb128(os, offset, "offset"); |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import { clamp, mean, isNaN, get, isArray } from 'lodash';
import update from 'immutability-helper';
import { helpers } from 'veritone-redux-common';
const { createReducer } = helpers;
export const PICK_START = 'PICK_START';
export const PICK_END = 'PICK_END';
export const RETRY_REQUEST = 'RETRY_REQUEST';
export const RETRY_DONE = 'RETRY_DONE';
export const ABORT_REQUEST = 'ABORT_REQUEST';
export const UPLOAD_REQUEST = 'UPLOAD_REQUEST';
export const UPLOAD_PROGRESS = 'UPLOAD_PROGRESS';
export const UPLOAD_COMPLETE = 'UPLOAD_COMPLETE';
export const CLEAR_FILEPICKER_DATA = 'CLEAR_FILEPICKER_DATA';
export const namespace = 'filePicker';
const defaultPickerState = {
open: false,
state: 'selecting', // selecting | uploading | complete
progressPercentByFileKey: {},
success: false,
error: false,
warning: false,
uploadResult: null
};
const defaultState = {
// populated like:
// [pickerId]: { ...defaultPickerState }
};
export default createReducer(defaultState, {
[PICK_START](
state,
{
meta: { id }
}
) {
return {
[id]: {
...defaultPickerState,
open: true,
state: 'selecting'
}
};
},
[PICK_END](
state,
{
meta: { id }
}
) {
return {
...state,
[id]: {
...state[id],
open: false
}
};
},
[CLEAR_FILEPICKER_DATA](
state,
{
meta: { id }
}
) {
return update(state, {
$unset: [id]
});
},
[ABORT_REQUEST](
state,
{
meta: { id, fileKey }
}
) {
let newProgressPercentByFileKey = get(state, [id, 'progressPercentByFileKey'], {});
if (fileKey) {
newProgressPercentByFileKey = update(newProgressPercentByFileKey, {
[fileKey]: {
aborted: { $set: 'aborted' }
}
});
} else {
Object.keys(get(state, [id, 'progressPercentByFileKey'], {})).forEach(fileKey => {
newProgressPercentByFileKey = update(newProgressPercentByFileKey, {
[fileKey]: {
aborted: { $set: 'aborted' }
}
});
});
}
return {
...state,
[id]: {
...state[id],
progressPercentByFileKey: newProgressPercentByFileKey
}
}
},
[RETRY_REQUEST](
state,
{
meta: { id }
}
) {
return {
...state,
[id]: {
...state[id],
state: 'uploading',
progressPercentByFileKey: {},
success: null,
error: null,
warning: null
}
}
},
[RETRY_DONE](
state,
{
meta: { id }
}
) {
return {
...state,
[id]: {
...state[id],
state: 'complete'
}
};
},
[UPLOAD_REQUEST](
state,
{
meta: { id }
}
) {
// todo: status message
return {
...state,
[id]: {<|fim▁hole|> success: null,
error: null,
warning: null,
uploadResult: null
}
};
},
[UPLOAD_PROGRESS](
state,
{
payload,
meta: { fileKey, id }
}
) {
// todo: status message
return {
...state,
[id]: {
...state[id],
progressPercentByFileKey: {
...state[id].progressPercentByFileKey,
[fileKey]: {
...state[id].progressPercentByFileKey[fileKey],
...payload
}
}
}
};
},
[UPLOAD_COMPLETE](
state,
{
payload,
meta: { warning, error, id }
}
) {
const errorMessage = get(error, 'message', error); // Error or string
// Extract failed files to be reuploaded
const failedFiles = isArray(payload)
? payload
.filter(result => result.error)
.map(result => result.file)
: [];
// Combine existing uploadResult if any
const prevUploadResult = (get(state, [id, 'uploadResult']) || [])
.filter(result => !result.error);
return {
...state,
[id]: {
...state[id],
success: !(warning || error) || null,
error: error ? errorMessage : null,
warning: warning || null,
state: 'complete',
uploadResult: prevUploadResult.concat(payload),
failedFiles
}
};
}
});
const local = state => state[namespace];
export const pick = id => ({
type: PICK_START,
meta: { id }
});
export const endPick = id => ({
type: PICK_END,
meta: { id }
});
export const retryRequest = (id, callback) => ({
type: RETRY_REQUEST,
payload: { callback },
meta: { id }
});
export const abortRequest = (id, fileKey) => ({
type: ABORT_REQUEST,
meta: { id, fileKey }
});
export const retryDone = (id, callback) => ({
type: RETRY_DONE,
payload: { callback },
meta: { id }
});
export const uploadRequest = (id, files, callback) => ({
type: UPLOAD_REQUEST,
payload: { files, callback },
meta: { id }
});
export const uploadProgress = (id, fileKey, data) => ({
type: UPLOAD_PROGRESS,
payload: {
...data,
percent: clamp(Math.round(data.percent), 100)
},
meta: { fileKey, id }
});
export const uploadComplete = (id, result, { warning, error }) => ({
type: UPLOAD_COMPLETE,
payload: result,
meta: { warning, error, id }
});
export const isOpen = (state, id) => get(local(state), [id, 'open']);
export const state = (state, id) =>
get(local(state), [id, 'state'], 'selecting');
// Keep this in case we want to go back to using mean percentage progresses
export const progressPercent = (state, id) => {
const currentProgress = get(local(state), [id, 'progressPercentByFileKey']);
if (!currentProgress) {
return 0;
}
const meanProgress = mean(Object.values(currentProgress));
const rounded = Math.round(meanProgress);
return isNaN(rounded) ? 0 : rounded;
};
export const percentByFiles = (state, id) => {
const currentFiles = get(local(state), [id, 'progressPercentByFileKey'], {});
return Object.keys(currentFiles).map(key => {
const value = currentFiles[key];
return {
key,
value
};
})
}
export const failedFiles = (state, id) => {
const failedFiles = get(local(state), [id, 'failedFiles'], []);
return failedFiles;
};
export const uploadResult = (state, id) => get(local(state), [id, 'uploadResult']);
export const didSucceed = (state, id) => !!get(local(state), [id, 'success']);
export const didError = (state, id) => !!get(local(state), [id, 'error']);
export const didWarn = (state, id) => !!get(local(state), [id, 'warning']);
// todo: status message for normal cases
export const statusMessage = (state, id) =>
get(local(state), [id, 'warning']) || get(local(state), [id, 'error']) || '';<|fim▁end|> | ...state[id],
state: 'uploading',
progressPercentByFileKey: {}, |
<|file_name|>typeck-default-trait-impl-cross-crate-coherence.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:tdticc_coherence_lib.rs
// Test that we do not consider associated types to be sendable without
// some applicable trait bound (and we don't ICE).
#![feature(optin_builtin_traits)]
extern crate tdticc_coherence_lib as lib;
use lib::DefaultedTrait;
struct A;
impl DefaultedTrait for (A,) { } //~ ERROR E0117<|fim▁hole|>impl !DefaultedTrait for (B,) { } //~ ERROR E0117
struct C;
struct D<T>(T);
impl DefaultedTrait for Box<C> { } //~ ERROR E0321
impl DefaultedTrait for lib::Something<C> { } //~ ERROR E0117
impl DefaultedTrait for D<C> { } // OK
fn main() { }<|fim▁end|> |
struct B; |
<|file_name|>mesos.rs<|end_file_name|><|fim▁begin|>use crate::{common::ui::UI,
error::Result};
use std::ffi::OsString;
// It would be more consistent naming to use "export mesos" instead of "mesoize", but for backwards
// compatibility we keep "mesoize"
const EXPORT_CMD_ENVVAR: &str = "HAB_PKG_MESOSIZE_BINARY";
const EXPORT_PKG_IDENT_ENVVAR: &str = "HAB_PKG_MESOSIZE_PKG_IDENT";
const EXPORT_CMD: &str = "hab-pkg-mesosize";
pub async fn start(ui: &mut UI, args: &[OsString]) -> Result<()> {
crate::command::pkg::export::export_common::start(ui,
args,
EXPORT_CMD_ENVVAR,
EXPORT_PKG_IDENT_ENVVAR,<|fim▁hole|><|fim▁end|> | EXPORT_CMD).await
} |
<|file_name|>frontend-results.py<|end_file_name|><|fim▁begin|>@app.route('/job/<name>')
def results(name):
job = saliweb.frontend.get_completed_job(name,
flask.request.args.get('passwd'))
# Determine whether the job completed successfully<|fim▁hole|> template = 'results_failed.html'
return saliweb.frontend.render_results_template(template, job=job)<|fim▁end|> | if os.path.exists(job.get_path('output.pdb')):
template = 'results_ok.html'
else: |
<|file_name|>console.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> * License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::ConsoleBinding;
use dom::bindings::codegen::Bindings::ConsoleBinding::ConsoleMethods;
use dom::bindings::global::{GlobalRef, GlobalField};
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use devtools_traits::{DevtoolsControlMsg, ConsoleMessage};
use util::str::DOMString;
#[dom_struct]
pub struct Console {
reflector_: Reflector,
global: GlobalField,
}
impl Console {
fn new_inherited(global: GlobalRef) -> Console {
Console {
reflector_: Reflector::new(),
global: GlobalField::from_rooted(&global),
}
}
pub fn new(global: GlobalRef) -> Temporary<Console> {
reflect_dom_object(box Console::new_inherited(global), global, ConsoleBinding::Wrap)
}
}
impl<'a> ConsoleMethods for JSRef<'a, Console> {
fn Log(self, message: DOMString) {
println!("{}", message);
propagate_console_msg(&self, ConsoleMessage::LogMessage(message));
}
fn Debug(self, message: DOMString) {
println!("{}", message);
}
fn Info(self, message: DOMString) {
println!("{}", message);
}
fn Warn(self, message: DOMString) {
println!("{}", message);
}
fn Error(self, message: DOMString) {
println!("{}", message);
}
fn Assert(self, condition: bool, message: Option<DOMString>) {
if !condition {
let message = match message {
Some(ref message) => message.as_slice(),
None => "no message",
};
println!("Assertion failed: {}", message);
}
}
}
fn propagate_console_msg(console: &JSRef<Console>, console_message: ConsoleMessage) {
let global = console.global.root();
match global.r() {
GlobalRef::Window(window_ref) => {
let pipelineId = window_ref.page().id;
console.global.root().r().as_window().page().devtools_chan.as_ref().map(|chan| {
chan.send(DevtoolsControlMsg::SendConsoleMessage(
pipelineId, console_message.clone())).unwrap();
});
},
GlobalRef::Worker(_) => {
// TODO: support worker console logs
}
}
}<|fim▁end|> | /* This Source Code Form is subject to the terms of the Mozilla Public |
<|file_name|>cell_renderer.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use ffi;
use cast::GTK_CELL_RENDERER;
pub trait CellRendererTrait: ::WidgetTrait {
fn stop_editing(&self, canceled: bool) {
unsafe {
ffi::gtk_cell_renderer_stop_editing(GTK_CELL_RENDERER(self.unwrap_widget()), match canceled {
true => 1,
false => 0
})
}
}
fn get_fixed_size(&self, width: &mut i32, height: &mut i32) {
unsafe { ffi::gtk_cell_renderer_get_fixed_size(GTK_CELL_RENDERER(self.unwrap_widget()), width, height) }
}
fn set_fixed_size(&self, width: i32, height: i32) {
unsafe { ffi::gtk_cell_renderer_set_fixed_size(GTK_CELL_RENDERER(self.unwrap_widget()), width, height) }
}
fn get_visible(&self) -> bool {
match unsafe { ffi::gtk_cell_renderer_get_visible(GTK_CELL_RENDERER(self.unwrap_widget())) } {
0 => false,
_ => true
}
}
fn set_visible(&self, visible: bool) {
unsafe {
ffi::gtk_cell_renderer_set_visible(GTK_CELL_RENDERER(self.unwrap_widget()), match visible {
true => 1,
false => 0
})
}
}
fn get_sensitive(&self) -> bool {
match unsafe { ffi::gtk_cell_renderer_get_sensitive(GTK_CELL_RENDERER(self.unwrap_widget())) } {
0 => false,
_ => true
}
}
fn set_sensitive(&self, sensitive: bool) {
unsafe {
ffi::gtk_cell_renderer_set_sensitive(GTK_CELL_RENDERER(self.unwrap_widget()), match sensitive {
true => 1,
false => 0
})
}
}
fn get_alignment(&self, xalign: &mut f32, yalign: &mut f32) {
unsafe { ffi::gtk_cell_renderer_get_alignment(GTK_CELL_RENDERER(self.unwrap_widget()), xalign, yalign) }
}
fn set_alignment(&self, xalign: f32, yalign: f32) {
unsafe { ffi::gtk_cell_renderer_set_alignment(GTK_CELL_RENDERER(self.unwrap_widget()), xalign, yalign) }
}
fn get_padding(&self, xpad: &mut i32, ypad: &mut i32) {
unsafe { ffi::gtk_cell_renderer_get_padding(GTK_CELL_RENDERER(self.unwrap_widget()), xpad, ypad) }
}
fn set_padding(&self, xpad: i32, ypad: i32) {
unsafe { ffi::gtk_cell_renderer_set_padding(GTK_CELL_RENDERER(self.unwrap_widget()), xpad, ypad) }
}
fn get_state<T: ::WidgetTrait>(&self, widget: &T, cell_state: ::CellRendererState) -> ::StateFlags {
unsafe { ffi::gtk_cell_renderer_get_state(GTK_CELL_RENDERER(self.unwrap_widget()), widget.unwrap_widget(), cell_state) }
}
fn is_activable(&self) -> bool {
match unsafe { ffi::gtk_cell_renderer_is_activatable(GTK_CELL_RENDERER(self.unwrap_widget())) } {
0 => false,
_ => true
}
}
fn get_preferred_height<T: ::WidgetTrait>(&self, widget: &T, minimum_size: &mut i32, natural_size: &mut i32) {
unsafe { ffi::gtk_cell_renderer_get_preferred_height(GTK_CELL_RENDERER(self.unwrap_widget()), widget.unwrap_widget(), minimum_size,
natural_size) }
}
fn get_preferred_height_for_width<T: ::WidgetTrait>(&self, widget: &T, width: i32, minimum_size: &mut i32, natural_size: &mut i32) {
unsafe { ffi::gtk_cell_renderer_get_preferred_height_for_width(GTK_CELL_RENDERER(self.unwrap_widget()), widget.unwrap_widget(), width,
minimum_size, natural_size) }
}
fn get_preferred_width<T: ::WidgetTrait>(&self, widget: &T, minimum_size: &mut i32, natural_size: &mut i32) {<|fim▁hole|> }
fn get_preferred_width_for_height<T: ::WidgetTrait>(&self, widget: &T, height: i32, minimum_size: &mut i32, natural_size: &mut i32) {
unsafe { ffi::gtk_cell_renderer_get_preferred_width_for_height(GTK_CELL_RENDERER(self.unwrap_widget()), widget.unwrap_widget(), height,
minimum_size, natural_size) }
}
fn get_request_mode(&self) -> ::SizeRequestMode {
unsafe { ffi::gtk_cell_renderer_get_request_mode(GTK_CELL_RENDERER(self.unwrap_widget())) }
}
}<|fim▁end|> | unsafe { ffi::gtk_cell_renderer_get_preferred_width(GTK_CELL_RENDERER(self.unwrap_widget()), widget.unwrap_widget(), minimum_size,
natural_size) } |
<|file_name|>canvasrenderingcontext2d.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use canvas_traits::canvas::{Canvas2dMsg, CanvasMsg};
use canvas_traits::canvas::{CompositionOrBlending, FillOrStrokeStyle, FillRule};
use canvas_traits::canvas::{LineCapStyle, LineJoinStyle, LinearGradientStyle};
use canvas_traits::canvas::{RadialGradientStyle, RepetitionStyle, byte_swap_and_premultiply};
use cssparser::{Parser, ParserInput, RGBA};
use cssparser::Color as CSSColor;
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::CSSStyleDeclarationBinding::CSSStyleDeclarationMethods;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasFillRule;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasImageSource;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasLineCap;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasLineJoin;
use dom::bindings::codegen::Bindings::CanvasRenderingContext2DBinding::CanvasRenderingContext2DMethods;
use dom::bindings::codegen::Bindings::ImageDataBinding::ImageDataMethods;
use dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use dom::bindings::codegen::UnionTypes::StringOrCanvasGradientOrCanvasPattern;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::inheritance::Castable;
use dom::bindings::num::Finite;
use dom::bindings::reflector::{DomObject, Reflector, reflect_dom_object};
use dom::bindings::root::{Dom, DomRoot, LayoutDom};
use dom::bindings::str::DOMString;
use dom::canvasgradient::{CanvasGradient, CanvasGradientStyle, ToFillOrStrokeStyle};
use dom::canvaspattern::CanvasPattern;
use dom::globalscope::GlobalScope;
use dom::htmlcanvaselement::HTMLCanvasElement;
use dom::imagedata::ImageData;
use dom::node::{document_from_node, Node, NodeDamage, window_from_node};
use dom_struct::dom_struct;
use euclid::{Transform2D, Point2D, Vector2D, Rect, Size2D, vec2};
use ipc_channel::ipc::{self, IpcSender};
use net_traits::image::base::PixelFormat;
use net_traits::image_cache::CanRequestImages;
use net_traits::image_cache::ImageCache;
use net_traits::image_cache::ImageOrMetadataAvailable;
use net_traits::image_cache::ImageResponse;
use net_traits::image_cache::ImageState;
use net_traits::image_cache::UsePlaceholder;
use num_traits::ToPrimitive;
use script_traits::ScriptMsg;
use servo_url::ServoUrl;
use std::{cmp, fmt, mem};
use std::cell::Cell;
use std::str::FromStr;
use std::sync::Arc;
use unpremultiplytable::UNPREMULTIPLY_TABLE;
#[must_root]
#[derive(Clone, JSTraceable, MallocSizeOf)]
#[allow(dead_code)]
enum CanvasFillOrStrokeStyle {
Color(RGBA),
Gradient(Dom<CanvasGradient>),
Pattern(Dom<CanvasPattern>),
}
// https://html.spec.whatwg.org/multipage/#canvasrenderingcontext2d
#[dom_struct]
pub struct CanvasRenderingContext2D {
reflector_: Reflector,
#[ignore_malloc_size_of = "Defined in ipc-channel"]
ipc_renderer: IpcSender<CanvasMsg>,
/// For rendering contexts created by an HTML canvas element, this is Some,
/// for ones created by a paint worklet, this is None.
canvas: Option<Dom<HTMLCanvasElement>>,
#[ignore_malloc_size_of = "Arc"]
image_cache: Arc<ImageCache>,
/// Any missing image URLs.
missing_image_urls: DomRefCell<Vec<ServoUrl>>,
/// The base URL for resolving CSS image URL values.
/// Needed because of https://github.com/servo/servo/issues/17625
base_url: ServoUrl,
state: DomRefCell<CanvasContextState>,
saved_states: DomRefCell<Vec<CanvasContextState>>,
origin_clean: Cell<bool>,
}
#[must_root]
#[derive(Clone, JSTraceable, MallocSizeOf)]
struct CanvasContextState {
global_alpha: f64,
global_composition: CompositionOrBlending,
image_smoothing_enabled: bool,
fill_style: CanvasFillOrStrokeStyle,
stroke_style: CanvasFillOrStrokeStyle,
line_width: f64,
line_cap: LineCapStyle,
line_join: LineJoinStyle,
miter_limit: f64,
transform: Transform2D<f32>,
shadow_offset_x: f64,
shadow_offset_y: f64,
shadow_blur: f64,
shadow_color: RGBA,
}
impl CanvasContextState {
fn new() -> CanvasContextState {
let black = RGBA::new(0, 0, 0, 255);
CanvasContextState {
global_alpha: 1.0,
global_composition: CompositionOrBlending::default(),
image_smoothing_enabled: true,
fill_style: CanvasFillOrStrokeStyle::Color(black),
stroke_style: CanvasFillOrStrokeStyle::Color(black),
line_width: 1.0,
line_cap: LineCapStyle::Butt,
line_join: LineJoinStyle::Miter,
miter_limit: 10.0,
transform: Transform2D::identity(),
shadow_offset_x: 0.0,
shadow_offset_y: 0.0,
shadow_blur: 0.0,
shadow_color: RGBA::transparent(),
}
}
}
impl CanvasRenderingContext2D {
pub fn new_inherited(global: &GlobalScope,
canvas: Option<&HTMLCanvasElement>,
image_cache: Arc<ImageCache>,
base_url: ServoUrl,
size: Size2D<i32>)
-> CanvasRenderingContext2D {
debug!("Creating new canvas rendering context.");
let (sender, receiver) = ipc::channel().unwrap();
let script_to_constellation_chan = global.script_to_constellation_chan();
debug!("Asking constellation to create new canvas thread.");
script_to_constellation_chan.send(ScriptMsg::CreateCanvasPaintThread(size, sender)).unwrap();
let ipc_renderer = receiver.recv().unwrap();
debug!("Done.");
CanvasRenderingContext2D {
reflector_: Reflector::new(),
ipc_renderer: ipc_renderer,
canvas: canvas.map(Dom::from_ref),
image_cache: image_cache,
missing_image_urls: DomRefCell::new(Vec::new()),
base_url: base_url,
state: DomRefCell::new(CanvasContextState::new()),
saved_states: DomRefCell::new(Vec::new()),
origin_clean: Cell::new(true),
}
}
pub fn new(global: &GlobalScope,
canvas: &HTMLCanvasElement,
size: Size2D<i32>)
-> DomRoot<CanvasRenderingContext2D> {
let window = window_from_node(canvas);
let image_cache = window.image_cache();
let base_url = window.get_url();
let boxed = Box::new(CanvasRenderingContext2D::new_inherited(
global, Some(canvas), image_cache, base_url, size
));
reflect_dom_object(boxed, global, CanvasRenderingContext2DBinding::Wrap)
}
// https://html.spec.whatwg.org/multipage/#concept-canvas-set-bitmap-dimensions
pub fn set_bitmap_dimensions(&self, size: Size2D<i32>) {
self.reset_to_initial_state();
self.ipc_renderer
.send(CanvasMsg::Recreate(size))
.unwrap();
}
// https://html.spec.whatwg.org/multipage/#reset-the-rendering-context-to-its-default-state
fn reset_to_initial_state(&self) {
self.saved_states.borrow_mut().clear();
*self.state.borrow_mut() = CanvasContextState::new();
}
fn mark_as_dirty(&self) {
if let Some(ref canvas) = self.canvas {
canvas.upcast::<Node>().dirty(NodeDamage::OtherNodeDamage);
}
}
fn update_transform(&self) {
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetTransform(self.state.borrow().transform)))
.unwrap()
}
// It is used by DrawImage to calculate the size of the source and destination rectangles based
// on the drawImage call arguments
// source rectangle = area of the original image to be copied
// destination rectangle = area of the destination canvas where the source image is going to be drawn
fn adjust_source_dest_rects(&self,
image_size: Size2D<f64>,
sx: f64,
sy: f64,
sw: f64,
sh: f64,
dx: f64,
dy: f64,
dw: f64,
dh: f64)
-> (Rect<f64>, Rect<f64>) {
let image_rect = Rect::new(Point2D::new(0f64, 0f64),
Size2D::new(image_size.width as f64, image_size.height as f64));
// The source rectangle is the rectangle whose corners are the four points (sx, sy),
// (sx+sw, sy), (sx+sw, sy+sh), (sx, sy+sh).
let source_rect = Rect::new(Point2D::new(sx.min(sx + sw), sy.min(sy + sh)),
Size2D::new(sw.abs(), sh.abs()));
// When the source rectangle is outside the source image,
// the source rectangle must be clipped to the source image
let source_rect_clipped = source_rect.intersection(&image_rect).unwrap_or(Rect::zero());
// Width and height ratios between the non clipped and clipped source rectangles
let width_ratio: f64 = source_rect_clipped.size.width / source_rect.size.width;
let height_ratio: f64 = source_rect_clipped.size.height / source_rect.size.height;
// When the source rectangle is outside the source image,
// the destination rectangle must be clipped in the same proportion.
let dest_rect_width_scaled: f64 = dw * width_ratio;
let dest_rect_height_scaled: f64 = dh * height_ratio;
// The destination rectangle is the rectangle whose corners are the four points (dx, dy),
// (dx+dw, dy), (dx+dw, dy+dh), (dx, dy+dh).
let dest_rect = Rect::new(Point2D::new(dx.min(dx + dest_rect_width_scaled),
dy.min(dy + dest_rect_height_scaled)),
Size2D::new(dest_rect_width_scaled.abs(),
dest_rect_height_scaled.abs()));
let source_rect = Rect::new(Point2D::new(source_rect_clipped.origin.x,
source_rect_clipped.origin.y),
Size2D::new(source_rect_clipped.size.width,
source_rect_clipped.size.height));
(source_rect, dest_rect)
}
// https://html.spec.whatwg.org/multipage/#the-image-argument-is-not-origin-clean
fn is_origin_clean(&self,
image: CanvasImageSource)
-> bool {
match image {
CanvasImageSource::HTMLCanvasElement(canvas) => {
canvas.origin_is_clean()
}
CanvasImageSource::CanvasRenderingContext2D(image) =>
image.origin_is_clean(),
CanvasImageSource::HTMLImageElement(image) => {
let canvas = match self.canvas {
Some(ref canvas) => canvas,
None => return false,
};
let image_origin = image.get_origin().expect("Image's origin is missing");
let document = document_from_node(&**canvas);
document.url().clone().origin() == image_origin
}
CanvasImageSource::CSSStyleValue(_) => true,
}
}
//
// drawImage coordinates explained
//
// Source Image Destination Canvas
// +-------------+ +-------------+
// | | | |
// |(sx,sy) | |(dx,dy) |
// | +----+ | | +----+ |
// | | | | | | | |
// | | |sh |---->| | |dh |
// | | | | | | | |
// | +----+ | | +----+ |
// | sw | | dw |
// | | | |
// +-------------+ +-------------+
//
//
// The rectangle (sx, sy, sw, sh) from the source image
// is copied on the rectangle (dx, dy, dh, dw) of the destination canvas
//
// https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
fn draw_image(&self,
image: CanvasImageSource,
sx: f64,
sy: f64,
sw: Option<f64>,
sh: Option<f64>,
dx: f64,
dy: f64,
dw: Option<f64>,
dh: Option<f64>)
-> ErrorResult {
let result = match image {
CanvasImageSource::HTMLCanvasElement(ref canvas) => {
self.draw_html_canvas_element(&canvas,
sx, sy, sw, sh,
dx, dy, dw, dh)
}
CanvasImageSource::CanvasRenderingContext2D(ref image) => {
self.draw_html_canvas_element(&image.Canvas(),
sx, sy, sw, sh,
dx, dy, dw, dh)
}
CanvasImageSource::HTMLImageElement(ref image) => {
// https://html.spec.whatwg.org/multipage/#img-error
// If the image argument is an HTMLImageElement object that is in the broken state,
// then throw an InvalidStateError exception
let url = image.get_url().ok_or(Error::InvalidState)?;
self.fetch_and_draw_image_data(url,
sx, sy, sw, sh,
dx, dy, dw, dh)
}
CanvasImageSource::CSSStyleValue(ref value) => {
let url = value.get_url(self.base_url.clone()).ok_or(Error::InvalidState)?;
self.fetch_and_draw_image_data(url,
sx, sy, sw, sh,
dx, dy, dw, dh)
}
};
if result.is_ok() && !self.is_origin_clean(image) {
self.set_origin_unclean()
}
result
}
fn draw_html_canvas_element(&self,
canvas: &HTMLCanvasElement,
sx: f64,
sy: f64,
sw: Option<f64>,
sh: Option<f64>,
dx: f64,
dy: f64,
dw: Option<f64>,
dh: Option<f64>)
-> ErrorResult {
// 1. Check the usability of the image argument
if !canvas.is_valid() {
return Err(Error::InvalidState);
}
let canvas_size = canvas.get_size();
let dw = dw.unwrap_or(canvas_size.width as f64);
let dh = dh.unwrap_or(canvas_size.height as f64);
let sw = sw.unwrap_or(canvas_size.width as f64);
let sh = sh.unwrap_or(canvas_size.height as f64);
let image_size = Size2D::new(canvas_size.width as f64, canvas_size.height as f64);
// 2. Establish the source and destination rectangles
let (source_rect, dest_rect) = self.adjust_source_dest_rects(image_size,
sx,
sy,
sw,
sh,
dx,
dy,
dw,
dh);
if !is_rect_valid(source_rect) || !is_rect_valid(dest_rect) {
return Ok(());
}
let smoothing_enabled = self.state.borrow().image_smoothing_enabled;
if self.canvas.as_ref().map_or(false, |c| &**c == canvas) {
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::DrawImageSelf(
image_size, dest_rect, source_rect, smoothing_enabled));
self.ipc_renderer.send(msg).unwrap();
} else {
let context = match canvas.get_or_init_2d_context() {
Some(context) => context,
None => return Err(Error::InvalidState),
};
let (sender, receiver) = ipc::channel().unwrap();
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::DrawImageInOther(
self.ipc_renderer.clone(),
image_size,
dest_rect,
source_rect,
smoothing_enabled,
sender));
let renderer = context.get_ipc_renderer();
renderer.send(msg).unwrap();
receiver.recv().unwrap();
};
self.mark_as_dirty();
Ok(())
}
fn fetch_and_draw_image_data(&self,
url: ServoUrl,
sx: f64,
sy: f64,
sw: Option<f64>,
sh: Option<f64>,
dx: f64,
dy: f64,
dw: Option<f64>,
dh: Option<f64>)
-> ErrorResult {
debug!("Fetching image {}.", url);
// https://html.spec.whatwg.org/multipage/#img-error
// If the image argument is an HTMLImageElement object that is in the broken state,
// then throw an InvalidStateError exception
let (image_data, image_size) = match self.fetch_image_data(url) {
Some((mut data, size)) => {
// Pixels come from cache in BGRA order and drawImage expects RGBA so we
// have to swap the color values
byte_swap_and_premultiply(&mut data);
let size = Size2D::new(size.width as f64, size.height as f64);
(data, size)
},
None => return Err(Error::InvalidState),
};
let dw = dw.unwrap_or(image_size.width);
let dh = dh.unwrap_or(image_size.height);
let sw = sw.unwrap_or(image_size.width);
let sh = sh.unwrap_or(image_size.height);
self.draw_image_data(image_data,
image_size,
sx, sy, sw, sh,
dx, dy, dw, dh)
}
fn draw_image_data(&self,
image_data: Vec<u8>,
image_size: Size2D<f64>,
sx: f64,
sy: f64,
sw: f64,
sh: f64,
dx: f64,
dy: f64,
dw: f64,
dh: f64)
-> ErrorResult {
// Establish the source and destination rectangles
let (source_rect, dest_rect) = self.adjust_source_dest_rects(image_size,
sx,
sy,
sw,
sh,
dx,
dy,
dw,
dh);
if !is_rect_valid(source_rect) || !is_rect_valid(dest_rect) {
return Ok(());
}
let smoothing_enabled = self.state.borrow().image_smoothing_enabled;
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::DrawImage(image_data,
image_size,
dest_rect,
source_rect,
smoothing_enabled)))
.unwrap();
self.mark_as_dirty();
Ok(())
}
fn fetch_image_data(&self, url: ServoUrl) -> Option<(Vec<u8>, Size2D<i32>)> {
let img = match self.request_image_from_cache(url) {
ImageResponse::Loaded(img, _) => img,
ImageResponse::PlaceholderLoaded(_, _) |
ImageResponse::None |
ImageResponse::MetadataLoaded(_) => {
return None;
}
};
let image_size = Size2D::new(img.width as i32, img.height as i32);
let image_data = match img.format {
PixelFormat::BGRA8 => img.bytes.to_vec(),
PixelFormat::K8 => panic!("K8 color type not supported"),
PixelFormat::RGB8 => panic!("RGB8 color type not supported"),
PixelFormat::KA8 => panic!("KA8 color type not supported"),
};
Some((image_data, image_size))
}
#[inline]
fn request_image_from_cache(&self, url: ServoUrl) -> ImageResponse {
let response = self.image_cache
.find_image_or_metadata(url.clone(),
UsePlaceholder::No,
CanRequestImages::No);
match response {
Ok(ImageOrMetadataAvailable::ImageAvailable(image, url)) =>
ImageResponse::Loaded(image, url),
Err(ImageState::Pending(_)) =>
ImageResponse::None,
_ => {
// Rather annoyingly, we get the same response back from
// A load which really failed and from a load which hasn't started yet.
self.missing_image_urls.borrow_mut().push(url);
ImageResponse::None
},
}
}
pub fn take_missing_image_urls(&self) -> Vec<ServoUrl> {
mem::replace(&mut self.missing_image_urls.borrow_mut(), vec![])
}
fn create_drawable_rect(&self, x: f64, y: f64, w: f64, h: f64) -> Option<Rect<f32>> {
if !([x, y, w, h].iter().all(|val| val.is_finite())) {
return None;
}
if w == 0.0 && h == 0.0 {
return None;
}
Some(Rect::new(Point2D::new(x as f32, y as f32),
Size2D::new(w as f32, h as f32)))
}
fn parse_color(&self, string: &str) -> Result<RGBA, ()> {
let mut input = ParserInput::new(string);
let mut parser = Parser::new(&mut input);
let color = CSSColor::parse(&mut parser);
if parser.is_exhausted() {
match color {
Ok(CSSColor::RGBA(rgba)) => Ok(rgba),
Ok(CSSColor::CurrentColor) => {
// TODO: https://github.com/whatwg/html/issues/1099
// Reconsider how to calculate currentColor in a display:none canvas
// TODO: will need to check that the context bitmap mode is fixed
// once we implement CanvasProxy
let canvas = match self.canvas {
// https://drafts.css-houdini.org/css-paint-api/#2d-rendering-context
// Whenever "currentColor" is used as a color in the PaintRenderingContext2D API,
// it is treated as opaque black.
None => return Ok(RGBA::new(0, 0, 0, 255)),
Some(ref canvas) => &**canvas,
};
let window = window_from_node(canvas);
let style = window.GetComputedStyle(canvas.upcast(), None);
let element_not_rendered =
!canvas.upcast::<Node>().is_in_doc() ||
style.GetPropertyValue(DOMString::from("display")) == "none";
if element_not_rendered {
Ok(RGBA::new(0, 0, 0, 255))
} else {
self.parse_color(&style.GetPropertyValue(DOMString::from("color")))
}
},
_ => Err(())
}
} else {
Err(())
}
}
pub fn get_ipc_renderer(&self) -> IpcSender<CanvasMsg> {
self.ipc_renderer.clone()
}
pub fn origin_is_clean(&self) -> bool {
self.origin_clean.get()
}
fn set_origin_unclean(&self) {
self.origin_clean.set(false)
}
}
pub trait LayoutCanvasRenderingContext2DHelpers {
#[allow(unsafe_code)]
unsafe fn get_ipc_renderer(&self) -> IpcSender<CanvasMsg>;
}
impl LayoutCanvasRenderingContext2DHelpers for LayoutDom<CanvasRenderingContext2D> {
#[allow(unsafe_code)]
unsafe fn get_ipc_renderer(&self) -> IpcSender<CanvasMsg> {
(*self.unsafe_get()).ipc_renderer.clone()
}
}
// We add a guard to each of methods by the spec:
// http://www.w3.org/html/wg/drafts/2dcontext/html5_canvas_CR/
//
// > Except where otherwise specified, for the 2D context interface,
// > any method call with a numeric argument whose value is infinite or a NaN value must be ignored.
//
// Restricted values are guarded in glue code. Therefore we need not add a guard.
//
// FIXME: this behavior should might be generated by some annotattions to idl.
impl CanvasRenderingContext2DMethods for CanvasRenderingContext2D {
// https://html.spec.whatwg.org/multipage/#dom-context-2d-canvas
fn Canvas(&self) -> DomRoot<HTMLCanvasElement> {
// This method is not called from a paint worklet rendering context,
// so it's OK to panic if self.canvas is None.
DomRoot::from_ref(self.canvas.as_ref().expect("No canvas."))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-save
fn Save(&self) {
self.saved_states.borrow_mut().push(self.state.borrow().clone());
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::SaveContext)).unwrap();
}
#[allow(unrooted_must_root)]
// https://html.spec.whatwg.org/multipage/#dom-context-2d-restore
fn Restore(&self) {
let mut saved_states = self.saved_states.borrow_mut();
if let Some(state) = saved_states.pop() {
self.state.borrow_mut().clone_from(&state);
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::RestoreContext)).unwrap();
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-scale
fn Scale(&self, x: f64, y: f64) {
if !(x.is_finite() && y.is_finite()) {
return;
}
let transform = self.state.borrow().transform;
self.state.borrow_mut().transform = transform.pre_scale(x as f32, y as f32);
self.update_transform()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-rotate
fn Rotate(&self, angle: f64) {
if angle == 0.0 || !angle.is_finite() {
return;
}
let (sin, cos) = (angle.sin(), angle.cos());
let transform = self.state.borrow().transform;
self.state.borrow_mut().transform = transform.pre_mul(
&Transform2D::row_major(cos as f32, sin as f32,
-sin as f32, cos as f32,
0.0, 0.0));
self.update_transform()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-translate
fn Translate(&self, x: f64, y: f64) {
if !(x.is_finite() && y.is_finite()) {
return;
}
let transform = self.state.borrow().transform;
self.state.borrow_mut().transform = transform.pre_translate(vec2(x as f32, y as f32));
self.update_transform()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-transform
fn Transform(&self, a: f64, b: f64, c: f64, d: f64, e: f64, f: f64) {
if !(a.is_finite() && b.is_finite() && c.is_finite() &&
d.is_finite() && e.is_finite() && f.is_finite()) {
return;
}
let transform = self.state.borrow().transform;
self.state.borrow_mut().transform = transform.pre_mul(
&Transform2D::row_major(a as f32, b as f32, c as f32, d as f32, e as f32, f as f32));
self.update_transform()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-settransform
fn SetTransform(&self, a: f64, b: f64, c: f64, d: f64, e: f64, f: f64) {
if !(a.is_finite() && b.is_finite() && c.is_finite() &&
d.is_finite() && e.is_finite() && f.is_finite()) {
return;
}
self.state.borrow_mut().transform =
Transform2D::row_major(a as f32, b as f32, c as f32, d as f32, e as f32, f as f32);
self.update_transform()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-resettransform
fn ResetTransform(&self) {
self.state.borrow_mut().transform = Transform2D::identity();
self.update_transform()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalalpha
fn GlobalAlpha(&self) -> f64 {
let state = self.state.borrow();
state.global_alpha
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalalpha
fn SetGlobalAlpha(&self, alpha: f64) {
if !alpha.is_finite() || alpha > 1.0 || alpha < 0.0 {
return;
}
self.state.borrow_mut().global_alpha = alpha;
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetGlobalAlpha(alpha as f32)))
.unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalcompositeoperation
fn GlobalCompositeOperation(&self) -> DOMString {
let state = self.state.borrow();
match state.global_composition {
CompositionOrBlending::Composition(op) => DOMString::from(op.to_str()),
CompositionOrBlending::Blending(op) => DOMString::from(op.to_str()),
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-globalcompositeoperation
fn SetGlobalCompositeOperation(&self, op_str: DOMString) {
if let Ok(op) = CompositionOrBlending::from_str(&op_str) {
self.state.borrow_mut().global_composition = op;
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetGlobalComposition(op)))
.unwrap()
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-fillrect
fn FillRect(&self, x: f64, y: f64, width: f64, height: f64) {
if let Some(rect) = self.create_drawable_rect(x, y, width, height) {
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::FillRect(rect))).unwrap();
self.mark_as_dirty();
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-clearrect
fn ClearRect(&self, x: f64, y: f64, width: f64, height: f64) {
if let Some(rect) = self.create_drawable_rect(x, y, width, height) {
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::ClearRect(rect)))
.unwrap();
self.mark_as_dirty();
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokerect
fn StrokeRect(&self, x: f64, y: f64, width: f64, height: f64) {
if let Some(rect) = self.create_drawable_rect(x, y, width, height) {
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::StrokeRect(rect)))
.unwrap();
self.mark_as_dirty();
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-beginpath
fn BeginPath(&self) {
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::BeginPath)).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-closepath
fn ClosePath(&self) {
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::ClosePath)).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-fill
fn Fill(&self, _: CanvasFillRule) {
// TODO: Process fill rule
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::Fill)).unwrap();
self.mark_as_dirty();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-stroke
fn Stroke(&self) {
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::Stroke)).unwrap();
self.mark_as_dirty();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-clip
fn Clip(&self, _: CanvasFillRule) {
// TODO: Process fill rule
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::Clip)).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-ispointinpath
fn IsPointInPath(&self, x: f64, y: f64, fill_rule: CanvasFillRule) -> bool {
let fill_rule = match fill_rule {
CanvasFillRule::Nonzero => FillRule::Nonzero,
CanvasFillRule::Evenodd => FillRule::Evenodd,
};
let (sender, receiver) = ipc::channel::<bool>().unwrap();
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::IsPointInPath(x, y, fill_rule, sender)))
.unwrap();
receiver.recv().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-filltext
fn FillText(&self, text: DOMString, x: f64, y: f64, max_width: Option<f64>) {
let parsed_text: String = text.into();
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::FillText(parsed_text, x, y, max_width))).unwrap();
self.mark_as_dirty();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
fn DrawImage(&self,
image: CanvasImageSource,
dx: f64,
dy: f64)
-> ErrorResult {
if !(dx.is_finite() && dy.is_finite()) {
return Ok(());
}
self.draw_image(image, 0f64, 0f64, None, None, dx, dy, None, None)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
fn DrawImage_(&self,
image: CanvasImageSource,
dx: f64,
dy: f64,
dw: f64,
dh: f64)
-> ErrorResult {
if !(dx.is_finite() && dy.is_finite() && dw.is_finite() && dh.is_finite()) {
return Ok(());
}
self.draw_image(image, 0f64, 0f64, None, None, dx, dy, Some(dw), Some(dh))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-drawimage
fn DrawImage__(&self,
image: CanvasImageSource,
sx: f64,
sy: f64,
sw: f64,
sh: f64,
dx: f64,
dy: f64,
dw: f64,
dh: f64)
-> ErrorResult {
if !(sx.is_finite() && sy.is_finite() && sw.is_finite() && sh.is_finite() &&
dx.is_finite() && dy.is_finite() && dw.is_finite() && dh.is_finite()) {
return Ok(());
}
self.draw_image(image,
sx,
sy,
Some(sw),
Some(sh),
dx,
dy,
Some(dw),
Some(dh))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-moveto
fn MoveTo(&self, x: f64, y: f64) {
if !(x.is_finite() && y.is_finite()) {
return;
}
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::MoveTo(Point2D::new(x as f32, y as f32)));
self.ipc_renderer.send(msg).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-lineto
fn LineTo(&self, x: f64, y: f64) {
if !(x.is_finite() && y.is_finite()) {
return;
}
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::LineTo(Point2D::new(x as f32, y as f32)));
self.ipc_renderer.send(msg).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-rect
fn Rect(&self, x: f64, y: f64, width: f64, height: f64) {
if [x, y, width, height].iter().all(|val| val.is_finite()) {
let rect = Rect::new(Point2D::new(x as f32, y as f32),
Size2D::new(width as f32, height as f32));
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::Rect(rect));
self.ipc_renderer.send(msg).unwrap();
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-quadraticcurveto
fn QuadraticCurveTo(&self, cpx: f64, cpy: f64, x: f64, y: f64) {
if !(cpx.is_finite() && cpy.is_finite() && x.is_finite() && y.is_finite()) {
return;
}
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::QuadraticCurveTo(Point2D::new(cpx as f32,
cpy as f32),<|fim▁hole|> y as f32)));
self.ipc_renderer.send(msg).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-beziercurveto
fn BezierCurveTo(&self, cp1x: f64, cp1y: f64, cp2x: f64, cp2y: f64, x: f64, y: f64) {
if !(cp1x.is_finite() && cp1y.is_finite() && cp2x.is_finite() && cp2y.is_finite() &&
x.is_finite() && y.is_finite()) {
return;
}
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::BezierCurveTo(Point2D::new(cp1x as f32,
cp1y as f32),
Point2D::new(cp2x as f32,
cp2y as f32),
Point2D::new(x as f32, y as f32)));
self.ipc_renderer.send(msg).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-arc
fn Arc(&self, x: f64, y: f64, r: f64, start: f64, end: f64, ccw: bool) -> ErrorResult {
if !([x, y, r, start, end].iter().all(|x| x.is_finite())) {
return Ok(());
}
if r < 0.0 {
return Err(Error::IndexSize);
}
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::Arc(Point2D::new(x as f32, y as f32),
r as f32,
start as f32,
end as f32,
ccw));
self.ipc_renderer.send(msg).unwrap();
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-arcto
fn ArcTo(&self, cp1x: f64, cp1y: f64, cp2x: f64, cp2y: f64, r: f64) -> ErrorResult {
if !([cp1x, cp1y, cp2x, cp2y, r].iter().all(|x| x.is_finite())) {
return Ok(());
}
if r < 0.0 {
return Err(Error::IndexSize);
}
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::ArcTo(Point2D::new(cp1x as f32, cp1y as f32),
Point2D::new(cp2x as f32, cp2y as f32),
r as f32));
self.ipc_renderer.send(msg).unwrap();
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-ellipse
fn Ellipse(&self, x: f64, y: f64, rx: f64, ry: f64, rotation: f64, start: f64, end: f64, ccw: bool) -> ErrorResult {
if !([x, y, rx, ry, rotation, start, end].iter().all(|x| x.is_finite())) {
return Ok(());
}
if rx < 0.0 || ry < 0.0 {
return Err(Error::IndexSize);
}
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::Ellipse(Point2D::new(x as f32, y as f32),
rx as f32,
ry as f32,
rotation as f32,
start as f32,
end as f32,
ccw));
self.ipc_renderer.send(msg).unwrap();
Ok(())
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-imagesmoothingenabled
fn ImageSmoothingEnabled(&self) -> bool {
let state = self.state.borrow();
state.image_smoothing_enabled
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-imagesmoothingenabled
fn SetImageSmoothingEnabled(&self, value: bool) {
self.state.borrow_mut().image_smoothing_enabled = value;
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn StrokeStyle(&self) -> StringOrCanvasGradientOrCanvasPattern {
match self.state.borrow().stroke_style {
CanvasFillOrStrokeStyle::Color(ref rgba) => {
let mut result = String::new();
serialize(rgba, &mut result).unwrap();
StringOrCanvasGradientOrCanvasPattern::String(DOMString::from(result))
},
CanvasFillOrStrokeStyle::Gradient(ref gradient) => {
StringOrCanvasGradientOrCanvasPattern::CanvasGradient(DomRoot::from_ref(&*gradient))
},
CanvasFillOrStrokeStyle::Pattern(ref pattern) => {
StringOrCanvasGradientOrCanvasPattern::CanvasPattern(DomRoot::from_ref(&*pattern))
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn SetStrokeStyle(&self, value: StringOrCanvasGradientOrCanvasPattern) {
match value {
StringOrCanvasGradientOrCanvasPattern::String(string) => {
if let Ok(rgba) = self.parse_color(&string) {
self.state.borrow_mut().stroke_style = CanvasFillOrStrokeStyle::Color(rgba);
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetStrokeStyle(
FillOrStrokeStyle::Color(rgba))))
.unwrap();
}
},
StringOrCanvasGradientOrCanvasPattern::CanvasGradient(gradient) => {
self.state.borrow_mut().stroke_style =
CanvasFillOrStrokeStyle::Gradient(Dom::from_ref(&*gradient));
let msg = CanvasMsg::Canvas2d(
Canvas2dMsg::SetStrokeStyle(gradient.to_fill_or_stroke_style()));
self.ipc_renderer.send(msg).unwrap();
},
StringOrCanvasGradientOrCanvasPattern::CanvasPattern(pattern) => {
self.state.borrow_mut().stroke_style =
CanvasFillOrStrokeStyle::Pattern(Dom::from_ref(&*pattern));
let msg = CanvasMsg::Canvas2d(
Canvas2dMsg::SetStrokeStyle(pattern.to_fill_or_stroke_style()));
self.ipc_renderer.send(msg).unwrap();
if !pattern.origin_is_clean() {
self.set_origin_unclean();
}
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn FillStyle(&self) -> StringOrCanvasGradientOrCanvasPattern {
match self.state.borrow().fill_style {
CanvasFillOrStrokeStyle::Color(ref rgba) => {
let mut result = String::new();
serialize(rgba, &mut result).unwrap();
StringOrCanvasGradientOrCanvasPattern::String(DOMString::from(result))
},
CanvasFillOrStrokeStyle::Gradient(ref gradient) => {
StringOrCanvasGradientOrCanvasPattern::CanvasGradient(DomRoot::from_ref(&*gradient))
},
CanvasFillOrStrokeStyle::Pattern(ref pattern) => {
StringOrCanvasGradientOrCanvasPattern::CanvasPattern(DomRoot::from_ref(&*pattern))
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-strokestyle
fn SetFillStyle(&self, value: StringOrCanvasGradientOrCanvasPattern) {
match value {
StringOrCanvasGradientOrCanvasPattern::String(string) => {
if let Ok(rgba) = self.parse_color(&string) {
self.state.borrow_mut().fill_style = CanvasFillOrStrokeStyle::Color(rgba);
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetFillStyle(
FillOrStrokeStyle::Color(rgba))))
.unwrap()
}
}
StringOrCanvasGradientOrCanvasPattern::CanvasGradient(gradient) => {
self.state.borrow_mut().fill_style =
CanvasFillOrStrokeStyle::Gradient(Dom::from_ref(&*gradient));
let msg = CanvasMsg::Canvas2d(
Canvas2dMsg::SetFillStyle(gradient.to_fill_or_stroke_style()));
self.ipc_renderer.send(msg).unwrap();
}
StringOrCanvasGradientOrCanvasPattern::CanvasPattern(pattern) => {
self.state.borrow_mut().fill_style =
CanvasFillOrStrokeStyle::Pattern(Dom::from_ref(&*pattern));
let msg = CanvasMsg::Canvas2d(
Canvas2dMsg::SetFillStyle(pattern.to_fill_or_stroke_style()));
self.ipc_renderer.send(msg).unwrap();
if !pattern.origin_is_clean() {
self.set_origin_unclean();
}
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createimagedata
fn CreateImageData(&self, sw: Finite<f64>, sh: Finite<f64>) -> Fallible<DomRoot<ImageData>> {
if *sw == 0.0 || *sh == 0.0 {
return Err(Error::IndexSize);
}
let sw = cmp::max(1, sw.abs().to_u32().unwrap());
let sh = cmp::max(1, sh.abs().to_u32().unwrap());
ImageData::new(&self.global(), sw, sh, None)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createimagedata
fn CreateImageData_(&self, imagedata: &ImageData) -> Fallible<DomRoot<ImageData>> {
ImageData::new(&self.global(),
imagedata.Width(),
imagedata.Height(),
None)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-getimagedata
fn GetImageData(&self,
sx: Finite<f64>,
sy: Finite<f64>,
sw: Finite<f64>,
sh: Finite<f64>)
-> Fallible<DomRoot<ImageData>> {
if !self.origin_is_clean() {
return Err(Error::Security)
}
let mut sx = *sx;
let mut sy = *sy;
let mut sw = *sw;
let mut sh = *sh;
if sw == 0.0 || sh == 0.0 {
return Err(Error::IndexSize);
}
if sw < 0.0 {
sw = -sw;
sx -= sw;
}
if sh < 0.0 {
sh = -sh;
sy -= sh;
}
let sh = cmp::max(1, sh.to_u32().unwrap());
let sw = cmp::max(1, sw.to_u32().unwrap());
let (sender, receiver) = ipc::channel::<Vec<u8>>().unwrap();
let dest_rect = Rect::new(Point2D::new(sx.to_i32().unwrap(), sy.to_i32().unwrap()),
Size2D::new(sw as i32, sh as i32));
let canvas_size = self.canvas.as_ref().map(|c| c.get_size()).unwrap_or(Size2D::zero());
let canvas_size = Size2D::new(canvas_size.width as f64, canvas_size.height as f64);
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::GetImageData(dest_rect, canvas_size, sender)))
.unwrap();
let mut data = receiver.recv().unwrap();
// Un-premultiply alpha
for chunk in data.chunks_mut(4) {
let alpha = chunk[3] as usize;
chunk[0] = UNPREMULTIPLY_TABLE[256 * alpha + chunk[0] as usize];
chunk[1] = UNPREMULTIPLY_TABLE[256 * alpha + chunk[1] as usize];
chunk[2] = UNPREMULTIPLY_TABLE[256 * alpha + chunk[2] as usize];
}
ImageData::new(&self.global(), sw, sh, Some(data))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata
fn PutImageData(&self, imagedata: &ImageData, dx: Finite<f64>, dy: Finite<f64>) {
self.PutImageData_(imagedata,
dx,
dy,
Finite::wrap(0f64),
Finite::wrap(0f64),
Finite::wrap(imagedata.Width() as f64),
Finite::wrap(imagedata.Height() as f64))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-putimagedata
fn PutImageData_(&self,
imagedata: &ImageData,
dx: Finite<f64>,
dy: Finite<f64>,
dirty_x: Finite<f64>,
dirty_y: Finite<f64>,
dirty_width: Finite<f64>,
dirty_height: Finite<f64>) {
let data = imagedata.get_data_array();
let offset = Vector2D::new(*dx, *dy);
let image_data_size = Size2D::new(imagedata.Width() as f64, imagedata.Height() as f64);
let dirty_rect = Rect::new(Point2D::new(*dirty_x, *dirty_y),
Size2D::new(*dirty_width, *dirty_height));
let msg = CanvasMsg::Canvas2d(Canvas2dMsg::PutImageData(data,
offset,
image_data_size,
dirty_rect));
self.ipc_renderer.send(msg).unwrap();
self.mark_as_dirty();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createlineargradient
fn CreateLinearGradient(&self,
x0: Finite<f64>,
y0: Finite<f64>,
x1: Finite<f64>,
y1: Finite<f64>)
-> DomRoot<CanvasGradient> {
CanvasGradient::new(&self.global(),
CanvasGradientStyle::Linear(LinearGradientStyle::new(*x0,
*y0,
*x1,
*y1,
Vec::new())))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createradialgradient
fn CreateRadialGradient(&self,
x0: Finite<f64>,
y0: Finite<f64>,
r0: Finite<f64>,
x1: Finite<f64>,
y1: Finite<f64>,
r1: Finite<f64>)
-> Fallible<DomRoot<CanvasGradient>> {
if *r0 < 0. || *r1 < 0. {
return Err(Error::IndexSize);
}
Ok(CanvasGradient::new(&self.global(),
CanvasGradientStyle::Radial(RadialGradientStyle::new(*x0,
*y0,
*r0,
*x1,
*y1,
*r1,
Vec::new()))))
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-createpattern
fn CreatePattern(&self,
image: CanvasImageSource,
mut repetition: DOMString)
-> Fallible<DomRoot<CanvasPattern>> {
let (image_data, image_size) = match image {
CanvasImageSource::HTMLImageElement(ref image) => {
// https://html.spec.whatwg.org/multipage/#img-error
// If the image argument is an HTMLImageElement object that is in the broken state,
// then throw an InvalidStateError exception
image.get_url()
.and_then(|url| self.fetch_image_data(url))
.ok_or(Error::InvalidState)?
},
CanvasImageSource::HTMLCanvasElement(ref canvas) => {
let _ = canvas.get_or_init_2d_context();
canvas.fetch_all_data().ok_or(Error::InvalidState)?
},
CanvasImageSource::CanvasRenderingContext2D(ref context) => {
let canvas = context.Canvas();
let _ = canvas.get_or_init_2d_context();
canvas.fetch_all_data().ok_or(Error::InvalidState)?
}
CanvasImageSource::CSSStyleValue(ref value) => {
value.get_url(self.base_url.clone())
.and_then(|url| self.fetch_image_data(url))
.ok_or(Error::InvalidState)?
}
};
if repetition.is_empty() {
repetition.push_str("repeat");
}
if let Ok(rep) = RepetitionStyle::from_str(&repetition) {
Ok(CanvasPattern::new(&self.global(),
image_data,
image_size,
rep,
self.is_origin_clean(image)))
} else {
Err(Error::Syntax)
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linewidth
fn LineWidth(&self) -> f64 {
let state = self.state.borrow();
state.line_width
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linewidth
fn SetLineWidth(&self, width: f64) {
if !width.is_finite() || width <= 0.0 {
return;
}
self.state.borrow_mut().line_width = width;
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetLineWidth(width as f32)))
.unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linecap
fn LineCap(&self) -> CanvasLineCap {
match self.state.borrow().line_cap {
LineCapStyle::Butt => CanvasLineCap::Butt,
LineCapStyle::Round => CanvasLineCap::Round,
LineCapStyle::Square => CanvasLineCap::Square,
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linecap
fn SetLineCap(&self, cap: CanvasLineCap) {
let line_cap = match cap {
CanvasLineCap::Butt => LineCapStyle::Butt,
CanvasLineCap::Round => LineCapStyle::Round,
CanvasLineCap::Square => LineCapStyle::Square,
};
self.state.borrow_mut().line_cap = line_cap;
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetLineCap(line_cap))).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linejoin
fn LineJoin(&self) -> CanvasLineJoin {
match self.state.borrow().line_join {
LineJoinStyle::Round => CanvasLineJoin::Round,
LineJoinStyle::Bevel => CanvasLineJoin::Bevel,
LineJoinStyle::Miter => CanvasLineJoin::Miter,
}
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-linejoin
fn SetLineJoin(&self, join: CanvasLineJoin) {
let line_join = match join {
CanvasLineJoin::Round => LineJoinStyle::Round,
CanvasLineJoin::Bevel => LineJoinStyle::Bevel,
CanvasLineJoin::Miter => LineJoinStyle::Miter,
};
self.state.borrow_mut().line_join = line_join;
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetLineJoin(line_join))).unwrap();
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-miterlimit
fn MiterLimit(&self) -> f64 {
let state = self.state.borrow();
state.miter_limit
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-miterlimit
fn SetMiterLimit(&self, limit: f64) {
if !limit.is_finite() || limit <= 0.0 {
return;
}
self.state.borrow_mut().miter_limit = limit;
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetMiterLimit(limit as f32)))
.unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsetx
fn ShadowOffsetX(&self) -> f64 {
self.state.borrow().shadow_offset_x
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsetx
fn SetShadowOffsetX(&self, value: f64) {
if !value.is_finite() || value == self.state.borrow().shadow_offset_x {
return;
}
self.state.borrow_mut().shadow_offset_x = value;
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetShadowOffsetX(value))).unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsety
fn ShadowOffsetY(&self) -> f64 {
self.state.borrow().shadow_offset_y
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowoffsety
fn SetShadowOffsetY(&self, value: f64) {
if !value.is_finite() || value == self.state.borrow().shadow_offset_y {
return;
}
self.state.borrow_mut().shadow_offset_y = value;
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetShadowOffsetY(value))).unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowblur
fn ShadowBlur(&self) -> f64 {
self.state.borrow().shadow_blur
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowblur
fn SetShadowBlur(&self, value: f64) {
if !value.is_finite() || value < 0f64 || value == self.state.borrow().shadow_blur {
return;
}
self.state.borrow_mut().shadow_blur = value;
self.ipc_renderer.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetShadowBlur(value))).unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowcolor
fn ShadowColor(&self) -> DOMString {
let mut result = String::new();
serialize(&self.state.borrow().shadow_color, &mut result).unwrap();
DOMString::from(result)
}
// https://html.spec.whatwg.org/multipage/#dom-context-2d-shadowcolor
fn SetShadowColor(&self, value: DOMString) {
if let Ok(color) = parse_color(&value) {
self.state.borrow_mut().shadow_color = color;
self.ipc_renderer
.send(CanvasMsg::Canvas2d(Canvas2dMsg::SetShadowColor(color)))
.unwrap()
}
}
}
impl Drop for CanvasRenderingContext2D {
fn drop(&mut self) {
if let Err(err) = self.ipc_renderer.send(CanvasMsg::Close) {
warn!("Could not close canvas: {}", err)
}
}
}
pub fn parse_color(string: &str) -> Result<RGBA, ()> {
let mut input = ParserInput::new(string);
let mut parser = Parser::new(&mut input);
match CSSColor::parse(&mut parser) {
Ok(CSSColor::RGBA(rgba)) => {
if parser.is_exhausted() {
Ok(rgba)
} else {
Err(())
}
},
_ => Err(()),
}
}
// Used by drawImage to determine if a source or destination rectangle is valid
// Origin coordinates and size cannot be negative. Size has to be greater than zero
fn is_rect_valid(rect: Rect<f64>) -> bool {
rect.size.width > 0.0 && rect.size.height > 0.0
}
// https://html.spec.whatwg.org/multipage/#serialisation-of-a-colour
fn serialize<W>(color: &RGBA, dest: &mut W) -> fmt::Result
where W: fmt::Write
{
let red = color.red;
let green = color.green;
let blue = color.blue;
if color.alpha == 255 {
write!(dest,
"#{:x}{:x}{:x}{:x}{:x}{:x}",
red >> 4,
red & 0xF,
green >> 4,
green & 0xF,
blue >> 4,
blue & 0xF)
} else {
write!(dest, "rgba({}, {}, {}, {})", red, green, blue, color.alpha_f32())
}
}<|fim▁end|> | Point2D::new(x as f32, |
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>// =============================
// Email: [email protected]
// www.ebenmonney.com/templates
// =============================
import { Component, ViewEncapsulation, OnInit, OnDestroy, ViewChildren, AfterViewInit, QueryList, ElementRef } from '@angular/core';
import { Router, NavigationStart } from '@angular/router';
import { ToastaService, ToastaConfig, ToastOptions, ToastData } from 'ngx-toasta';
import { ModalDirective } from 'ngx-bootstrap/modal';
import { AlertService, AlertDialog, DialogType, AlertCommand, AlertMessage, MessageSeverity } from '../services/alert.service';
import { NotificationService } from '../services/notification.service';
import { AppTranslationService } from '../services/app-translation.service';
import { AccountService } from '../services/account.service';
import { LocalStoreManager } from '../services/local-store-manager.service';
import { AppTitleService } from '../services/app-title.service';
import { AuthService } from '../services/auth.service';
import { ConfigurationService } from '../services/configuration.service';
import { Permission } from '../models/permission.model';
import { LoginComponent } from '../components/login/login.component';
const alertify: any = require('../assets/scripts/alertify.js');
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss']
})
export class AppComponent implements OnInit, AfterViewInit, OnDestroy {
isAppLoaded: boolean;
isUserLoggedIn: boolean;
shouldShowLoginModal: boolean;
removePrebootScreen: boolean;
newNotificationCount = 0;
appTitle = 'QuickApp';
stickyToasties: number[] = [];
dataLoadingConsecutiveFailures = 0;
notificationsLoadingSubscription: any;
@ViewChildren('loginModal,loginControl')
modalLoginControls: QueryList<any>;
loginModal: ModalDirective;
loginControl: LoginComponent;
gT = (key: string | Array<string>, interpolateParams?: object) => this.translationService.getTranslation(key, interpolateParams);
get notificationsTitle() {
if (this.newNotificationCount) {
return `${this.gT('app.Notifications')} (${this.newNotificationCount} ${this.gT('app.New')})`;
} else {
return this.gT('app.Notifications');
}
}
constructor(
storageManager: LocalStoreManager,
private toastaService: ToastaService,
private toastaConfig: ToastaConfig,
private accountService: AccountService,
private alertService: AlertService,
private notificationService: NotificationService,
private appTitleService: AppTitleService,
private authService: AuthService,
private translationService: AppTranslationService,
public configurations: ConfigurationService,
public router: Router) {
storageManager.initialiseStorageSyncListener();
this.toastaConfig.theme = 'bootstrap';
this.toastaConfig.position = 'top-right';
this.toastaConfig.limit = 100;
this.toastaConfig.showClose = true;
this.toastaConfig.showDuration = false;
this.appTitleService.appName = this.appTitle;
}
ngAfterViewInit() {
this.modalLoginControls.changes.subscribe((controls: QueryList<any>) => {
controls.forEach(control => {
if (control) {
if (control instanceof LoginComponent) {
this.loginControl = control;
this.loginControl.modalClosedCallback = () => this.loginModal.hide();
} else {
this.loginModal = control;
this.loginModal.show();
}
}
});
});
}
onLoginModalShown() {
this.alertService.showStickyMessage('Session Expired', 'Your Session has expired. Please log in again', MessageSeverity.info);
}
onLoginModalHidden() {
this.alertService.resetStickyMessage();
this.loginControl.reset();
this.shouldShowLoginModal = false;
if (this.authService.isSessionExpired) {
this.alertService.showStickyMessage('Session Expired', 'Your Session has expired. Please log in again to renew your session', MessageSeverity.warn);
}
}
onLoginModalHide() {
this.alertService.resetStickyMessage();
}
ngOnInit() {
this.isUserLoggedIn = this.authService.isLoggedIn;
// 0.5 extra sec to display preboot/loader information. Preboot screen is removed 0.5 sec later
setTimeout(() => this.isAppLoaded = true, 500);
setTimeout(() => this.removePrebootScreen = true, 1000);
setTimeout(() => {
if (this.isUserLoggedIn) {
this.alertService.resetStickyMessage();
// if (!this.authService.isSessionExpired)
this.alertService.showMessage('Login', `Welcome back ${this.userName}!`, MessageSeverity.default);
// else
// this.alertService.showStickyMessage("Session Expired", "Your Session has expired. Please log in again", MessageSeverity.warn);
}
}, 2000);
this.alertService.getDialogEvent().subscribe(alert => this.showDialog(alert));
this.alertService.getMessageEvent().subscribe(message => this.showToast(message));
this.authService.reLoginDelegate = () => this.shouldShowLoginModal = true;
this.authService.getLoginStatusEvent().subscribe(isLoggedIn => {
this.isUserLoggedIn = isLoggedIn;
if (this.isUserLoggedIn) {
this.initNotificationsLoading();
} else {
this.unsubscribeNotifications();
}
setTimeout(() => {
if (!this.isUserLoggedIn) {
this.alertService.showMessage('Session Ended!', '', MessageSeverity.default);
}
}, 500);
});
}
ngOnDestroy() {
this.unsubscribeNotifications();
}
private unsubscribeNotifications() {
if (this.notificationsLoadingSubscription) {
this.notificationsLoadingSubscription.unsubscribe();
}
}
initNotificationsLoading() {
this.notificationsLoadingSubscription = this.notificationService.getNewNotificationsPeriodically()
.subscribe(notifications => {
this.dataLoadingConsecutiveFailures = 0;
this.newNotificationCount = notifications.filter(n => !n.isRead).length;
},
error => {
this.alertService.logError(error);
if (this.dataLoadingConsecutiveFailures++ < 20) {
setTimeout(() => this.initNotificationsLoading(), 5000);
} else {
this.alertService.showStickyMessage('Load Error', 'Loading new notifications from the server failed!', MessageSeverity.error);
}
});
}
markNotificationsAsRead() {
const recentNotifications = this.notificationService.recentNotifications;
if (recentNotifications.length) {
this.notificationService.readUnreadNotification(recentNotifications.map(n => n.id), true)
.subscribe(response => {
for (const n of recentNotifications) {
n.isRead = true;
}
this.newNotificationCount = recentNotifications.filter(n => !n.isRead).length;
},
error => {
this.alertService.logError(error);
this.alertService.showMessage('Notification Error', 'Marking read notifications failed', MessageSeverity.error);
});
}
}
showDialog(dialog: AlertDialog) {
alertify.set({
labels: {
ok: dialog.okLabel || 'OK',
cancel: dialog.cancelLabel || 'Cancel'
}
});
switch (dialog.type) {
case DialogType.alert:
alertify.alert(dialog.message);
break;
case DialogType.confirm:
alertify
.confirm(dialog.message, (e) => {
if (e) {
dialog.okCallback();
} else {
if (dialog.cancelCallback) {
dialog.cancelCallback();
}
}
});
break;
case DialogType.prompt:
alertify
.prompt(dialog.message, (e, val) => {
if (e) {
dialog.okCallback(val);
} else {
if (dialog.cancelCallback) {
dialog.cancelCallback();
}
}
}, dialog.defaultValue);
break;
}
}
showToast(alert: AlertCommand) {
if (alert.operation === 'clear') {
for (const id of this.stickyToasties.slice(0)) {
this.toastaService.clear(id);
}
return;
}
const toastOptions: ToastOptions = {
title: alert.message.summary,<|fim▁hole|> msg: alert.message.detail,
};
if (alert.operation === 'add_sticky') {
toastOptions.timeout = 0;
toastOptions.onAdd = (toast: ToastData) => {
this.stickyToasties.push(toast.id);
};
toastOptions.onRemove = (toast: ToastData) => {
const index = this.stickyToasties.indexOf(toast.id, 0);
if (index > -1) {
this.stickyToasties.splice(index, 1);
}
if (alert.onRemove) {
alert.onRemove();
}
toast.onAdd = null;
toast.onRemove = null;
};
} else {
toastOptions.timeout = 4000;
}
switch (alert.message.severity) {
case MessageSeverity.default: this.toastaService.default(toastOptions); break;
case MessageSeverity.info: this.toastaService.info(toastOptions); break;
case MessageSeverity.success: this.toastaService.success(toastOptions); break;
case MessageSeverity.error: this.toastaService.error(toastOptions); break;
case MessageSeverity.warn: this.toastaService.warning(toastOptions); break;
case MessageSeverity.wait: this.toastaService.wait(toastOptions); break;
}
}
logout() {
this.authService.logout();
this.authService.redirectLogoutUser();
}
getYear() {
return new Date().getUTCFullYear();
}
get userName(): string {
return this.authService.currentUser ? this.authService.currentUser.userName : '';
}
get fullName(): string {
return this.authService.currentUser ? this.authService.currentUser.fullName : '';
}
get canViewCustomers() {
return this.accountService.userHasPermission(Permission.viewUsersPermission); // eg. viewCustomersPermission
}
get canViewProducts() {
return this.accountService.userHasPermission(Permission.viewUsersPermission); // eg. viewProductsPermission
}
get canViewOrders() {
return true; // eg. viewOrdersPermission
}
}<|fim▁end|> | |
<|file_name|>thummer.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2011-2018 Matt Austin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, unicode_literals
import re
from django.conf import settings
from django.template import Library, Node, NodeList, TemplateSyntaxError
from django.utils.encoding import smart_str
from thummer.utils import get_thumbnail
register = Library()
kw_pat = re.compile(r'^(?P<key>[\w]+)=(?P<value>.+)$')
class ThummerNodeBase(Node):
"""
A Node that renders safely
"""
nodelist_empty = NodeList()
def render(self, context):
try:
return self._render(context)
except Exception:<|fim▁hole|> if settings.DEBUG:
raise
# TODO: Log error
return self.nodelist_empty.render(context)
def _render(self, context):
raise NotImplemented()
@register.tag('thummer')
class ThummerNode(ThummerNodeBase):
child_nodelists = ('nodelist_url', 'nodelist_empty')
error_msg = ('Syntax error. Expected: ``thummer url geometry '
'[key1=val1 key2=val2...] as var``')
def __init__(self, parser, token):
bits = token.split_contents()
if len(bits) < 5 or bits[-2] != 'as':
raise TemplateSyntaxError(self.error_msg)
self.url = parser.compile_filter(bits[1])
self.geometry = parser.compile_filter(bits[2])
self.options = []
for bit in bits[3:-2]:
m = kw_pat.match(bit)
if not m:
raise TemplateSyntaxError(self.error_msg)
key = smart_str(m.group('key'))
expr = parser.compile_filter(m.group('value'))
self.options.append((key, expr))
self.as_var = bits[-1]
self.nodelist_url = parser.parse(('empty', 'endthummer',))
if parser.next_token().contents == 'empty':
self.nodelist_empty = parser.parse(('endthummer',))
parser.delete_first_token()
def _render(self, context):
url = self.url.resolve(context)
geometry = self.geometry.resolve(context)
options = {}
for key, expr in self.options:
noresolve = {'True': True, 'False': False, 'None': None}
value = noresolve.get('{}'.format(expr), expr.resolve(context))
if key == 'options':
options.update(value)
else:
options[key] = value
if url:
thumbnail = get_thumbnail(url, geometry, **options)
else:
return self.nodelist_empty.render(context)
context.push()
context[self.as_var] = thumbnail
output = self.nodelist_url.render(context)
context.pop()
return output
def __iter__(self):
for node in self.nodelist_url:
yield node
for node in self.nodelist_empty:
yield node<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from searchv2.tests.test_models import *
from searchv2.tests.test_utils import *
from searchv2.tests.test_views import *<|fim▁end|> | from searchv2.tests.test_builders import * |
<|file_name|>sha256_bench.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(all(feature = "nightly", test), feature(test))]
#![cfg(all(feature = "nightly", test))]
extern crate test;
extern crate cxema;
#[cfg(test)]
use cxema::sha2::{Sha256};<|fim▁hole|>use cxema::digest::Digest;
use test::Bencher;
#[bench]
pub fn sha256_10(bh: &mut Bencher) {
let mut sh = Sha256::new();
let bytes = [1u8; 10];
bh.iter(|| {
sh.input(&bytes);
});
bh.bytes = bytes.len() as u64;
}
#[bench]
pub fn sha256_1k(bh: &mut Bencher) {
let mut sh = Sha256::new();
let bytes = [1u8; 1024];
bh.iter(|| {
sh.input(&bytes);
});
bh.bytes = bytes.len() as u64;
}
#[bench]
pub fn sha256_64k(bh: &mut Bencher) {
let mut sh = Sha256::new();
let bytes = [1u8; 65536];
bh.iter(|| {
sh.input(&bytes);
});
bh.bytes = bytes.len() as u64;
}<|fim▁end|> | |
<|file_name|>List.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# CTK: Cherokee Toolkit
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2010-2014 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
from Widget import Widget
from Container import Container
from util import props_to_str
ENTRY_HTML = '<%(tag)s id="%(id)s" %(props)s>%(content)s</%(tag)s>'
class ListEntry (Container):
def __init__ (self, _props={}, tag='li'):
Container.__init__ (self)
self.tag = tag
self.props = _props.copy()
def Render (self):
render = Container.Render (self)
if 'id' in self.props:<|fim▁hole|>
props = {'id': self.id,
'tag': self.tag,
'props': props_to_str(self.props),
'content': render.html}
render.html = ENTRY_HTML %(props)
return render
class List (Container):
"""
Widget for lists of elements. The list can grow dynamically, and
accept any kind of CTK widget as listed element. Arguments are
optional.
Arguments:
_props: dictionary with properties for the HTML element,
such as {'name': 'foo', 'id': 'bar', 'class': 'baz'}
tag: tag to use for the element, either 'ul' for unordered
lists, or 'ol' for ordered lists. By default, 'ul' is
used.
Examples:
lst = CTK.List()
lst.Add (CTK.RawHTML('One')
lst.Add (CTK.Image({'src': '/foo/bar/baz.png'})
"""
def __init__ (self, _props={}, tag='ul'):
Container.__init__ (self)
self.tag = tag
self.props = _props.copy()
def Add (self, widget, props={}):
assert isinstance(widget, Widget) or widget is None or type(widget) is list
entry = ListEntry (props.copy())
if widget:
if type(widget) == list:
for w in widget:
entry += w
else:
entry += widget
Container.__iadd__ (self, entry)
def __iadd__ (self, widget):
self.Add (widget)
return self
def Render (self):
render = Container.Render (self)
props = {'id': self.id,
'tag': self.tag,
'props': props_to_str(self.props),
'content': render.html}
render.html = ENTRY_HTML %(props)
return render<|fim▁end|> | self.id = self.props['id'] |
<|file_name|>app.reducers.ts<|end_file_name|><|fim▁begin|>import * as fromShoppingList from '../shopping-list/store/shopping-list.reducers';
import * as fromAuth from '../auth/store/auth.reducers';<|fim▁hole|>export interface AppState {
shoppingList: fromShoppingList.State;
auth: fromAuth.State;
}
export const reducers: ActionReducerMap<AppState> = {
shoppingList: fromShoppingList.shoppingListReducers,
auth: fromAuth.authReducer
};<|fim▁end|> | import {ActionReducerMap} from '@ngrx/store';
|
<|file_name|>info-pane.js<|end_file_name|><|fim▁begin|>(function ($) {
function FewbricksDevHelper() {
var $this = this;
/**
*
*/
this.init = function() {
$this.cssClassFull = 'fewbricks-info-pane--full';
if(!$this.initMainElm()) {
return;
}
$this.initToggler();
$this.$mainElm.show();
}
/**
*
*/
this.initMainElm = function() {
$this.$mainElm = $('#fewbricks-info-pane');
if($this.$mainElm.length === 0) {
return false;
}
if(typeof fewbricksInfoPane !== 'undefined' && typeof fewbricksInfoPane.startHeight !== 'undefined') {
$this.toggleMainElm(fewbricksInfoPane.startHeight);
}
return true;
}
/**
*
*/
this.initToggler = function() {
$('[data-fewbricks-info-pane-toggler]')
.unbind('click')
.on('click', function() {
let height = $(this).attr('data-fewbricks-info-pane-height');
$this.toggleMainElm(height);
document.cookie = 'fewbricks_info_pane_height=' + height;
});
}
/**
*
*/
this.toggleMainElm = function(height) {
if(height === 'minimized') {
$this.$mainElm.attr('style', function(i, style)
{<|fim▁hole|> } else {
$this.$mainElm.height(height + 'vh');
}
}
/**
*
* @returns {*|boolean}
*/
this.mainElmIsFull = function() {
return $this.$mainElm.hasClass($this.cssClassFull);
}
}
$(document).ready(function () {
(new FewbricksDevHelper()).init();
});
})(jQuery);<|fim▁end|> | return style && style.replace(/height[^;]+;?/g, '');
});
|
<|file_name|>004-280565a54124-add_custom_head_tags.py<|end_file_name|><|fim▁begin|># This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2015 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
"""add custom head tags
add setting for custom tags (HTML) in <head> section
added: 2012-02-13 (v0.10dev)
previously migrate script v054
Revision ID: 280565a54124
Revises: 4d27ff5680e5
Create Date: 2013-05-14 22:38:02.552230
"""
# revision identifiers, used by Alembic.
revision = '280565a54124'
down_revision = '4d27ff5680e5'
from alembic.op import execute, inline_literal
from sqlalchemy import Integer, Unicode, UnicodeText
from sqlalchemy import Column, MetaData, Table
# -- table definition ---------------------------------------------------------
metadata = MetaData()
settings = Table('settings', metadata,
Column('id', Integer, autoincrement=True, primary_key=True),
Column('key', Unicode(255), nullable=False, unique=True),
Column('value', UnicodeText),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
# -- helpers ------------------------------------------------------------------
def insert_setting(key, value):
execute(
settings.insert().\
values({<|fim▁hole|>
def delete_setting(key):
execute(
settings.delete().\
where(settings.c.key==inline_literal(key))
)
# -----------------------------------------------------------------------------
SETTINGS = [
(u'appearance_custom_head_tags', u''),
]
def upgrade():
for key, value in SETTINGS:
insert_setting(key, value)
def downgrade():
for key, value in SETTINGS:
delete_setting(key)<|fim▁end|> | 'key': inline_literal(key),
'value': inline_literal(value),
})
) |
<|file_name|>access.py<|end_file_name|><|fim▁begin|>import base64
import os
import time
from gluon import portalocker
from gluon.admin import apath
from gluon.fileutils import read_file
# ###########################################################
# ## make sure administrator is on localhost or https
# ###########################################################
http_host = request.env.http_host.split(':')[0]
if request.env.web2py_runtime_gae:
session_db = DAL('gae')
session.connect(request, response, db=session_db)
hosts = (http_host, )
is_gae = True
else:
is_gae = False
if request.env.http_x_forwarded_for or request.is_https:
session.secure()
elif not request.is_local and not DEMO_MODE:
raise HTTP(200, T('Admin is disabled because insecure channel'))
try:
_config = {}
port = int(request.env.server_port or 0)
restricted(
read_file(apath('../parameters_%i.py' % port, request)), _config)
if not 'password' in _config or not _config['password']:
raise HTTP(200, T('admin disabled because no admin password'))
except IOError:
import gluon.fileutils
if is_gae:
if gluon.fileutils.check_credentials(request):
session.authorized = True
session.last_time = time.time()
else:
raise HTTP(200,
T('admin disabled because not supported on google app engine'))
else:
raise HTTP(
200, T('admin disabled because unable to access password file'))
def verify_password(password):
session.pam_user = None
if DEMO_MODE:
return True
elif not _config.get('password'):
return False
elif _config['password'].startswith('pam_user:'):
session.pam_user = _config['password'][9:].strip()
import gluon.contrib.pam
return gluon.contrib.pam.authenticate(session.pam_user, password)
else:
return _config['password'] == CRYPT()(password)[0]
# ###########################################################
# ## handle brute-force login attacks
# ###########################################################
deny_file = os.path.join(request.folder, 'private', 'hosts.deny')
allowed_number_of_attempts = 5
expiration_failed_logins = 3600
def read_hosts_deny():
import datetime
hosts = {}
if os.path.exists(deny_file):
hosts = {}
f = open(deny_file, 'r')
portalocker.lock(f, portalocker.LOCK_SH)
for line in f.readlines():
if not line.strip() or line.startswith('#'):
continue
fields = line.strip().split()
if len(fields) > 2:
hosts[fields[0].strip()] = ( # ip
int(fields[1].strip()), # n attemps
int(fields[2].strip()) # last attempts
)
portalocker.unlock(f)
f.close()
return hosts
def write_hosts_deny(denied_hosts):
f = open(deny_file, 'w')
portalocker.lock(f, portalocker.LOCK_EX)
for key, val in denied_hosts.items():
if time.time() - val[1] < expiration_failed_logins:
line = '%s %s %s\n' % (key, val[0], val[1])
f.write(line)
portalocker.unlock(f)
f.close()
def login_record(success=True):
denied_hosts = read_hosts_deny()
val = (0, 0)
if success and request.client in denied_hosts:
del denied_hosts[request.client]
elif not success and not request.is_local:
val = denied_hosts.get(request.client, (0, 0))
if time.time() - val[1] < expiration_failed_logins \
and val[0] >= allowed_number_of_attempts:
return val[0] # locked out
time.sleep(2 ** val[0])
val = (val[0] + 1, int(time.time()))
denied_hosts[request.client] = val
write_hosts_deny(denied_hosts)
return val[0]
# ###########################################################
# ## session expiration
# ###########################################################
t0 = time.time()
if session.authorized:
if session.last_time and session.last_time < t0 - EXPIRATION:
session.flash = T('session expired')
session.authorized = False
else:
session.last_time = t0
if request.vars.is_mobile in ('true', 'false', 'auto'):
session.is_mobile = request.vars.is_mobile or 'auto'
if request.controller == 'default' and request.function == 'index':
if not request.vars.is_mobile:
session.is_mobile = 'auto'
if not session.is_mobile:
session.is_mobile = 'auto'
if session.is_mobile == 'true':
is_mobile = True
elif session.is_mobile == 'false':
is_mobile = False
else:
is_mobile = request.user_agent().is_mobile
if DEMO_MODE:
session.authorized = True
session.forget()
if request.controller == "webservices":
basic = request.env.http_authorization
if not basic or not basic[:6].lower() == 'basic ':
raise HTTP(401, "Wrong credentials")
(username, password) = base64.b64decode(basic[6:]).split(':')
if not verify_password(password) or MULTI_USER_MODE:
time.sleep(10)
raise HTTP(403, "Not authorized")
elif not session.authorized and not \
(request.controller + '/' + request.function in<|fim▁hole|> if request.env.query_string:
query_string = '?' + request.env.query_string
else:
query_string = ''
if request.env.web2py_original_uri:
url = request.env.web2py_original_uri
else:
url = request.env.path_info + query_string
redirect(URL(request.application, 'default', 'index', vars=dict(send=url)))
elif session.authorized and \
request.controller == 'default' and \
request.function == 'index':
redirect(URL(request.application, 'default', 'site'))
if request.controller == 'appadmin' and DEMO_MODE:
session.flash = 'Appadmin disabled in demo mode'
redirect(URL('default', 'sites'))<|fim▁end|> | ('default/index', 'default/user', 'plugin_jqmobile/index', 'plugin_jqmobile/about')):
|
<|file_name|>test_prune.ts<|end_file_name|><|fim▁begin|>import { expect } from 'chai';
import { prune } from 'src/util/prune';
describe('unit/utils/prune', () => {
it('returns string if string is shorter than limit', () => {
const expected = 'this is some text';
const actual = prune(expected, 50);
expect(actual).to.equal(expected);
});
it('returns string if string length matches limit', () => {
const expected = '0123456789';
const actual = prune(expected, 10);
expect(actual).to.equal(expected);
});
it('returns correct pruned string if string length is exactly three less than limit', () => {
const text____ = 'one two three four five six seven';
const expected = 'one two three four five six...';<|fim▁hole|> });
it('returns correct string if it needs to drop a whole word', () => {
const text____ = 'one two three four five sixty-seven';
const expected = 'one two three four five...';
const actual = prune(text____, 30);
expect(actual).to.equal(expected);
});
it('returns chopped string if there are no spaces', () => {
const text____ = 'onetwothreefourfivesixty-seven';
const expected = 'onetwothreefourfi...';
const actual = prune(text____, 20);
expect(actual).to.equal(expected);
});
it('returns string without trailing punctuation', () => {
const text____ = 'one two three four five, sixty-seven';
const expected = 'one two three four five...';
const actual = prune(text____, 30);
expect(actual).to.equal(expected);
});
it('returns trimmed strings with newlines and multi-spaces removed', () => {
const text____ = ' one two\nthree four\nfive six seven ';
const expected = 'one two three...';
const actual = prune(text____, 20);
expect(actual).to.equal(expected);
});
});<|fim▁end|> |
const actual = prune(text____, 30);
expect(actual).to.equal(expected); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import logging
log = logging.getLogger(__name__)
from .trajectories import Trajectories
<|fim▁hole|>except ImportError: # pragma: no cover
log.warning('''Matplotlib can't be imported,'''
'''drawing module won't be available ''')
__all__ = ['Trajectories']<|fim▁end|> | try: # pragma: no cover
from . import draw
__all__ = ['Trajectories', 'draw'] |
<|file_name|>housing_test.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2018 Phyks
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import itertools
from collections import Counter<|fim▁hole|>from weboob.capabilities.housing import POSTS_TYPES
class HousingTest(object):
"""
Testing class to standardize the housing modules tests.
"""
# Fields to be checked for values across all items in housings list
FIELDS_ALL_HOUSINGS_LIST = [
"id", "type", "advert_type", "house_type", "url", "title", "area",
"cost", "currency", "utilities", "date", "location", "station", "text",
"phone", "rooms", "bedrooms", "DPE", "GES", "details"
]
# Fields to be checked for at least one item in housings list
FIELDS_ANY_HOUSINGS_LIST = [
"photos"
]
# Fields to be checked for values across all items when querying
# individually
FIELDS_ALL_SINGLE_HOUSING = [
"id", "url", "type", "advert_type", "house_type", "title", "area",
"cost", "currency", "utilities", "date", "location", "station", "text",
"phone", "rooms", "bedrooms", "DPE", "GES", "details"
]
# Fields to be checked for values at least once for all items when querying
# individually
FIELDS_ANY_SINGLE_HOUSING = [
"photos"
]
# Some backends cannot distinguish between rent and furnished rent for
# single housing post. Set this to True if this is the case.
DO_NOT_DISTINGUISH_FURNISHED_RENT = False
def assertNotEmpty(self, obj, field):
self.assertFalse(
empty(getattr(obj, field)),
'Field "%s" is empty and should not be.' % field
)
def check_housing_lists(self, query):
results = list(itertools.islice(
self.backend.search_housings(query),
20
))
self.assertGreater(len(results), 0)
for field in self.FIELDS_ANY_HOUSINGS_LIST:
self.assertTrue(
any(not empty(getattr(x, field)) for x in results),
'Missing a "%s" field.' % field
)
for x in results:
if 'type' in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertEqual(x.type, query.type)
if 'advert_type' in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertIn(x.advert_type, query.advert_types)
if 'house_type' in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertIn(x.house_type, query.house_types)
for field in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertNotEmpty(x, field)
if not empty(x.cost):
self.assertNotEmpty(x, 'price_per_meter')
for photo in x.photos:
self.assertRegexpMatches(photo.url, r'^http(s?)://')
return results
def check_single_housing_all(self, housing,
type, house_types, advert_type):
for field in self.FIELDS_ALL_SINGLE_HOUSING:
self.assertNotEmpty(housing, field)
if 'type' in self.FIELDS_ALL_SINGLE_HOUSING:
if (
self.DO_NOT_DISTINGUISH_FURNISHED_RENT and
type in [POSTS_TYPES.RENT, POSTS_TYPES.FURNISHED_RENT]
):
self.assertIn(housing.type,
[POSTS_TYPES.RENT, POSTS_TYPES.FURNISHED_RENT])
else:
self.assertEqual(housing.type, type)
if 'house_type' in self.FIELDS_ALL_SINGLE_HOUSING:
if not empty(house_types):
self.assertEqual(housing.house_type, house_types)
else:
self.assertNotEmpty(housing, 'house_type')
if 'advert_type' in self.FIELDS_ALL_SINGLE_HOUSING:
self.assertEqual(housing.advert_type, advert_type)
def check_single_housing_any(self, housing, counter):
for field in self.FIELDS_ANY_SINGLE_HOUSING:
if not empty(getattr(housing, field)):
counter[field] += 1
for photo in housing.photos:
self.assertRegexpMatches(photo.url, r'^http(s?)://')
return counter
def check_against_query(self, query):
# Check housing listing results
results = self.check_housing_lists(query)
# Check mandatory fields in all housings
housing = self.backend.get_housing(results[0].id)
self.backend.fillobj(housing, 'phone') # Fetch phone
self.check_single_housing_all(
housing,
results[0].type,
results[0].house_type,
results[0].advert_type
)
# Check fields that should appear in at least one housing
counter = Counter()
counter = self.check_single_housing_any(housing, counter)
for result in results[1:]:
if all(counter[field] > 0 for field in
self.FIELDS_ANY_SINGLE_HOUSING):
break
housing = self.backend.get_housing(result.id)
self.backend.fillobj(housing, 'phone') # Fetch phone
counter = self.check_single_housing_any(housing, counter)
for field in self.FIELDS_ANY_SINGLE_HOUSING:
self.assertGreater(
counter[field],
0,
'Optional field "%s" should appear at least once.' % field
)<|fim▁end|> |
from weboob.capabilities.base import empty |
<|file_name|>kitten-static-T.cpp<|end_file_name|><|fim▁begin|>#include <cstdio>
template<typename T>
auto kitten(T x) __attribute__((noinline));
template<class T>
auto kitten(T t)
{
static T x = 0;<|fim▁hole|>int main()
{
printf("%d\n", kitten(1));
printf("%g\n", kitten(3.14));
}<|fim▁end|> | return (x += 1) + t;
}
|
<|file_name|>make-stat.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 24 11:17:25 2014
@author: kshmirko
"""
import pandas as pds
import numpy as np
def seasons(x):
month = x.month
ret = None
if month in [12,1,2]:
ret = "Winter"
elif month in [3,4,5]:
ret = "Spring"
elif month in [6,7,8]:
ret = "Summer"
else:
ret = "Fall"
return ret
Lon0 = 131.9
Lat0 = 43.1
Radius = 4.0
DB = 'DS-%5.1f-%4.1f-%3.1f.h5'%(Lon0, Lat0, Radius)
O3StatFileName_fig = 'O3-%5.1f-%4.1f-%3.1f.eps'%(Lon0, Lat0, Radius)
O3StatFileName_h5 = 'O3-%5.1f-%4.1f-%3.1f.h5'%(Lon0, Lat0, Radius)
O3 = pds.read_hdf(DB,'O3')
O3_Err = pds.read_hdf(DB,'O3Err')
TH = pds.read_hdf(DB,'TH')
#вычисляем статистику по сезонам относительно земли
O3seasons = O3.groupby(seasons).mean().T / 1.0e12
O3seasons_s = O3.groupby(seasons).std().T / 1.0e12
O3seasons_Err = O3_Err.groupby(seasons).mean().T / 100.00
THseasons = TH.groupby(seasons).agg([np.mean, np.std]).T
X = np.linspace(0.5, 70, 140)
StatO3 = pds.DataFrame(index=X)
for iseason in ['Winter','Spring','Summer','Fall']:
StatO3[iseason+'_m'] = O3seasons[iseason]
StatO3[iseason+'_e'] = O3seasons_Err[iseason]
StatO3[iseason+'_s'] = O3seasons_s[iseason]
store = pds.HDFStore(O3StatFileName_h5,'w')
store.put('Statistics',StatO3)
store.close()
import pylab as plt
plt.figure(1)
plt.clf()
ax = plt.subplot(2,2,1)
ax.plot(X, O3seasons['Winter'])<|fim▁hole|>ax.set_ylabel('$[O3]x10^{12}, cm^{-3}$' )
ax.set_title('Winter')
Ht0 = THseasons['Winter'][0]['mean']
Ht0e= THseasons['Winter'][0]['std']
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.annotate('$H_{tropo}=%3.1f\pm%3.1f km$'%(Ht0, Ht0e), xy=(Ht0, 4,),
xycoords='data', xytext=(30, 4.0),
arrowprops=dict(arrowstyle="->", lw=2),
size=16
)
ax.grid()
ax2 = ax.twinx()
ax2.plot(X,O3seasons_Err['Winter'],'r.--')
ax.spines['right'].set_color('red')
ax2.yaxis.label.set_color('red')
ax2.tick_params(axis='y', colors='red')
ax2.set_ylim((0,100))
# 2-nd plot
ax = plt.subplot(2,2,2)
ax.plot(X, O3seasons['Spring'],'g')
ax.set_xlim((0,40))
ax.set_ylim((0,6))
ax.set_title('Spring')
Ht0 = THseasons['Spring'][0]['mean']
Ht0e= THseasons['Spring'][0]['std']
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.annotate('$H_{tropo}=%3.1f\pm%3.1f km$'%(Ht0, Ht0e), xy=(Ht0, 4,),
xycoords='data', xytext=(30, 4.0),
arrowprops=dict(arrowstyle="->", lw=2),
size=16
)
ax.grid()
ax2 = ax.twinx()
ax2.plot(X,O3seasons_Err['Spring'],'r.--')
ax.spines['right'].set_color('red')
ax2.set_ylabel('Error,$\%$')
ax2.tick_params(axis='y', colors='red')
ax2.yaxis.label.set_color('red')
ax2.set_ylim((0,100))
#3-rd plot
ax = plt.subplot(2,2,3)
ax.plot(X, O3seasons['Summer'],'y')
ax.set_xlim((0,40))
ax.set_ylim((0,6))
ax.grid()
ax.set_xlabel('Altitude, km')
ax.set_ylabel('$[O3]x10^{12}, cm^{-3}$' )
ax.set_title('Summer')
Ht0 = THseasons['Summer'][0]['mean']
Ht0e= THseasons['Summer'][0]['std']
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.annotate('$H_{tropo}=%3.1f\pm%3.1f km$'%(Ht0, Ht0e), xy=(Ht0, 4,),
xycoords='data', xytext=(30, 4.0),
arrowprops=dict(arrowstyle="->", lw=2),
size=16
)
ax2 = ax.twinx()
ax2.plot(X,O3seasons_Err['Summer'],'r.--')
ax.spines['right'].set_color('red')
ax2.tick_params(axis='y', colors='red')
ax2.yaxis.label.set_color('red')
ax2.set_ylim((0,100))
#4-th plot
ax = plt.subplot(2,2,4)
ax.plot(X, O3seasons['Fall'],'k')
ax.set_xlim((0,40))
ax.set_ylim((0,6))
Ht0 = THseasons['Fall'][0]['mean']
Ht0e= THseasons['Fall'][0]['std']
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.plot([Ht0,Ht0],[0,6],
'k.--',lw=2)
ax.annotate('$H_{tropo}=%3.1f\pm%3.1f km$'%(Ht0, Ht0e), xy=(Ht0, 4,),
xycoords='data', xytext=(30, 4.0),
arrowprops=dict(arrowstyle="->", lw=2),
size=16
)
ax.grid()
ax.set_xlabel('Altitude, km')
ax.set_title('Fall')
ax2 = ax.twinx()
ax2.plot(X,O3seasons_Err['Fall'],'r.--')
ax.spines['right'].set_color('red')
ax2.yaxis.label.set_color('red')
ax2.set_ylabel('Error,$\%$')
ax2.tick_params(axis='y', colors='red')
ax2.set_ylim((0,100))
plt.savefig(O3StatFileName_fig)<|fim▁end|> | ax.set_xlim((0,40))
ax.set_ylim((0,6)) |
<|file_name|>index.tsx<|end_file_name|><|fim▁begin|>import React from 'react'
import {
MutationFunction as MutationFn,
useMutation,
useQuery,
} from '@apollo/client'
import { Form, Field } from 'react-final-form'
import { ApolloError, PureQueryOptions } from '@apollo/client'
import { unescape } from 'underscore'
import styled from 'styled-components'
import Box from 'v2/components/UI/Box'
import Alert from 'v2/components/UI/Alert'
import ErrorAlert from 'v2/components/UI/ErrorAlert'
import Accordion from 'v2/components/UI/Accordion'
import Text from 'v2/components/UI/Text'
import LoadingIndicator from 'v2/components/UI/LoadingIndicator'
import TitledDialog from 'v2/components/UI/TitledDialog'
import RadioOptions from 'v2/components/UI/RadioOptions'
import { LabelledInput, Label, Input, Textarea } from 'v2/components/UI/Inputs'
import ExportChannel from 'v2/components/ManageChannel/components/ExportChannel'
import DeleteChannel from 'v2/components/ManageChannel/components/DeleteChannel'
import TransferChannel from 'v2/components/ManageChannel/components/TransferChannel'
import AssignAuthor from 'v2/components/ManageChannel/components/AssignAuthor'
import ChannelVisibilityPulldown from 'v2/components/ChannelVisibilityPulldown'
import {
ManageChannelQuery,
ManageChannelQueryVariables,
ManageChannelQuery_channel as Channel,
ManageChannelQuery_me,
} from '__generated__/ManageChannelQuery'
import {
updateChannelMutation,
updateChannelMutationVariables,
} from '__generated__/updateChannelMutation'
import UPDATE_CHANNEL_QUERY from 'v2/components/ManageChannel/mutations/updateChannel'
import MANAGE_CHANNEL_QUERY from 'v2/components/ManageChannel/queries/manageChannel'
import mapErrors from 'v2/util/mapErrors'
import { FORM_ERROR } from 'final-form'
const Container = styled.div`
width: 100%;
margin: 0 auto 2em auto;
`
const TextInput = styled(Input).attrs({
flex: 1,
f: 7,
})``
interface ManageChannelProps {
channel: Channel
me: ManageChannelQuery_me
updateChannel: MutationFn<
updateChannelMutation,
updateChannelMutationVariables
>
loading: boolean
error: ApolloError
refetchQueries?: PureQueryOptions[]
onUpdate?: (href?: string) => void
onDelete?: () => void
}
const ManageChannel: React.FC<ManageChannelProps> = ({
channel,
me,
loading,
error,
updateChannel,
refetchQueries,
onUpdate = () => {
window.location.reload()
},
onDelete = () => {
window.location.href = '/'
},
}) => {
if (loading) return <LoadingIndicator />
if (error) return <ErrorAlert>{error.message}</ErrorAlert>
const showAuthor = me.counts.groups > 0 && me.id === channel.user.id
const owner = `${channel.owner.__typename.toUpperCase()}:${channel.owner.id}`
const handleSubmit = values => {
let owner
if (values.owner) {
const [type, id] = values.owner.split(':')
owner = { type, id }
}
const variables = {
...values,
id: channel.id,
owner,
}
return updateChannel({
variables,
refetchQueries,
})
.then(
({
data: {
update_channel: {
channel: { href },
},
},
}) => {
onUpdate(href)
return true
}
)
.catch(err => {
const mappedErrors = mapErrors(err)
const errors = {
[FORM_ERROR]: mappedErrors.errorMessage,
...mappedErrors.attributeErrors,
}
return errors
})
}
return (
<Form
onSubmit={handleSubmit}
render={({
handleSubmit,
submitFailed,
submitSucceeded,
submitting,
submitError,
values,
}) => {
const color =
(values && values.visibility && values.visibility.toLowerCase()) ||
channel.visibility.toUpperCase()
return (
<TitledDialog
title="Edit channel"
label={submitting ? 'Saving' : 'Save changes'}
onDone={handleSubmit}
>
{submitFailed && submitError && (
<ErrorAlert isReloadable={false}>{submitError}</ErrorAlert>
)}
{submitSucceeded && (
<Alert bg="state.premium" color="background" mb={6}>
Channel Saved.
</Alert>
)}
<Container>
<Field name="title" initialValue={unescape(channel.title)}>
{props => {
return (
<LabelledInput mt={6} mb={5}>
<Label>Title</Label>
<TextInput
{...props.input}
placeholder="Type channel name"
borderless
autoFocus
required
color={`channel.${color}`}
errorMessage={
props.meta.error || props.meta.submitError
}
/>
</LabelledInput>
)
}}
</Field>
{showAuthor && (
<Field name="owner" initialValue={owner}>
{({ input, ...rest }) => {
const handleAuthor = newOwner => {
const [type, id] = newOwner.split(':')
const { owner } = channel
const newOwerIsCurrentOwner =
type.toLowerCase() === owner.__typename.toLowerCase() &&
id === owner.id
if (newOwerIsCurrentOwner) return
input.onChange(newOwner)
}
return (
<LabelledInput my={6} alignItems="start">
<Label>Author</Label>
<AssignAuthor
{...rest}
onChange={handleAuthor}
selected={input.value}
/>
</LabelledInput>
)
}}
</Field>
)}
<Field name="description" initialValue={channel.description}>
{props => {
return (
<LabelledInput my={6}>
<Label>Description</Label>
<Textarea
{...props.input}
placeholder="Describe your channel here"
rows={4}
errorMessage={
props.meta.error || props.meta.submitError
}
/><|fim▁hole|> </LabelledInput>
)
}}
</Field>
<Field
name="visibility"
initialValue={channel.visibility.toUpperCase()}
>
{props => {
return (
<LabelledInput my={6}>
<Label>Privacy</Label>
<div>
<ChannelVisibilityPulldown
{...props.input}
type={channel.owner.__typename.toUpperCase()}
/>
</div>
</LabelledInput>
)
}}
</Field>
<Accordion label="NSFW?" mode="closed">
<Field
name="content_flag"
initialValue={channel.content_flag.toUpperCase()}
>
{props => {
return (
<Box m={7}>
<Text f={2} mb={7}>
Not Safe For Work (NSFW) channels are hidden from
Explore and are only visible on your profile to people
who have the "Show NSFW Content" setting
turned on.
</Text>
<RadioOptions
value={props.input.value}
onSelect={props.input.onChange}
size="1em"
>
<RadioOptions.Option value="SAFE">
{({ selected }) => (
<Text f={3} mb={3} selected={selected}>
<strong>Safe for work</strong>
</Text>
)}
</RadioOptions.Option>
<RadioOptions.Option value="NSFW">
{({ selected }) => (
<Text f={3} mb={3} selected={selected}>
<strong>Not safe for work</strong>
</Text>
)}
</RadioOptions.Option>
</RadioOptions>
</Box>
)
}}
</Field>
</Accordion>
{channel.can.export && (
<Accordion label="Export" mode="closed">
<Box m={7}>
<ExportChannel id={channel.id} />
</Box>
</Accordion>
)}
{channel.can.transfer && (
<Accordion label="Transfer ownership" mode="closed">
<Box m={7}>
<TransferChannel channel={channel} />
</Box>
</Accordion>
)}
{channel.can.destroy && (
<Accordion label="Delete channel" mode="closed">
<Box m={7}>
<DeleteChannel
id={channel.id}
onDelete={onDelete}
refetchQueries={refetchQueries}
/>
</Box>
</Accordion>
)}
</Container>
</TitledDialog>
)
}}
/>
)
}
interface ManageChannelContainerProps {
id: string | number
refetchQueries?: PureQueryOptions[]
onUpdate?: () => void
onDelete?: () => void
}
const ManageChannelContainer: React.FC<ManageChannelContainerProps> = ({
id,
refetchQueries,
onDelete,
onUpdate,
}) => {
const { data, loading, error } = useQuery<
ManageChannelQuery,
ManageChannelQueryVariables
>(MANAGE_CHANNEL_QUERY, { variables: { id: id.toString() } })
const [updateChannel] = useMutation<
updateChannelMutation,
updateChannelMutationVariables
>(UPDATE_CHANNEL_QUERY)
return (
<ManageChannel
error={error}
loading={loading}
channel={data && data.channel}
me={data && data.me}
updateChannel={updateChannel}
refetchQueries={refetchQueries}
onDelete={onDelete}
onUpdate={onUpdate}
/>
)
}
export default ManageChannelContainer<|fim▁end|> | |
<|file_name|>ScriptLoader.java<|end_file_name|><|fim▁begin|>package logbook.data;
import java.io.BufferedReader;
import java.io.Closeable;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.CheckForNull;
import javax.script.Invocable;
import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
import javax.script.ScriptException;
import logbook.config.AppConfig;
import org.apache.commons.io.FilenameUtils;
/**
* スクリプトを読み込みEventListenerの実装を取得する
*
*/
public final class ScriptLoader implements Closeable {
/** ClassLoader */
private final URLClassLoader classLoader;
/** ScriptEngineManager */
private final ScriptEngineManager manager;
/**
* コンストラクター
*/
public ScriptLoader() {
this.classLoader = URLClassLoader.newInstance(this.getLibraries());
this.manager = new ScriptEngineManager(this.classLoader);
}
/**
* スクリプトを読み込みEventListenerの実装を取得する<br>
<|fim▁hole|> * @throws IOException
* @throws ScriptException
*/
@CheckForNull
public EventListener getEventListener(Path script) throws IOException, ScriptException {
try (BufferedReader reader = Files.newBufferedReader(script, StandardCharsets.UTF_8)) {
// 拡張子からScriptEngineを取得
String ext = FilenameUtils.getExtension(script.toString());
ScriptEngine engine = this.manager.getEngineByExtension(ext);
if (engine != null) {
// eval
engine.eval(reader);
// 実装を取得
EventListener listener = ((Invocable) engine).getInterface(EventListener.class);
if (listener != null) {
return new ScriptEventAdapter(listener, script);
}
}
return null;
}
}
/**
* ScriptEngineManagerで使用する追加のライブラリ
*
* @return ライブラリ
*/
public URL[] getLibraries() {
String[] engines = AppConfig.get().getScriptEngines();
List<URL> libs = new ArrayList<>();
for (String engine : engines) {
Path path = Paths.get(engine);
if (Files.isReadable(path)) {
try {
libs.add(path.toUri().toURL());
} catch (MalformedURLException e) {
// ここに入るパターンはないはず
e.printStackTrace();
}
}
}
return libs.toArray(new URL[libs.size()]);
}
@Override
public void close() throws IOException {
this.classLoader.close();
}
}<|fim▁end|> | *
* @param script スクリプト
* @return スクリプトにより実装されたEventListener、スクリプトエンジンが見つからない、もしくはコンパイル済み関数がEventListenerを実装しない場合null
|
<|file_name|>timesheet.py<|end_file_name|><|fim▁begin|>import xml.etree.ElementTree as ET
import datetime
import sys
import openpyxl
import re
import dateutil
def main():
print 'Number of arguments:', len(sys.argv), 'arguments.' #DEBUG
print 'Argument List:', str(sys.argv) #DEBUG
Payrate = raw_input("Enter your pay rate: ") #DEBUG
sNumber = raw_input("Enter 900#: ") #DEBUG
xml = ET.parse("xml.xml") #DEBUG
root = xml.getroot()
root = root[3][0] #Go directly to worksheet/table
sheet = openpyxl.load_workbook(sys.argv[1], data_only=True).active
writeName(root)
writeEmployeeNum(root)
writeStudentNum(sNumber)
writePayRate(payRate)
#At this point all that is left are the times
for x in root.findall(".//*"):
if x.text != None:
dates.append(x.text)
for x in char_range('G','Z'):
writeTimes(x + '17' , dates)
def writeTimes (position, dateList):
match = next(x[0] for x in enumerate(dateList) if x[1] == sheet[position].value)
jobCode = dateList[num+4]
if jobCode == 900:
raise error("Cannot start day with 900 break")
else:
sheet[date] = roundTime(
def roundTime(time):
date = dateutil.parser.parse(x)
if date.minute <= 7
return date.replace(minute=0)
else if date.minute >= 8 and date.minute <= 22:
return date.replace(minute=15)
else if date.minute >= 23 and date.minute <= 37:
return date.replace(minute=30)
else if date.minute >= 38 and date.minute <= 52:
return date.replace(minute=45)
else if date.minute >= 53:
if date.hour == 23:
raise error("Worked overnight or did not clock out")
else:
date += datetime.timedelta(minutes= (60-date.minute))
#Rounds time to next hour by adding minutes until 60
return date
else:
raise error("Something went wrong in roundTime")
def writeName(tree):
name = tree[-1][4][0].text<|fim▁hole|> sheet['I8'] = name
def writeEmployeeNum(tree):
num = root[2][0][0].text
sheet['4D'] = re.match('.*?([0-9]+)$', num).group(1)
def writeStudentNum(num):
sheet['8S']=num
def writePayRate(num):
sheet['6k']=num
def char_range(c1, c2):
"""Generates the characters from `c1` to `c2`, inclusive."""
"""Courtesy http://stackoverflow.com/questions/7001144/range-over-character-in-python"""
for c in xrange(ord(c1), ord(c2)+1):
yield chr(c)
main()<|fim▁end|> | |
<|file_name|>tryclient.py<|end_file_name|><|fim▁begin|># This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import json
import os
import random
import re
import shlex
import string
import sys
import time
from twisted.cred import credentials
from twisted.internet import defer
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet import task
from twisted.internet import utils
from twisted.python import log
from twisted.python import runtime
from twisted.python.procutils import which
from twisted.spread import pb
from buildbot.process.results import SUCCESS
from buildbot.process.results import Results
from buildbot.util import bytes2unicode
from buildbot.util import now
from buildbot.util import unicode2bytes
from buildbot.util.eventual import fireEventually
class SourceStamp:
def __init__(self, branch, revision, patch, repository=''):
self.branch = branch
self.revision = revision
self.patch = patch
self.repository = repository
def output(*msg):
print(' '.join([str(m)for m in msg]))
class SourceStampExtractor:
def __init__(self, treetop, branch, repository):
self.treetop = treetop
self.repository = repository
self.branch = branch
exes = which(self.vcexe)
if not exes:
output("Could not find executable '{}'.".format(self.vcexe))
sys.exit(1)
self.exe = exes[0]
def dovc(self, cmd):
"""This accepts the arguments of a command, without the actual
command itself."""
env = os.environ.copy()
env['LC_ALL'] = "C"
d = utils.getProcessOutputAndValue(self.exe, cmd, env=env,
path=self.treetop)
d.addCallback(self._didvc, cmd)
return d
def _didvc(self, res, cmd):
(stdout, stderr, code) = res
# 'bzr diff' sets rc=1 if there were any differences.
# cvs does something similar, so don't bother requiring rc=0.
return stdout
def get(self):
"""Return a Deferred that fires with a SourceStamp instance."""
d = self.getBaseRevision()
d.addCallback(self.getPatch)
d.addCallback(self.done)
return d
def readPatch(self, diff, patchlevel):
if not diff:
diff = None
self.patch = (patchlevel, bytes2unicode(diff))
def done(self, res):
if not self.repository:
self.repository = self.treetop
# TODO: figure out the branch and project too
ss = SourceStamp(bytes2unicode(self.branch), self.baserev, self.patch,
repository=self.repository)
return ss
class CVSExtractor(SourceStampExtractor):
patchlevel = 0
vcexe = "cvs"
def getBaseRevision(self):
# this depends upon our local clock and the repository's clock being
# reasonably synchronized with each other. We express everything in
# UTC because the '%z' format specifier for strftime doesn't always
# work.
self.baserev = time.strftime("%Y-%m-%d %H:%M:%S +0000",
time.gmtime(now()))
return defer.succeed(None)
def getPatch(self, res):
# the -q tells CVS to not announce each directory as it works
if self.branch is not None:
# 'cvs diff' won't take both -r and -D at the same time (it
# ignores the -r). As best I can tell, there is no way to make
# cvs give you a diff relative to a timestamp on the non-trunk
# branch. A bare 'cvs diff' will tell you about the changes
# relative to your checked-out versions, but I know of no way to
# find out what those checked-out versions are.
output("Sorry, CVS 'try' builds don't work with branches")
sys.exit(1)
args = ['-q', 'diff', '-u', '-D', self.baserev]
d = self.dovc(args)
d.addCallback(self.readPatch, self.patchlevel)
return d
class SVNExtractor(SourceStampExtractor):
patchlevel = 0
vcexe = "svn"
def getBaseRevision(self):
d = self.dovc(["status", "-u"])
d.addCallback(self.parseStatus)
return d
def parseStatus(self, res):
# svn shows the base revision for each file that has been modified or
# which needs an update. You can update each file to a different
# version, so each file is displayed with its individual base
# revision. It also shows the repository-wide latest revision number
# on the last line ("Status against revision: \d+").
# for our purposes, we use the latest revision number as the "base"
# revision, and get a diff against that. This means we will get
# reverse-diffs for local files that need updating, but the resulting
# tree will still be correct. The only weirdness is that the baserev
# that we emit may be different than the version of the tree that we
# first checked out.
# to do this differently would probably involve scanning the revision
# numbers to find the max (or perhaps the min) revision, and then
# using that as a base.
for line in res.split(b"\n"):
m = re.search(br'^Status against revision:\s+(\d+)', line)
if m:
self.baserev = m.group(1)
return
output(
b"Could not find 'Status against revision' in SVN output: " + res)
sys.exit(1)
def getPatch(self, res):
d = self.dovc(["diff", "-r{}".format(self.baserev)])
d.addCallback(self.readPatch, self.patchlevel)
return d
class BzrExtractor(SourceStampExtractor):
patchlevel = 0
vcexe = "bzr"
def getBaseRevision(self):
d = self.dovc(["revision-info", "-rsubmit:"])
d.addCallback(self.get_revision_number)
return d
def get_revision_number(self, out):
revno, revid = out.split()
self.baserev = 'revid:' + revid
return
def getPatch(self, res):
d = self.dovc(["diff", "-r{}..".format(self.baserev)])
d.addCallback(self.readPatch, self.patchlevel)
return d
class MercurialExtractor(SourceStampExtractor):
patchlevel = 1
vcexe = "hg"
def _didvc(self, res, cmd):
(stdout, stderr, code) = res
if code:
cs = ' '.join(['hg'] + cmd)
if stderr:
stderr = '\n' + stderr.rstrip()
raise RuntimeError("{} returned {} {}".format(cs, code, stderr))
return stdout
@defer.inlineCallbacks
def getBaseRevision(self):
upstream = ""
if self.repository:
upstream = "r'{}'".format(self.repository)
output = ''
try:
output = yield self.dovc(["log", "--template", "{node}\\n", "-r",
"max(::. - outgoing({}))".format(upstream)])
except RuntimeError:
# outgoing() will abort if no default-push/default path is
# configured
if upstream:
raise
# fall back to current working directory parent
output = yield self.dovc(["log", "--template", "{node}\\n", "-r", "p1()"])
m = re.search(br'^(\w+)', output)
if not m:
raise RuntimeError(
"Revision {!r} is not in the right format".format(output))
self.baserev = m.group(0)
def getPatch(self, res):
d = self.dovc(["diff", "-r", self.baserev])
d.addCallback(self.readPatch, self.patchlevel)
return d
class PerforceExtractor(SourceStampExtractor):
patchlevel = 0
vcexe = "p4"
def getBaseRevision(self):
d = self.dovc(["changes", "-m1", "..."])
d.addCallback(self.parseStatus)
return d
def parseStatus(self, res):
#
# extract the base change number
#
m = re.search(br'Change (\d+)', res)
if m:
self.baserev = m.group(1)
return
output(b"Could not find change number in output: " + res)
sys.exit(1)
def readPatch(self, res, patchlevel):
#
# extract the actual patch from "res"
#
if not self.branch:
output("you must specify a branch")
sys.exit(1)
mpatch = ""
found = False
for line in res.split("\n"):
m = re.search('==== //depot/' + self.branch
+ r'/([\w/\.\d\-_]+)#(\d+) -', line)
if m:
mpatch += "--- {}#{}\n".format(m.group(1), m.group(2))
mpatch += "+++ {}\n".format(m.group(1))
found = True
else:
mpatch += line
mpatch += "\n"
if not found:
output(b"could not parse patch file")
sys.exit(1)
self.patch = (patchlevel, mpatch)
def getPatch(self, res):
d = self.dovc(["diff"])
d.addCallback(self.readPatch, self.patchlevel)
return d
class DarcsExtractor(SourceStampExtractor):
patchlevel = 1
vcexe = "darcs"
def getBaseRevision(self):
d = self.dovc(["changes", "--context"])
d.addCallback(self.parseStatus)
return d
def parseStatus(self, res):
self.baserev = res # the whole context file
def getPatch(self, res):
d = self.dovc(["diff", "-u"])
d.addCallback(self.readPatch, self.patchlevel)
return d
class GitExtractor(SourceStampExtractor):
patchlevel = 1
vcexe = "git"
config = None
def getBaseRevision(self):
# If a branch is specified, parse out the rev it points to
# and extract the local name.
if self.branch:
d = self.dovc(["rev-parse", self.branch])
d.addCallback(self.override_baserev)
d.addCallback(self.extractLocalBranch)
return d
d = self.dovc(["branch", "--no-color", "-v", "--no-abbrev"])
d.addCallback(self.parseStatus)
return d
# remove remote-prefix from self.branch (assumes format <prefix>/<branch>)
# this uses "git remote" to retrieve all configured remote names
def extractLocalBranch(self, res):
if '/' in self.branch:
d = self.dovc(["remote"])
d.addCallback(self.fixBranch)
return d
return None
# strip remote prefix from self.branch
def fixBranch(self, remotes):
for l in bytes2unicode(remotes).split("\n"):
r = l.strip()
if r and self.branch.startswith(r + "/"):
self.branch = self.branch[len(r) + 1:]
break
def readConfig(self):
if self.config:
return defer.succeed(self.config)
d = self.dovc(["config", "-l"])
d.addCallback(self.parseConfig)
return d
def parseConfig(self, res):
self.config = {}
for l in res.split(b"\n"):
if l.strip():
parts = l.strip().split(b"=", 2)
if len(parts) < 2:
parts.append('true')
self.config[parts[0]] = parts[1]
return self.config
def parseTrackingBranch(self, res):
# If we're tracking a remote, consider that the base.
remote = self.config.get(b"branch." + self.branch + b".remote")
ref = self.config.get(b"branch." + self.branch + b".merge")
if remote and ref:
remote_branch = ref.split(b"/", 2)[-1]
baserev = remote + b"/" + remote_branch
else:
baserev = b"master"
d = self.dovc(["rev-parse", baserev])
d.addCallback(self.override_baserev)
return d
def override_baserev(self, res):
self.baserev = bytes2unicode(res).strip()
def parseStatus(self, res):
# The current branch is marked by '*' at the start of the
# line, followed by the branch name and the SHA1.
#
# Branch names may contain pretty much anything but whitespace.
m = re.search(br'^\* (\S+)\s+([0-9a-f]{40})', res, re.MULTILINE)
if m:
self.baserev = m.group(2)
self.branch = m.group(1)
d = self.readConfig()
d.addCallback(self.parseTrackingBranch)
return d
output(b"Could not find current GIT branch: " + res)
sys.exit(1)
def getPatch(self, res):
d = self.dovc(["diff", "--src-prefix=a/", "--dst-prefix=b/",
"--no-textconv", "--no-ext-diff", self.baserev])
d.addCallback(self.readPatch, self.patchlevel)
return d
class MonotoneExtractor(SourceStampExtractor):
patchlevel = 0
vcexe = "mtn"
def getBaseRevision(self):
d = self.dovc(["automate", "get_base_revision_id"])
d.addCallback(self.parseStatus)
return d
def parseStatus(self, output):
hash = output.strip()
if len(hash) != 40:
self.baserev = None<|fim▁hole|> self.baserev = hash
def getPatch(self, res):
d = self.dovc(["diff"])
d.addCallback(self.readPatch, self.patchlevel)
return d
def getSourceStamp(vctype, treetop, branch=None, repository=None):
if vctype == "cvs":
cls = CVSExtractor
elif vctype == "svn":
cls = SVNExtractor
elif vctype == "bzr":
cls = BzrExtractor
elif vctype == "hg":
cls = MercurialExtractor
elif vctype == "p4":
cls = PerforceExtractor
elif vctype == "darcs":
cls = DarcsExtractor
elif vctype == "git":
cls = GitExtractor
elif vctype == "mtn":
cls = MonotoneExtractor
elif vctype == "none":
return defer.succeed(SourceStamp("", "", (1, ""), ""))
else:
output("unknown vctype '{}'".format(vctype))
sys.exit(1)
return cls(treetop, branch, repository).get()
def ns(s):
return "{}:{},".format(len(s), s)
def createJobfile(jobid, branch, baserev, patch_level, patch_body, repository,
project, who, comment, builderNames, properties):
# Determine job file version from provided arguments
if properties:
version = 5
elif comment:
version = 4
elif who:
version = 3
else:
version = 2
job = ""
job += ns(str(version))
if version < 5:
job += ns(jobid)
job += ns(branch)
job += ns(str(baserev))
job += ns("{}".format(patch_level))
job += ns(patch_body or "")
job += ns(repository)
job += ns(project)
if (version >= 3):
job += ns(who)
if (version >= 4):
job += ns(comment)
for bn in builderNames:
job += ns(bn)
else:
job += ns(
json.dumps({
'jobid': jobid, 'branch': branch, 'baserev': str(baserev),
'patch_level': patch_level, 'patch_body': patch_body,
'repository': repository, 'project': project, 'who': who,
'comment': comment, 'builderNames': builderNames,
'properties': properties,
}))
return job
def getTopdir(topfile, start=None):
"""walk upwards from the current directory until we find this topfile"""
if not start:
start = os.getcwd()
here = start
toomany = 20
while toomany > 0:
if os.path.exists(os.path.join(here, topfile)):
return here
next = os.path.dirname(here)
if next == here:
break # we've hit the root
here = next
toomany -= 1
output("Unable to find topfile '{}' anywhere "
"from {} upwards".format(topfile, start))
sys.exit(1)
class RemoteTryPP(protocol.ProcessProtocol):
def __init__(self, job):
self.job = job
self.d = defer.Deferred()
def connectionMade(self):
self.transport.write(unicode2bytes(self.job))
self.transport.closeStdin()
def outReceived(self, data):
sys.stdout.write(bytes2unicode(data))
def errReceived(self, data):
sys.stderr.write(bytes2unicode(data))
def processEnded(self, status_object):
sig = status_object.value.signal
rc = status_object.value.exitCode
if sig is not None or rc != 0:
self.d.errback(RuntimeError("remote 'buildbot tryserver' failed"
": sig={}, rc={}".format(sig, rc)))
return
self.d.callback((sig, rc))
class FakeBuildSetStatus:
def callRemote(self, name):
if name == "getBuildRequests":
return defer.succeed([])
raise NotImplementedError()
class Try(pb.Referenceable):
buildsetStatus = None
quiet = False
printloop = False
def __init__(self, config):
self.config = config
self.connect = self.getopt('connect')
if self.connect not in ['ssh', 'pb']:
output("you must specify a connect style: ssh or pb")
sys.exit(1)
self.builderNames = self.getopt('builders')
self.project = self.getopt('project', '')
self.who = self.getopt('who')
self.comment = self.getopt('comment')
def getopt(self, config_name, default=None):
value = self.config.get(config_name)
if value is None or value == []:
value = default
return value
def createJob(self):
# returns a Deferred which fires when the job parameters have been
# created
# generate a random (unique) string. It would make sense to add a
# hostname and process ID here, but a) I suspect that would cause
# windows portability problems, and b) really this is good enough
self.bsid = "{}-{}".format(time.time(), random.randint(0, 1000000))
# common options
branch = self.getopt("branch")
difffile = self.config.get("diff")
if difffile:
baserev = self.config.get("baserev")
if difffile == "-":
diff = sys.stdin.read()
else:
with open(difffile, "r") as f:
diff = f.read()
if not diff:
diff = None
patch = (self.config['patchlevel'], diff)
ss = SourceStamp(
branch, baserev, patch, repository=self.getopt("repository"))
d = defer.succeed(ss)
else:
vc = self.getopt("vc")
if vc in ("cvs", "svn"):
# we need to find the tree-top
topdir = self.getopt("topdir")
if topdir:
treedir = os.path.expanduser(topdir)
else:
topfile = self.getopt("topfile")
if topfile:
treedir = getTopdir(topfile)
else:
output("Must specify topdir or topfile.")
sys.exit(1)
else:
treedir = os.getcwd()
d = getSourceStamp(vc, treedir, branch, self.getopt("repository"))
d.addCallback(self._createJob_1)
return d
def _createJob_1(self, ss):
self.sourcestamp = ss
patchlevel, diff = ss.patch
if diff is None:
raise RuntimeError("There is no patch to try, diff is empty.")
if self.connect == "ssh":
revspec = ss.revision
if revspec is None:
revspec = ""
self.jobfile = createJobfile(
self.bsid, ss.branch or "", revspec, patchlevel, diff,
ss.repository, self.project, self.who, self.comment,
self.builderNames, self.config.get('properties', {}))
def fakeDeliverJob(self):
# Display the job to be delivered, but don't perform delivery.
ss = self.sourcestamp
output("Job:\n\tRepository: {}\n\tProject: {}\n\tBranch: {}\n\t"
"Revision: {}\n\tBuilders: {}\n{}".format(
ss.repository, self.project, ss.branch,
ss.revision,
self.builderNames,
ss.patch[1]))
self.buildsetStatus = FakeBuildSetStatus()
d = defer.Deferred()
d.callback(True)
return d
def deliver_job_ssh(self):
tryhost = self.getopt("host")
tryport = self.getopt("port")
tryuser = self.getopt("username")
trydir = self.getopt("jobdir")
buildbotbin = self.getopt("buildbotbin")
ssh_command = self.getopt("ssh")
if not ssh_command:
ssh_commands = which("ssh")
if not ssh_commands:
raise RuntimeError("couldn't find ssh executable, make sure "
"it is available in the PATH")
argv = [ssh_commands[0]]
else:
# Split the string on whitespace to allow passing options in
# ssh command too, but preserving whitespace inside quotes to
# allow using paths with spaces in them which is common under
# Windows. And because Windows uses backslashes in paths, we
# can't just use shlex.split there as it would interpret them
# specially, so do it by hand.
if runtime.platformType == 'win32':
# Note that regex here matches the arguments, not the
# separators, as it's simpler to do it like this. And then we
# just need to get all of them together using the slice and
# also remove the quotes from those that were quoted.
argv = [string.strip(a, '"') for a in
re.split(r'''([^" ]+|"[^"]+")''', ssh_command)[1::2]]
else:
# Do use standard tokenization logic under POSIX.
argv = shlex.split(ssh_command)
if tryuser:
argv += ["-l", tryuser]
if tryport:
argv += ["-p", tryport]
argv += [tryhost, buildbotbin, "tryserver", "--jobdir", trydir]
pp = RemoteTryPP(self.jobfile)
reactor.spawnProcess(pp, argv[0], argv, os.environ)
d = pp.d
return d
@defer.inlineCallbacks
def deliver_job_pb(self):
user = self.getopt("username")
passwd = self.getopt("passwd")
master = self.getopt("master")
tryhost, tryport = master.split(":")
tryport = int(tryport)
f = pb.PBClientFactory()
d = f.login(credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd)))
reactor.connectTCP(tryhost, tryport, f)
remote = yield d
ss = self.sourcestamp
output("Delivering job; comment=", self.comment)
self.buildsetStatus = \
yield remote.callRemote("try", ss.branch, ss.revision, ss.patch, ss.repository,
self.project, self.builderNames, self.who, self.comment,
self.config.get('properties', {}))
def deliverJob(self):
# returns a Deferred that fires when the job has been delivered
if self.connect == "ssh":
return self.deliver_job_ssh()
if self.connect == "pb":
return self.deliver_job_pb()
raise RuntimeError("unknown connecttype '{}', "
"should be 'ssh' or 'pb'".format(self.connect))
def getStatus(self):
# returns a Deferred that fires when the builds have finished, and
# may emit status messages while we wait
wait = bool(self.getopt("wait"))
if not wait:
output("not waiting for builds to finish")
elif self.connect == "ssh":
output("waiting for builds with ssh is not supported")
else:
self.running = defer.Deferred()
if not self.buildsetStatus:
output("try scheduler on the master does not have the builder configured")
return None
self._getStatus_1() # note that we don't wait for the returned Deferred
if bool(self.config.get("dryrun")):
self.statusDone()
return self.running
return None
@defer.inlineCallbacks
def _getStatus_1(self):
# gather the set of BuildRequests
brs = yield self.buildsetStatus.callRemote("getBuildRequests")
self.builderNames = []
self.buildRequests = {}
# self.builds holds the current BuildStatus object for each one
self.builds = {}
# self.outstanding holds the list of builderNames which haven't
# finished yet
self.outstanding = []
# self.results holds the list of build results. It holds a tuple of
# (result, text)
self.results = {}
# self.currentStep holds the name of the Step that each build is
# currently running
self.currentStep = {}
# self.ETA holds the expected finishing time (absolute time since
# epoch)
self.ETA = {}
for n, br in brs:
self.builderNames.append(n)
self.buildRequests[n] = br
self.builds[n] = None
self.outstanding.append(n)
self.results[n] = [None, None]
self.currentStep[n] = None
self.ETA[n] = None
# get new Builds for this buildrequest. We follow each one until
# it finishes or is interrupted.
br.callRemote("subscribe", self)
# now that those queries are in transit, we can start the
# display-status-every-30-seconds loop
if not self.getopt("quiet"):
self.printloop = task.LoopingCall(self.printStatus)
self.printloop.start(3, now=False)
# these methods are invoked by the status objects we've subscribed to
def remote_newbuild(self, bs, builderName):
if self.builds[builderName]:
self.builds[builderName].callRemote("unsubscribe", self)
self.builds[builderName] = bs
bs.callRemote("subscribe", self, 20)
d = bs.callRemote("waitUntilFinished")
d.addCallback(self._build_finished, builderName)
def remote_stepStarted(self, buildername, build, stepname, step):
self.currentStep[buildername] = stepname
def remote_stepFinished(self, buildername, build, stepname, step, results):
pass
def remote_buildETAUpdate(self, buildername, build, eta):
self.ETA[buildername] = now() + eta
@defer.inlineCallbacks
def _build_finished(self, bs, builderName):
# we need to collect status from the newly-finished build. We don't
# remove the build from self.outstanding until we've collected
# everything we want.
self.builds[builderName] = None
self.ETA[builderName] = None
self.currentStep[builderName] = "finished"
self.results[builderName][0] = yield bs.callRemote("getResults")
self.results[builderName][1] = yield bs.callRemote("getText")
self.outstanding.remove(builderName)
if not self.outstanding:
self.statusDone()
def printStatus(self):
try:
names = sorted(self.buildRequests.keys())
for n in names:
if n not in self.outstanding:
# the build is finished, and we have results
code, text = self.results[n]
t = Results[code]
if text:
t += " ({})".format(" ".join(text))
elif self.builds[n]:
t = self.currentStep[n] or "building"
if self.ETA[n]:
t += " [ETA {}s]".format(self.ETA[n] - now())
else:
t = "no build"
self.announce("{}: {}".format(n, t))
self.announce("")
except Exception:
log.err(None, "printing status")
def statusDone(self):
if self.printloop:
self.printloop.stop()
self.printloop = None
output("All Builds Complete")
# TODO: include a URL for all failing builds
names = sorted(self.buildRequests.keys())
happy = True
for n in names:
code, text = self.results[n]
t = "{}: {}".format(n, Results[code])
if text:
t += " ({})".format(" ".join(text))
output(t)
if code != SUCCESS:
happy = False
if happy:
self.exitcode = 0
else:
self.exitcode = 1
self.running.callback(self.exitcode)
@defer.inlineCallbacks
def getAvailableBuilderNames(self):
# This logs into the master using the PB protocol to
# get the names of the configured builders that can
# be used for the --builder argument
if self.connect == "pb":
user = self.getopt("username")
passwd = self.getopt("passwd")
master = self.getopt("master")
tryhost, tryport = master.split(":")
tryport = int(tryport)
f = pb.PBClientFactory()
d = f.login(credentials.UsernamePassword(unicode2bytes(user), unicode2bytes(passwd)))
reactor.connectTCP(tryhost, tryport, f)
remote = yield d
buildernames = yield remote.callRemote("getAvailableBuilderNames")
output("The following builders are available for the try scheduler: ")
for buildername in buildernames:
output(buildername)
yield remote.broker.transport.loseConnection()
return
if self.connect == "ssh":
output("Cannot get available builders over ssh.")
sys.exit(1)
raise RuntimeError(
"unknown connecttype '{}', should be 'pb'".format(self.connect))
def announce(self, message):
if not self.quiet:
output(message)
@defer.inlineCallbacks
def run_impl(self):
output("using '{}' connect method".format(self.connect))
self.exitcode = 0
# we can't do spawnProcess until we're inside reactor.run(), so force asynchronous execution
yield fireEventually(None)
try:
if bool(self.config.get("get-builder-names")):
yield self.getAvailableBuilderNames()
else:
yield self.createJob()
yield self.announce("job created")
if bool(self.config.get("dryrun")):
yield self.fakeDeliverJob()
else:
yield self.deliverJob()
yield self.announce("job has been delivered")
yield self.getStatus()
if not bool(self.config.get("dryrun")):
yield self.cleanup()
except SystemExit as e:
self.exitcode = e.code
except Exception as e:
log.err(e)
raise
def run(self):
d = self.run_impl()
d.addCallback(lambda res: reactor.stop())
reactor.run()
sys.exit(self.exitcode)
def trapSystemExit(self, why):
why.trap(SystemExit)
self.exitcode = why.value.code
def cleanup(self, res=None):
if self.buildsetStatus:
self.buildsetStatus.broker.transport.loseConnection()<|fim▁end|> | |
<|file_name|>DeleteEventsByEventTypeRequestProtocolMarshaller.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.frauddetector.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.Request;
import com.amazonaws.http.HttpMethodName;
import com.amazonaws.services.frauddetector.model.*;
import com.amazonaws.transform.Marshaller;
import com.amazonaws.protocol.*;
import com.amazonaws.protocol.Protocol;
import com.amazonaws.annotation.SdkInternalApi;
/**
* DeleteEventsByEventTypeRequest Marshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class DeleteEventsByEventTypeRequestProtocolMarshaller implements Marshaller<Request<DeleteEventsByEventTypeRequest>, DeleteEventsByEventTypeRequest> {
private static final OperationInfo SDK_OPERATION_BINDING = OperationInfo.builder().protocol(Protocol.AWS_JSON).requestUri("/")
.httpMethodName(HttpMethodName.POST).hasExplicitPayloadMember(false).hasPayloadMembers(true)
.operationIdentifier("AWSHawksNestServiceFacade.DeleteEventsByEventType").serviceName("AmazonFraudDetector").build();
private final com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory;
public DeleteEventsByEventTypeRequestProtocolMarshaller(com.amazonaws.protocol.json.SdkJsonProtocolFactory protocolFactory) {
this.protocolFactory = protocolFactory;
}
public Request<DeleteEventsByEventTypeRequest> marshall(DeleteEventsByEventTypeRequest deleteEventsByEventTypeRequest) {
if (deleteEventsByEventTypeRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
final ProtocolRequestMarshaller<DeleteEventsByEventTypeRequest> protocolMarshaller = protocolFactory.createProtocolMarshaller(
SDK_OPERATION_BINDING, deleteEventsByEventTypeRequest);
protocolMarshaller.startMarshalling();
DeleteEventsByEventTypeRequestMarshaller.getInstance().marshall(deleteEventsByEventTypeRequest, protocolMarshaller);
return protocolMarshaller.finishMarshalling();
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}<|fim▁end|> | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
* |
<|file_name|>Radio.js<|end_file_name|><|fim▁begin|>'use strict';
var _get = require('babel-runtime/helpers/get')['default'];
var _inherits = require('babel-runtime/helpers/inherits')['default'];
var _createClass = require('babel-runtime/helpers/create-class')['default'];
var _classCallCheck = require('babel-runtime/helpers/class-call-check')['default'];
var _extends = require('babel-runtime/helpers/extends')['default'];
var _interopRequireDefault = require('babel-runtime/helpers/interop-require-default')['default'];
Object.defineProperty(exports, '__esModule', {
value: true
});
var _react = require('react');
var _react2 = _interopRequireDefault(_react);
var _EnhancedSwitch = require('./EnhancedSwitch');
var _EnhancedSwitch2 = _interopRequireDefault(_EnhancedSwitch);
var Radio = (function (_React$Component) {
_inherits(Radio, _React$Component);
function Radio() {
_classCallCheck(this, Radio);
_get(Object.getPrototypeOf(Radio.prototype), 'constructor', this).apply(this, arguments);
}
_createClass(Radio, [{
key: 'getValue',<|fim▁hole|> return this.refs.enhancedSwitch.getValue();
}
}, {
key: 'setChecked',
value: function setChecked(newCheckedValue) {
this.refs.enhancedSwitch.setSwitched(newCheckedValue);
}
}, {
key: 'isChecked',
value: function isChecked() {
return this.refs.enhancedSwitch.isSwitched();
}
}, {
key: 'render',
value: function render() {
var enhancedSwitchProps = {
ref: 'enhancedSwitch',
inputType: 'radio'
};
// labelClassName
return _react2['default'].createElement(_EnhancedSwitch2['default'], _extends({}, this.props, enhancedSwitchProps));
}
}]);
return Radio;
})(_react2['default'].Component);
exports['default'] = Radio;
module.exports = exports['default'];<|fim▁end|> | value: function getValue() { |
<|file_name|>GroovyService.java<|end_file_name|><|fim▁begin|>/**
* Cerberus Copyright (C) 2013 - 2017 cerberustesting
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This file is part of Cerberus.
*
* Cerberus is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Cerberus is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Cerberus. If not, see <http://www.gnu.org/licenses/>.
*/
package org.cerberus.service.groovy.impl;
import groovy.lang.GroovyShell;
import java.util.Collections;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import javax.annotation.PostConstruct;
import javax.annotation.PreDestroy;
import org.cerberus.service.groovy.IGroovyService;<|fim▁hole|>import org.kohsuke.groovy.sandbox.SandboxTransformer;
import org.springframework.stereotype.Service;
/**
* {@link IGroovyService} default implementation
*
* @author Aurelien Bourdon
*/
@Service
public class GroovyService implements IGroovyService {
/**
* Groovy specific compilation customizer in order to avoid code injection
*/
private static final CompilerConfiguration GROOVY_COMPILER_CONFIGURATION = new CompilerConfiguration().addCompilationCustomizers(new SandboxTransformer());
/**
* Each Groovy execution is ran inside a dedicated {@link Thread},
* especially to register our Groovy interceptor
*/
private ExecutorService executorService;
@PostConstruct
private void init() {
executorService = Executors.newCachedThreadPool();
}
@PreDestroy
private void shutdown() {
if (executorService != null && !executorService.isShutdown()) {
executorService.shutdownNow();
}
}
@Override
public String eval(final String script) throws IGroovyServiceException {
try {
Future<String> expression = executorService.submit(() -> {
RestrictiveGroovyInterceptor interceptor = new RestrictiveGroovyInterceptor(
Collections.<Class<?>>emptySet(),
Collections.<Class<?>>emptySet(),
Collections.<RestrictiveGroovyInterceptor.AllowedPrefix>emptyList()
);
try {
interceptor.register();
GroovyShell shell = new GroovyShell(GROOVY_COMPILER_CONFIGURATION);
return shell.evaluate(script).toString();
} finally {
interceptor.unregister();
}
});
String eval = expression.get();
if (eval == null) {
throw new IGroovyServiceException("Groovy evaluation returns null result");
}
return eval;
} catch (Exception e) {
throw new IGroovyServiceException(e);
}
}
}<|fim▁end|> | import org.codehaus.groovy.control.CompilerConfiguration; |
<|file_name|>pet.js<|end_file_name|><|fim▁begin|>// inheritientence!!
function Pet() {
this.animal = "";
this.name="";
this.setAnimal = function(newAnimal){
this.animal = newAnimal;
}
this.setName = function(newName){
this.name = newName;
}
}
var myCat = new Pet();
myCat.setAnimal = "cat";
myCat.setName = "Sylvester";
function Dog(){
this.breed ="";
this.setBread = function(newBreed){
this.breed = newBreed;<|fim▁hole|>Dog.prototype = new Pet();
// Now I can access the propertites and methods of Pet in addition to Dog
var myDog = new Dog();
myDog.setName("Alan");
myDog.setBreed("Greyhound");
alert(myDog.name + "is a " myDog.breed);<|fim▁end|> | }
}
|
<|file_name|>javascriptExamples.js<|end_file_name|><|fim▁begin|>define(["widgetjs/widgetjs", "lodash", "jquery", "prettify", "code", "bootstrap"], function(widgetjs, lodash, jQuery, prettify, code) {
var examples = {};
examples.modals = code({
group: "Modals",
label: "Modals",
links: ["http://getbootstrap.com/javascript/#modals"],
example : function(html) {
// Modal
html.div({"class": "clearfix bs-example-modal"},
html.div({ klass: "modal"},
html.div({ klass: "modal-dialog"},
html.div({ klass: "modal-content"},
html.div({ klass: "modal-header"},
html.button({ type: "button", klass: "close", "data-dismiss": "modal", "aria-hidden": "true"},
"×"
),
html.h4({ klass: "modal-title"},
"Modal title"
)
),
html.div({ klass: "modal-body"},
html.p(
"One fine body…"
)
),
html.div({ klass: "modal-footer"},
html.button({ type: "button", klass: "btn btn-default", "data-dismiss": "modal"},
"Close"
),
html.button({ type: "button", klass: "btn btn-primary"},
"Save changes"
)
)
)
)
)
);
}
});
examples.modalsDemo = code({
group: "Modals",
label: "Live Demo",
links: ["http://getbootstrap.com/javascript/#modals"],
example : function(html) {
// Modal
html.div({ id: "myModal", klass: "modal fade"},
html.div({ klass: "modal-dialog"},
html.div({ klass: "modal-content"},
html.div({ klass: "modal-header"},
html.button({ type: "button", klass: "close", "data-dismiss": "modal", "aria-hidden": "true"},
"×"
),
html.h4({ klass: "modal-title"},
"Modal title"
)
),
html.div({ klass: "modal-body"},
html.p(
"One fine body…"
)
),
html.div({ klass: "modal-footer"},
html.button({ type: "button", klass: "btn btn-default", "data-dismiss": "modal"},
"Close"
),
html.button({ type: "button", klass: "btn btn-primary"},
"Save changes"
)
)
)
)
);
html.button({ klass: "btn btn-primary btn-lg", "data-toggle": "modal", "data-target": "#myModal"}, "Show modal");
}
});
examples.toolTips = code({
group: "Tooltips",
label: "Live Demo",
links: ["http://getbootstrap.com/javascript/#tooltips"],
example : function(html) {
// TOOLTIPS
html.div({ klass: "tooltip-demo"},
html.div({ klass: "bs-example-tooltips"},
html.button({ type: "button", klass: "btn btn-default", "data-toggle": "tooltip", "data-placement": "left", title: "", "data-original-title": "Tooltip on left"},
"Tooltip on left"
),
html.button({ type: "button", klass: "btn btn-default", "data-toggle": "tooltip", "data-placement": "top", title: "", "data-original-title": "Tooltip on top"},
"Tooltip on top"
),
html.button({ type: "button", klass: "btn btn-default", "data-toggle": "tooltip", "data-placement": "bottom", title: "", "data-original-title": "Tooltip on bottom"},
"Tooltip on bottom"
),
html.button({ type: "button", klass: "btn btn-default", "data-toggle": "tooltip", "data-placement": "right", title: "", "data-original-title": "Tooltip on right"},
"Tooltip on right"
)
)
);
jQuery(".bs-example-tooltips").tooltip();
}
});
examples.popovers = code({
group: "Popovers",
label: "Popovers",
links: ["http://getbootstrap.com/javascript/#popovers"],
example : function(html) {
// Popovers
html.div({ klass: "bs-example-popovers"},
html.button({ type: "button", klass: "btn btn-default", "data-container": "body", "data-toggle": "popover", "data-placement": "left", "data-content": "Vivamus sagittis lacus vel augue laoreet rutrum faucibus.", "data-original-title": "", title: ""},
"Left"
),
html.button({ type: "button", klass: "btn btn-default", "data-container": "body", "data-toggle": "popover", "data-placement": "top", "data-content": "Vivamus sagittis lacus vel augue laoreet rutrum faucibus.", "data-original-title": "", title: ""},
"Top"
),
html.button({ type: "button", klass: "btn btn-default", "data-container": "body", "data-toggle": "popover", "data-placement": "bottom", "data-content": "Vivamus sagittis lacus vel augue laoreet rutrum faucibus.", "data-original-title": "", title: ""},
"Bottom"
),
html.button({ type: "button", klass: "btn btn-default", "data-container": "body", "data-toggle": "popover", "data-placement": "right", "data-content": "Vivamus sagittis lacus vel augue laoreet rutrum faucibus.", "data-original-title": "", title: ""},
"Right"
)
);
jQuery(".bs-example-popovers button").popover();
}
});
examples.buttonsCheckbox = code({
group: "Buttons",
label: "Checkbox",
links: ["http://getbootstrap.com/javascript/#buttons"],
example : function(html) {
html.div({ style: "padding-bottom: 24px;"},
html.div({ klass: "btn-group", "data-toggle": "buttons"},
html.label({ klass: "btn btn-primary"},
html.input({ type: "checkbox"}),
"Option 1"
),
html.label({ klass: "btn btn-primary"},
html.input({ type: "checkbox"}),
"Option 2"
),
html.label({ klass: "btn btn-primary"},
html.input({ type: "checkbox"}),
"Option 3"
)
)
);
}
});
examples.buttonsRadio= code({
group: "Buttons",
label: "Radio",
links: ["http://getbootstrap.com/javascript/#buttons"],
example : function(html) {
html.div({ style: "padding-bottom: 24px;"},
html.div({ klass: "btn-group", "data-toggle": "buttons"},
html.label({ klass: "btn btn-primary"},
html.input({ type: "radio"}),
"Option 1"
),
html.label({ klass: "btn btn-primary"},
html.input({ type: "radio"}),
"Option 2"
),
html.label({ klass: "btn btn-primary"},
html.input({ type: "radio"}),
"Option 3"
)
)
);
}
});
examples.collapse = code({<|fim▁hole|> label: "Collapse",
links: ["http://getbootstrap.com/javascript/#collapse"],
example : function(html) {
html.div({ klass: "panel-group", id: "accordion"},
html.div({ klass: "panel panel-default"},
html.div({ klass: "panel-heading"},
html.h4({ klass: "panel-title"},
html.a({ "data-toggle": "collapse", "data-parent": "#accordion", href: "#collapseOne"},
"Heading #1"
)
)
),
html.div({ id: "collapseOne", klass: "panel-collapse collapse in"},
html.div({ klass: "panel-body"},
"Content"
)
)
),
html.div({ klass: "panel panel-default"},
html.div({ klass: "panel-heading"},
html.h4({ klass: "panel-title"},
html.a({ "data-toggle": "collapse", "data-parent": "#accordion", href: "#collapseTwo"},
"Heading #2"
)
)
),
html.div({ id: "collapseTwo", klass: "panel-collapse collapse"},
html.div({ klass: "panel-body"},
"Content"
)
)
),
html.div({ klass: "panel panel-default"},
html.div({ klass: "panel-heading"},
html.h4({ klass: "panel-title"},
html.a({ "data-toggle": "collapse", "data-parent": "#accordion", href: "#collapseThree"},
"Heading #2"
)
)
),
html.div({ id: "collapseThree", klass: "panel-collapse collapse"},
html.div({ klass: "panel-body"},
"Content"
)
)
)
);
}
});
return examples;
});<|fim▁end|> | group: "Collapse", |
<|file_name|>CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82a.cpp<|end_file_name|><|fim▁begin|>/* TEMPLATE GENERATED TESTCASE FILE
Filename: CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82a.cpp
Label Definition File: CWE134_Uncontrolled_Format_String.label.xml
Template File: sources-sinks-82a.tmpl.cpp
*/
/*
* @description
* CWE: 134 Uncontrolled Format String
* BadSource: connect_socket Read data using a connect socket (client side)
* GoodSource: Copy a fixed string into data
* Sinks: fprintf
* GoodSink: fprintf with "%s" as the second argument and data as the third
* BadSink : fprintf with data as the second argument
* Flow Variant: 82 Data flow: data passed in a parameter to an virtual method called via a pointer
*
* */
#include "std_testcase.h"
#include "CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82.h"
#ifdef _WIN32
#include <winsock2.h>
#include <windows.h>
#include <direct.h>
#pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */
#define CLOSE_SOCKET closesocket
#else /* NOT _WIN32 */
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <unistd.h>
#define INVALID_SOCKET -1
#define SOCKET_ERROR -1
#define CLOSE_SOCKET close
#define SOCKET int
#endif
#define TCP_PORT 27015
#define IP_ADDRESS "127.0.0.1"
namespace CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82
{
#ifndef OMITBAD
void bad()
{
char * data;
char dataBuffer[100] = "";
data = dataBuffer;
{
#ifdef _WIN32
WSADATA wsaData;
int wsaDataInit = 0;
#endif
int recvResult;
struct sockaddr_in service;
char *replace;
SOCKET connectSocket = INVALID_SOCKET;
size_t dataLen = strlen(data);
do
{
#ifdef _WIN32
if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR)
{
break;
}
wsaDataInit = 1;
#endif
/* POTENTIAL FLAW: Read data using a connect socket */
connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (connectSocket == INVALID_SOCKET)
{
break;
}
memset(&service, 0, sizeof(service));
service.sin_family = AF_INET;
service.sin_addr.s_addr = inet_addr(IP_ADDRESS);
service.sin_port = htons(TCP_PORT);
if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR)
{
break;
}
/* Abort on error or the connection was closed, make sure to recv one
* less char than is in the recv_buf in order to append a terminator */
/* Abort on error or the connection was closed */
recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (100 - dataLen - 1), 0);
if (recvResult == SOCKET_ERROR || recvResult == 0)
{
break;
}
/* Append null terminator */
data[dataLen + recvResult / sizeof(char)] = '\0';
/* Eliminate CRLF */
replace = strchr(data, '\r');
if (replace)
{
*replace = '\0';
}
replace = strchr(data, '\n');
if (replace)
<|fim▁hole|> while (0);
if (connectSocket != INVALID_SOCKET)
{
CLOSE_SOCKET(connectSocket);
}
#ifdef _WIN32
if (wsaDataInit)
{
WSACleanup();
}
#endif
}
CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_base* baseObject = new CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_bad;
baseObject->action(data);
delete baseObject;
}
#endif /* OMITBAD */
#ifndef OMITGOOD
/* goodG2B uses the GoodSource with the BadSink */
static void goodG2B()
{
char * data;
char dataBuffer[100] = "";
data = dataBuffer;
/* FIX: Use a fixed string that does not contain a format specifier */
strcpy(data, "fixedstringtest");
CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_base* baseObject = new CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_goodG2B;
baseObject->action(data);
delete baseObject;
}
/* goodB2G uses the BadSource with the GoodSink */
static void goodB2G()
{
char * data;
char dataBuffer[100] = "";
data = dataBuffer;
{
#ifdef _WIN32
WSADATA wsaData;
int wsaDataInit = 0;
#endif
int recvResult;
struct sockaddr_in service;
char *replace;
SOCKET connectSocket = INVALID_SOCKET;
size_t dataLen = strlen(data);
do
{
#ifdef _WIN32
if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR)
{
break;
}
wsaDataInit = 1;
#endif
/* POTENTIAL FLAW: Read data using a connect socket */
connectSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP);
if (connectSocket == INVALID_SOCKET)
{
break;
}
memset(&service, 0, sizeof(service));
service.sin_family = AF_INET;
service.sin_addr.s_addr = inet_addr(IP_ADDRESS);
service.sin_port = htons(TCP_PORT);
if (connect(connectSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR)
{
break;
}
/* Abort on error or the connection was closed, make sure to recv one
* less char than is in the recv_buf in order to append a terminator */
/* Abort on error or the connection was closed */
recvResult = recv(connectSocket, (char *)(data + dataLen), sizeof(char) * (100 - dataLen - 1), 0);
if (recvResult == SOCKET_ERROR || recvResult == 0)
{
break;
}
/* Append null terminator */
data[dataLen + recvResult / sizeof(char)] = '\0';
/* Eliminate CRLF */
replace = strchr(data, '\r');
if (replace)
{
*replace = '\0';
}
replace = strchr(data, '\n');
if (replace)
{
*replace = '\0';
}
}
while (0);
if (connectSocket != INVALID_SOCKET)
{
CLOSE_SOCKET(connectSocket);
}
#ifdef _WIN32
if (wsaDataInit)
{
WSACleanup();
}
#endif
}
CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_base* baseObject = new CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82_goodB2G;
baseObject->action(data);
delete baseObject;
}
void good()
{
goodG2B();
goodB2G();
}
#endif /* OMITGOOD */
} /* close namespace */
/* Below is the main(). It is only used when building this testcase on
its own for testing or for building a binary to use in testing binary
analysis tools. It is not used when compiling all the testcases as one
application, which is how source code analysis tools are tested. */
#ifdef INCLUDEMAIN
using namespace CWE134_Uncontrolled_Format_String__char_connect_socket_fprintf_82; /* so that we can use good and bad easily */
int main(int argc, char * argv[])
{
/* seed randomness */
srand( (unsigned)time(NULL) );
#ifndef OMITGOOD
printLine("Calling good()...");
good();
printLine("Finished good()");
#endif /* OMITGOOD */
#ifndef OMITBAD
printLine("Calling bad()...");
bad();
printLine("Finished bad()");
#endif /* OMITBAD */
return 0;
}
#endif<|fim▁end|> | {
*replace = '\0';
}
}
|
<|file_name|>contenttype.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2003-2005 Tommi Maekitalo
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* As a special exception, you may use this file as part of a free
* software library without restriction. Specifically, if other files
* instantiate templates or use macros or inline functions from this
* file, or you compile this file and link it with other files to
* produce an executable, this file does not by itself cause the
* resulting executable to be covered by the GNU General Public
* License. This exception does not however invalidate any other
* reasons why the executable file might be covered by the GNU Library
* General Public License.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <tnt/contenttype.h>
#include <tnt/util.h>
#include <iostream>
#include <sstream>
#include <stdexcept>
#include <ctype.h>
#include <algorithm>
#include <cxxtools/log.h>
log_define("tntnet.contenttype")
namespace tnt
{
Contenttype::Contenttype(const std::string& ct)
{
log_debug("Contenttype <= " << ct);
std::istringstream in(ct);
in >> *this;
if (!in)
{
std::ostringstream msg;
msg << "error 1 parsing content-type-header at "
<< in.tellg()
<< ": "
<< ct;
throwRuntimeError(msg.str());
}
if (in.get() != std::ios::traits_type::eof())
{
std::ostringstream msg;
msg << "error 2 parsing content-type-header at "
<< in.tellg()
<< ": "
<< ct;
throwRuntimeError(msg.str());
}
}
Contenttype::return_type Contenttype::onType(
const std::string& t, const std::string& s)
{
log_debug("Contenttype::onType " << t << ", " << s);
if (s.empty())
return FAIL;
type = t;
subtype = s;
std::transform(type.begin(), type.end(), type.begin(),
::tolower);
std::transform(subtype.begin(), subtype.end(), subtype.begin(),
::tolower);
return OK;
}
Contenttype::return_type Contenttype::onParameter(
const std::string& attribute, const std::string& value)
{
log_debug("Contenttype::onParameter " << attribute << ", " << value);
std::string att = attribute;
std::transform(att.begin(), att.end(), att.begin(),
::tolower);
parameter.insert(parameter_type::value_type(att, value));<|fim▁hole|> if (attribute == "boundary")
boundary = value;
return OK;
}
}<|fim▁end|> | |
<|file_name|>list_from_json.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A command that reads JSON data and lists it."""
import json
import sys
from googlecloudsdk.calliope import base
class ListFromJson(base.ListCommand):
"""Read JSON data and list it on the standard output.
*{command}* is a test harness for resource output formatting and filtering.
It behaves like any other `gcloud ... list` command except that the resources
are read from a JSON data file.
The input JSON data is either a single resource object or a list of resource
objects of the same type. The resources are printed on the standard output.
The default output format is *json*.
"""
@staticmethod
def Args(parser):
base.URI_FLAG.RemoveFromParser(parser)
parser.add_argument(
'json_file',
metavar='JSON-FILE',
nargs='?',
default=None,
help=('A file containing JSON data for a single resource or a list of'
' resources of the same type. If omitted then the standard input'
' is read.'))
@staticmethod
def GetUriCacheUpdateOp():<|fim▁hole|> """No resource URIs."""
return None
def Run(self, args):
if args.json_file:
with open(args.json_file, 'r') as f:
resources = json.load(f)
else:
resources = json.load(sys.stdin)
return resources
def Format(self, unused_args):
return 'json'<|fim▁end|> | |
<|file_name|>pattern_generator.py<|end_file_name|><|fim▁begin|>#
# This file is part of GreatFET
#
from ..interface import GreatFETInterface
class PatternGenerator(GreatFETInterface):
"""
Class that supports using the GreatFET as a simple pattern generator.
"""
def __init__(self, board, sample_rate=1e6, bus_width=8):
""" Set up a GreatFET pattern generator object. """
# Grab a reference to the board and its pattern-gen API.
self.board = board
self.api = board.apis.pattern_generator
# Grab a reference to the user's bus parameters.
self.sample_rate = int(sample_rate)
self.bus_width = bus_width
# FIXME: These should be read from the board, rather than hardcoded!
self.upload_chunk_size = 2048
self.samples_max = 32 * 1024
def set_sample_rate(self, sample_rate):
""" Updates the generator's sample rates. """
self.sample_rate = int(sample_rate)
def _upload_samples(self, samples):
""" Uploads a collection of samples into the board's sample memory; precedes scan-out of those samples. """
# Iterate over the full set of provided samples, uploading them in chunks.
for offset in range(0, len(samples), self.upload_chunk_size):
chunk = samples[offset:offset + self.upload_chunk_size]
self.api.upload_samples(offset, chunk)
def scan_out_pattern(self, samples, repeat=True):<|fim▁hole|> samples = bytes(samples)
# Upload the samples to be scanned out...
self._upload_samples(samples)
# ... and then trigger the scan-out itself.
self.api.generate_pattern(self.sample_rate, self.bus_width, len(samples), repeat)
def stop(self):
""" Stops the board from scanning out any further samples. """
self.api.stop()
def dump_sgpio_config(self, include_unused=False):
""" Debug function; returns the board's dumped SGPIO configuration. """
self.api.dump_sgpio_configuration(include_unused)
return self.board.read_debug_ring()<|fim▁end|> | """ Sends a collection of fixed samples to the board, and then instructs it to repeatedly """
|
<|file_name|>SPixelPerfectSprite.java<|end_file_name|><|fim▁begin|>package ir.abforce.dinorunner.custom;
import com.makersf.andengine.extension.collisions.entity.sprite.PixelPerfectSprite;
import com.makersf.andengine.extension.collisions.opengl.texture.region.PixelPerfectTextureRegion;
import ir.abforce.dinorunner.managers.RM;<|fim▁hole|> */
public class SPixelPerfectSprite extends PixelPerfectSprite {
public SPixelPerfectSprite(float pX, float pY, PixelPerfectTextureRegion pTextureRegion) {
super(pX, pY, pTextureRegion, RM.VBO);
setScale(RM.S);
}
}<|fim▁end|> |
/**
* Created by Ali Reza on 9/4/15. |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls.defaults import *
urlpatterns = patterns('member.views',
url(r'^$', 'login', name='passport_index'),
url(r'^register/$', 'register', name='passport_register'),
url(r'^login/$', 'login', name='passport_login'),
url(r'^logout/$', 'logout', name='passport_logout'),
url(r'^active/$', 'active', name='passport_active'),
url(r'^forget/$', 'forget', name='passport_forget'),<|fim▁hole|>)<|fim▁end|> | url(r'^profile/$', 'profile', name='passport_profile'), |
<|file_name|>test_handlers.py<|end_file_name|><|fim▁begin|>"""Tests for UnauthenticatedReddit class."""
from __future__ import print_function, unicode_literals
from mock import patch
from praw import handlers
from random import choice
from six.moves import cStringIO
from .helper import PRAWTest, betamax, replace_handler
class HandlerTest(PRAWTest):<|fim▁hole|> def _cache_hit_callback(self, key):
pass
@replace_handler(handlers.RateLimitHandler())
def test_ratelimit_handlers(self):
to_evict = self.r.config[choice(list(self.r.config.API_PATHS.keys()))]
self.assertIs(0, self.r.handler.evict(to_evict))
@betamax()
def test_cache_hit_callback(self):
with patch.object(HandlerTest, '_cache_hit_callback') as mock:
self.r.handler.cache_hit_callback = self._cache_hit_callback
# ensure there won't be a difference in the cache key
self.r.login(self.un, self.un_pswd, disable_warning=True)
before_cache = list(self.r.get_new(limit=5))
after_cache = list(self.r.get_new(limit=5))
self.assertTrue(mock.called)
self.assertEqual(before_cache, after_cache)
self.r.handler.cache_hit_callback = None<|fim▁end|> | def setUp(self):
super(HandlerTest, self).setUp()
self.cache_store = cStringIO()
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># vim: set fileencoding=utf-8 :
"""python-opscripts setup
"""
# Standard library
from __future__ import absolute_import, division, print_function<|fim▁hole|>import os.path
import re
import site
import sys
import glob
# Third-party
from setuptools import find_packages, setup
setup_path = os.path.dirname(os.path.realpath(__file__))
re_info = re.compile(r"""
# Description docstring
^" " "(?P<description>.+)
^" " ".*
# Version variable
__version__\s*=\s*"(?P<version>[^"]+)".*
# Maintainer variable
__maintainer__\s*=\s*"(?P<maintainer>[^"]+)".*
# Maintainer_email variable
__maintainer_email__\s*=\s*"(?P<maintainer_email>[^"]+)".*
# URL variable
__url__\s*=\s*"(?P<url>[^"]+)".*
# License variable
__license__\s*=\s*"(?P<license>[^"]+)".*
""", re.DOTALL | re.MULTILINE | re.VERBOSE)
with open(os.path.join(setup_path, "opscripts/__init__.py"), "rb") as f:
results = re_info.search(f.read().decode("utf-8"))
metadata = results.groupdict()
with open(os.path.join(setup_path, "README.rst"), "rb") as f:
long_description = f.read().decode("utf-8")
install_requires = ["ConfigArgParse"]
classifiers = ["Environment :: Console",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Systems Administration"]
packages = find_packages()
# Install config file appropriately
docs_path = ""
examples_path = "examples"
if hasattr(sys, "real_prefix"):
docs_path = sys.prefix
elif "--user" in sys.argv:
docs_path = site.USER_BASE
examples_path = os.path.join(docs_path, examples_path)
examples = glob.glob(os.path.join(setup_path, "example*.py"))
docs = [os.path.join(setup_path, "README.rst"),
os.path.join(setup_path, "LICENSE")]
setup(name="OpScripts",
version=metadata["version"],
maintainer=metadata["maintainer"],
maintainer_email=metadata["maintainer_email"],
license=metadata["license"],
description=metadata["description"],
long_description=long_description,
url=metadata["url"],
packages=packages,
data_files=[(docs_path, docs), (examples_path, examples)],
keywords="CLI, DevOps, Ops, sysadmin, Systems administration",
classifiers=classifiers,
download_url="https://github.com/ClockworkNet/OpScripts/releases",
zip_safe=True)<|fim▁end|> | |
<|file_name|>gr-message-scores_test.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright (C) 2016 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import '../../../test/common-test-setup-karma';
import './gr-message-scores';
import {
createChange,
createChangeMessage,
createDetailedLabelInfo,
} from '../../../test/test-data-generators';
import {queryAll, stubFlags} from '../../../test/test-utils';
import {GrMessageScores} from './gr-message-scores';
const basicFixture = fixtureFromElement('gr-message-scores');
suite('gr-message-score tests', () => {
let element: GrMessageScores;
setup(async () => {
element = basicFixture.instantiate();
await element.updateComplete;
});
test('votes', async () => {
element.message = {
...createChangeMessage(),
author: {},
expanded: false,
message: 'Patch Set 1: Verified+1 Code-Review-2 Trybot-Label3+1 Blub+1',
};
element.labelExtremes = {
Verified: {max: 1, min: -1},
'Code-Review': {max: 2, min: -2},
'Trybot-Label3': {max: 3, min: 0},
};
await element.updateComplete;
const scoreChips = queryAll(element, '.score');
assert.equal(scoreChips.length, 3);
assert.isTrue(scoreChips[0].classList.contains('positive'));
assert.isTrue(scoreChips[0].classList.contains('max'));
assert.isTrue(scoreChips[1].classList.contains('negative'));
assert.isTrue(scoreChips[1].classList.contains('min'));
assert.isTrue(scoreChips[2].classList.contains('positive'));
assert.isFalse(scoreChips[2].classList.contains('min'));
});
test('Uploaded patch set X', async () => {
element.message = {
...createChangeMessage(),
author: {},
expanded: false,
message:
'Uploaded patch set 1:' +
'Verified+1 Code-Review-2 Trybot-Label3+1 Blub+1',
};
element.labelExtremes = {
Verified: {max: 1, min: -1},
'Code-Review': {max: 2, min: -2},
'Trybot-Label3': {max: 3, min: 0},
};
await element.updateComplete;
const scoreChips = queryAll(element, '.score');
assert.equal(scoreChips.length, 3);
assert.isTrue(scoreChips[0].classList.contains('positive'));
assert.isTrue(scoreChips[0].classList.contains('max'));
assert.isTrue(scoreChips[1].classList.contains('negative'));
assert.isTrue(scoreChips[1].classList.contains('min'));
assert.isTrue(scoreChips[2].classList.contains('positive'));
assert.isFalse(scoreChips[2].classList.contains('min'));
});
test('Uploaded and rebased', async () => {
element.message = {
...createChangeMessage(),
author: {},
expanded: false,
message: 'Uploaded patch set 4: Commit-Queue+1: Patch Set 3 was rebased.',
};
element.labelExtremes = {
'Commit-Queue': {max: 2, min: -2},
};
await element.updateComplete;
const scoreChips = queryAll(element, '.score');
assert.equal(scoreChips.length, 1);
assert.isTrue(scoreChips[0].classList.contains('positive'));
});
test('removed votes', async () => {
element.message = {
...createChangeMessage(),
author: {},
expanded: false,
message: 'Patch Set 1: Verified+1 -Code-Review -Commit-Queue',
};
element.labelExtremes = {
Verified: {max: 1, min: -1},
'Code-Review': {max: 2, min: -2},
'Commit-Queue': {max: 3, min: 0},
};
await element.updateComplete;
const scoreChips = queryAll(element, '.score');
assert.equal(scoreChips.length, 3);
assert.isTrue(scoreChips[1].classList.contains('removed'));
assert.isTrue(scoreChips[2].classList.contains('removed'));
});
test('false negative vote', async () => {
element.message = {
...createChangeMessage(),
author: {},
expanded: false,
message: 'Patch Set 1: Cherry Picked from branch stable-2.14.',
};
element.labelExtremes = {};
await element.updateComplete;
const scoreChips = element.shadowRoot?.querySelectorAll('.score');
assert.equal(scoreChips?.length, 0);
});
test('reset vote', async () => {
stubFlags('isEnabled').returns(true);
element = basicFixture.instantiate();
element.change = {
...createChange(),
labels: {
'Commit-Queue': createDetailedLabelInfo(),
'Auto-Submit': createDetailedLabelInfo(),
},
};
element.message = {
...createChangeMessage(),
author: {},
expanded: false,
message: 'Patch Set 10: Auto-Submit+1 -Commit-Queue',
};
element.labelExtremes = {
'Commit-Queue': {max: 2, min: 0},
'Auto-Submit': {max: 1, min: 0},
};
await element.updateComplete;
const triggerChips =
element.shadowRoot?.querySelectorAll('gr-trigger-vote');
assert.equal(triggerChips?.length, 1);
const triggerChip = triggerChips?.[0];
expect(triggerChip).shadowDom.equal(`<div class="container">
<span class="label">Auto-Submit</span>
<gr-vote-chip></gr-vote-chip>
</div>`);
const voteChips = triggerChip?.shadowRoot?.querySelectorAll('gr-vote-chip');
assert.equal(voteChips?.length, 1);
expect(voteChips?.[0]).shadowDom.equal('');
const scoreChips = element.shadowRoot?.querySelectorAll('.score');
assert.equal(scoreChips?.length, 1);
expect(scoreChips?.[0]).dom.equal(`<span class="removed score">
Commit-Queue 0 (vote reset)
</span>`);
});<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>SwingDatePropertyEditor.java<|end_file_name|><|fim▁begin|>/*
* Debrief - the Open Source Maritime Analysis Application
* http://debrief.info
*
* (C) 2000-2014, PlanetMayo Ltd
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the Eclipse Public License v1.0
* (http://www.eclipse.org/legal/epl-v10.html)
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*/
package MWC.GUI.Properties.Swing;
// Copyright MWC 1999, Debrief 3 Project
// $RCSfile: SwingDatePropertyEditor.java,v $
// @author $Author: Ian.Mayo $
// @version $Revision: 1.3 $
// $Log: SwingDatePropertyEditor.java,v $
// Revision 1.3 2004/11/26 11:32:48 Ian.Mayo
// Moving closer, supporting checking for time resolution
//
// Revision 1.2 2004/05/25 15:29:37 Ian.Mayo
// Commit updates from home
//
// Revision 1.1.1.1 2004/03/04 20:31:20 ian
// no message
//
// Revision 1.1.1.1 2003/07/17 10:07:26 Ian.Mayo
// Initial import
//
// Revision 1.2 2002-05-28 09:25:47+01 ian_mayo
// after switch to new system
//
// Revision 1.1 2002-05-28 09:14:33+01 ian_mayo
// Initial revision
//
// Revision 1.1 2002-04-11 14:01:26+01 ian_mayo
// Initial revision
//
// Revision 1.1 2001-08-31 10:36:55+01 administrator
// Tidied up layout, so all data is displayed when editor panel is first opened
//
// Revision 1.0 2001-07-17 08:43:31+01 administrator
// Initial revision
//
// Revision 1.4 2001-07-12 12:06:59+01 novatech
// use tooltips to show the date format
//
// Revision 1.3 2001-01-21 21:38:23+00 novatech
// handle focusGained = select all text
//
// Revision 1.2 2001-01-17 09:41:37+00 novatech
// factor generic processing to parent class, and provide support for NULL values
//
// Revision 1.1 2001-01-03 13:42:39+00 novatech
// Initial revision
//
// Revision 1.1.1.1 2000/12/12 21:45:37 ianmayo
// initial version
//
// Revision 1.5 2000-10-09 13:35:47+01 ian_mayo
// Switched stack traces to go to log file
//
// Revision 1.4 2000-04-03 10:48:57+01 ian_mayo
// squeeze up the controls
//
// Revision 1.3 2000-02-02 14:25:07+00 ian_mayo
// correct package naming
//
// Revision 1.2 1999-11-23 11:05:03+00 ian_mayo
// further introduction of SWING components
//
// Revision 1.1 1999-11-16 16:07:19+00 ian_mayo
// Initial revision
//
// Revision 1.1 1999-11-16 16:02:29+00 ian_mayo
// Initial revision
//
// Revision 1.2 1999-11-11 18:16:09+00 ian_mayo
// new class, now working
//
// Revision 1.1 1999-10-12 15:36:48+01 ian_mayo
// Initial revision
//
// Revision 1.1 1999-08-26 10:05:48+01 administrator
// Initial revision
//
<|fim▁hole|>import java.awt.event.FocusEvent;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JTextField;
import MWC.GUI.Dialogs.DialogFactory;
import MWC.GenericData.HiResDate;
import MWC.Utilities.TextFormatting.DebriefFormatDateTime;
public class SwingDatePropertyEditor extends
MWC.GUI.Properties.DatePropertyEditor implements java.awt.event.FocusListener
{
/////////////////////////////////////////////////////////////
// member variables
////////////////////////////////////////////////////////////
/**
* field to edit the date
*/
JTextField _theDate;
/**
* field to edit the time
*/
JTextField _theTime;
/**
* label to show the microsecodns
*/
JLabel _theMicrosTxt;
/**
* panel to hold everything
*/
JPanel _theHolder;
/////////////////////////////////////////////////////////////
// constructor
////////////////////////////////////////////////////////////
/////////////////////////////////////////////////////////////
// member functions
////////////////////////////////////////////////////////////
/**
* build the editor
*/
public java.awt.Component getCustomEditor()
{
_theHolder = new JPanel();
final java.awt.BorderLayout bl1 = new java.awt.BorderLayout();
bl1.setVgap(0);
bl1.setHgap(0);
final java.awt.BorderLayout bl2 = new java.awt.BorderLayout();
bl2.setVgap(0);
bl2.setHgap(0);
final JPanel lPanel = new JPanel();
lPanel.setLayout(bl1);
final JPanel rPanel = new JPanel();
rPanel.setLayout(bl2);
_theHolder.setLayout(new java.awt.GridLayout(0, 2));
_theDate = new JTextField();
_theDate.setToolTipText("Format: " + NULL_DATE);
_theTime = new JTextField();
_theTime.setToolTipText("Format: " + NULL_TIME);
lPanel.add("Center", new JLabel("Date:", JLabel.RIGHT));
lPanel.add("East", _theDate);
rPanel.add("Center", new JLabel("Time:", JLabel.RIGHT));
rPanel.add("East", _theTime);
_theHolder.add(lPanel);
_theHolder.add(rPanel);
// get the fields to select the full text when they're selected
_theDate.addFocusListener(this);
_theTime.addFocusListener(this);
// right, just see if we are in hi-res DTG editing mode
if (HiResDate.inHiResProcessingMode())
{
// ok, add a button to allow the user to enter DTG data
final JButton editMicros = new JButton("Micros");
editMicros.addActionListener(new ActionListener()
{
public void actionPerformed(final ActionEvent e)
{
editMicrosPressed();
}
});
// ok, we'
_theMicrosTxt = new JLabel("..");
_theHolder.add(_theMicrosTxt);
_theHolder.add(editMicros);
}
resetData();
return _theHolder;
}
/**
* user wants to edit the microseconds. give him a popup
*/
void editMicrosPressed()
{
//To change body of created methods use File | Settings | File Templates.
final Integer res = DialogFactory.getInteger("Edit microseconds", "Enter microseconds",(int) _theMicros);
// did user enter anything?
if(res != null)
{
// store the data
_theMicros = res.intValue();
// and update the screen
resetData();
}
}
/**
* get the date text as a string
*/
protected String getDateText()
{
return _theDate.getText();
}
/**
* get the date text as a string
*/
protected String getTimeText()
{
return _theTime.getText();
}
/**
* set the date text in string form
*/
protected void setDateText(final String val)
{
if (_theHolder != null)
{
_theDate.setText(val);
}
}
/**
* set the time text in string form
*/
protected void setTimeText(final String val)
{
if (_theHolder != null)
{
_theTime.setText(val);
}
}
/**
* show the user how many microseconds there are
*
* @param val
*/
protected void setMicroText(final long val)
{
// output the number of microseconds
_theMicrosTxt.setText(DebriefFormatDateTime.formatMicros(new HiResDate(0, val)) + " micros");
}
/////////////////////////////
// focus listener support classes
/////////////////////////////
/**
* Invoked when a component gains the keyboard focus.
*/
public void focusGained(final FocusEvent e)
{
final java.awt.Component c = e.getComponent();
if (c instanceof JTextField)
{
final JTextField jt = (JTextField) c;
jt.setSelectionStart(0);
jt.setSelectionEnd(jt.getText().length());
}
}
/**
* Invoked when a component loses the keyboard focus.
*/
public void focusLost(final FocusEvent e)
{
}
}<|fim▁end|> |
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
|
<|file_name|>fanhaorename.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
""" fanhaorename.py
"""
import os
import os.path
import logging
import fileorganizer
from fileorganizer import _helper
from fileorganizer.replacename import _replacename
__author__ = "Jack Chang <[email protected]>"
<|fim▁hole|> """
result = ""
for c in tag:
if c.isalpha():
result += "[{0}{1}]".format(c.lower(), c.upper())
else:
result += c
return result
def fanhaorename(work_dir,
tag,
exclude=None,
mode=0,
wetrun=False,
this_name=os.path.basename(__file__)):
""" Batch Rename Fanhao
\b
Args:
work_dir (str): Working Directory
tag (str): Fanhao tag
find (str, optional): Regex string to find in filename/foldername
replace (str, optional): Regex string to replace in filename/foldername
exclude (str, optional): Regex string to exclude in mattches
mode (int, optional): 0=FILE ONLY, 1=FOLDER ONLY, 2=BOTH
wetrun (bool, optional): Test Run or not
"""
_find_dir = r"(.*)({0})(-|_| )*(\d\d\d)(.*)".format(_tagHelper(tag))
_replace_dir = r"{0}-\4".format(tag)
_find_file = _find_dir + r"(\.(.*))"
_replace_file = _replace_dir + r"\6"
_helper.init_loger()
this_run = "WET" if wetrun else "DRY"
loger = logging.getLogger(this_name)
loger.info("[START] === %s [%s RUN] ===", this_name, this_run)
loger.info("[DO] Rename \"%s\" fanhao in \"%s\"; Mode %s", tag, work_dir,
mode)
if mode in (0, 2): # mode 0 and 2
for item in _replacename(_find_file, _replace_file, work_dir, 0,
exclude):
item.commit() if wetrun else loger.info("%s", item)
if mode in (1, 2): # mode 1 and 2
for item in _replacename(_find_dir, _replace_dir, work_dir, 1,
exclude):
item.commit() if wetrun else loger.info("%s", item)
loger.info("[END] === %s [%s RUN] ===", this_name, this_run)
if __name__ == "__main__":
fileorganizer.cli.cli_fanhaorename()<|fim▁end|> | def _tagHelper(tag):
""" TODO |
<|file_name|>svm_classifier.py<|end_file_name|><|fim▁begin|>from oct2py import octave
from cvxopt import matrix, solvers
from cvxpy import *
import numpy as np
def svm_classifier(X, y,mode):
[num_sample,d]=X.shape
mode_control=np.ones((num_sample,1))
for i in range(num_sample):
if(mode==1):
if(y[i]==-1):
mode_control[i]=0;<|fim▁hole|> mode_control[i]=0;
#[G,beta]= octave.PWL_feature(X, M, beta_type);
A=np.array([[]])
for i in range(num_sample):
s=np.zeros((1,num_sample));
s[0,i]=1;
temp_1=y[i]*np.concatenate((np.array([[1]]),[X[i,:]]),axis=1)
temp_a=-np.concatenate((temp_1,s),axis=1)
if(i==0):
A=temp_a
else:
A=np.concatenate((A,temp_a),axis=0)
dum_concat=-np.concatenate((np.zeros((num_sample,d+1)),np.eye(num_sample)),axis=1)
A=np.concatenate((A,dum_concat),axis=0);
beq=np.zeros((1+d+num_sample,1));
Aeq=np.concatenate((np.zeros((d+1,1)),np.ones((num_sample,1))-mode_control),axis=0);
Aeq=np.diag(Aeq[:,0]);
b=np.concatenate((-np.ones((num_sample,1)), np.zeros((num_sample,1))),axis=0);
gamma=1;
x=Variable(d+num_sample+1,1)
constraints=[A*x<=b, Aeq*x==0]
obj=Minimize(( 0.5*(norm(x[0:d+1:1,0])**2) )+100*(sum_entries(x[d+1:d+1+num_sample:1,0])) )
prob = Problem(obj, constraints)
prob.solve()
x_val=x.value
ypredicted = x_val[0,0]+(X*x_val[1:1+d,0]);
ypredicted=np.sign(ypredicted);
error=np.zeros(y.shape[0]);
for i in range(y.shape[0]):
error[i]=(y[i]-ypredicted[i,0])/2
return (error,x.value[0:d+1],ypredicted)<|fim▁end|> | if(mode==2):
if(y[i]==1):
mode_control[i]=0;
if(mode==4): |
<|file_name|>KColorChooserMode.py<|end_file_name|><|fim▁begin|># encoding: utf-8
# module PyKDE4.kdeui
# from /usr/lib/python3/dist-packages/PyKDE4/kdeui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdecore as __PyKDE4_kdecore
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
import PyQt4.QtSvg as __PyQt4_QtSvg
<|fim▁hole|>from .int import int
class KColorChooserMode(int):
# no doc
def __init__(self, *args, **kwargs): # real signature unknown
pass
__dict__ = None # (!) real value is ''<|fim▁end|> | |
<|file_name|>stepwise_test.go<|end_file_name|><|fim▁begin|>package userpass
import (
"fmt"
"reflect"
"testing"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/sdk/helper/policyutil"
"github.com/hashicorp/vault/sdk/testing/stepwise"
dockerEnvironment "github.com/hashicorp/vault/sdk/testing/stepwise/environments/docker"
"github.com/mitchellh/mapstructure"
)
func TestAccBackend_stepwise_UserCrud(t *testing.T) {
customPluginName := "my-userpass"
envOptions := &stepwise.MountOptions{
RegistryName: customPluginName,
PluginType: stepwise.PluginTypeCredential,
PluginName: "userpass",
MountPathPrefix: customPluginName,
}
stepwise.Run(t, stepwise.Case{
Environment: dockerEnvironment.NewEnvironment(customPluginName, envOptions),
Steps: []stepwise.Step{
testAccStepwiseUser(t, "web", "password", "foo"),
testAccStepwiseReadUser(t, "web", "foo"),
testAccStepwiseDeleteUser(t, "web"),
testAccStepwiseReadUser(t, "web", ""),
},
})
}
func testAccStepwiseUser(
t *testing.T, name string, password string, policies string) stepwise.Step {
return stepwise.Step{
Operation: stepwise.UpdateOperation,
Path: "users/" + name,
Data: map[string]interface{}{
"password": password,
"policies": policies,
},
}
}
func testAccStepwiseDeleteUser(t *testing.T, name string) stepwise.Step {
return stepwise.Step{
Operation: stepwise.DeleteOperation,
Path: "users/" + name,
}
}<|fim▁hole|> Path: "users/" + name,
Assert: func(resp *api.Secret, err error) error {
if resp == nil {
if policies == "" {
return nil
}
return fmt.Errorf("unexpected nil response")
}
var d struct {
Policies []string `mapstructure:"policies"`
}
if err := mapstructure.Decode(resp.Data, &d); err != nil {
return err
}
expectedPolicies := policyutil.ParsePolicies(policies)
if !reflect.DeepEqual(d.Policies, expectedPolicies) {
return fmt.Errorf("Actual policies: %#v\nExpected policies: %#v", d.Policies, expectedPolicies)
}
return nil
},
}
}<|fim▁end|> |
func testAccStepwiseReadUser(t *testing.T, name string, policies string) stepwise.Step {
return stepwise.Step{
Operation: stepwise.ReadOperation, |
<|file_name|>util_stub.cpp<|end_file_name|><|fim▁begin|>/*****************************************************************************
The Dark Mod GPL Source Code
This file is part of the The Dark Mod Source Code, originally based
on the Doom 3 GPL Source Code as published in 2011.
The Dark Mod Source Code is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the License,
or (at your option) any later version. For details, see LICENSE.TXT.<|fim▁hole|> $Revision: 5122 $ (Revision of last commit)
$Date: 2011-12-11 19:47:31 +0000 (Sun, 11 Dec 2011) $ (Date of last commit)
$Author: greebo $ (Author of last commit)
******************************************************************************/
#include "../../idlib/precompiled.h"
#pragma hdrstop
void EditorPrintConsole(const char *msg) {
}<|fim▁end|> |
Project: The Dark Mod (http://www.thedarkmod.com/)
|
<|file_name|>acf-pro-input.css.js<|end_file_name|><|fim▁begin|>/******/ (function() { // webpackBootstrap
/******/ "use strict";
/******/
/******/ <|fim▁hole|><|fim▁end|> | /******/ })()
; |
<|file_name|>test_minerstat.py<|end_file_name|><|fim▁begin|>from twisted.trial import unittest
from minerstat.service import MinerStatService
from minerstat.rig import Rig
from minerstat.remote import MinerStatRemoteProtocol, Command
from minerstat.utils import Config
from minerstat.miners.claymore import EthClaymoreMiner
from twisted.internet import task, defer
from mock import Mock, create_autospec
import treq
import os
class MinerStatServiceTest(unittest.TestCase):
def setUp(self):
self.clock = task.Clock()
self.clock.spawnProcess = Mock()
treq_mock = create_autospec(treq)
response_mock = Mock()
response_mock.text.return_value = defer.succeed("")
treq_mock.request.return_value = defer.succeed(response_mock)
self.config = Config.default()
self.config.path = "./"
try:
os.makedirs("clients/algo")
except FileExistsError:
pass
self.remote = MinerStatRemoteProtocol(self.config, treq_mock)
self.rig = Rig(self.config, remote=self.remote, reactor=self.clock)
self.rig.start = Mock(return_value=defer.succeed(None))
self.rig.stop = Mock(return_value=defer.succeed(None))
self.service = MinerStatService(self.rig)
def test_init(self):
MinerStatService(self.rig)
@defer.inlineCallbacks
def test_start_stop(self):
yield self.service.startService()
self.service.rig.start.assert_called_with()<|fim▁hole|>
class MinerStatRemoteProtocolTest(unittest.TestCase):
def setUp(self):
self.config = Config("a", "b", "w", "p")
self.prot = MinerStatRemoteProtocol(self.config)
def test_algoinfo(self):
pass
def test_dlconf(self):
pass
def test_send_log(self):
pass
def test_algo_check(self):
pass
def test_dispatch_remote_command(self):
pass
def test_poll_remote(self):
pass
def test_make_full_url(self):
print(self.prot.make_full_url("foobar"))
class CommandTest(unittest.TestCase):
def test_init(self):
command = Command("foo", None)
self.assertTrue(command)
coin = EthClaymoreMiner()
command2 = Command("foo", coin)
self.assertTrue(command2)<|fim▁end|> | yield self.service.stopService()
self.service.rig.stop.assert_called_with()
|
<|file_name|>net.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import logging
import re
import salt.client
from netaddr import IPNetwork, IPAddress
log = logging.getLogger(__name__)
def ping(cluster = None, exclude = None, **kwargs):
"""
Ping all addresses from all addresses on all minions. If cluster is passed,
restrict addresses to public and cluster networks.
Note: Some optimizations could be done here in the multi module (such as
skipping the source and destination when they are the same). However, the
unoptimized version is taking ~2.5 seconds on 18 minions with 72 addresses
for success. Failures take between 6 to 12 seconds. Optimizations should
focus there.
TODO: Convert commented out print statements to log.debug
CLI Example: (Before DeepSea with a cluster configuration)
.. code-block:: bash
sudo salt-run net.ping
or you can run it with exclude
.. code-block:: bash
sudo salt-run net.ping exclude="E@host*,host-osd-name*,192.168.1.1"
(After DeepSea with a cluster configuration)
.. code-block:: bash
sudo salt-run net.ping cluster=ceph
sudo salt-run net.ping ceph
"""
exclude_string = exclude_iplist = None
if exclude:
exclude_string, exclude_iplist = _exclude_filter(exclude)
extra_kwargs = _skip_dunder(kwargs)
if _skip_dunder(kwargs):
print "Unsupported parameters: {}".format(" ,".join(extra_kwargs.keys()))
text = re.sub(re.compile("^ {12}", re.MULTILINE), "", '''
salt-run net.ping [cluster] [exclude]
Ping all addresses from all addresses on all minions.
If cluster is specified, restrict addresses to cluster and public networks.
If exclude is specified, remove matching addresses. See Salt compound matchers.
within exclude individual ip address will be remove a specific target interface
instead of ping from, the ping to interface will be removed
Examples:
salt-run net.ping
salt-run net.ping ceph
salt-run net.ping ceph [email protected]
salt-run net.ping cluster=ceph [email protected]
salt-run net.ping [email protected]
salt-run net.ping [email protected]/29
salt-run net.ping exclude="E@host*,host-osd-name*,192.168.1.1"
''')
print text
return
local = salt.client.LocalClient()
if cluster:
search = "I@cluster:{}".format(cluster)
if exclude_string:
search += " and not ( " + exclude_string + " )"
log.debug( "ping: search {} ".format(search))
networks = local.cmd(search , 'pillar.item', [ 'cluster_network', 'public_network' ], expr_form="compound")
#print networks
total = local.cmd(search , 'grains.get', [ 'ipv4' ], expr_form="compound")
#print addresses
addresses = []
for host in sorted(total.iterkeys()):
if 'cluster_network' in networks[host]:
addresses.extend(_address(total[host], networks[host]['cluster_network']))
if 'public_network' in networks[host]:
addresses.extend(_address(total[host], networks[host]['public_network']))
else:
search = "*"
if exclude_string:
search += " and not ( " + exclude_string + " )"
log.debug( "ping: search {} ".format(search))
addresses = local.cmd(search , 'grains.get', [ 'ipv4' ], expr_form="compound")
addresses = _flatten(addresses.values())
# Lazy loopback removal - use ipaddress when adding IPv6
try:
if addresses:
addresses.remove('127.0.0.1')
if exclude_iplist:
for ex_ip in exclude_iplist:
log.debug( "ping: removing {} ip ".format(ex_ip))
addresses.remove(ex_ip)
except ValueError:
log.debug( "ping: remove {} ip doesn't exist".format(ex_ip))
pass
#print addresses
results = local.cmd(search, 'multi.ping', addresses, expr_form="compound")
#print results
_summarize(len(addresses), results)
def _address(addresses, network):
"""
Return all addresses in the given network
Note: list comprehension vs. netaddr vs. simple
"""
matched = []
for address in addresses:
if IPAddress(address) in IPNetwork(network):
matched.append(address)
return matched
def _exclude_filter(excluded):
"""
Internal exclude_filter return string in compound format
Compound format = {'G': 'grain', 'P': 'grain_pcre', 'I': 'pillar',
'J': 'pillar_pcre', 'L': 'list', 'N': None,
'S': 'ipcidr', 'E': 'pcre'}
IPV4 address = "255.255.255.255"
hostname = "myhostname"
"""
log.debug( "_exclude_filter: excluding {}".format(excluded))
excluded = excluded.split(",")
log.debug( "_exclude_filter: split ',' {}".format(excluded))
pattern_compound = re.compile("^.*([GPIJLNSE]\@).*$")
pattern_iplist = re.compile( "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$" )
pattern_ipcidr = re.compile( "^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])(\/([0-9]|[1-2][0-9]|3[0-2]))$")
pattern_hostlist = re.compile( "^(([a-zA-Z]|[a-zA-Z][a-zA-Z0-9-]*[a-zA-Z0-9]).)*([A-Za-z]|[A-Za-z][A-Za-z0-9-]*[A-Za-z0-9])$")
compound = []
ipcidr = []
iplist = []
hostlist = []
regex_list = []
for para in excluded:
if pattern_compound.match(para):
log.debug( "_exclude_filter: Compound {}".format(para))
compound.append(para)
elif pattern_iplist.match(para):
log.debug( "_exclude_filter: ip {}".format(para))
iplist.append(para)
elif pattern_ipcidr.match(para):
log.debug( "_exclude_filter: ipcidr {}".format(para))
ipcidr.append("S@"+para)
elif pattern_hostlist.match(para):
hostlist.append("L@"+para)
log.debug( "_exclude_filter: hostname {}".format(para))
else:
regex_list.append("E@"+para)
log.debug( "_exclude_filter: not sure but likely Regex host {}".format(para))
#if ipcidr:
# log.debug("_exclude_filter ip subnet is not working yet ... = {}".format(ipcidr))
new_compound_excluded = " or ".join(compound + hostlist + regex_list + ipcidr)
log.debug("_exclude_filter new formed compound excluded list = {}".format(new_compound_excluded))
if new_compound_excluded and iplist:
return new_compound_excluded, iplist
elif new_compound_excluded:
return new_compound_excluded, None
elif iplist:
return None, iplist
else:
return None, None
def _flatten(l):
"""
Flatten a array of arrays
"""
log.debug( "_flatten: {}".format(l))
return list(set(item for sublist in l for item in sublist))
def _summarize(total, results):
"""
Summarize the successes, failures and errors across all minions
"""
success = []
failed = []
errored = []
slow = []
log.debug( "_summarize: results {}".format(results))
for host in sorted(results.iterkeys()):
if results[host]['succeeded'] == total:
success.append(host)
if 'failed' in results[host]:
failed.append("{} from {}".format(results[host]['failed'], host))
if 'errored' in results[host]:
errored.append("{} from {}".format(results[host]['errored'], host))
if 'slow' in results[host]:
slow.append("{} from {} average rtt {}".format(results[host]['slow'], host, "{0:.2f}".format(results[host]['avg'])))
if success:<|fim▁hole|> else:
avg = 0
print "Succeeded: {} addresses from {} minions average rtt {} ms".format(total, len(success), "{0:.2f}".format(avg))
if slow:
print "Warning: \n {}".format("\n ".join(slow))
if failed:
print "Failed: \n {}".format("\n ".join(failed))
if errored:
print "Errored: \n {}".format("\n ".join(errored))
def _skip_dunder(settings):
"""
Skip double underscore keys
"""
return {k:v for k,v in settings.iteritems() if not k.startswith('__')}<|fim▁end|> | avg = sum( results[host].get('avg') for host in results) / len(results) |
<|file_name|>hr_contract.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class hr_employee(osv.osv):
_name = "hr.employee"
_description = "Employee"
_inherit = "hr.employee"
def _get_latest_contract(self, cr, uid, ids, field_name, args, context=None):
res = {}
obj_contract = self.pool.get('hr.contract')
for emp in self.browse(cr, uid, ids, context=context):
contract_ids = obj_contract.search(cr, uid, [('employee_id','=',emp.id),], order='date_start', context=context)
if contract_ids:
res[emp.id] = contract_ids[-1:][0]
else:
res[emp.id] = False
return res
_columns = {
'manager': fields.boolean('Is a Manager'),
'medic_exam': fields.date('Medical Examination Date'),
'place_of_birth': fields.char('Place of Birth', size=30),
'children': fields.integer('Number of Children'),
'vehicle': fields.char('Company Vehicle', size=64),
'vehicle_distance': fields.integer('Home-Work Dist.', help="In kilometers"),
'contract_ids': fields.one2many('hr.contract', 'employee_id', 'Contracts'),
'contract_id':fields.function(_get_latest_contract, string='Contract', type='many2one', relation="hr.contract", help='Latest contract of the employee'),
}
class hr_contract_type(osv.osv):
_name = 'hr.contract.type'
_description = 'Contract Type'
_columns = {
'name': fields.char('Contract Type', size=32, required=True),
}
class hr_contract(osv.osv):
_name = 'hr.contract'
_description = 'Contract'
_columns = {
'name': fields.char('Contract Reference', size=64, required=True),
'employee_id': fields.many2one('hr.employee', "Employee", required=True),
'department_id': fields.related('employee_id','department_id', type='many2one', relation='hr.department', string="Department", readonly=True),
'type_id': fields.many2one('hr.contract.type', "Contract Type", required=True),
'job_id': fields.many2one('hr.job', 'Job Title'),
'date_start': fields.date('Start Date', required=True),<|fim▁hole|> 'trial_date_end': fields.date('Trial End Date'),
'working_hours': fields.many2one('resource.calendar','Working Schedule'),
'wage': fields.float('Wage', digits=(16,2), required=True, help="Basic Salary of the employee"),
'advantages': fields.text('Advantages'),
'notes': fields.text('Notes'),
'permit_no': fields.char('Work Permit No', size=256, required=False, readonly=False),
'visa_no': fields.char('Visa No', size=64, required=False, readonly=False),
'visa_expire': fields.date('Visa Expire Date'),
}
def _get_type(self, cr, uid, context=None):
type_ids = self.pool.get('hr.contract.type').search(cr, uid, [('name', '=', 'Employee')])
return type_ids and type_ids[0] or False
_defaults = {
'date_start': lambda *a: time.strftime("%Y-%m-%d"),
'type_id': _get_type
}
def onchange_employee_id(self, cr, uid, ids, employee_id, context=None):
if not employee_id:
return {'value': {'job_id': False}}
emp_obj = self.pool.get('hr.employee').browse(cr, uid, employee_id, context=context)
job_id = False
if emp_obj.job_id:
job_id = emp_obj.job_id.id
return {'value': {'job_id': job_id}}
def _check_dates(self, cr, uid, ids, context=None):
for contract in self.read(cr, uid, ids, ['date_start', 'date_end'], context=context):
if contract['date_start'] and contract['date_end'] and contract['date_start'] > contract['date_end']:
return False
return True
_constraints = [
(_check_dates, 'Error! Contract start-date must be less than contract end-date.', ['date_start', 'date_end'])
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<|fim▁end|> | 'date_end': fields.date('End Date'),
'trial_date_start': fields.date('Trial Start Date'), |
<|file_name|>test_models.py<|end_file_name|><|fim▁begin|>import datetime
from decimal import Decimal<|fim▁hole|> Transaction,
)
class TransactionTestCase(TestCase):
def test_unicode(self):
trans = self.factory.make_one(
Transaction, memo=u'Sublime purchase',
date=datetime.date(2013, 2, 5), amount=Decimal('59.95'),
currency=u'EUR')
self.assertEqual(u'2013-02-05 59.95 EUR - Sublime purchase',
unicode(trans))
def test_factory_makes_category(self):
transaction = self.factory.make_one(Transaction)
self.assertIsNotNone(transaction.category)<|fim▁end|> | from django_factory import TestCase
from financial_transactions.models import ( |
<|file_name|>default_settings.py<|end_file_name|><|fim▁begin|>#from django.conf import settings<|fim▁hole|>
NOTICE_FROM_ANONYMOUS = 1<|fim▁end|> | |
<|file_name|>endpoint.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2017 TypeFox and others.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*/
import URI from "./uri";
/**
* An endpoint provides URLs for http and ws, based on configuration ansd defaults.
*/
export class Endpoint {
constructor(protected options: Endpoint.Options = {}) {
}
getWebSocketUrl(): URI {
return new URI(`${this.wsScheme}://${this.host}${this.path}`)
}
getRestUrl(): URI {
return new URI(`${this.httpScheme}://${this.host}${this.path}`)
}
protected get host() {
return location.host || "127.0.0.1:3000"
}
protected get wsScheme() {
return this.httpScheme === 'https:' ? 'wss' : 'ws';
}<|fim▁hole|> return this.options.httpScheme
}
if (location.protocol === 'http' || location.protocol === 'https') {
return location.protocol
}
return 'http'
}
protected get path() {
if (this.options.path) {
if (this.options.path.startsWith("/")) {
return this.options.path
} else {
return '/' + this.options.path
}
}
return this.options.path || ""
}
}
export namespace Endpoint {
export class Options {
host?: string
wsScheme?: string
httpScheme?: string
path?: string
}
}<|fim▁end|> |
protected get httpScheme() {
if (this.options.httpScheme) { |
<|file_name|>list.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Jeremy Letang.
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use api::context::Context;
use backit::responses;
use db::repositories::repo as repo_repo;
use iron::{Request, Response, IronResult};
use serde_json;
use std::error::Error;
// get /api/v1/tags
pub fn list(ctx: Context, _: &mut Request) -> IronResult<Response> {
let db = &mut *ctx.db.get().expect("cannot get sqlite connection from the context");<|fim▁hole|> Err(e) => responses::internal_error(e.description()),
}
}<|fim▁end|> | match repo_repo::list_for_user_id(db, &*ctx.user.id) {
Ok(l) => responses::ok(serde_json::to_string(&l).unwrap()), |
<|file_name|>KVBase.cpp<|end_file_name|><|fim▁begin|>/***************************************************************************
$Id: KVBase.cpp,v 1.57 2009/04/22 09:38:39 franklan Exp $
kvbase.cpp - description
-------------------
begin : Thu May 16 2002
copyright : (C) 2002 by J.D. Frankland
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
#include <cassert>
#include "Riostream.h"
#include "TMath.h"
#include "TFile.h"
#include "KVBase.h"
#include "TClass.h"
#include "KVString.h"
#include "TSystem.h"
#include "TInterpreter.h"
#include "TEnv.h"
#include "TPluginManager.h"
#include "KVNameValueList.h"
#include "TSystemDirectory.h"
#include "KVVersion.h"
#ifdef WITH_BZR_INFOS
#include "KVBzrInfo.h"
#endif
#ifdef WITH_GIT_INFOS
#include "KVGitInfo.h"
#endif
#include "TROOT.h"
#include "TDatime.h"
#include "THashList.h"
#include "TError.h"
#include "TGMimeTypes.h"
#include "TGClient.h"
#include "TContextMenu.h"
#include <TKey.h>
#include "TTree.h"
#ifdef WITH_GRULIB
#include "GNetClientRoot.h"
#endif
using namespace std;
ClassImp(KVBase)
////////////////////////////////////////////////////////////////////////////////
// BEGIN_HTML <!--
/* -->
<h2>KVBase</h2>
<h4>Base class for KaliVeda framework</h4>
This is the base class for many classes in the KaliVeda framework. Each
KVBase object has<br>
<ul>
<li>a name - Get/SetName()</li>
<li>a type - Get/SetType()</li>
<li>a number - Get/SetNumber()</li>
<li>a label - Get/SetLabel()<br>
</li>
</ul>
When objects are accessed through a TObject/TNamed base pointer, it is possible
to test whether an object is derived from KVBase, using the bit KVBase::kIsKaliVedaObject:
<code>
TObject* ob = (address of some object)
if( ob->TestBit( KVBase::kIsKaliVedaObject ) ){
</code>
This class also provides a number of general utilities, often as static
(stand-alone) methods.<br>
<h3>KaliVeda build/installation information</h3>
The static methods<br>
<pre>KVBase::GetKVSourceDir()<br>KVBase::GetKVBuildDate()<br>KVBase::GetKVBuildUser()<br></pre>
give info on the sources, when and where they were built, and by whom.<br>
The static methods<br>
<pre>KVBase::GetKVVersion()<br>KVBase::GetKVRoot()<br>KVBase::GetKVRootDir()<br>KVBase::GetKVBinDir()<br>KVBase::GetKVFilesDir()<br></pre>
give info on the version of KaliVeda, the environment variable $KVROOT,
and the paths to the installation directories.<br>
<h3>Initialisation</h3>
The entire KaliVeda framework is initialised by the static method<br>
<pre>KVBase::InitEnvironment()<br></pre>
<h3>Finding/opening files</h3>
Static methods for easily locating and/or opening files within the
KaliVeda installation tree (under $KVROOT) are given:<br>
<pre>KVBase::SearchKVFile(...)<br>KVBase::SearchAndOpenKVFile(...)<br></pre>
Note that in the second case, two methods exist: one for reading, the
other for writing the (ascii) files. A global function for searching
files is also defined:<br>
<pre>Bool_t SearchFile(const Char_t* name, TString& fullpath, int ndirs, ...)<br></pre>
This will search for a
file in an arbitrary number of locations, return kTRUE if file is found
and put full path to file in 'fullpath':<br>
<ul>
<li> 'name' is a filename (not an absolute pathname) i.e. "toto.dat"</li>
<li> 'fullpath' will contain the full path to the
file if it is found (if file not found, fullpath="")</li>
<li> 'ndirs' is the number of directories to
search in<br>
</li>
</ul>
The remaining arguments are the names of 'ndirs' paths to search in,
i.e.<br>
<pre>SearchFile("toto.dat", fullpath, 2, gSystem->pwd(), gSystem->HomeDirectory());</pre>
means: search for a file 'toto.dat' in current working directory, then
user's home directory.<br>
<pre>SearchFile("toto.dat", fullpath, 3, KVBase::GetKVFilesDir(), KVBase::GetKVRootDir(), gRootDir);</pre>
means: search for a file 'toto.dat' in $KVROOT/KVFiles, in $KVROOT, and
finally in $ROOTSYS.<br>
<h3>Finding class source files</h3>
Source files for a class can be found using static method<br>
<pre>KVBase::FindClassSourceFiles(...)<br></pre>
It will look for appropriately-named files corresponding to the header
& implementation file of a class, testing several popular suffixes
in each case.<br>
<h3>Finding executables</h3>
To find an executable in the current user's 'PATH' (or elsewhere), use
static method<br>
<pre>KVBase::FindExecutable(...)<br></pre>
<h3>Temporary files</h3>
The static methods<br>
<pre>KVBase::GetTempFileName(...)<br>KVBase::OpenTempFile(...)<br></pre>
can generate and handle uniquely-named temporary (ascii) files.<br>
<h3>Backing-up files</h3>
The static method<br>
<pre>KVBase::BackupFileWithDate(...)<br></pre>
can be used to create a dated backup of an existing file before it is
replaced with a new version.<br>
<h3>Handling plugins</h3>
As plugins are extensively used in the KaliVeda framework, a few
utilities for handling them are defined. They are static methods<br>
<pre>KVBase::LoadPlugin(...)<br>KVBase::GetPluginURI(...)<br></pre>
<!-- */
// --> END_HTML
////////////////////////////////////////////////////////////////////////////////
#define xstr(s) str(s)
#define str(s) #s
UInt_t KVBase::fNbObj = 0;
TString KVBase::fWorkingDirectory = "$(HOME)/.kaliveda";
Bool_t KVBase::fEnvIsInit = kFALSE;
const Char_t* KVBase::GetETCDIRFilePath(const Char_t* namefile)
{
if (strcmp(namefile, "")) return Form("%s/%s", xstr(ETCDIR), namefile);
return Form("%s", xstr(ETCDIR));
}
const Char_t* KVBase::GetDATADIRFilePath(const Char_t* namefile)
{
if (strcmp(namefile, "")) return Form("%s/%s", xstr(DATADIR), namefile);
return Form("%s", xstr(DATADIR));
}
const Char_t* KVBase::GetTEMPLATEDIRFilePath(const Char_t* namefile)
{
if (strcmp(namefile, "")) return Form("%s/%s", xstr(TEMPLATEDIR), namefile);
return Form("%s", xstr(TEMPLATEDIR));
}
const Char_t* KVBase::GetDATABASEFilePath()
{
return Form("%s/db", fWorkingDirectory.Data());
}
const Char_t* KVBase::GetLIBDIRFilePath(const Char_t* namefile)
{
if (strcmp(namefile, "")) return Form("%s/%s", xstr(LIBDIR), namefile);
return Form("%s", xstr(LIBDIR));
}
const Char_t* KVBase::GetINCDIRFilePath(const Char_t* namefile)
{
if (strcmp(namefile, "")) return Form("%s/%s", xstr(INCDIR), namefile);
return Form("%s", xstr(INCDIR));
}
const Char_t* KVBase::GetBINDIRFilePath(const Char_t* namefile)
{
if (strcmp(namefile, "")) return Form("%s/%s", xstr(BINDIR), namefile);
return Form("%s", xstr(BINDIR));
}
const Char_t* KVBase::GetWORKDIRFilePath(const Char_t* namefile)
{
if (strcmp(namefile, "")) return Form("%s/%s", fWorkingDirectory.Data(), namefile);
return fWorkingDirectory;
}
//_______________
void KVBase::init()
{
//Default initialisation
InitEnvironment();
fNumber = 0;
fNbObj++;
fLabel = "";
SetBit(kIsKaliVedaObject);
}
void KVBase::InitEnvironment()
{
// STATIC method to Initialise KaliVeda environment
// Reads config files in $(pkgdatadir)/etc and sets up environment
// (data repositories, datasets, etc. etc.)
// Adds directory where kaliveda shared libs are installed to the dynamic
// path - for finding and loading plugins (even those which are in libkaliveda.so)
// Resets the gRandom random number sequence using a clock-based seed
// (i.e. random sequences do not repeat).
#ifdef WITH_GNU_INSTALL
// Sets location of user's working directory which is by default
// $(HOME)/.kaliveda
// but can be changed with variable
// KaliVeda.WorkingDirectory: [directory]
// in configuration file. [directory] must be an absolute pathname,
// can use shell variables like $(HOME).
//
#endif
// Normally, the first object created which inherits from KVBase will
// perform this initialisation; if you need to set up the environment before
// creating a KVBase object, or if you just want to be absolutely sure that
// the environment has been initialised, you can call this method.
if (!fEnvIsInit) {//test if environment already initialised
// Add path to kaliveda libraries to dynamic loader path
// This is needed to find plugins
// and also to be able to compile with kaliveda in the interpreter
TString libdir = GetLIBDIRFilePath();
gSystem->AddDynamicPath(libdir);
// force re-reading of rootmap files in new dynamic path
gInterpreter->LoadLibraryMap();
// Add path to kaliveda header files
// This is needed to be able to compile with kaliveda in the interpreter
TString incdir = GetINCDIRFilePath();
incdir.Prepend("-I");
gSystem->AddIncludePath(incdir);
//set up environment using kvrootrc file
if (!gEnv->Defined("DataSet.DatabaseFile")) {
ReadConfigFiles();
}
#ifdef WITH_GNU_INSTALL
// set working directory & create if needed
fWorkingDirectory = gEnv->GetValue("KaliVeda.WorkingDirectory", "$(HOME)/.kaliveda");
gSystem->ExpandPathName(fWorkingDirectory);
gSystem->mkdir(fWorkingDirectory, kTRUE);
#else
// set environment variable used in database makefiles
fWorkingDirectory = KV_ROOT;
#endif
// set environment variable used in database makefiles
gSystem->Setenv("KV_WORK_DIR", fWorkingDirectory);
//generate new seed from system clock
gRandom->SetSeed(0);
// initialisation has been performed
fEnvIsInit = kTRUE;
}
}
void KVBase::ReadConfigFiles()
{
// Read all configuration files
// System config files are read first in the order they appear in file
// ${ETCDIR}/config.files
// Then we read any of the following files if they exist:
// ${HOME}/.kvrootrc
// ${PWD}/.kvrootrc
TString tmp = GetETCDIRFilePath("config.files");
ifstream conflist;
conflist.open(tmp.Data());
if (!conflist.good()) {
::Fatal("KVBase::ReadConfigFiles", "Cannot open %s", tmp.Data());
return;
}
KVString file;
file.ReadLine(conflist);
conflist.close();
file.Begin(";");
while (!file.End()) {
tmp = GetETCDIRFilePath(file.Next().Data());
//skip over any missing files - this is needed when installing from
//e.g. ubuntu packages if not all packages are installed
if (!gSystem->AccessPathName(tmp.Data())) gEnv->ReadFile(tmp.Data(), kEnvChange);
}
AssignAndDelete(tmp, gSystem->ConcatFileName(gSystem->Getenv("HOME"), ".kvrootrc"));
gEnv->ReadFile(tmp.Data(), kEnvChange);
tmp = "./.kvrootrc";
gEnv->ReadFile(tmp.Data(), kEnvChange);
// load plugin handlers
gROOT->GetPluginManager()->LoadHandlersFromEnv(gEnv);
// load mime types/icon definitions when not in batch (i.e. GUI-less) mode
if (!gROOT->IsBatch()) ReadGUIMimeTypes();
}
//_______________________________________________________________________________
KVBase::KVBase()
{
//Default constructor.
init();
}
//_______________________________________________________________________________
KVBase::KVBase(const Char_t* name, const Char_t* type): TNamed(name, type)
{
//Ctor for object with given name and type.
init();
}
//______________________
KVBase::KVBase(const KVBase& obj) : TNamed()
{
//copy ctor
init();
#if ROOT_VERSION_CODE >= ROOT_VERSION(3,4,0)
obj.Copy(*this);
#else
((KVBase&) obj).Copy(*this);
#endif
}
//_______________________________________________________________________________
KVBase::~KVBase()
{
fNbObj--;
}
//_______________________________________________________________________________
void KVBase::Clear(Option_t* opt)
{
//Clear object properties : name, type/title, number, label
TNamed::Clear(opt);
fNumber = 0;
fLabel = "";
}
//___________________________________________________________________________________
#if ROOT_VERSION_CODE >= ROOT_VERSION(3,4,0)
void KVBase::Copy(TObject& obj) const
#else
void KVBase::Copy(TObject& obj)
#endif
{
//Copy this to obj
//Redefinition of TObject::Copy
TNamed::Copy(obj);
((KVBase&) obj).SetNumber(fNumber);
((KVBase&) obj).SetLabel(fLabel);
}
//____________________________________________________________________________________
void KVBase::Print(Option_t*) const
{
cout << "KVBase object: Name=" << GetName() << " Type=" << GetType();
if (fLabel != "")
cout << " Label=" << GetLabel();
if (fNumber != 0)
cout << " Number=" << GetNumber();
cout << endl;
}
void KVBase::Streamer(TBuffer& R__b)
{
//Backwards compatible streamer for KVBase objects
//Needed to handle 'fLabel' char array in class version 1
//Objects written with version < 3 did not have kIsKaliVedaObject bit set,
//we set it here when reading object.
if (R__b.IsReading()) {
UInt_t R__s, R__c;
Version_t R__v = R__b.ReadVersion(&R__s, &R__c);
if (R__v > 1) {
if (R__v < 4) {
TNamed::Streamer(R__b);
R__b >> fNumber;
R__b >> fLabel;
if (R__v < 3) SetBit(kIsKaliVedaObject);
R__b.CheckByteCount(R__s, R__c, KVBase::IsA());
} else {
//AUTOMATIC STREAMER EVOLUTION FOR CLASS VERSION > 3
KVBase::Class()->ReadBuffer(R__b, this, R__v, R__s, R__c);
}
return;
}
//OLD STREAMER FOR CLASS VERSION 1
TNamed::Streamer(R__b);
R__b >> fNumber;
UInt_t LabelLength;
R__b >> LabelLength;
if (LabelLength) {
Char_t* Label = new Char_t[LabelLength];
R__b.ReadFastArray(Label, LabelLength);
fLabel = Label;
delete[]Label;
}
SetBit(kIsKaliVedaObject);
R__b.CheckByteCount(R__s, R__c, KVBase::IsA());
} else {
KVBase::Class()->WriteBuffer(R__b, this);
}
}
//________________________________________________________________________________//
Bool_t SearchFile(const Char_t* name, TString& fullpath, int ndirs, ...)
{
//Search for file in an arbitrary number of locations, return kTRUE if file found and put full path to file in 'fullpath"
//
//'name' is a filename (not an absolute pathname) i.e. "toto.dat"
//'fullpath" will contain the full path to the file if it is found (if file not found, fullpath="")
//'ndirs" is the number of directories to search in
//the remaining arguments are the names of 'ndirs' paths to search in, i.e.
//
// SearchFile("toto.dat", fullpath, 2, gSystem->pwd(), gSystem->HomeDirectory());
//
//means: search for a file 'toto.dat' in current working directory, then user's home directory.
if (ndirs <= 0)
return kFALSE;
va_list args;
va_start(args, ndirs);
for (; ndirs; ndirs--) { //loop over directories
AssignAndDelete(fullpath,
gSystem->ConcatFileName(va_arg(args, const char*),
name));
if (!gSystem->AccessPathName(fullpath.Data())) {
va_end(args);
return kTRUE;
}
}
va_end(args);
fullpath = ""; //clear fullpath string to avoid using it by mistake
return kFALSE;
}
Bool_t KVBase::SearchKVFile(const Char_t* name, TString& fullpath,
const Char_t* kvsubdir)
{
//search for files in the following order:
// if 'name' = absolute path the function returns kTRUE if the file exists
// if name != absolute path:
// 1. a. if 'kvsubdir'="" (default) look for file in $(pkgdatadir) directory
// 1. b. if 'kvsubdir'!="" look for file in $(pkgdatadir)/'kvsubdir'
// 2. look for file with this name in user's home directory
// 3. look for file with this name in working directory
//in all cases the function returns kTRUE if the file was found.
//'fullpath' then contains the absolute path to the file
if (gSystem->IsAbsoluteFileName(name)) {
//absolute path
fullpath = name;
return !gSystem->AccessPathName(name);
}
TString kvfile_dir;
if (strcmp(kvsubdir, "")) {
//subdirectory name given
kvfile_dir = GetDATADIRFilePath(kvsubdir);
} else
kvfile_dir = GetDATADIRFilePath();
return SearchFile(name, fullpath, 3, kvfile_dir.Data(),
gSystem->HomeDirectory(), gSystem->pwd());
}
//________________________________________________________________________________//
Bool_t KVBase::SearchAndOpenKVFile(const Char_t* name, ifstream& file, const Char_t* kvsubdir, KVLockfile* locks)
{
//Search and open for READING a file:
//
//search for ascii file (and open it, if found) in the following order:
// if 'name' = absolute path the function returns kTRUE if the file exists
// if name != absolute path:
// 1. a. if 'kvsubdir'="" (default) look for file in $(pkdatadir) directory
// 1. b. if 'kvsubdir'!="" look for file in $(pkdatadir)/'kvsubdir'
// 2. look for file with this name in user's home directory
// 3. look for file with this name in working directory
//if the file is not found, kFALSE is returned.
//if file is found and can be opened, file' is then an ifstream connected to the open (ascii) file
//
//LOCKFILE:
//If a KVLockfile pointer is given, we use it to get a lock on the file before opening it.
//If this lock is not successful, the file is not opened and we return an error message.
TString fullpath;
if (SearchKVFile(name, fullpath, kvsubdir)) {
//put lock on file if required
if (locks && !locks->Lock(fullpath.Data())) return kFALSE;
file.open(fullpath.Data());
if (file.good()) {
//cout << "Opened file : " << fullpath.Data() << endl;
return kTRUE;
}
//unlock file if not opened successfully
if (locks) locks->Release();
}
return kFALSE;
}
//________________________________________________________________________________//
Bool_t KVBase::SearchAndOpenKVFile(const Char_t* name, ofstream& file, const Char_t* kvsubdir, KVLockfile* locks)
{
//Search and CREATE i.e. open for WRITING a file:
//
//open for writing an ascii file in the location determined in the following way:
// if 'name' = absolute path we use the full path
// if name != absolute path:
// 1. a. if 'kvsubdir'="" (default) file will be in $(pkdatadir) directory
// 1. b. if 'kvsubdir'!="":
// if 'kvsubdir' is an absolute pathname, file in 'kvsubdir'
// if 'kvsubdir' is not an absolute pathname,
// file will be in '$(pkdatadir)/kvsubdir'
//if an existing file is found, a warning is printed and the existing file 'toto' is renamed
//"toto.date". where 'date' is created with TDatime::AsSQLDate
// file' is then an ofstream connected to the opened file
//
//LOCKFILE:
//If a KVLockfile pointer is given, we use it to get a lock on the file before opening it.
//If this lock is not successful, the file is not opened and we return an error message.
KVString fullpath;
if (gSystem->IsAbsoluteFileName(name)) {
fullpath = name;
} else if (gSystem->IsAbsoluteFileName(kvsubdir)) {
AssignAndDelete(fullpath,
gSystem->ConcatFileName(kvsubdir, name));
} else if (strcmp(kvsubdir, "")) {
KVString path = GetDATADIRFilePath(kvsubdir);
AssignAndDelete(fullpath,
gSystem->ConcatFileName(path.Data(), name));
} else {
fullpath = GetDATADIRFilePath(name);
}
//Backup file if necessary
BackupFileWithDate(fullpath.Data());
//put lock on file if required
if (locks && !locks->Lock(fullpath.Data())) return kFALSE;
file.open(fullpath.Data());
return kTRUE;
}
//________________________________________________________________________________//
void KVBase::BackupFileWithDate(const Char_t* path)
{
//'path' gives the full path (can include environment variables, special symbols)
//to a file which will be renamed with an extension containing the current date and time
//(in SQL format).
//Example:
// KVBase::BackupFileWithDate("$(HOME)/toto.txt")
//The file toto.txt will be renamed toto.txt.2007-05-02_16:22:37
//does the file exist ?
KVString fullpath = path;
gSystem->ExpandPathName(fullpath);
if (!gSystem->AccessPathName(fullpath.Data())) {
//backup file
TDatime now;
KVString date(now.AsSQLString());
date.ReplaceAll(' ', '_');
TString backup = fullpath + "." + date;
//lock both files
KVLockfile lf1(fullpath.Data()), lf2(backup.Data());
if (lf1.Lock() && lf2.Lock()) {
gSystem->Rename(fullpath.Data(), backup.Data());
printf("Info in <KVBase::BackupFileWithDate(const Char_t *)> : Existing file %s renamed %s\n",
fullpath.Data(), backup.Data());
}
}
}
//________________________________________________________________________________//
TPluginHandler* KVBase::LoadPlugin(const Char_t* base, const Char_t* uri)
{
//Load plugin library in order to extend capabilities of base class "base", depending on
//the given uri (these arguments are used to call TPluginManager::FindHandler).
//Returns pointer to TPluginHandler.
//Returns 0 in case of problems.
//does plugin exist for given name ?
TPluginHandler* ph =
(TPluginHandler*) gROOT->GetPluginManager()->FindHandler(base,
uri);
if (!ph)
return 0;
//check plugin library/macro is available
if (ph->CheckPlugin() != 0)
return 0;
//load plugin module
if (ph->LoadPlugin() != 0)
return 0;
return ph;
}
//__________________________________________________________________________________________________________________
void KVBase::OpenTempFile(TString& base, ofstream& fp)
{
//Opens a uniquely-named file in system temp directory (gSystem->TempDirectory)
//Name of file is "basexxxxxxxxxx" where "xxxxxxxxx" is current time as returned
//by gSystem->Now().
//After opening file, 'base' contains full path to file.
GetTempFileName(base);
fp.open(base.Data());
}
//__________________________________________________________________________________________________________________
void KVBase::GetTempFileName(TString& base)
{
//When called with base="toto.dat", the returned value of 'base' is
//"/full/path/to/temp/dir/toto.dat15930693"
//i.e. the full path to a file in the system temp directory (gSystem->TempDirectory)
//appended with the current time as returned by gSystem->Now() in order to make
//its name unique
TString tmp1;
AssignAndDelete(tmp1,
gSystem->ConcatFileName(gSystem->TempDirectory(),
base.Data()));
long lnow = (long) gSystem->Now();
base = tmp1 + lnow;
//make sure no existing file with same name
while (!gSystem->AccessPathName(base)) {
base = tmp1 + (++lnow);
}
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetKVVersion()
{
//Returns KaliVeda version string
static TString tmp(KV_VERSION);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetKVBuildUser()
{
// Returns username of person who performed build
static TString tmp(KV_BUILD_USER);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetKVBuildDate()
{
//Returns KaliVeda build date
static TString tmp(KV_BUILD_DATE);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetKVBuildType()
{
//Returns KaliVeda build type (cmake build: Release, Debug, RelWithDebInfo, ...)
static TString tmp(KV_BUILD_TYPE);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetKVSourceDir()
{
//Returns top-level directory of source tree used for build
static TString tmp(KV_SOURCE_DIR);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetKVBuildDir()
{
//Returns top-level directory used for build
static TString tmp(KV_BUILD_DIR);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
#ifdef WITH_GIT_INFOS
const Char_t* KVBase::gitBranch()
{
// Returns git branch of sources
static TString tmp(KV_GIT_BRANCH);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::gitCommit()
{
// Returns last git commit of sources
static TString tmp(KV_GIT_COMMIT);
return tmp.Data();
}
#endif
//__________________________________________________________________________________________________________________
#ifdef WITH_BZR_INFOS
const Char_t* KVBase::bzrRevisionId()
{
// Returns Bazaar branch revision-id of sources
static TString tmp(BZR_REVISION_ID);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::bzrRevisionDate()
{
// Returns date of Bazaar branch revision of sources
static TString tmp(BZR_REVISION_DATE);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::bzrBranchNick()
{
// Returns nickname of Bazaar branch of sources
static TString tmp(BZR_BRANCH_NICK);
return tmp.Data();
}
//__________________________________________________________________________________________________________________
Int_t KVBase::bzrIsBranchClean()
{
// Returns 1 if Bazaar branch of sources contained uncommitted
// changes at time of building; 0 if all changes had been committed.
// WARNING: this doesn't really work (ignore)
return BZR_BRANCH_IS_CLEAN;
}
//__________________________________________________________________________________________________________________
Int_t KVBase::bzrRevisionNumber()
{
// Returns Bazaar branch revision number of sources
return BZR_REVISION_NUMBER;
}
#endif
//__________________________________________________________________________________________________________________
Bool_t KVBase::FindExecutable(TString& exec, const Char_t* path)
{
//By default, FindExecutable(exec) will look for the executable named by 'exec'
//in the directories contained in the environment variable PATH. You can override
//this by giving your own search 'path' as second argument (remember to write
//environment variables as $(PATH), for cross-platform compatibility).
//
//If 'exec' is not found, and if it does not end with '.exe', we look for 'exec.exe'
//This is for compatibility with Windows/cygwin environments.
//
//If the executable is found, returns kTRUE and 'exec' then holds full path to executable.
//Returns kFALSE if exec not found in path.
//
//If 'exec' is an absolute pathname, we return kTRUE if the file exists
//(we do not use 'path').
TString spath(path), backup(exec), backup2(exec), expandexec(exec);
gSystem->ExpandPathName(expandexec);
if (gSystem->IsAbsoluteFileName(expandexec.Data())) {
//executable given as absolute path
//we check if it exists
if (!gSystem->AccessPathName(expandexec)) {
exec = expandexec;
return kTRUE;
} else {
//try with ".exe" in case of Windows system
if (!expandexec.EndsWith(".exe")) {
expandexec += ".exe";
if (!gSystem->AccessPathName(expandexec)) {
exec = expandexec;
return kTRUE;
}
}
}
exec = backup;
return kFALSE;
}
gSystem->ExpandPathName(spath);
if (KVBase::FindFile(spath.Data(), exec))
return kTRUE;
if (!backup.EndsWith(".exe")) {
backup += ".exe";
if (KVBase::FindFile(spath.Data(), backup)) {
exec = backup;
return kTRUE;
}
}
exec = backup2;
return kFALSE;
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::FindFile(const Char_t* search, TString& wfil)
{
//Backwards compatible fix for TSystem::FindFile which only exists from 5.12/00 onwards
//Use this method as a replacement for gSystem->FindFile (same arguments)
#ifdef __WITHOUT_TSYSTEM_FINDFILE
Char_t* result = gSystem->Which(search, wfil.Data());
if (result) {
wfil = result;
delete[]result;
} else {
wfil = "";
}
return wfil.Data();
#else
return gSystem->FindFile(search, wfil);
#endif
}
//__________________________________________________________________________________________________________________
Bool_t KVBase::FindClassSourceFiles(const Char_t* class_name, KVString& imp_file, KVString& dec_file, const Char_t* dir_name)
{
//Look for the source files corresponding to "class_name"
//i.e. taking class_name as a base, we look for one of
// [class_name.C,class_name.cpp,class_name.cxx]
//and one of
// [class_name.h,class_name.hh,class_name.H]
//By default we look in the current working directory, unless argument 'dir_name' is given
//If found, the names of the two files are written in 'imp_file' and 'dec_file'
KVNameValueList impl_alt;
int i = 0;
impl_alt.SetValue("%s.C", i);
impl_alt.SetValue("%s.cpp", i);
impl_alt.SetValue("%s.cxx", i);
KVNameValueList decl_alt;
decl_alt.SetValue("%s.h", i);
decl_alt.SetValue("%s.hh", i);
decl_alt.SetValue("%s.H", i);
TString _dir_name = dir_name;
gSystem->ExpandPathName(_dir_name);
TSystemDirectory dir("LocDir", _dir_name.Data());
TList* lf = dir.GetListOfFiles();
Bool_t ok_imp, ok_dec;
ok_imp = ok_dec = kFALSE;
//look for implementation file
for (i = 0; i < impl_alt.GetNpar(); i++) {
if (lf->FindObject(Form(impl_alt.GetParameter(i)->GetName(), class_name))) {
imp_file = Form(impl_alt.GetParameter(i)->GetName(), class_name);
ok_imp = kTRUE;
}
}
//look for header file
for (i = 0; i < decl_alt.GetNpar(); i++) {
if (lf->FindObject(Form(decl_alt.GetParameter(i)->GetName(), class_name))) {
dec_file = Form(decl_alt.GetParameter(i)->GetName(), class_name);
ok_dec = kTRUE;
}
}
delete lf;
return (ok_imp && ok_dec);
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetPluginURI(const Char_t* base, const Char_t* derived)
{
//Inverse of gPluginMgr->FindHandler(const Char_t* base, const Char_t* uri).
//Given a base class "base" and a derived class "derived", we search gEnv to find the
//URI corresponding to this plugin.
//
//Example: given a plugin such as
//
//Plugin.KVIDTelescope: ^PHOS$ KVIDPhoswich KVIndra "KVIDPhoswich()"
//
//then calling KVBase::GetPluginURI("KVIDTelescope", "KVIDPhoswich") will return "PHOS".
//
//Most of the code is copied from TPluginManager::LoadHandlersFromEnv
TIter next(gEnv->GetTable());
TEnvRec* er;
static TString tmp;
while ((er = (TEnvRec*) next())) {
const char* s;
if ((s = strstr(er->GetName(), "Plugin."))) {
// use s, i.e. skip possible OS and application prefix to Plugin.
// so that GetValue() takes properly care of returning the value
// for the specified OS and/or application
const char* val = gEnv->GetValue(s, (const char*)0);
if (val) {
Int_t cnt = 0;
s += 7;
//is it the right base class ?
if (strcmp(s, base)) continue; //skip to next env var if not right base
char* v = StrDup(val);
while (1) {
TString regexp = strtok(!cnt ? v : 0, "; ");
if (regexp.IsNull()) break;
TString clss = strtok(0, "; ");
if (clss.IsNull()) break;
TString plugin = strtok(0, "; ");
if (plugin.IsNull()) break;
TString ctor = strtok(0, ";\"");
if (!ctor.Contains("("))
ctor = strtok(0, ";\"");
if (clss == derived) {
//found the required plugin
//we remove the 'regexp' operator '^' from the beginning
//and '$' from the end of the URI, if necessary
if (regexp.MaybeRegexp()) {
regexp.Remove(TString::kBoth, '^');
regexp.Remove(TString::kBoth, '$');
}
tmp = regexp;
delete [] v;
return tmp.Data();
}
cnt++;
}
delete [] v;
}
}
}
tmp = "";
return tmp;
}
//__________________________________________________________________________________________________________________
const Char_t* KVBase::GetListOfPlugins(const Char_t* base)
{
// Return whitespace-separated list of all plugin classes defined for
// the given base class.
//
// Most of the code is copied from TPluginManager::LoadHandlersFromEnv
TIter next(gEnv->GetTable());
TEnvRec* er;
static TString tmp;
tmp = "";
while ((er = (TEnvRec*) next())) {
const char* s;
if ((s = strstr(er->GetName(), "Plugin."))) {
// use s, i.e. skip possible OS and application prefix to Plugin.
// so that GetValue() takes properly care of returning the value
// for the specified OS and/or application
const char* val = gEnv->GetValue(s, (const char*)0);
if (val) {
Int_t cnt = 0;
s += 7;
//is it the right base class ?
if (strcmp(s, base)) continue; //skip to next env var if not right base
char* v = StrDup(val);
while (1) {
TString regexp = strtok(!cnt ? v : 0, "; ");
if (regexp.IsNull()) break;
TString clss = strtok(0, "; ");
if (clss.IsNull()) break;
TString plugin = strtok(0, "; ");
if (plugin.IsNull()) break;
TString ctor = strtok(0, ";\"");
if (!ctor.Contains("("))
ctor = strtok(0, ";\"");
tmp += clss;
tmp += " ";
cnt++;
}
delete [] v;
}
}
}
//remove final trailing whitespace
tmp.Remove(TString::kTrailing, ' ');
return tmp;
}
//__________________________________________________________________________________________________________________
void KVBase::ReadGUIMimeTypes()
{
// Add to standard ROOT mime types some new ones defined in .kvrootrc
// for icons associated with graphs, runs, etc. by lines such as:
//
// KaliVeda.GUI.MimeTypes : KVIDMap
// KaliVeda.GUI.MimeTypes.KVIDMap.Icon : rootdb_t.xpm
// +KaliVeda.GUI.MimeTypes : KVIDZAGrid
// KaliVeda.GUI.MimeTypes.KVIDZAGrid.Icon : draw_t.xpm
//
// etc.
KVString mimetypes = gEnv->GetValue("KaliVeda.GUI.MimeTypes", "");
if (mimetypes != "") {
mimetypes.Begin(" ");
while (!mimetypes.End()) {
KVString classname = mimetypes.Next(kTRUE);
KVString icon = gEnv->GetValue(Form("KaliVeda.GUI.MimeTypes.%s.Icon", classname.Data()), "draw_t.xpm");
KVString type = classname;
type.ToLower();
if (gClient) gClient->GetMimeTypeList()->AddType(Form("[kaliveda/%s]", type.Data()),
classname.Data(), icon.Data(), icon.Data(), "");
}
}
}
//__________________________________________________________________________________________________________________
#ifdef WITH_GRULIB
Int_t KVBase::TestPorts(Int_t port)
{
// Test ports for availability. Start from 'port' and go up to port+2000 at most.
// Returns -1 if no ports available.
GNetClientRoot testnet((char*) "localhost");
Int_t ret;
ret = port;
for (int i = 0; i < 2000; i++) {
ret = testnet.TestPortFree(port, (char*) "localhost");
if (ret > 0)
break;
if ((ret <= 0))
port++;
}
return ret;
}
#endif
Bool_t KVBase::AreEqual(Double_t A, Double_t B, Long64_t maxdif)
{
// Comparison between two 64-bit floating-point values
// Returns kTRUE if the integer representations of the two values are within
// maxdif of each other.
// By default maxdif=1, which means that we consider that x==y if the
// difference between them is no greater than the precision of Double_t
// variables, i.e. 4.94065645841246544e-324
//
// Based on the function AlmostEqual2sComplement(float, float, int)
// by Bruce Dawson http://www.cygnus-software.com/papers/comparingfloats/comparingfloats.htm
union converter {
Double_t f;
Long64_t i;
} zero, val1, val2;
assert(maxdif > 0);
if (A == B) return true;
/* rustine to obtain the (64-bit) constant value 0x8000000000000000
even on 32-bit machines (there is probably an easier way!) */
zero.i = 1;
zero.f = -zero.f;
zero.i -= 1;
val1.f = A;
val2.f = B;
Long64_t Aint, Bint;
Aint = val1.i;
Bint = val2.i;
if (Aint < 0) Aint = zero.i - Aint;
if (Bint < 0) Bint = zero.i - Bint;
Long64_t intDiff = abs(val1.i - val2.i);
if (intDiff <= maxdif) return true;
return false;
}
Bool_t KVBase::OpenContextMenu(const char* method, TObject* obj, const char* alt_method_name)
{
// Open context menu for given method of object *obj.
// By default title of menu is 'obj->ClassName()::method'
// You can give an alternative method name in 'alt_method_name'
// Returns kFALSE if the given method is not defined for the class of object in question.
//
// WARNING: even if this method returns kTRUE, this is no guarantee that the method
// has indeed been executed. The user may have pressed the 'Cancel' button...
TMethod* m = obj->IsA()->GetMethodAllAny(method);
if (!m) {
obj->Warning("OpenContextMenu", "%s is not a method of %s", method, obj->ClassName());
return kFALSE;
}
TString Method = alt_method_name;
if (Method == "") Method = method;
TContextMenu* cm = new TContextMenu(Method, Form("%s::%s", obj->ClassName(), Method.Data()));
cm->Action(obj, m);
delete cm;
return kTRUE;
}
void KVBase::CombineFiles(const Char_t* file1, const Char_t* file2, const Char_t* newfilename, Bool_t keep)
{
// STATIC method which allows to combine the contents of two ROOT files
// (file1 and file2) into a new ROOT file (newfilename).
// All objects from the two files will be written in the new file.
//
// if keep=kFALSE, the two files will be deleted after the operation
::Info("KVBase::CombineFiles", "Copying all objects from %s and %s ===> into new file %s", file1, file2, newfilename);
TFile* f1 = TFile::Open(file1);
TList objL1;//list of objects in file 1
TList treeL1;//list of trees in file 1
TIter next(f1->GetListOfKeys());
TKey* key;
while ((key = (TKey*)next())) {
if (!TClass::GetClass(key->GetClassName(), kFALSE, kTRUE)->InheritsFrom("TDirectory")) {//avoid subdirectories!
if (!TClass::GetClass(key->GetClassName(), kFALSE, kTRUE)->InheritsFrom("TTree"))
objL1.Add(f1->Get(key->GetName()));
else
treeL1.Add(f1->Get(key->GetName()));
}
}
TFile* f2 = TFile::Open(file2);
TList objL2;//list of objects in file 2
TList treeL2;//list of trees in file 2
TIter next2(f2->GetListOfKeys());
while ((key = (TKey*)next2())) {
if (!TClass::GetClass(key->GetClassName(), kFALSE, kTRUE)->InheritsFrom("TDirectory")) {//avoid subdirectories!
if (!TClass::GetClass(key->GetClassName(), kFALSE, kTRUE)->InheritsFrom("TTree"))
objL2.Add(f2->Get(key->GetName()));
else
treeL2.Add(f2->Get(key->GetName()));
}
}
TFile* newfile = new TFile(newfilename, "recreate");
objL1.Execute("Write", "");
objL2.Execute("Write", "");
if (treeL1.GetEntries()) {
TIter nxtT(&treeL1);
TTree* t;
while ((t = (TTree*)nxtT())) t->CloneTree(-1, "fast")->Write();
}
if (treeL2.GetEntries()) {
TIter nxtT(&treeL2);
TTree* t;
while ((t = (TTree*)nxtT())) t->CloneTree(-1, "fast")->Write();
}
newfile->Close();
f1->Close();
f2->Close();
if (!keep) {
gSystem->Unlink(file1);
gSystem->Unlink(file2);
}
}
TObject* KVBase::GetObject() const
{
// Dummy method (returns NULL).
// This method may be used in 'container' classes used with KVListView.
// In order to open the context menu of the 'contained' object,
// GetLabel() should return the real class of the object,
// and this method should return its address.
// Then call KVListView::SetUseObjLabelAsRealClass(kTRUE).
return NULL;
}
const Char_t* KVBase::GetExampleFilePath(const Char_t* library, const Char_t* namefile)
{
// Return full path to example file for given library (="KVMultiDet", "BackTrack", etc.)
static TString path;
path = KVBase::GetDATADIRFilePath("examples/");
path += library;
path += "/";
path += namefile;
return path.Data();
}
void KVBase::PrintSplashScreen()
{
// Prints welcome message and infos on version etc.
cout << "***********************************************************" <<
endl;
cout << "* HI COQUINE !!! *" <<
endl;
cout << "* *" <<
endl;
cout << "* W E L C O M E to K A L I V E D A *" <<
endl;
cout << "* *" <<
endl;<|fim▁hole|> printf("* Version: %-10s Built: %-10s *\n", KVBase::GetKVVersion(), KVBase::GetKVBuildDate());
#ifdef WITH_BZR_INFOS
TString bzrinfo;
bzrinfo.Form("%s@%d", bzrBranchNick(), bzrRevisionNumber());
printf("* bzr: %50s *\n", bzrinfo.Data());
#endif
#ifdef WITH_GIT_INFOS
TString gitinfo;
gitinfo.Form("%s@%s", gitBranch(), gitCommit());
printf("* git: %-50s *\n", gitinfo.Data());
#endif
cout << "* *" <<
endl;
cout << "* For help, read the doc on : *" <<
endl;
cout << "* http://indra.in2p3.fr/KaliVedaDoc *" <<
endl;
cout << "* *" <<
endl;
cout << "* ENJOY !!! *" <<
endl;
cout << "***********************************************************" <<
endl << endl;
}<|fim▁end|> | |
<|file_name|>generateTOC.js<|end_file_name|><|fim▁begin|>function nextLevel(nodeList, startIndex, hlevel, prefix, tocString)
{
var hIndex = 1;
var i = startIndex;
while (i < nodeList.length) {
var currentNode = nodeList[i];
if (currentNode.tagName != "H"+hlevel)
break;
if (currentNode.className == "no-toc") {
++i;
continue;
}
var sectionString = prefix+hIndex;
// Update the TOC
var text = currentNode.innerHTML;
// Strip off names specified via <a name="..."></a>
var tocText = text.replace(/<a name=[\'\"][^\'\"]*[\'\"]>([^<]*)<\/a>/g, "$1");
tocString.s += "<li class='toc-h"+hlevel+"'><a href='#"+sectionString+"'><span class='secno'>"+sectionString+"</span>"+tocText+"</a></li>\n";
// Modify the header
currentNode.innerHTML = "<span class=secno>"+sectionString+"</span> "+text;
currentNode.id = sectionString;
// traverse children
i = nextLevel(nodeList, i+1, hlevel+1, sectionString+".", tocString);
hIndex++;
}
return i;
}
function generateTOC(toc)
{
var nodeList = $("h2,h3,h4,h5,h6");
var tocString = { s:"<ul class='toc'>\n" };
nextLevel(nodeList, 0, 2, "", tocString);
toc.innerHTML = tocString.s;
// Now position the document, in case a #xxx directive was given
var id = window.location.hash.substring(1);<|fim▁hole|> var target = document.getElementById(id);
if (target) {
var rect = target.getBoundingClientRect();
setTimeout(function() { window.scrollTo(0, rect.top) }, 0);
}
}
}<|fim▁end|> | if (id.length > 0) { |
<|file_name|>fileUpload.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';<|fim▁hole|> .service('fileUpload', Service);
Service.$inject = ['$http'];
function Service($http) {
this.uploadFile = uploadFile;
////////////////
function uploadFile(file, url, done) {
var fd = new FormData();
fd.append('file',file);
$http.post(url, fd, {
transformRequest: angular.identity,
headers: { 'Content-Type': undefined }
}).success(function (response) {
done(null, response);
}).error(function (e) {
done(e, null);
});
}
}
})();<|fim▁end|> |
angular
.module('app.web') |
<|file_name|>helper.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import print_function
# python standard library
from socket import socket
import sys, os, re, stat, math, time, datetime
import importlib
# third party modules
try: # unicode monkeypatch for windoze
import win_unicode_console
win_unicode_console.enable()
except:
msg = "Please install the 'win_unicode_console' module."
if os.name == 'nt': print(msg)
try: # os independent color support
from colorama import init, Fore, Back, Style
init() # required to get colors on windoze
except ImportError:
msg = "Please install the 'colorama' module for color support."
# poor man's colored output (ANSI)
class Back():
BLUE = '\x1b[44m' if os.name == 'posix' else ''
CYAN = '\x1b[46m' if os.name == 'posix' else ''
GREEN = '\x1b[42m' if os.name == 'posix' else ''
MAGENTA = '\x1b[45m' if os.name == 'posix' else ''
RED = '\x1b[41m' if os.name == 'posix' else ''
class Fore():
BLUE = '\x1b[34m' if os.name == 'posix' else ''
CYAN = '\x1b[36m' if os.name == 'posix' else ''
MAGENTA = '\x1b[35m' if os.name == 'posix' else ''
YELLOW = '\x1b[33m' if os.name == 'posix' else ''
class Style():
DIM = '\x1b[2m' if os.name == 'posix' else ''
BRIGHT = '\x1b[1m' if os.name == 'posix' else ''
RESET_ALL = '\x1b[0m' if os.name == 'posix' else ''
NORMAL = '\x1b[22m' if os.name == 'posix' else ''
print(Back.RED + msg + Style.RESET_ALL)
# ----------------------------------------------------------------------
# return first item of list or alternative
def item(mylist, alternative=""):
return next(iter(mylist), alternative)
# split list into chunks of equal size
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i+n]
# ----------------------------------------------------------------------
class log():
# open logfile
def open(self, filename):
try:
return open(filename, mode='wb')
except IOError as e:
output().errmsg("Cannot open logfile", e)
return None
# write raw data to logfile
def write(self, logfile, data):
# logfile open and data non-empty
if logfile and data:
try:
logfile.write(data)
except IOError as e:
output().errmsg("Cannot log", e)
# write comment to logfile
def comment(self, logfile, line):
comment = "%" + ("[ " + line + " ]").center(72, '-')
self.write(logfile, os.linesep + comment + os.linesep)
# close logfile
def close(self, logfile):
try:
logfile.close()
except IOError as e:
output().errmsg("Cannot close logfile", e)
# ----------------------------------------------------------------------
class output():
# show send commands (debug mode)
def send(self, str, mode):
if str: print(Back.CYAN + str + Style.RESET_ALL)
if str and mode == 'hex':
print(Fore.CYAN + conv().hex(str, ':') + Style.RESET_ALL)
# show recv commands (debug mode)
def recv(self, str, mode):
if str: print(Back.MAGENTA + str + Style.RESET_ALL)
if str and mode == 'hex':
print(Fore.MAGENTA + conv().hex(str, ':') + Style.RESET_ALL)
# show information
def info(self, msg, eol=None):
if msg: print(Back.BLUE + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show raw data
def raw(self, msg, eol=None):
if msg: print(Fore.YELLOW + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show chit-chat
def chitchat(self, msg, eol=None):
if msg: print(Style.DIM + msg + Style.RESET_ALL, end=eol)
sys.stdout.flush()
# show warning message
def warning(self, msg):
if msg: print(Back.RED + msg + Style.RESET_ALL)
# show green message
def green(self, msg):
if msg: print(Back.GREEN + msg + Style.RESET_ALL)
# show error message
def errmsg(self, msg, info=""):
info = str(info).strip()
if info: # monkeypatch to make python error message less ugly
info = item(re.findall('Errno -?\d+\] (.*)', info), '') or info.splitlines()[-1]
info = Style.RESET_ALL + Style.DIM + " (" + info.strip('<>') + ")" + Style.RESET_ALL
if msg: print(Back.RED + msg + info)
# show printer and status
def discover(self, xxx_todo_changeme):
(ipaddr, (device, uptime, status, prstat)) = xxx_todo_changeme
ipaddr = output().strfit(ipaddr, 15)
device = output().strfit(device, 27)
uptime = output().strfit(uptime, 8)
status = output().strfit(status, 23)
if device.strip() != 'device': device = Style.BRIGHT + device + Style.NORMAL
if prstat == '1': status = Back.GREEN + status + Back.BLUE # unknown
if prstat == '2': status = Back.GREEN + status + Back.BLUE # running
if prstat == '3': status = Back.YELLOW + status + Back.BLUE # warning
if prstat == '4': status = Back.GREEN + status + Back.BLUE # testing
if prstat == '5': status = Back.RED + status + Back.BLUE # down
line = (ipaddr, device, uptime, status)
output().info('%-15s %-27s %-8s %-23s' % line)
# recursively list files
def psfind(self, name):
vol = Style.DIM + Fore.YELLOW + item(re.findall("^(%.*%)", name)) + Style.RESET_ALL
name = Fore.YELLOW + const.SEP + re.sub("^(%.*%)", '', name) + Style.RESET_ALL
print("%s %s" % (vol, name))
# show directory listing
def psdir(self, isdir, size, mtime, name, otime):
otime = Style.DIM + "(created " + otime + ")" + Style.RESET_ALL
vol = Style.DIM + Fore.YELLOW + item(re.findall("^(%.*%)", name)) + Style.RESET_ALL
name = re.sub("^(%.*%)", '', name) # remove volume information from filename
name = Style.BRIGHT + Fore.BLUE + name + Style.RESET_ALL if isdir else name
if isdir: print("d %8s %s %s %s %s" % (size, mtime, otime, vol, name))
else: print("- %8s %s %s %s %s" % (size, mtime, otime, vol, name))
# show directory listing
def pjldir(self, name, size):
name = name if size else Style.BRIGHT + Fore.BLUE + name + Style.RESET_ALL
if size: print("- %8s %s" % (size, name))
else: print("d %8s %s" % ("-", name))
# show directory listing
def pcldir(self, size, mtime, id, name):
id = Style.DIM + "(macro id: " + id + ")" + Style.RESET_ALL
print("- %8s %s %s %s" % (size, mtime, id, name))
# show output from df
def df(self, args):
self.info("%-16s %-11s %-11s %-9s %-10s %-8s %-9s %-10s %-10s" % args)
# show fuzzing results
def fuzzed(self, path, cmd, opt):
opt1, opt2, opt3 = opt
if isinstance(opt1, bool): opt1 = (Back.GREEN + str(opt1) + Back.BLUE + " ")\
if opt1 else (Back.RED + str(opt1) + Back.BLUE + " ")
if isinstance(opt2, bool): opt2 = (Back.GREEN + str(opt2) + Back.BLUE + " ")\
if opt2 else (Back.RED + str(opt2) + Back.BLUE + " ")
if isinstance(opt3, bool): opt3 = (Back.GREEN + str(opt3) + Back.BLUE + " ")\
if opt3 else (Back.RED + str(opt3) + Back.BLUE + " ")
opt = opt1, opt2, opt3
self.info("%-35s %-12s %-7s %-7s %-7s" % ((path, cmd) + opt))
# show captured jobs
def joblist(self, xxx_todo_changeme1):
(date, size, user, name, soft) = xxx_todo_changeme1
user = output().strfit(user, 13)
name = output().strfit(name, 22)
soft = output().strfit(soft, 20)
line = (date, size, user, name, soft)
output().info('%-12s %5s %-13s %-22s %-20s' % line)
# show ascii only
def ascii(self, data):
data = re.sub(r"(\x00){10}", "\x00", data) # shorten nullbyte streams
data = re.sub(r"([^ -~])", ".", data) # replace non-printable chars
self.raw(data, "")
# show binary dump
def dump(self, data):
# experimental regex to match sensitive strings like passwords
data = re.sub(r"[\x00-\x06,\x1e]([!-~]{6,}?(?!\\0A))\x00{16}", "START" + r"\1" + "STOP", data)
data = re.sub(r"\00+", "\x00", data) # ignore nullbyte streams
data = re.sub(r"(\x00){10}", "\x00", data) # ignore nullbyte streams
data = re.sub(r"([\x00-\x1f,\x7f-\xff])", ".", data)
data = re.sub(r"START([!-~]{6,}?)STOP", Style.RESET_ALL + Back.BLUE + r"\1" + Style.RESET_ALL + Fore.YELLOW, data)
self.raw(data, "")
# dump ps dictionary
def psdict(self, data, indent=''):
importlib.reload(sys) # workaround for non-ascii output
sys.setdefaultencoding('UTF8')
# convert list to dictionary with indices as keys
if isinstance(data, list):
data = dict(enumerate(data))
# data now is expected to be a dictionary
if len(list(data.keys())) > 0: last = sorted(data.keys())[-1]
for key, val in sorted(data.items()):
type = val['type'].replace('type', '')
value = val['value']
perms = val['perms']
recursion = False
# current enty is a dictionary
if isinstance(value, dict):
value, recursion = '', True
# current enty is a ps array
if isinstance(value, list):
try: # array contains only atomic values
value = ' '.join(x['value'] for x in value)
except: # array contains further list or dict
# value = sum(val['value'], [])
value, recursion = '', True
# value = value.encode('ascii', errors='ignore')
node = '┬' if recursion else '─'
edge = indent + ('└' if key == last else '├')
# output current node in dictionary
print("%s%s %-3s %-11s %-30s %s" % (edge, node, perms, type, key, value))
if recursion: # ...
self.psdict(val['value'], indent + (' ' if key == last else '│'))
# show some information
def psonly(self):
self.chitchat("Info: This only affects jobs printed by a PostScript driver")
# countdown from sec to zero
def countdown(self, msg, sec, cmd):
try:
sys.stdout.write(msg)
for x in reversed(list(range(1, sec+1))):
sys.stdout.write(" " + str(x))
sys.stdout.flush()
time.sleep(1)
print(" KABOOM!")
return True
except KeyboardInterrupt:
print("")
# show horizontal line
def hline(self, len=72):
self.info("─" * len)
# crop/pad string to fixed length
def strfit(self, str, max):
str = str.strip() or "-"
if str.startswith('(') and str.endswith(')'): str = str[1:-1]
# crop long strings
if len(str) > max:
str = str[0:max-1] + "…"
# pad short strings
return str.ljust(max)
# ----------------------------------------------------------------------
class conv():
# return current time
def now(self):
return int(time.time())
# return time elapsed since unix epoch
def elapsed(self, date, div=1, short=False):
date = str(datetime.timedelta(seconds=int(date)/div))
return date.split(",")[0] if short else date
# return date dependend on current year
def lsdate(self, date):
year1 = datetime.datetime.now().year
year2 = datetime.datetime.fromtimestamp(date).year
pdate = '%b %e ' if os.name == 'posix' else '%b %d '
format = pdate + "%H:%M" if year1 == year2 else pdate + " %Y"
return time.strftime(format, time.localtime(date))
# return date plus/minus given seconds
def timediff(self, seconds):
return self.lsdate(self.now() + self.int(seconds) / 1000)
# convert size to human readble value
def filesize(self, num):
num = self.int(num)
for unit in ['B','K','M']:
if abs(num) < 1024.0:
return (("%4.1f%s" if unit == 'M' else "%4.0f%s") % (num, unit))
num /= 1024.0
# remove carriage return from line breaks
def nstrip(self, data):
return re.sub(r'\r\n', '\n', data)
# convert string to hexdecimal
def hex(self, data, sep=''):
return sep.join("{:02x}".format(ord(c)) for c in data)
# convert to ascii character
def chr(self, num):
return chr(self.int(num))
# convert to integer or zero
def int(self, num):
try: n = int(num)
except ValueError: n = 0
return n
# ----------------------------------------------------------------------
class file():
# read from local file
def read(self, path):
try:
with open(path, mode='rb') as f:
data = f.read()
f.close()
return data
except IOError as e:
output().errmsg("Cannot read from file", e)
# write to local file
def write(self, path, data, m='wb'):
try:
with open(path, mode=m) as f:
f.write(data)
f.close()
except IOError as e:
output().errmsg("Cannot write to file", e)
# append to local file
def append(self, path, data):
self.write(path, data, 'ab+')
# ----------------------------------------------------------------------
class conn(object):
# create debug connection object
def __init__(self, mode, debug, quiet):
self.mode = mode
self.debug = debug
self.quiet = quiet
self._file = None
self._sock = socket()
<|fim▁hole|> # target is a character device
if os.path.exists(target) \
and stat.S_ISCHR(os.stat(target).st_mode):
self._file = os.open(target, os.O_RDWR)
# treat target as ipv4 socket
else:
m = re.search('^(.+?):([0-9]+)$', target)
if m:
[target, port] = m.groups()
port = int(port)
self._sock.connect((target, port))
# close connection
def close(self, *arg):
# close file descriptor
if self._file: os.close(self._file)
# close inet socket
else: self._sock.close()
# set timeout
def timeout(self, *arg):
self._sock.settimeout(*arg)
# send data
def send(self, data):
if self.debug: output().send(self.beautify(data), self.debug)
# send data to device
if self._file: return os.write(self._file, data)
# send data to socket
elif self._sock: return self._sock.sendall(data.encode())
# receive data
def recv(self, bytes):
# receive data from device
if self._file: data = os.read(self._file, bytes).decode()
# receive data from socket
else: data = self._sock.recv(bytes).decode()
# output recv data when in debug mode
if self.debug: output().recv(self.beautify(data), self.debug)
return data
# so-many-seconds-passed bool condition
def past(self, seconds, watchdog):
return int(watchdog * 100) % (seconds * 100) == 0
# connection-feels-slow bool condition
def slow(self, limit, watchdog):
return not (self.quiet or self.debug) and watchdog > limit
# receive data until a delimiter is reached
def recv_until(self, delimiter, fb=True, crop=True, binary=False):
data = ""
sleep = 0.01 # pause in recv loop
limit = 3.0 # max watchdog overrun
wd = 0.0 # watchdog timeout counter
r = re.compile(delimiter, re.DOTALL)
s = re.compile("^\x04?\x0d?\x0a?" + delimiter, re.DOTALL)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
while not r.search(data):
data += self.recv(4096) # receive actual data
if self.past(limit, wd): wd_old, bytes = wd, len(data)
wd += sleep # workaround for endless loop w/o socket timeout
time.sleep(sleep) # happens on some devices - python socket error?
# timeout plus it seems we are not receiving data anymore
if wd > self._sock.gettimeout() and wd >= wd_old + limit:
if len(data) == bytes:
output().errmsg("Receiving data failed", "watchdog timeout")
break
# visual feedback on large/slow data transfers
if self.slow(limit, wd) and self.past(0.1, wd) and len(data) > 0:
output().chitchat(str(len(data)) + " bytes received\r", '')
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# clear current line from 'so-many bytes received' chit-chat
if self.slow(limit, wd): output().chitchat(' ' * 24 + "\r", '')
# warn if feedback expected but response empty (= delimiter only)
# this also happens for data received out of order (e.g. brother)
if fb and s.search(data): output().chitchat("No data received.")
# remove delimiter itself from data
if crop: data = r.sub('', data)
# crop uel sequence at the beginning
data = re.sub(r'(^' + const.UEL + ')', '', data)
'''
┌─────────────────────────────────────────────────────────────────────────┐
│ delimiters -- note that carriage return (0d) is optional in ps/pjl │
├─────────────────────────┬─────────────────────────┬─────────────────────┤
│ │ PJL │ PostScript │
├─────────────────────────┼─────────┬───────────────┼────────┬────────────┤
│ │ send │ recv │ send │ recv │
├─────────────────────────┼─────────┼───────────────┼────────┼────────────┤
│ normal commands (ascii) │ 0d? 0a │ 0d+ 0a 0c 04? │ 0d? 0a │ 0d? 0a 04? │
├─────────────────────────┼─────────┼───────────────┼────────┼────────────┤
│ file transfers (binary) │ 0d? 0a │ 0c │ 0d? 0a │ - │
└─────────────────────────┴─────────┴───────────────┴────────┴────────────┘
'''
# crop end-of-transmission chars
if self.mode == 'ps':
data = re.sub(r'^\x04', '', data)
if not binary: data = re.sub(r'\x0d?\x0a\x04?$', '', data)
else: # pjl and pcl mode
if binary: data = re.sub(r'\x0c$', '', data)
else: data = re.sub(r'\x0d+\x0a\x0c\x04?$', '', data)
# crop whitespaces/newline as feedback
if not binary: data = data.strip()
return data
# beautify debug output
def beautify(self, data):
# remove sent/recv uel sequences
data = re.sub(r'' + const.UEL, '', data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
if self.mode == 'ps':
# remove sent postscript header
data = re.sub(r'' + re.escape(const.PS_HEADER), '', data)
# remove sent postscript hack
data = re.sub(r'' + re.escape(const.PS_IOHACK), '', data)
# remove sent delimiter token
data = re.sub(r'\(DELIMITER\d+\\n\) print flush\n', '', data)
# remove recv delimiter token
data = re.sub(r'DELIMITER\d+', '', data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
elif self.mode == 'pjl':
# remove sent/recv delimiter token
data = re.sub(r'@PJL ECHO\s+DELIMITER\d+', '', data)
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
elif self.mode == 'pcl':
# remove sent delimiter token
data = re.sub(r'\x1b\*s-\d+X', '', data)
# remove recv delimiter token
data = re.sub(r'PCL\x0d?\x0a?\x0c?ECHO -\d+', '', data)
# replace sent escape sequences
data = re.sub(r'(' + const.ESC + ')', '<Esc>', data)
pass
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
# replace lineseps in between
data = re.sub(r'\x0d?\x0a?\x0c', os.linesep, data)
# remove eot/eof sequences
data = data.strip(const.EOF)
return data
# ----------------------------------------------------------------------
class const(): # define constants
SEP = '/' # use posixoid path separator
EOL = '\r\n' # line feed || carriage return
ESC = '\x1b' # used to start escape sequences
UEL = ESC + '%-12345X' # universal exit language
EOF = EOL + '\x0c\x04' # potential end of file chars
DELIMITER = "DELIMITER" # delimiter marking end of repsonse
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PS_CATCH = '%%\[ (.*)\]%%'
PS_ERROR = '%%\[ Error: (.*)\]%%'
PS_FLUSH = '%%\[ Flushing: (.*)\]%%'
PS_PROMPT = '>' # TBD: could be derived from PS command 'prompt'
PS_HEADER = '@PJL ENTER LANGUAGE = POSTSCRIPT\n%!\n'
PS_GLOBAL = 'true 0 startjob pop\n' # 'serverdict begin 0 exitserver'
PS_SUPER = '\n1183615869 internaldict /superexec get exec'
PS_NOHOOK = '/nohook true def\n'
PS_IOHACK = '/print {(%stdout) (w) file dup 3 2 roll writestring flushfile} def\n'\
'/== {128 string cvs print (\\n) print} def\n'
PCL_HEADER = '@PJL ENTER LANGUAGE = PCL' + EOL + ESC
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
SUPERBLOCK = '31337' # define super macro id to contain pclfs table
BLOCKRANGE = list(range(10000,20000)) # use those macros for file content
FILE_EXISTS = -1 # file size to be returned if file/dir size unknown
NONEXISTENT = -2 # file size to be returned if a file does not exist
#- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PS_VOL = '' # no default volume in ps (read: any, write: first)
PJL_VOL = '0:' + SEP # default pjl volume name || path seperator<|fim▁end|> | # open connection
def open(self, target, port=9100): |
<|file_name|>_pyflakes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
try:
# The 'demandimport' breaks pyflakes and flake8._pyflakes
from mercurial import demandimport
except ImportError:
pass
else:
demandimport.disable()
import os
import pep8
import pyflakes
import pyflakes.checker
<|fim▁hole|>
def patch_pyflakes():
"""Add error codes to Pyflakes messages."""
codes = dict([line.split()[::-1] for line in (
'F401 UnusedImport',
'F402 ImportShadowedByLoopVar',
'F403 ImportStarUsed',
'F404 LateFutureImport',
'F810 Redefined', # XXX Obsolete?
'F811 RedefinedWhileUnused',
'F812 RedefinedInListComp',
'F821 UndefinedName',
'F822 UndefinedExport',
'F823 UndefinedLocal',
'F831 DuplicateArgument',
'F841 UnusedVariable',
)])
for name, obj in vars(pyflakes.messages).items():
if name[0].isupper() and obj.message:
obj.flake8_msg = '%s %s' % (codes.get(name, 'F999'), obj.message)
patch_pyflakes()
class FlakesChecker(pyflakes.checker.Checker):
"""Subclass the Pyflakes checker to conform with the flake8 API."""
name = 'pyflakes'
version = pyflakes.__version__
def __init__(self, tree, filename):
filename = pep8.normalize_paths(filename)[0]
withDoctest = self.withDoctest
included_by = [include for include in self.include_in_doctest
if include != '' and filename.startswith(include)]
if included_by:
withDoctest = True
for exclude in self.exclude_from_doctest:
if exclude != '' and filename.startswith(exclude):
withDoctest = False
overlaped_by = [include for include in included_by
if include.startswith(exclude)]
if overlaped_by:
withDoctest = True
super(FlakesChecker, self).__init__(tree, filename,
withDoctest=withDoctest)
@classmethod
def add_options(cls, parser):
parser.add_option('--builtins',
help="define more built-ins, comma separated")
parser.add_option('--doctests', default=False, action='store_true',
help="check syntax of the doctests")
parser.add_option('--include-in-doctest', default='',
dest='include_in_doctest',
help='Run doctests only on these files',
type='string')
parser.add_option('--exclude-from-doctest', default='',
dest='exclude_from_doctest',
help='Skip these files when running doctests',
type='string')
parser.config_options.extend(['builtins', 'doctests',
'include-in-doctest',
'exclude-from-doctest'])
@classmethod
def parse_options(cls, options):
if options.builtins:
cls.builtIns = cls.builtIns.union(options.builtins.split(','))
cls.withDoctest = options.doctests
included_files = []
for included_file in options.include_in_doctest.split(','):
if included_file == '':
continue
if not included_file.startswith((os.sep, './', '~/')):
included_files.append('./' + included_file)
else:
included_files.append(included_file)
cls.include_in_doctest = pep8.normalize_paths(','.join(included_files))
excluded_files = []
for excluded_file in options.exclude_from_doctest.split(','):
if excluded_file == '':
continue
if not excluded_file.startswith((os.sep, './', '~/')):
excluded_files.append('./' + excluded_file)
else:
excluded_files.append(excluded_file)
cls.exclude_from_doctest = pep8.normalize_paths(
','.join(excluded_files))
inc_exc = set(cls.include_in_doctest).intersection(
set(cls.exclude_from_doctest))
if inc_exc:
raise ValueError('"%s" was specified in both the '
'include-in-doctest and exclude-from-doctest '
'options. You are not allowed to specify it in '
'both for doctesting.' % inc_exc)
def run(self):
for m in self.messages:
col = getattr(m, 'col', 0)
yield m.lineno, col, (m.flake8_msg % m.message_args), m.__class__<|fim▁end|> | |
<|file_name|>js_3YI8rlQtCphHC8k7Vs22nkB6_u47OqwXcD7P8Jm9QQg_BHuNkXbS1MEkV6lGkimSfQE6366BcKxzYtd8U65iUpM.js<|end_file_name|><|fim▁begin|>/**
* @file
* Provides Ajax page updating via jQuery $.ajax.
*
* Ajax is a method of making a request via JavaScript while viewing an HTML
* page. The request returns an array of commands encoded in JSON, which is
* then executed to make any changes that are necessary to the page.
*
* Drupal uses this file to enhance form elements with `#ajax['url']` and
* `#ajax['wrapper']` properties. If set, this file will automatically be
* included to provide Ajax capabilities.
*/
(function ($, window, Drupal, drupalSettings) {
'use strict';
/**
* Attaches the Ajax behavior to each Ajax form element.
*
* @type {Drupal~behavior}
*
* @prop {Drupal~behaviorAttach} attach
* Initialize all {@link Drupal.Ajax} objects declared in
* `drupalSettings.ajax` or initialize {@link Drupal.Ajax} objects from
* DOM elements having the `use-ajax-submit` or `use-ajax` css class.
* @prop {Drupal~behaviorDetach} detach
* During `unload` remove all {@link Drupal.Ajax} objects related to
* the removed content.
*/
Drupal.behaviors.AJAX = {
attach: function (context, settings) {
function loadAjaxBehavior(base) {
var element_settings = settings.ajax[base];
if (typeof element_settings.selector === 'undefined') {
element_settings.selector = '#' + base;
}
$(element_settings.selector).once('drupal-ajax').each(function () {
element_settings.element = this;
element_settings.base = base;
Drupal.ajax(element_settings);
});
}
// Load all Ajax behaviors specified in the settings.
for (var base in settings.ajax) {
if (settings.ajax.hasOwnProperty(base)) {
loadAjaxBehavior(base);
}
}
// Bind Ajax behaviors to all items showing the class.
$('.use-ajax').once('ajax').each(function () {
var element_settings = {};
// Clicked links look better with the throbber than the progress bar.
element_settings.progress = {type: 'throbber'};
// For anchor tags, these will go to the target of the anchor rather
// than the usual location.
var href = $(this).attr('href');
if (href) {
element_settings.url = href;
element_settings.event = 'click';
}
element_settings.dialogType = $(this).data('dialog-type');
element_settings.dialog = $(this).data('dialog-options');
element_settings.base = $(this).attr('id');
element_settings.element = this;
Drupal.ajax(element_settings);
});
// This class means to submit the form to the action using Ajax.
$('.use-ajax-submit').once('ajax').each(function () {
var element_settings = {};
// Ajax submits specified in this manner automatically submit to the
// normal form action.
element_settings.url = $(this.form).attr('action');
// Form submit button clicks need to tell the form what was clicked so
// it gets passed in the POST request.
element_settings.setClick = true;
// Form buttons use the 'click' event rather than mousedown.
element_settings.event = 'click';
// Clicked form buttons look better with the throbber than the progress
// bar.
element_settings.progress = {type: 'throbber'};
element_settings.base = $(this).attr('id');
element_settings.element = this;
Drupal.ajax(element_settings);
});
},
detach: function (context, settings, trigger) {
if (trigger === 'unload') {
Drupal.ajax.expired().forEach(function (instance) {
// Set this to null and allow garbage collection to reclaim
// the memory.
Drupal.ajax.instances[instance.instanceIndex] = null;
});
}
}
};
/**
* Extends Error to provide handling for Errors in Ajax.
*
* @constructor
*
* @augments Error
*
* @param {XMLHttpRequest} xmlhttp
* XMLHttpRequest object used for the failed request.
* @param {string} uri
* The URI where the error occurred.
* @param {string} customMessage
* The custom message.
*/
Drupal.AjaxError = function (xmlhttp, uri, customMessage) {
var statusCode;
var statusText;
var pathText;
var responseText;
var readyStateText;
if (xmlhttp.status) {
statusCode = '\n' + Drupal.t('An AJAX HTTP error occurred.') + '\n' + Drupal.t('HTTP Result Code: !status', {'!status': xmlhttp.status});
}
else {
statusCode = '\n' + Drupal.t('An AJAX HTTP request terminated abnormally.');
}
statusCode += '\n' + Drupal.t('Debugging information follows.');
pathText = '\n' + Drupal.t('Path: !uri', {'!uri': uri});
statusText = '';
// In some cases, when statusCode === 0, xmlhttp.statusText may not be
// defined. Unfortunately, testing for it with typeof, etc, doesn't seem to
// catch that and the test causes an exception. So we need to catch the
// exception here.
try {
statusText = '\n' + Drupal.t('StatusText: !statusText', {'!statusText': $.trim(xmlhttp.statusText)});
}
catch (e) {
// Empty.
}
responseText = '';
// Again, we don't have a way to know for sure whether accessing
// xmlhttp.responseText is going to throw an exception. So we'll catch it.
try {
responseText = '\n' + Drupal.t('ResponseText: !responseText', {'!responseText': $.trim(xmlhttp.responseText)});
}
catch (e) {
// Empty.
}
// Make the responseText more readable by stripping HTML tags and newlines.
responseText = responseText.replace(/<("[^"]*"|'[^']*'|[^'">])*>/gi, '');
responseText = responseText.replace(/[\n]+\s+/g, '\n');
// We don't need readyState except for status == 0.
readyStateText = xmlhttp.status === 0 ? ('\n' + Drupal.t('ReadyState: !readyState', {'!readyState': xmlhttp.readyState})) : '';
customMessage = customMessage ? ('\n' + Drupal.t('CustomMessage: !customMessage', {'!customMessage': customMessage})) : '';
/**
* Formatted and translated error message.
*
* @type {string}
*/
this.message = statusCode + pathText + statusText + customMessage + responseText + readyStateText;
/**
* Used by some browsers to display a more accurate stack trace.
*
* @type {string}
*/
this.name = 'AjaxError';
};
Drupal.AjaxError.prototype = new Error();
Drupal.AjaxError.prototype.constructor = Drupal.AjaxError;
/**
* Provides Ajax page updating via jQuery $.ajax.
*
* This function is designed to improve developer experience by wrapping the
* initialization of {@link Drupal.Ajax} objects and storing all created
* objects in the {@link Drupal.ajax.instances} array.
*
* @example
* Drupal.behaviors.myCustomAJAXStuff = {
* attach: function (context, settings) {
*
* var ajaxSettings = {
* url: 'my/url/path',
* // If the old version of Drupal.ajax() needs to be used those
* // properties can be added
* base: 'myBase',
* element: $(context).find('.someElement')
* };
*
* var myAjaxObject = Drupal.ajax(ajaxSettings);
*
* // Declare a new Ajax command specifically for this Ajax object.
* myAjaxObject.commands.insert = function (ajax, response, status) {
* $('#my-wrapper').append(response.data);
* alert('New content was appended to #my-wrapper');
* };
*
* // This command will remove this Ajax object from the page.
* myAjaxObject.commands.destroyObject = function (ajax, response, status) {
* Drupal.ajax.instances[this.instanceIndex] = null;
* };
*
* // Programmatically trigger the Ajax request.
* myAjaxObject.execute();
* }
* };
*
* @param {object} settings
* The settings object passed to {@link Drupal.Ajax} constructor.
* @param {string} [settings.base]
* Base is passed to {@link Drupal.Ajax} constructor as the 'base'
* parameter.
* @param {HTMLElement} [settings.element]
* Element parameter of {@link Drupal.Ajax} constructor, element on which
* event listeners will be bound.
*
* @return {Drupal.Ajax}
* The created Ajax object.
*
* @see Drupal.AjaxCommands
*/
Drupal.ajax = function (settings) {
if (arguments.length !== 1) {
throw new Error('Drupal.ajax() function must be called with one configuration object only');
}
// Map those config keys to variables for the old Drupal.ajax function.
var base = settings.base || false;
var element = settings.element || false;
delete settings.base;
delete settings.element;
// By default do not display progress for ajax calls without an element.
if (!settings.progress && !element) {
settings.progress = false;
}
var ajax = new Drupal.Ajax(base, element, settings);
ajax.instanceIndex = Drupal.ajax.instances.length;
Drupal.ajax.instances.push(ajax);
return ajax;
};
/**
* Contains all created Ajax objects.
*
* @type {Array.<Drupal.Ajax|null>}
*/
Drupal.ajax.instances = [];
/**
* List all objects where the associated element is not in the DOM
*
* This method ignores {@link Drupal.Ajax} objects not bound to DOM elements
* when created with {@link Drupal.ajax}.
*
* @return {Array.<Drupal.Ajax>}
* The list of expired {@link Drupal.Ajax} objects.
*/
Drupal.ajax.expired = function () {
return Drupal.ajax.instances.filter(function (instance) {
return instance && instance.element !== false && !document.body.contains(instance.element);
});
};
/**
* Settings for an Ajax object.
*
* @typedef {object} Drupal.Ajax~element_settings
*
* @prop {string} url
* Target of the Ajax request.
* @prop {?string} [event]
* Event bound to settings.element which will trigger the Ajax request.
* @prop {bool} [keypress=true]
* Triggers a request on keypress events.
* @prop {?string} selector
* jQuery selector targeting the element to bind events to or used with
* {@link Drupal.AjaxCommands}.
* @prop {string} [effect='none']
* Name of the jQuery method to use for displaying new Ajax content.
* @prop {string|number} [speed='none']
* Speed with which to apply the effect.
* @prop {string} [method]
* Name of the jQuery method used to insert new content in the targeted
* element.
* @prop {object} [progress]
* Settings for the display of a user-friendly loader.
* @prop {string} [progress.type='throbber']
* Type of progress element, core provides `'bar'`, `'throbber'` and
* `'fullscreen'`.
* @prop {string} [progress.message=Drupal.t('Please wait...')]
* Custom message to be used with the bar indicator.
* @prop {object} [submit]
* Extra data to be sent with the Ajax request.
* @prop {bool} [submit.js=true]
* Allows the PHP side to know this comes from an Ajax request.
* @prop {object} [dialog]
* Options for {@link Drupal.dialog}.
* @prop {string} [dialogType]
* One of `'modal'` or `'dialog'`.
* @prop {string} [prevent]
* List of events on which to stop default action and stop propagation.
*/
/**
* Ajax constructor.
*
* The Ajax request returns an array of commands encoded in JSON, which is
* then executed to make any changes that are necessary to the page.
*
* Drupal uses this file to enhance form elements with `#ajax['url']` and
* `#ajax['wrapper']` properties. If set, this file will automatically be
* included to provide Ajax capabilities.
*
* @constructor
*
* @param {string} [base]
* Base parameter of {@link Drupal.Ajax} constructor
* @param {HTMLElement} [element]
* Element parameter of {@link Drupal.Ajax} constructor, element on which
* event listeners will be bound.
* @param {Drupal.Ajax~element_settings} element_settings
* Settings for this Ajax object.
*/
Drupal.Ajax = function (base, element, element_settings) {
var defaults = {
event: element ? 'mousedown' : null,
keypress: true,
selector: base ? '#' + base : null,
effect: 'none',
speed: 'none',
method: 'replaceWith',
progress: {
type: 'throbber',
message: Drupal.t('Please wait...')
},
submit: {
js: true
}
};
$.extend(this, defaults, element_settings);
/**
* @type {Drupal.AjaxCommands}
*/
this.commands = new Drupal.AjaxCommands();
/**
* @type {bool|number}
*/
this.instanceIndex = false;
// @todo Remove this after refactoring the PHP code to:
// - Call this 'selector'.
// - Include the '#' for ID-based selectors.
// - Support non-ID-based selectors.
if (this.wrapper) {
/**
* @type {string}
*/
this.wrapper = '#' + this.wrapper;
}
/**
* @type {HTMLElement}
*/
this.element = element;
/**
* @type {Drupal.Ajax~element_settings}
*/
this.element_settings = element_settings;
// If there isn't a form, jQuery.ajax() will be used instead, allowing us to
// bind Ajax to links as well.
if (this.element && this.element.form) {
/**
* @type {jQuery}
*/
this.$form = $(this.element.form);
}
// If no Ajax callback URL was given, use the link href or form action.
if (!this.url) {
var $element = $(this.element);
if ($element.is('a')) {
this.url = $element.attr('href');
}
else if (this.element && element.form) {
this.url = this.$form.attr('action');
}
}
// Replacing 'nojs' with 'ajax' in the URL allows for an easy method to let
// the server detect when it needs to degrade gracefully.
// There are four scenarios to check for:
// 1. /nojs/
// 2. /nojs$ - The end of a URL string.
// 3. /nojs? - Followed by a query (e.g. path/nojs?destination=foobar).
// 4. /nojs# - Followed by a fragment (e.g.: path/nojs#myfragment).
var originalUrl = this.url;
/**
* Processed Ajax URL.
*
* @type {string}
*/
this.url = this.url.replace(/\/nojs(\/|$|\?|#)/g, '/ajax$1');
// If the 'nojs' version of the URL is trusted, also trust the 'ajax'
// version.
if (drupalSettings.ajaxTrustedUrl[originalUrl]) {
drupalSettings.ajaxTrustedUrl[this.url] = true;
}
// Set the options for the ajaxSubmit function.
// The 'this' variable will not persist inside of the options object.
var ajax = this;
/**
* Options for the jQuery.ajax function.
*
* @name Drupal.Ajax#options
*
* @type {object}
*
* @prop {string} url
* Ajax URL to be called.
* @prop {object} data
* Ajax payload.
* @prop {function} beforeSerialize
* Implement jQuery beforeSerialize function to call
* {@link Drupal.Ajax#beforeSerialize}.
* @prop {function} beforeSubmit
* Implement jQuery beforeSubmit function to call
* {@link Drupal.Ajax#beforeSubmit}.
* @prop {function} beforeSend
* Implement jQuery beforeSend function to call
* {@link Drupal.Ajax#beforeSend}.
* @prop {function} success
* Implement jQuery success function to call
* {@link Drupal.Ajax#success}.
* @prop {function} complete
* Implement jQuery success function to clean up ajax state and trigger an
* error if needed.
* @prop {string} dataType='json'
* Type of the response expected.
* @prop {string} type='POST'
* HTTP method to use for the Ajax request.
*/
ajax.options = {
url: ajax.url,
data: ajax.submit,
beforeSerialize: function (element_settings, options) {
return ajax.beforeSerialize(element_settings, options);
},
beforeSubmit: function (form_values, element_settings, options) {
ajax.ajaxing = true;
return ajax.beforeSubmit(form_values, element_settings, options);
},
beforeSend: function (xmlhttprequest, options) {
ajax.ajaxing = true;
return ajax.beforeSend(xmlhttprequest, options);
},
success: function (response, status, xmlhttprequest) {
// Sanity check for browser support (object expected).
// When using iFrame uploads, responses must be returned as a string.
if (typeof response === 'string') {
response = $.parseJSON(response);
}
// Prior to invoking the response's commands, verify that they can be
// trusted by checking for a response header. See
// \Drupal\Core\EventSubscriber\AjaxResponseSubscriber for details.
// - Empty responses are harmless so can bypass verification. This
// avoids an alert message for server-generated no-op responses that
// skip Ajax rendering.
// - Ajax objects with trusted URLs (e.g., ones defined server-side via
// #ajax) can bypass header verification. This is especially useful
// for Ajax with multipart forms. Because IFRAME transport is used,
// the response headers cannot be accessed for verification.
if (response !== null && !drupalSettings.ajaxTrustedUrl[ajax.url]) {
if (xmlhttprequest.getResponseHeader('X-Drupal-Ajax-Token') !== '1') {
var customMessage = Drupal.t('The response failed verification so will not be processed.');
return ajax.error(xmlhttprequest, ajax.url, customMessage);
}
}
return ajax.success(response, status);
},
complete: function (xmlhttprequest, status) {
ajax.ajaxing = false;
if (status === 'error' || status === 'parsererror') {
return ajax.error(xmlhttprequest, ajax.url);
}
},
dataType: 'json',
type: 'POST'
};
if (element_settings.dialog) {
ajax.options.data.dialogOptions = element_settings.dialog;
}
// Ensure that we have a valid URL by adding ? when no query parameter is
// yet available, otherwise append using &.
if (ajax.options.url.indexOf('?') === -1) {
ajax.options.url += '?';
}
else {
ajax.options.url += '&';
}
ajax.options.url += Drupal.ajax.WRAPPER_FORMAT + '=drupal_' + (element_settings.dialogType || 'ajax');
// Bind the ajaxSubmit function to the element event.
$(ajax.element).on(element_settings.event, function (event) {
if (!drupalSettings.ajaxTrustedUrl[ajax.url] && !Drupal.url.isLocal(ajax.url)) {
throw new Error(Drupal.t('The callback URL is not local and not trusted: !url', {'!url': ajax.url}));
}
return ajax.eventResponse(this, event);
});
// If necessary, enable keyboard submission so that Ajax behaviors
// can be triggered through keyboard input as well as e.g. a mousedown
// action.
if (element_settings.keypress) {
$(ajax.element).on('keypress', function (event) {
return ajax.keypressResponse(this, event);
});
}
// If necessary, prevent the browser default action of an additional event.
// For example, prevent the browser default action of a click, even if the
// Ajax behavior binds to mousedown.
if (element_settings.prevent) {
$(ajax.element).on(element_settings.prevent, false);
}
};
/**
* URL query attribute to indicate the wrapper used to render a request.
*
* The wrapper format determines how the HTML is wrapped, for example in a
* modal dialog.
*
* @const {string}
*
* @default
*/
Drupal.ajax.WRAPPER_FORMAT = '_wrapper_format';
/**
* Request parameter to indicate that a request is a Drupal Ajax request.
*
* @const {string}
*
* @default
*/
Drupal.Ajax.AJAX_REQUEST_PARAMETER = '_drupal_ajax';
/**
* Execute the ajax request.
*
* Allows developers to execute an Ajax request manually without specifying
* an event to respond to.
*
* @return {object}
* Returns the jQuery.Deferred object underlying the Ajax request. If
* pre-serialization fails, the Deferred will be returned in the rejected
* state.
*/
Drupal.Ajax.prototype.execute = function () {
// Do not perform another ajax command if one is already in progress.
if (this.ajaxing) {
return;
}
try {
this.beforeSerialize(this.element, this.options);
// Return the jqXHR so that external code can hook into the Deferred API.
return $.ajax(this.options);
}
catch (e) {
// Unset the ajax.ajaxing flag here because it won't be unset during
// the complete response.
this.ajaxing = false;
window.alert('An error occurred while attempting to process ' + this.options.url + ': ' + e.message);
// For consistency, return a rejected Deferred (i.e., jqXHR's superclass)
// so that calling code can take appropriate action.
return $.Deferred().reject();
}
};
/**
* Handle a key press.
*
* The Ajax object will, if instructed, bind to a key press response. This
* will test to see if the key press is valid to trigger this event and
* if it is, trigger it for us and prevent other keypresses from triggering.
* In this case we're handling RETURN and SPACEBAR keypresses (event codes 13
* and 32. RETURN is often used to submit a form when in a textfield, and
* SPACE is often used to activate an element without submitting.
*
* @param {HTMLElement} element
* Element the event was triggered on.
* @param {jQuery.Event} event
* Triggered event.
*/
Drupal.Ajax.prototype.keypressResponse = function (element, event) {
// Create a synonym for this to reduce code confusion.
var ajax = this;
// Detect enter key and space bar and allow the standard response for them,
// except for form elements of type 'text', 'tel', 'number' and 'textarea',
// where the spacebar activation causes inappropriate activation if
// #ajax['keypress'] is TRUE. On a text-type widget a space should always
// be a space.
if (event.which === 13 || (event.which === 32 && element.type !== 'text' &&
element.type !== 'textarea' && element.type !== 'tel' && element.type !== 'number')) {
event.preventDefault();
event.stopPropagation();
$(ajax.element_settings.element).trigger(ajax.element_settings.event);
}
};
/**
* Handle an event that triggers an Ajax response.
*
* When an event that triggers an Ajax response happens, this method will
* perform the actual Ajax call. It is bound to the event using
* bind() in the constructor, and it uses the options specified on the
* Ajax object.
*
* @param {HTMLElement} element
* Element the event was triggered on.
* @param {jQuery.Event} event
* Triggered event.
*/
Drupal.Ajax.prototype.eventResponse = function (element, event) {
event.preventDefault();
event.stopPropagation();
// Create a synonym for this to reduce code confusion.
var ajax = this;
// Do not perform another Ajax command if one is already in progress.
if (ajax.ajaxing) {
return;
}
try {
if (ajax.$form) {
// If setClick is set, we must set this to ensure that the button's
// value is passed.
if (ajax.setClick) {
// Mark the clicked button. 'form.clk' is a special variable for
// ajaxSubmit that tells the system which element got clicked to
// trigger the submit. Without it there would be no 'op' or
// equivalent.
element.form.clk = element;
}
ajax.$form.ajaxSubmit(ajax.options);
}
else {
ajax.beforeSerialize(ajax.element, ajax.options);
$.ajax(ajax.options);
}
}
catch (e) {
// Unset the ajax.ajaxing flag here because it won't be unset during
// the complete response.
ajax.ajaxing = false;
window.alert('An error occurred while attempting to process ' + ajax.options.url + ': ' + e.message);
}
};
/**
* Handler for the form serialization.
*
* Runs before the beforeSend() handler (see below), and unlike that one, runs
* before field data is collected.
*
* @param {object} [element]
* Ajax object's `element_settings`.
* @param {object} options
* jQuery.ajax options.
*/
Drupal.Ajax.prototype.beforeSerialize = function (element, options) {
// Allow detaching behaviors to update field values before collecting them.
// This is only needed when field values are added to the POST data, so only
// when there is a form such that this.$form.ajaxSubmit() is used instead of
// $.ajax(). When there is no form and $.ajax() is used, beforeSerialize()
// isn't called, but don't rely on that: explicitly check this.$form.
if (this.$form) {
var settings = this.settings || drupalSettings;
Drupal.detachBehaviors(this.$form.get(0), settings, 'serialize');
}
// Inform Drupal that this is an AJAX request.
options.data[Drupal.Ajax.AJAX_REQUEST_PARAMETER] = 1;
// Allow Drupal to return new JavaScript and CSS files to load without
// returning the ones already loaded.
// @see \Drupal\Core\Theme\AjaxBasePageNegotiator
// @see \Drupal\Core\Asset\LibraryDependencyResolverInterface::getMinimalRepresentativeSubset()
// @see system_js_settings_alter()
var pageState = drupalSettings.ajaxPageState;
options.data['ajax_page_state[theme]'] = pageState.theme;
options.data['ajax_page_state[theme_token]'] = pageState.theme_token;
options.data['ajax_page_state[libraries]'] = pageState.libraries;
};
/**
* Modify form values prior to form submission.
*
* @param {Array.<object>} form_values
* Processed form values.
* @param {jQuery} element
* The form node as a jQuery object.
* @param {object} options
* jQuery.ajax options.
*/
Drupal.Ajax.prototype.beforeSubmit = function (form_values, element, options) {
// This function is left empty to make it simple to override for modules
// that wish to add functionality here.
};
/**
* Prepare the Ajax request before it is sent.
*
* @param {XMLHttpRequest} xmlhttprequest
* Native Ajax object.
* @param {object} options
* jQuery.ajax options.
*/
Drupal.Ajax.prototype.beforeSend = function (xmlhttprequest, options) {
// For forms without file inputs, the jQuery Form plugin serializes the
// form values, and then calls jQuery's $.ajax() function, which invokes
// this handler. In this circumstance, options.extraData is never used. For
// forms with file inputs, the jQuery Form plugin uses the browser's normal
// form submission mechanism, but captures the response in a hidden IFRAME.
// In this circumstance, it calls this handler first, and then appends
// hidden fields to the form to submit the values in options.extraData.
// There is no simple way to know which submission mechanism will be used,
// so we add to extraData regardless, and allow it to be ignored in the
// former case.
if (this.$form) {
options.extraData = options.extraData || {};<|fim▁hole|> options.extraData.ajax_iframe_upload = '1';
// The triggering element is about to be disabled (see below), but if it
// contains a value (e.g., a checkbox, textfield, select, etc.), ensure
// that value is included in the submission. As per above, submissions
// that use $.ajax() are already serialized prior to the element being
// disabled, so this is only needed for IFRAME submissions.
var v = $.fieldValue(this.element);
if (v !== null) {
options.extraData[this.element.name] = v;
}
}
// Disable the element that received the change to prevent user interface
// interaction while the Ajax request is in progress. ajax.ajaxing prevents
// the element from triggering a new request, but does not prevent the user
// from changing its value.
$(this.element).prop('disabled', true);
if (!this.progress || !this.progress.type) {
return;
}
// Insert progress indicator.
var progressIndicatorMethod = 'setProgressIndicator' + this.progress.type.slice(0, 1).toUpperCase() + this.progress.type.slice(1).toLowerCase();
if (progressIndicatorMethod in this && typeof this[progressIndicatorMethod] === 'function') {
this[progressIndicatorMethod].call(this);
}
};
/**
* Sets the progress bar progress indicator.
*/
Drupal.Ajax.prototype.setProgressIndicatorBar = function () {
var progressBar = new Drupal.ProgressBar('ajax-progress-' + this.element.id, $.noop, this.progress.method, $.noop);
if (this.progress.message) {
progressBar.setProgress(-1, this.progress.message);
}
if (this.progress.url) {
progressBar.startMonitoring(this.progress.url, this.progress.interval || 1500);
}
this.progress.element = $(progressBar.element).addClass('ajax-progress ajax-progress-bar');
this.progress.object = progressBar;
$(this.element).after(this.progress.element);
};
/**
* Sets the throbber progress indicator.
*/
Drupal.Ajax.prototype.setProgressIndicatorThrobber = function () {
this.progress.element = $('<div class="ajax-progress ajax-progress-throbber"><div class="throbber"> </div></div>');
if (this.progress.message) {
this.progress.element.find('.throbber').after('<div class="message">' + this.progress.message + '</div>');
}
$(this.element).after(this.progress.element);
};
/**
* Sets the fullscreen progress indicator.
*/
Drupal.Ajax.prototype.setProgressIndicatorFullscreen = function () {
this.progress.element = $('<div class="ajax-progress ajax-progress-fullscreen"> </div>');
$('body').after(this.progress.element);
};
/**
* Handler for the form redirection completion.
*
* @param {Array.<Drupal.AjaxCommands~commandDefinition>} response
* Drupal Ajax response.
* @param {number} status
* XMLHttpRequest status.
*/
Drupal.Ajax.prototype.success = function (response, status) {
// Remove the progress element.
if (this.progress.element) {
$(this.progress.element).remove();
}
if (this.progress.object) {
this.progress.object.stopMonitoring();
}
$(this.element).prop('disabled', false);
// Save element's ancestors tree so if the element is removed from the dom
// we can try to refocus one of its parents. Using addBack reverse the
// result array, meaning that index 0 is the highest parent in the hierarchy
// in this situation it is usually a <form> element.
var elementParents = $(this.element).parents('[data-drupal-selector]').addBack().toArray();
// Track if any command is altering the focus so we can avoid changing the
// focus set by the Ajax command.
var focusChanged = false;
for (var i in response) {
if (response.hasOwnProperty(i) && response[i].command && this.commands[response[i].command]) {
this.commands[response[i].command](this, response[i], status);
if (response[i].command === 'invoke' && response[i].method === 'focus') {
focusChanged = true;
}
}
}
// If the focus hasn't be changed by the ajax commands, try to refocus the
// triggering element or one of its parents if that element does not exist
// anymore.
if (!focusChanged && this.element && !$(this.element).data('disable-refocus')) {
var target = false;
for (var n = elementParents.length - 1; !target && n > 0; n--) {
target = document.querySelector('[data-drupal-selector="' + elementParents[n].getAttribute('data-drupal-selector') + '"]');
}
if (target) {
$(target).trigger('focus');
}
}
// Reattach behaviors, if they were detached in beforeSerialize(). The
// attachBehaviors() called on the new content from processing the response
// commands is not sufficient, because behaviors from the entire form need
// to be reattached.
if (this.$form) {
var settings = this.settings || drupalSettings;
Drupal.attachBehaviors(this.$form.get(0), settings);
}
// Remove any response-specific settings so they don't get used on the next
// call by mistake.
this.settings = null;
};
/**
* Build an effect object to apply an effect when adding new HTML.
*
* @param {object} response
* Drupal Ajax response.
* @param {string} [response.effect]
* Override the default value of {@link Drupal.Ajax#element_settings}.
* @param {string|number} [response.speed]
* Override the default value of {@link Drupal.Ajax#element_settings}.
*
* @return {object}
* Returns an object with `showEffect`, `hideEffect` and `showSpeed`
* properties.
*/
Drupal.Ajax.prototype.getEffect = function (response) {
var type = response.effect || this.effect;
var speed = response.speed || this.speed;
var effect = {};
if (type === 'none') {
effect.showEffect = 'show';
effect.hideEffect = 'hide';
effect.showSpeed = '';
}
else if (type === 'fade') {
effect.showEffect = 'fadeIn';
effect.hideEffect = 'fadeOut';
effect.showSpeed = speed;
}
else {
effect.showEffect = type + 'Toggle';
effect.hideEffect = type + 'Toggle';
effect.showSpeed = speed;
}
return effect;
};
/**
* Handler for the form redirection error.
*
* @param {object} xmlhttprequest
* Native XMLHttpRequest object.
* @param {string} uri
* Ajax Request URI.
* @param {string} [customMessage]
* Extra message to print with the Ajax error.
*/
Drupal.Ajax.prototype.error = function (xmlhttprequest, uri, customMessage) {
// Remove the progress element.
if (this.progress.element) {
$(this.progress.element).remove();
}
if (this.progress.object) {
this.progress.object.stopMonitoring();
}
// Undo hide.
$(this.wrapper).show();
// Re-enable the element.
$(this.element).prop('disabled', false);
// Reattach behaviors, if they were detached in beforeSerialize().
if (this.$form) {
var settings = this.settings || drupalSettings;
Drupal.attachBehaviors(this.$form.get(0), settings);
}
throw new Drupal.AjaxError(xmlhttprequest, uri, customMessage);
};
/**
* @typedef {object} Drupal.AjaxCommands~commandDefinition
*
* @prop {string} command
* @prop {string} [method]
* @prop {string} [selector]
* @prop {string} [data]
* @prop {object} [settings]
* @prop {bool} [asterisk]
* @prop {string} [text]
* @prop {string} [title]
* @prop {string} [url]
* @prop {object} [argument]
* @prop {string} [name]
* @prop {string} [value]
* @prop {string} [old]
* @prop {string} [new]
* @prop {bool} [merge]
* @prop {Array} [args]
*
* @see Drupal.AjaxCommands
*/
/**
* Provide a series of commands that the client will perform.
*
* @constructor
*/
Drupal.AjaxCommands = function () {};
Drupal.AjaxCommands.prototype = {
/**
* Command to insert new content into the DOM.
*
* @param {Drupal.Ajax} ajax
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.data
* The data to use with the jQuery method.
* @param {string} [response.method]
* The jQuery DOM manipulation method to be used.
* @param {string} [response.selector]
* A optional jQuery selector string.
* @param {object} [response.settings]
* An optional array of settings that will be used.
* @param {number} [status]
* The XMLHttpRequest status.
*/
insert: function (ajax, response, status) {
// Get information from the response. If it is not there, default to
// our presets.
var $wrapper = response.selector ? $(response.selector) : $(ajax.wrapper);
var method = response.method || ajax.method;
var effect = ajax.getEffect(response);
var settings;
// We don't know what response.data contains: it might be a string of text
// without HTML, so don't rely on jQuery correctly interpreting
// $(response.data) as new HTML rather than a CSS selector. Also, if
// response.data contains top-level text nodes, they get lost with either
// $(response.data) or $('<div></div>').replaceWith(response.data).
var $new_content_wrapped = $('<div></div>').html(response.data);
var $new_content = $new_content_wrapped.contents();
// For legacy reasons, the effects processing code assumes that
// $new_content consists of a single top-level element. Also, it has not
// been sufficiently tested whether attachBehaviors() can be successfully
// called with a context object that includes top-level text nodes.
// However, to give developers full control of the HTML appearing in the
// page, and to enable Ajax content to be inserted in places where <div>
// elements are not allowed (e.g., within <table>, <tr>, and <span>
// parents), we check if the new content satisfies the requirement
// of a single top-level element, and only use the container <div> created
// above when it doesn't. For more information, please see
// https://www.drupal.org/node/736066.
if ($new_content.length !== 1 || $new_content.get(0).nodeType !== 1) {
$new_content = $new_content_wrapped;
}
// If removing content from the wrapper, detach behaviors first.
switch (method) {
case 'html':
case 'replaceWith':
case 'replaceAll':
case 'empty':
case 'remove':
settings = response.settings || ajax.settings || drupalSettings;
Drupal.detachBehaviors($wrapper.get(0), settings);
}
// Add the new content to the page.
$wrapper[method]($new_content);
// Immediately hide the new content if we're using any effects.
if (effect.showEffect !== 'show') {
$new_content.hide();
}
// Determine which effect to use and what content will receive the
// effect, then show the new content.
if ($new_content.find('.ajax-new-content').length > 0) {
$new_content.find('.ajax-new-content').hide();
$new_content.show();
$new_content.find('.ajax-new-content')[effect.showEffect](effect.showSpeed);
}
else if (effect.showEffect !== 'show') {
$new_content[effect.showEffect](effect.showSpeed);
}
// Attach all JavaScript behaviors to the new content, if it was
// successfully added to the page, this if statement allows
// `#ajax['wrapper']` to be optional.
if ($new_content.parents('html').length > 0) {
// Apply any settings from the returned JSON if available.
settings = response.settings || ajax.settings || drupalSettings;
Drupal.attachBehaviors($new_content.get(0), settings);
}
},
/**
* Command to remove a chunk from the page.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.selector
* A jQuery selector string.
* @param {object} [response.settings]
* An optional array of settings that will be used.
* @param {number} [status]
* The XMLHttpRequest status.
*/
remove: function (ajax, response, status) {
var settings = response.settings || ajax.settings || drupalSettings;
$(response.selector).each(function () {
Drupal.detachBehaviors(this, settings);
})
.remove();
},
/**
* Command to mark a chunk changed.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The JSON response object from the Ajax request.
* @param {string} response.selector
* A jQuery selector string.
* @param {bool} [response.asterisk]
* An optional CSS selector. If specified, an asterisk will be
* appended to the HTML inside the provided selector.
* @param {number} [status]
* The request status.
*/
changed: function (ajax, response, status) {
var $element = $(response.selector);
if (!$element.hasClass('ajax-changed')) {
$element.addClass('ajax-changed');
if (response.asterisk) {
$element.find(response.asterisk).append(' <abbr class="ajax-changed" title="' + Drupal.t('Changed') + '">*</abbr> ');
}
}
},
/**
* Command to provide an alert.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The JSON response from the Ajax request.
* @param {string} response.text
* The text that will be displayed in an alert dialog.
* @param {number} [status]
* The XMLHttpRequest status.
*/
alert: function (ajax, response, status) {
window.alert(response.text, response.title);
},
/**
* Command to set the window.location, redirecting the browser.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.url
* The URL to redirect to.
* @param {number} [status]
* The XMLHttpRequest status.
*/
redirect: function (ajax, response, status) {
window.location = response.url;
},
/**
* Command to provide the jQuery css() function.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.selector
* A jQuery selector string.
* @param {object} response.argument
* An array of key/value pairs to set in the CSS for the selector.
* @param {number} [status]
* The XMLHttpRequest status.
*/
css: function (ajax, response, status) {
$(response.selector).css(response.argument);
},
/**
* Command to set the settings used for other commands in this response.
*
* This method will also remove expired `drupalSettings.ajax` settings.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {bool} response.merge
* Determines whether the additional settings should be merged to the
* global settings.
* @param {object} response.settings
* Contains additional settings to add to the global settings.
* @param {number} [status]
* The XMLHttpRequest status.
*/
settings: function (ajax, response, status) {
var ajaxSettings = drupalSettings.ajax;
// Clean up drupalSettings.ajax.
if (ajaxSettings) {
Drupal.ajax.expired().forEach(function (instance) {
// If the Ajax object has been created through drupalSettings.ajax
// it will have a selector. When there is no selector the object
// has been initialized with a special class name picked up by the
// Ajax behavior.
if (instance.selector) {
var selector = instance.selector.replace('#', '');
if (selector in ajaxSettings) {
delete ajaxSettings[selector];
}
}
});
}
if (response.merge) {
$.extend(true, drupalSettings, response.settings);
}
else {
ajax.settings = response.settings;
}
},
/**
* Command to attach data using jQuery's data API.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.name
* The name or key (in the key value pair) of the data attached to this
* selector.
* @param {string} response.selector
* A jQuery selector string.
* @param {string|object} response.value
* The value of to be attached.
* @param {number} [status]
* The XMLHttpRequest status.
*/
data: function (ajax, response, status) {
$(response.selector).data(response.name, response.value);
},
/**
* Command to apply a jQuery method.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {Array} response.args
* An array of arguments to the jQuery method, if any.
* @param {string} response.method
* The jQuery method to invoke.
* @param {string} response.selector
* A jQuery selector string.
* @param {number} [status]
* The XMLHttpRequest status.
*/
invoke: function (ajax, response, status) {
var $element = $(response.selector);
$element[response.method].apply($element, response.args);
},
/**
* Command to restripe a table.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.selector
* A jQuery selector string.
* @param {number} [status]
* The XMLHttpRequest status.
*/
restripe: function (ajax, response, status) {
// :even and :odd are reversed because jQuery counts from 0 and
// we count from 1, so we're out of sync.
// Match immediate children of the parent element to allow nesting.
$(response.selector).find('> tbody > tr:visible, > tr:visible')
.removeClass('odd even')
.filter(':even').addClass('odd').end()
.filter(':odd').addClass('even');
},
/**
* Command to update a form's build ID.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.old
* The old form build ID.
* @param {string} response.new
* The new form build ID.
* @param {number} [status]
* The XMLHttpRequest status.
*/
update_build_id: function (ajax, response, status) {
$('input[name="form_build_id"][value="' + response.old + '"]').val(response.new);
},
/**
* Command to add css.
*
* Uses the proprietary addImport method if available as browsers which
* support that method ignore @import statements in dynamically added
* stylesheets.
*
* @param {Drupal.Ajax} [ajax]
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* The response from the Ajax request.
* @param {string} response.data
* A string that contains the styles to be added.
* @param {number} [status]
* The XMLHttpRequest status.
*/
add_css: function (ajax, response, status) {
// Add the styles in the normal way.
$('head').prepend(response.data);
// Add imports in the styles using the addImport method if available.
var match;
var importMatch = /^@import url\("(.*)"\);$/igm;
if (document.styleSheets[0].addImport && importMatch.test(response.data)) {
importMatch.lastIndex = 0;
do {
match = importMatch.exec(response.data);
document.styleSheets[0].addImport(match[1]);
} while (match);
}
}
};
})(jQuery, window, Drupal, drupalSettings);
;
/**
* @file
* Adds an HTML element and method to trigger audio UAs to read system messages.
*
* Use {@link Drupal.announce} to indicate to screen reader users that an
* element on the page has changed state. For instance, if clicking a link
* loads 10 more items into a list, one might announce the change like this.
*
* @example
* $('#search-list')
* .on('itemInsert', function (event, data) {
* // Insert the new items.
* $(data.container.el).append(data.items.el);
* // Announce the change to the page contents.
* Drupal.announce(Drupal.t('@count items added to @container',
* {'@count': data.items.length, '@container': data.container.title}
* ));
* });
*/
(function (Drupal, debounce) {
'use strict';
var liveElement;
var announcements = [];
/**
* Builds a div element with the aria-live attribute and add it to the DOM.
*
* @type {Drupal~behavior}
*
* @prop {Drupal~behaviorAttach} attach
* Attaches the behavior for drupalAnnouce.
*/
Drupal.behaviors.drupalAnnounce = {
attach: function (context) {
// Create only one aria-live element.
if (!liveElement) {
liveElement = document.createElement('div');
liveElement.id = 'drupal-live-announce';
liveElement.className = 'visually-hidden';
liveElement.setAttribute('aria-live', 'polite');
liveElement.setAttribute('aria-busy', 'false');
document.body.appendChild(liveElement);
}
}
};
/**
* Concatenates announcements to a single string; appends to the live region.
*/
function announce() {
var text = [];
var priority = 'polite';
var announcement;
// Create an array of announcement strings to be joined and appended to the
// aria live region.
var il = announcements.length;
for (var i = 0; i < il; i++) {
announcement = announcements.pop();
text.unshift(announcement.text);
// If any of the announcements has a priority of assertive then the group
// of joined announcements will have this priority.
if (announcement.priority === 'assertive') {
priority = 'assertive';
}
}
if (text.length) {
// Clear the liveElement so that repeated strings will be read.
liveElement.innerHTML = '';
// Set the busy state to true until the node changes are complete.
liveElement.setAttribute('aria-busy', 'true');
// Set the priority to assertive, or default to polite.
liveElement.setAttribute('aria-live', priority);
// Print the text to the live region. Text should be run through
// Drupal.t() before being passed to Drupal.announce().
liveElement.innerHTML = text.join('\n');
// The live text area is updated. Allow the AT to announce the text.
liveElement.setAttribute('aria-busy', 'false');
}
}
/**
* Triggers audio UAs to read the supplied text.
*
* The aria-live region will only read the text that currently populates its
* text node. Replacing text quickly in rapid calls to announce results in
* only the text from the most recent call to {@link Drupal.announce} being
* read. By wrapping the call to announce in a debounce function, we allow for
* time for multiple calls to {@link Drupal.announce} to queue up their
* messages. These messages are then joined and append to the aria-live region
* as one text node.
*
* @param {string} text
* A string to be read by the UA.
* @param {string} [priority='polite']
* A string to indicate the priority of the message. Can be either
* 'polite' or 'assertive'.
*
* @return {function}
* The return of the call to debounce.
*
* @see http://www.w3.org/WAI/PF/aria-practices/#liveprops
*/
Drupal.announce = function (text, priority) {
// Save the text and priority into a closure variable. Multiple simultaneous
// announcements will be concatenated and read in sequence.
announcements.push({
text: text,
priority: priority
});
// Immediately invoke the function that debounce returns. 200 ms is right at
// the cusp where humans notice a pause, so we will wait
// at most this much time before the set of queued announcements is read.
return (debounce(announce, 200)());
};
}(Drupal, Drupal.debounce));
;
(function(){if(window.matchMedia&&window.matchMedia("all").addListener){return false}var e=window.matchMedia,i=e("only all").matches,n=false,t=0,a=[],r=function(i){clearTimeout(t);t=setTimeout(function(){for(var i=0,n=a.length;i<n;i++){var t=a[i].mql,r=a[i].listeners||[],o=e(t.media).matches;if(o!==t.matches){t.matches=o;for(var s=0,l=r.length;s<l;s++){r[s].call(window,t)}}}},30)};window.matchMedia=function(t){var o=e(t),s=[],l=0;o.addListener=function(e){if(!i){return}if(!n){n=true;window.addEventListener("resize",r,true)}if(l===0){l=a.push({mql:o,listeners:s})}s.push(e)};o.removeListener=function(e){for(var i=0,n=s.length;i<n;i++){if(s[i]===e){s.splice(i,1)}}};return o}})();
;
/**
* @file
* Manages elements that can offset the size of the viewport.
*
* Measures and reports viewport offset dimensions from elements like the
* toolbar that can potentially displace the positioning of other elements.
*/
/**
* @typedef {object} Drupal~displaceOffset
*
* @prop {number} top
* @prop {number} left
* @prop {number} right
* @prop {number} bottom
*/
/**
* Triggers when layout of the page changes.
*
* This is used to position fixed element on the page during page resize and
* Toolbar toggling.
*
* @event drupalViewportOffsetChange
*/
(function ($, Drupal, debounce) {
'use strict';
/**
* @name Drupal.displace.offsets
*
* @type {Drupal~displaceOffset}
*/
var offsets = {
top: 0,
right: 0,
bottom: 0,
left: 0
};
/**
* Registers a resize handler on the window.
*
* @type {Drupal~behavior}
*/
Drupal.behaviors.drupalDisplace = {
attach: function () {
// Mark this behavior as processed on the first pass.
if (this.displaceProcessed) {
return;
}
this.displaceProcessed = true;
$(window).on('resize.drupalDisplace', debounce(displace, 200));
}
};
/**
* Informs listeners of the current offset dimensions.
*
* @function Drupal.displace
*
* @prop {Drupal~displaceOffset} offsets
*
* @param {bool} [broadcast]
* When true or undefined, causes the recalculated offsets values to be
* broadcast to listeners.
*
* @return {Drupal~displaceOffset}
* An object whose keys are the for sides an element -- top, right, bottom
* and left. The value of each key is the viewport displacement distance for
* that edge.
*
* @fires event:drupalViewportOffsetChange
*/
function displace(broadcast) {
offsets = Drupal.displace.offsets = calculateOffsets();
if (typeof broadcast === 'undefined' || broadcast) {
$(document).trigger('drupalViewportOffsetChange', offsets);
}
return offsets;
}
/**
* Determines the viewport offsets.
*
* @return {Drupal~displaceOffset}
* An object whose keys are the for sides an element -- top, right, bottom
* and left. The value of each key is the viewport displacement distance for
* that edge.
*/
function calculateOffsets() {
return {
top: calculateOffset('top'),
right: calculateOffset('right'),
bottom: calculateOffset('bottom'),
left: calculateOffset('left')
};
}
/**
* Gets a specific edge's offset.
*
* Any element with the attribute data-offset-{edge} e.g. data-offset-top will
* be considered in the viewport offset calculations. If the attribute has a
* numeric value, that value will be used. If no value is provided, one will
* be calculated using the element's dimensions and placement.
*
* @function Drupal.displace.calculateOffset
*
* @param {string} edge
* The name of the edge to calculate. Can be 'top', 'right',
* 'bottom' or 'left'.
*
* @return {number}
* The viewport displacement distance for the requested edge.
*/
function calculateOffset(edge) {
var edgeOffset = 0;
var displacingElements = document.querySelectorAll('[data-offset-' + edge + ']');
var n = displacingElements.length;
for (var i = 0; i < n; i++) {
var el = displacingElements[i];
// If the element is not visible, do consider its dimensions.
if (el.style.display === 'none') {
continue;
}
// If the offset data attribute contains a displacing value, use it.
var displacement = parseInt(el.getAttribute('data-offset-' + edge), 10);
// If the element's offset data attribute exits
// but is not a valid number then get the displacement
// dimensions directly from the element.
if (isNaN(displacement)) {
displacement = getRawOffset(el, edge);
}
// If the displacement value is larger than the current value for this
// edge, use the displacement value.
edgeOffset = Math.max(edgeOffset, displacement);
}
return edgeOffset;
}
/**
* Calculates displacement for element based on its dimensions and placement.
*
* @param {HTMLElement} el
* The jQuery element whose dimensions and placement will be measured.
*
* @param {string} edge
* The name of the edge of the viewport that the element is associated
* with.
*
* @return {number}
* The viewport displacement distance for the requested edge.
*/
function getRawOffset(el, edge) {
var $el = $(el);
var documentElement = document.documentElement;
var displacement = 0;
var horizontal = (edge === 'left' || edge === 'right');
// Get the offset of the element itself.
var placement = $el.offset()[horizontal ? 'left' : 'top'];
// Subtract scroll distance from placement to get the distance
// to the edge of the viewport.
placement -= window['scroll' + (horizontal ? 'X' : 'Y')] || document.documentElement['scroll' + (horizontal ? 'Left' : 'Top')] || 0;
// Find the displacement value according to the edge.
switch (edge) {
// Left and top elements displace as a sum of their own offset value
// plus their size.
case 'top':
// Total displacement is the sum of the elements placement and size.
displacement = placement + $el.outerHeight();
break;
case 'left':
// Total displacement is the sum of the elements placement and size.
displacement = placement + $el.outerWidth();
break;
// Right and bottom elements displace according to their left and
// top offset. Their size isn't important.
case 'bottom':
displacement = documentElement.clientHeight - placement;
break;
case 'right':
displacement = documentElement.clientWidth - placement;
break;
default:
displacement = 0;
}
return displacement;
}
/**
* Assign the displace function to a property of the Drupal global object.
*
* @ignore
*/
Drupal.displace = displace;
$.extend(Drupal.displace, {
/**
* Expose offsets to other scripts to avoid having to recalculate offsets.
*
* @ignore
*/
offsets: offsets,
/**
* Expose method to compute a single edge offsets.
*
* @ignore
*/
calculateOffset: calculateOffset
});
})(jQuery, Drupal, Drupal.debounce);
;
/**
* @file
* Builds a nested accordion widget.
*
* Invoke on an HTML list element with the jQuery plugin pattern.
*
* @example
* $('.toolbar-menu').drupalToolbarMenu();
*/
(function ($, Drupal, drupalSettings) {
'use strict';
/**
* Store the open menu tray.
*/
var activeItem = Drupal.url(drupalSettings.path.currentPath);
$.fn.drupalToolbarMenu = function () {
var ui = {
handleOpen: Drupal.t('Extend'),
handleClose: Drupal.t('Collapse')
};
/**
* Handle clicks from the disclosure button on an item with sub-items.
*
* @param {Object} event
* A jQuery Event object.
*/
function toggleClickHandler(event) {
var $toggle = $(event.target);
var $item = $toggle.closest('li');
// Toggle the list item.
toggleList($item);
// Close open sibling menus.
var $openItems = $item.siblings().filter('.open');
toggleList($openItems, false);
}
/**
* Handle clicks from a menu item link.
*
* @param {Object} event
* A jQuery Event object.
*/
function linkClickHandler(event) {
// If the toolbar is positioned fixed (and therefore hiding content
// underneath), then users expect clicks in the administration menu tray
// to take them to that destination but for the menu tray to be closed
// after clicking: otherwise the toolbar itself is obstructing the view
// of the destination they chose.
if (!Drupal.toolbar.models.toolbarModel.get('isFixed')) {
Drupal.toolbar.models.toolbarModel.set('activeTab', null);
}
// Stopping propagation to make sure that once a toolbar-box is clicked
// (the whitespace part), the page is not redirected anymore.
event.stopPropagation();
}
/**
* Toggle the open/close state of a list is a menu.
*
* @param {jQuery} $item
* The li item to be toggled.
*
* @param {Boolean} switcher
* A flag that forces toggleClass to add or a remove a class, rather than
* simply toggling its presence.
*/
function toggleList($item, switcher) {
var $toggle = $item.children('.toolbar-box').children('.toolbar-handle');
switcher = (typeof switcher !== 'undefined') ? switcher : !$item.hasClass('open');
// Toggle the item open state.
$item.toggleClass('open', switcher);
// Twist the toggle.
$toggle.toggleClass('open', switcher);
// Adjust the toggle text.
$toggle
.find('.action')
// Expand Structure, Collapse Structure.
.text((switcher) ? ui.handleClose : ui.handleOpen);
}
/**
* Add markup to the menu elements.
*
* Items with sub-elements have a list toggle attached to them. Menu item
* links and the corresponding list toggle are wrapped with in a div
* classed with .toolbar-box. The .toolbar-box div provides a positioning
* context for the item list toggle.
*
* @param {jQuery} $menu
* The root of the menu to be initialized.
*/
function initItems($menu) {
var options = {
class: 'toolbar-icon toolbar-handle',
action: ui.handleOpen,
text: ''
};
// Initialize items and their links.
$menu.find('li > a').wrap('<div class="toolbar-box">');
// Add a handle to each list item if it has a menu.
$menu.find('li').each(function (index, element) {
var $item = $(element);
if ($item.children('ul.toolbar-menu').length) {
var $box = $item.children('.toolbar-box');
options.text = Drupal.t('@label', {'@label': $box.find('a').text()});
$item.children('.toolbar-box')
.append(Drupal.theme('toolbarMenuItemToggle', options));
}
});
}
/**
* Adds a level class to each list based on its depth in the menu.
*
* This function is called recursively on each sub level of lists elements
* until the depth of the menu is exhausted.
*
* @param {jQuery} $lists
* A jQuery object of ul elements.
*
* @param {number} level
* The current level number to be assigned to the list elements.
*/
function markListLevels($lists, level) {
level = (!level) ? 1 : level;
var $lis = $lists.children('li').addClass('level-' + level);
$lists = $lis.children('ul');
if ($lists.length) {
markListLevels($lists, level + 1);
}
}
/**
* On page load, open the active menu item.
*
* Marks the trail of the active link in the menu back to the root of the
* menu with .menu-item--active-trail.
*
* @param {jQuery} $menu
* The root of the menu.
*/
function openActiveItem($menu) {
var pathItem = $menu.find('a[href="' + location.pathname + '"]');
if (pathItem.length && !activeItem) {
activeItem = location.pathname;
}
if (activeItem) {
var $activeItem = $menu.find('a[href="' + activeItem + '"]').addClass('menu-item--active');
var $activeTrail = $activeItem.parentsUntil('.root', 'li').addClass('menu-item--active-trail');
toggleList($activeTrail, true);
}
}
// Return the jQuery object.
return this.each(function (selector) {
var $menu = $(this).once('toolbar-menu');
if ($menu.length) {
// Bind event handlers.
$menu
.on('click.toolbar', '.toolbar-box', toggleClickHandler)
.on('click.toolbar', '.toolbar-box a', linkClickHandler);
$menu.addClass('root');
initItems($menu);
markListLevels($menu);
// Restore previous and active states.
openActiveItem($menu);
}
});
};
/**
* A toggle is an interactive element often bound to a click handler.
*
* @param {object} options
* Options for the button.
* @param {string} options.class
* Class to set on the button.
* @param {string} options.action
* Action for the button.
* @param {string} options.text
* Used as label for the button.
*
* @return {string}
* A string representing a DOM fragment.
*/
Drupal.theme.toolbarMenuItemToggle = function (options) {
return '<button class="' + options['class'] + '"><span class="action">' + options.action + '</span><span class="label">' + options.text + '</span></button>';
};
}(jQuery, Drupal, drupalSettings));
;
/**
* @file
* Defines the behavior of the Drupal administration toolbar.
*/
(function ($, Drupal, drupalSettings) {
'use strict';
// Merge run-time settings with the defaults.
var options = $.extend(
{
breakpoints: {
'toolbar.narrow': '',
'toolbar.standard': '',
'toolbar.wide': ''
}
},
drupalSettings.toolbar,
// Merge strings on top of drupalSettings so that they are not mutable.
{
strings: {
horizontal: Drupal.t('Horizontal orientation'),
vertical: Drupal.t('Vertical orientation')
}
}
);
/**
* Registers tabs with the toolbar.
*
* The Drupal toolbar allows modules to register top-level tabs. These may
* point directly to a resource or toggle the visibility of a tray.
*
* Modules register tabs with hook_toolbar().
*
* @type {Drupal~behavior}
*
* @prop {Drupal~behaviorAttach} attach
* Attaches the toolbar rendering functionality to the toolbar element.
*/
Drupal.behaviors.toolbar = {
attach: function (context) {
// Verify that the user agent understands media queries. Complex admin
// toolbar layouts require media query support.
if (!window.matchMedia('only screen').matches) {
return;
}
// Process the administrative toolbar.
$(context).find('#toolbar-administration').once('toolbar').each(function () {
// Establish the toolbar models and views.
var model = Drupal.toolbar.models.toolbarModel = new Drupal.toolbar.ToolbarModel({
locked: JSON.parse(localStorage.getItem('Drupal.toolbar.trayVerticalLocked')) || false,
activeTab: document.getElementById(JSON.parse(localStorage.getItem('Drupal.toolbar.activeTabID')))
});
Drupal.toolbar.views.toolbarVisualView = new Drupal.toolbar.ToolbarVisualView({
el: this,
model: model,
strings: options.strings
});
Drupal.toolbar.views.toolbarAuralView = new Drupal.toolbar.ToolbarAuralView({
el: this,
model: model,
strings: options.strings
});
Drupal.toolbar.views.bodyVisualView = new Drupal.toolbar.BodyVisualView({
el: this,
model: model
});
// Render collapsible menus.
var menuModel = Drupal.toolbar.models.menuModel = new Drupal.toolbar.MenuModel();
Drupal.toolbar.views.menuVisualView = new Drupal.toolbar.MenuVisualView({
el: $(this).find('.toolbar-menu-administration').get(0),
model: menuModel,
strings: options.strings
});
// Handle the resolution of Drupal.toolbar.setSubtrees.
// This is handled with a deferred so that the function may be invoked
// asynchronously.
Drupal.toolbar.setSubtrees.done(function (subtrees) {
menuModel.set('subtrees', subtrees);
var theme = drupalSettings.ajaxPageState.theme;
localStorage.setItem('Drupal.toolbar.subtrees.' + theme, JSON.stringify(subtrees));
// Indicate on the toolbarModel that subtrees are now loaded.
model.set('areSubtreesLoaded', true);
});
// Attach a listener to the configured media query breakpoints.
for (var label in options.breakpoints) {
if (options.breakpoints.hasOwnProperty(label)) {
var mq = options.breakpoints[label];
var mql = Drupal.toolbar.mql[label] = window.matchMedia(mq);
// Curry the model and the label of the media query breakpoint to
// the mediaQueryChangeHandler function.
mql.addListener(Drupal.toolbar.mediaQueryChangeHandler.bind(null, model, label));
// Fire the mediaQueryChangeHandler for each configured breakpoint
// so that they process once.
Drupal.toolbar.mediaQueryChangeHandler.call(null, model, label, mql);
}
}
// Trigger an initial attempt to load menu subitems. This first attempt
// is made after the media query handlers have had an opportunity to
// process. The toolbar starts in the vertical orientation by default,
// unless the viewport is wide enough to accommodate a horizontal
// orientation. Thus we give the Toolbar a chance to determine if it
// should be set to horizontal orientation before attempting to load
// menu subtrees.
Drupal.toolbar.views.toolbarVisualView.loadSubtrees();
$(document)
// Update the model when the viewport offset changes.
.on('drupalViewportOffsetChange.toolbar', function (event, offsets) {
model.set('offsets', offsets);
});
// Broadcast model changes to other modules.
model
.on('change:orientation', function (model, orientation) {
$(document).trigger('drupalToolbarOrientationChange', orientation);
})
.on('change:activeTab', function (model, tab) {
$(document).trigger('drupalToolbarTabChange', tab);
})
.on('change:activeTray', function (model, tray) {
$(document).trigger('drupalToolbarTrayChange', tray);
});
// If the toolbar's orientation is horizontal and no active tab is
// defined then show the tray of the first toolbar tab by default (but
// not the first 'Home' toolbar tab).
if (Drupal.toolbar.models.toolbarModel.get('orientation') === 'horizontal' && Drupal.toolbar.models.toolbarModel.get('activeTab') === null) {
Drupal.toolbar.models.toolbarModel.set({
activeTab: $('.toolbar-bar .toolbar-tab:not(.home-toolbar-tab) a').get(0)
});
}
});
}
};
/**
* Toolbar methods of Backbone objects.
*
* @namespace
*/
Drupal.toolbar = {
/**
* A hash of View instances.
*
* @type {object.<string, Backbone.View>}
*/
views: {},
/**
* A hash of Model instances.
*
* @type {object.<string, Backbone.Model>}
*/
models: {},
/**
* A hash of MediaQueryList objects tracked by the toolbar.
*
* @type {object.<string, object>}
*/
mql: {},
/**
* Accepts a list of subtree menu elements.
*
* A deferred object that is resolved by an inlined JavaScript callback.
*
* @type {jQuery.Deferred}
*
* @see toolbar_subtrees_jsonp().
*/
setSubtrees: new $.Deferred(),
/**
* Respond to configured narrow media query changes.
*
* @param {Drupal.toolbar.ToolbarModel} model
* A toolbar model
* @param {string} label
* Media query label.
* @param {object} mql
* A MediaQueryList object.
*/
mediaQueryChangeHandler: function (model, label, mql) {
switch (label) {
case 'toolbar.narrow':
model.set({
isOriented: mql.matches,
isTrayToggleVisible: false
});
// If the toolbar doesn't have an explicit orientation yet, or if the
// narrow media query doesn't match then set the orientation to
// vertical.
if (!mql.matches || !model.get('orientation')) {
model.set({orientation: 'vertical'}, {validate: true});
}
break;
case 'toolbar.standard':
model.set({
isFixed: mql.matches
});
break;
case 'toolbar.wide':
model.set({
orientation: ((mql.matches) ? 'horizontal' : 'vertical')
}, {validate: true});
// The tray orientation toggle visibility does not need to be
// validated.
model.set({
isTrayToggleVisible: mql.matches
});
break;
default:
break;
}
}
};
/**
* A toggle is an interactive element often bound to a click handler.
*
* @return {string}
* A string representing a DOM fragment.
*/
Drupal.theme.toolbarOrientationToggle = function () {
return '<div class="toolbar-toggle-orientation"><div class="toolbar-lining">' +
'<button class="toolbar-icon" type="button"></button>' +
'</div></div>';
};
/**
* Ajax command to set the toolbar subtrees.
*
* @param {Drupal.Ajax} ajax
* {@link Drupal.Ajax} object created by {@link Drupal.ajax}.
* @param {object} response
* JSON response from the Ajax request.
* @param {number} [status]
* XMLHttpRequest status.
*/
Drupal.AjaxCommands.prototype.setToolbarSubtrees = function (ajax, response, status) {
Drupal.toolbar.setSubtrees.resolve(response.subtrees);
};
}(jQuery, Drupal, drupalSettings));
;
/**
* @file
* A Backbone Model for collapsible menus.
*/
(function (Backbone, Drupal) {
'use strict';
/**
* Backbone Model for collapsible menus.
*
* @constructor
*
* @augments Backbone.Model
*/
Drupal.toolbar.MenuModel = Backbone.Model.extend(/** @lends Drupal.toolbar.MenuModel# */{
/**
* @type {object}
*
* @prop {object} subtrees
*/
defaults: /** @lends Drupal.toolbar.MenuModel# */{
/**
* @type {object}
*/
subtrees: {}
}
});
}(Backbone, Drupal));
;
/**
* @file
* A Backbone Model for the toolbar.
*/
(function (Backbone, Drupal) {
'use strict';
/**
* Backbone model for the toolbar.
*
* @constructor
*
* @augments Backbone.Model
*/
Drupal.toolbar.ToolbarModel = Backbone.Model.extend(/** @lends Drupal.toolbar.ToolbarModel# */{
/**
* @type {object}
*
* @prop activeTab
* @prop activeTray
* @prop isOriented
* @prop isFixed
* @prop areSubtreesLoaded
* @prop isViewportOverflowConstrained
* @prop orientation
* @prop locked
* @prop isTrayToggleVisible
* @prop height
* @prop offsets
*/
defaults: /** @lends Drupal.toolbar.ToolbarModel# */{
/**
* The active toolbar tab. All other tabs should be inactive under
* normal circumstances. It will remain active across page loads. The
* active item is stored as an ID selector e.g. '#toolbar-item--1'.
*
* @type {string}
*/
activeTab: null,
/**
* Represents whether a tray is open or not. Stored as an ID selector e.g.
* '#toolbar-item--1-tray'.
*
* @type {string}
*/
activeTray: null,
/**
* Indicates whether the toolbar is displayed in an oriented fashion,
* either horizontal or vertical.
*
* @type {bool}
*/
isOriented: false,
/**
* Indicates whether the toolbar is positioned absolute (false) or fixed
* (true).
*
* @type {bool}
*/
isFixed: false,
/**
* Menu subtrees are loaded through an AJAX request only when the Toolbar
* is set to a vertical orientation.
*
* @type {bool}
*/
areSubtreesLoaded: false,
/**
* If the viewport overflow becomes constrained, isFixed must be true so
* that elements in the trays aren't lost off-screen and impossible to
* get to.
*
* @type {bool}
*/
isViewportOverflowConstrained: false,
/**
* The orientation of the active tray.
*
* @type {string}
*/
orientation: 'vertical',
/**
* A tray is locked if a user toggled it to vertical. Otherwise a tray
* will switch between vertical and horizontal orientation based on the
* configured breakpoints. The locked state will be maintained across page
* loads.
*
* @type {bool}
*/
locked: false,
/**
* Indicates whether the tray orientation toggle is visible.
*
* @type {bool}
*/
isTrayToggleVisible: false,
/**
* The height of the toolbar.
*
* @type {number}
*/
height: null,
/**
* The current viewport offsets determined by {@link Drupal.displace}. The
* offsets suggest how a module might position is components relative to
* the viewport.
*
* @type {object}
*
* @prop {number} top
* @prop {number} right
* @prop {number} bottom
* @prop {number} left
*/
offsets: {
top: 0,
right: 0,
bottom: 0,
left: 0
}
},
/**
* @inheritdoc
*
* @param {object} attributes
* Attributes for the toolbar.
* @param {object} options
* Options for the toolbar.
*
* @return {string|undefined}
* Returns an error message if validation failed.
*/
validate: function (attributes, options) {
// Prevent the orientation being set to horizontal if it is locked, unless
// override has not been passed as an option.
if (attributes.orientation === 'horizontal' && this.get('locked') && !options.override) {
return Drupal.t('The toolbar cannot be set to a horizontal orientation when it is locked.');
}
}
});
}(Backbone, Drupal));
;
/**
* @file
* A Backbone view for the body element.
*/
(function ($, Drupal, Backbone) {
'use strict';
Drupal.toolbar.BodyVisualView = Backbone.View.extend(/** @lends Drupal.toolbar.BodyVisualView# */{
/**
* Adjusts the body element with the toolbar position and dimension changes.
*
* @constructs
*
* @augments Backbone.View
*/
initialize: function () {
this.listenTo(this.model, 'change:orientation change:offsets change:activeTray change:isOriented change:isFixed change:isViewportOverflowConstrained', this.render);
},
/**
* @inheritdoc
*/
render: function () {
var $body = $('body');
var orientation = this.model.get('orientation');
var isOriented = this.model.get('isOriented');
var isViewportOverflowConstrained = this.model.get('isViewportOverflowConstrained');
$body
// We are using JavaScript to control media-query handling for two
// reasons: (1) Using JavaScript let's us leverage the breakpoint
// configurations and (2) the CSS is really complex if we try to hide
// some styling from browsers that don't understand CSS media queries.
// If we drive the CSS from classes added through JavaScript,
// then the CSS becomes simpler and more robust.
.toggleClass('toolbar-vertical', (orientation === 'vertical'))
.toggleClass('toolbar-horizontal', (isOriented && orientation === 'horizontal'))
// When the toolbar is fixed, it will not scroll with page scrolling.
.toggleClass('toolbar-fixed', (isViewportOverflowConstrained || this.model.get('isFixed')))
// Toggle the toolbar-tray-open class on the body element. The class is
// applied when a toolbar tray is active. Padding might be applied to
// the body element to prevent the tray from overlapping content.
.toggleClass('toolbar-tray-open', !!this.model.get('activeTray'))
// Apply padding to the top of the body to offset the placement of the
// toolbar bar element.
.css('padding-top', this.model.get('offsets').top);
}
});
}(jQuery, Drupal, Backbone));
;
/**
* @file
* A Backbone view for the collapsible menus.
*/
(function ($, Backbone, Drupal) {
'use strict';
Drupal.toolbar.MenuVisualView = Backbone.View.extend(/** @lends Drupal.toolbar.MenuVisualView# */{
/**
* Backbone View for collapsible menus.
*
* @constructs
*
* @augments Backbone.View
*/
initialize: function () {
this.listenTo(this.model, 'change:subtrees', this.render);
},
/**
* @inheritdoc
*/
render: function () {
var subtrees = this.model.get('subtrees');
// Add subtrees.
for (var id in subtrees) {
if (subtrees.hasOwnProperty(id)) {
this.$el
.find('#toolbar-link-' + id)
.once('toolbar-subtrees')
.after(subtrees[id]);
}
}
// Render the main menu as a nested, collapsible accordion.
if ('drupalToolbarMenu' in $.fn) {
this.$el
.children('.toolbar-menu')
.drupalToolbarMenu();
}
}
});
}(jQuery, Backbone, Drupal));
;
/**
* @file
* A Backbone view for the aural feedback of the toolbar.
*/
(function (Backbone, Drupal) {
'use strict';
Drupal.toolbar.ToolbarAuralView = Backbone.View.extend(/** @lends Drupal.toolbar.ToolbarAuralView# */{
/**
* Backbone view for the aural feedback of the toolbar.
*
* @constructs
*
* @augments Backbone.View
*
* @param {object} options
* Options for the view.
* @param {object} options.strings
* Various strings to use in the view.
*/
initialize: function (options) {
this.strings = options.strings;
this.listenTo(this.model, 'change:orientation', this.onOrientationChange);
this.listenTo(this.model, 'change:activeTray', this.onActiveTrayChange);
},
/**
* Announces an orientation change.
*
* @param {Drupal.toolbar.ToolbarModel} model
* The toolbar model in question.
* @param {string} orientation
* The new value of the orientation attribute in the model.
*/
onOrientationChange: function (model, orientation) {
Drupal.announce(Drupal.t('Tray orientation changed to @orientation.', {
'@orientation': orientation
}));
},
/**
* Announces a changed active tray.
*
* @param {Drupal.toolbar.ToolbarModel} model
* The toolbar model in question.
* @param {HTMLElement} tray
* The new value of the tray attribute in the model.
*/
onActiveTrayChange: function (model, tray) {
var relevantTray = (tray === null) ? model.previous('activeTray') : tray;
var action = (tray === null) ? Drupal.t('closed') : Drupal.t('opened');
var trayNameElement = relevantTray.querySelector('.toolbar-tray-name');
var text;
if (trayNameElement !== null) {
text = Drupal.t('Tray "@tray" @action.', {
'@tray': trayNameElement.textContent, '@action': action
});
}
else {
text = Drupal.t('Tray @action.', {'@action': action});
}
Drupal.announce(text);
}
});
}(Backbone, Drupal));
;
/**
* @file
* A Backbone view for the toolbar element. Listens to mouse & touch.
*/
(function ($, Drupal, drupalSettings, Backbone) {
'use strict';
Drupal.toolbar.ToolbarVisualView = Backbone.View.extend(/** @lends Drupal.toolbar.ToolbarVisualView# */{
/**
* Event map for the `ToolbarVisualView`.
*
* @return {object}
* A map of events.
*/
events: function () {
// Prevents delay and simulated mouse events.
var touchEndToClick = function (event) {
event.preventDefault();
event.target.click();
};
return {
'click .toolbar-bar .toolbar-tab .trigger': 'onTabClick',
'click .toolbar-toggle-orientation button': 'onOrientationToggleClick',
'touchend .toolbar-bar .toolbar-tab .trigger': touchEndToClick,
'touchend .toolbar-toggle-orientation button': touchEndToClick
};
},
/**
* Backbone view for the toolbar element. Listens to mouse & touch.
*
* @constructs
*
* @augments Backbone.View
*
* @param {object} options
* Options for the view object.
* @param {object} options.strings
* Various strings to use in the view.
*/
initialize: function (options) {
this.strings = options.strings;
this.listenTo(this.model, 'change:activeTab change:orientation change:isOriented change:isTrayToggleVisible', this.render);
this.listenTo(this.model, 'change:mqMatches', this.onMediaQueryChange);
this.listenTo(this.model, 'change:offsets', this.adjustPlacement);
// Add the tray orientation toggles.
this.$el
.find('.toolbar-tray .toolbar-lining')
.append(Drupal.theme('toolbarOrientationToggle'));
// Trigger an activeTab change so that listening scripts can respond on
// page load. This will call render.
this.model.trigger('change:activeTab');
},
/**
* @inheritdoc
*
* @return {Drupal.toolbar.ToolbarVisualView}
* The `ToolbarVisualView` instance.
*/
render: function () {
this.updateTabs();
this.updateTrayOrientation();
this.updateBarAttributes();
// Load the subtrees if the orientation of the toolbar is changed to
// vertical. This condition responds to the case that the toolbar switches
// from horizontal to vertical orientation. The toolbar starts in a
// vertical orientation by default and then switches to horizontal during
// initialization if the media query conditions are met. Simply checking
// that the orientation is vertical here would result in the subtrees
// always being loaded, even when the toolbar initialization ultimately
// results in a horizontal orientation.
//
// @see Drupal.behaviors.toolbar.attach() where admin menu subtrees
// loading is invoked during initialization after media query conditions
// have been processed.
if (this.model.changed.orientation === 'vertical' || this.model.changed.activeTab) {
this.loadSubtrees();
}
// Trigger a recalculation of viewport displacing elements. Use setTimeout
// to ensure this recalculation happens after changes to visual elements
// have processed.
window.setTimeout(function () {
Drupal.displace(true);
}, 0);
return this;
},
/**
* Responds to a toolbar tab click.
*
* @param {jQuery.Event} event
* The event triggered.
*/
onTabClick: function (event) {
// If this tab has a tray associated with it, it is considered an
// activatable tab.
if (event.target.hasAttribute('data-toolbar-tray')) {
var activeTab = this.model.get('activeTab');
var clickedTab = event.target;
// Set the event target as the active item if it is not already.
this.model.set('activeTab', (!activeTab || clickedTab !== activeTab) ? clickedTab : null);
event.preventDefault();
event.stopPropagation();
}
},
/**
* Toggles the orientation of a toolbar tray.
*
* @param {jQuery.Event} event
* The event triggered.
*/
onOrientationToggleClick: function (event) {
var orientation = this.model.get('orientation');
// Determine the toggle-to orientation.
var antiOrientation = (orientation === 'vertical') ? 'horizontal' : 'vertical';
var locked = antiOrientation === 'vertical';
// Remember the locked state.
if (locked) {
localStorage.setItem('Drupal.toolbar.trayVerticalLocked', 'true');
}
else {
localStorage.removeItem('Drupal.toolbar.trayVerticalLocked');
}
// Update the model.
this.model.set({
locked: locked,
orientation: antiOrientation
}, {
validate: true,
override: true
});
event.preventDefault();
event.stopPropagation();
},
/**
* Updates the display of the tabs: toggles a tab and the associated tray.
*/
updateTabs: function () {
var $tab = $(this.model.get('activeTab'));
// Deactivate the previous tab.
$(this.model.previous('activeTab'))
.removeClass('is-active')
.prop('aria-pressed', false);
// Deactivate the previous tray.
$(this.model.previous('activeTray'))
.removeClass('is-active');
// Activate the selected tab.
if ($tab.length > 0) {
$tab
.addClass('is-active')
// Mark the tab as pressed.
.prop('aria-pressed', true);
var name = $tab.attr('data-toolbar-tray');
// Store the active tab name or remove the setting.
var id = $tab.get(0).id;
if (id) {
localStorage.setItem('Drupal.toolbar.activeTabID', JSON.stringify(id));
}
// Activate the associated tray.
var $tray = this.$el.find('[data-toolbar-tray="' + name + '"].toolbar-tray');
if ($tray.length) {
$tray.addClass('is-active');
this.model.set('activeTray', $tray.get(0));
}
else {
// There is no active tray.
this.model.set('activeTray', null);
}
}
else {
// There is no active tray.
this.model.set('activeTray', null);
localStorage.removeItem('Drupal.toolbar.activeTabID');
}
},
/**
* Update the attributes of the toolbar bar element.
*/
updateBarAttributes: function () {
var isOriented = this.model.get('isOriented');
if (isOriented) {
this.$el.find('.toolbar-bar').attr('data-offset-top', '');
}
else {
this.$el.find('.toolbar-bar').removeAttr('data-offset-top');
}
// Toggle between a basic vertical view and a more sophisticated
// horizontal and vertical display of the toolbar bar and trays.
this.$el.toggleClass('toolbar-oriented', isOriented);
},
/**
* Updates the orientation of the active tray if necessary.
*/
updateTrayOrientation: function () {
var orientation = this.model.get('orientation');
// The antiOrientation is used to render the view of action buttons like
// the tray orientation toggle.
var antiOrientation = (orientation === 'vertical') ? 'horizontal' : 'vertical';
// Update the orientation of the trays.
var $trays = this.$el.find('.toolbar-tray')
.removeClass('toolbar-tray-horizontal toolbar-tray-vertical')
.addClass('toolbar-tray-' + orientation);
// Update the tray orientation toggle button.
var iconClass = 'toolbar-icon-toggle-' + orientation;
var iconAntiClass = 'toolbar-icon-toggle-' + antiOrientation;
var $orientationToggle = this.$el.find('.toolbar-toggle-orientation')
.toggle(this.model.get('isTrayToggleVisible'));
$orientationToggle.find('button')
.val(antiOrientation)
.attr('title', this.strings[antiOrientation])
.text(this.strings[antiOrientation])
.removeClass(iconClass)
.addClass(iconAntiClass);
// Update data offset attributes for the trays.
var dir = document.documentElement.dir;
var edge = (dir === 'rtl') ? 'right' : 'left';
// Remove data-offset attributes from the trays so they can be refreshed.
$trays.removeAttr('data-offset-left data-offset-right data-offset-top');
// If an active vertical tray exists, mark it as an offset element.
$trays.filter('.toolbar-tray-vertical.is-active').attr('data-offset-' + edge, '');
// If an active horizontal tray exists, mark it as an offset element.
$trays.filter('.toolbar-tray-horizontal.is-active').attr('data-offset-top', '');
},
/**
* Sets the tops of the trays so that they align with the bottom of the bar.
*/
adjustPlacement: function () {
var $trays = this.$el.find('.toolbar-tray');
if (!this.model.get('isOriented')) {
$trays.css('margin-top', 0);
$trays.removeClass('toolbar-tray-horizontal').addClass('toolbar-tray-vertical');
}
else {
// The toolbar container is invisible. Its placement is used to
// determine the container for the trays.
$trays.css('margin-top', this.$el.find('.toolbar-bar').outerHeight());
}
},
/**
* Calls the endpoint URI that builds an AJAX command with the rendered
* subtrees.
*
* The rendered admin menu subtrees HTML is cached on the client in
* localStorage until the cache of the admin menu subtrees on the server-
* side is invalidated. The subtreesHash is stored in localStorage as well
* and compared to the subtreesHash in drupalSettings to determine when the
* admin menu subtrees cache has been invalidated.
*/
loadSubtrees: function () {
var $activeTab = $(this.model.get('activeTab'));
var orientation = this.model.get('orientation');
// Only load and render the admin menu subtrees if:
// (1) They have not been loaded yet.
// (2) The active tab is the administration menu tab, indicated by the
// presence of the data-drupal-subtrees attribute.
// (3) The orientation of the tray is vertical.
if (!this.model.get('areSubtreesLoaded') && typeof $activeTab.data('drupal-subtrees') !== 'undefined' && orientation === 'vertical') {
var subtreesHash = drupalSettings.toolbar.subtreesHash;
var theme = drupalSettings.ajaxPageState.theme;
var endpoint = Drupal.url('toolbar/subtrees/' + subtreesHash);
var cachedSubtreesHash = localStorage.getItem('Drupal.toolbar.subtreesHash.' + theme);
var cachedSubtrees = JSON.parse(localStorage.getItem('Drupal.toolbar.subtrees.' + theme));
var isVertical = this.model.get('orientation') === 'vertical';
// If we have the subtrees in localStorage and the subtree hash has not
// changed, then use the cached data.
if (isVertical && subtreesHash === cachedSubtreesHash && cachedSubtrees) {
Drupal.toolbar.setSubtrees.resolve(cachedSubtrees);
}
// Only make the call to get the subtrees if the orientation of the
// toolbar is vertical.
else if (isVertical) {
// Remove the cached menu information.
localStorage.removeItem('Drupal.toolbar.subtreesHash.' + theme);
localStorage.removeItem('Drupal.toolbar.subtrees.' + theme);
// The AJAX response's command will trigger the resolve method of the
// Drupal.toolbar.setSubtrees Promise.
Drupal.ajax({url: endpoint}).execute();
// Cache the hash for the subtrees locally.
localStorage.setItem('Drupal.toolbar.subtreesHash.' + theme, subtreesHash);
}
}
}
});
}(jQuery, Drupal, drupalSettings, Backbone));
;<|fim▁end|> |
// Let the server know when the IFRAME submission mechanism is used. The
// server can use this information to wrap the JSON response in a
// TEXTAREA, as per http://jquery.malsup.com/form/#file-upload. |
<|file_name|>api.py<|end_file_name|><|fim▁begin|>from django.core.exceptions import MultipleObjectsReturned
from django.shortcuts import redirect
from django.urls import reverse, path
from wagtail.api.v2.router import WagtailAPIRouter
from wagtail.api.v2.views import PagesAPIViewSet, BaseAPIViewSet
from wagtail.images.api.v2.views import ImagesAPIViewSet
from wagtail.documents.api.v2.views import DocumentsAPIViewSet
class OpenstaxPagesAPIEndpoint(PagesAPIViewSet):
"""
OpenStax custom Pages API endpoint that allows finding pages and books by pk or slug
"""
def detail_view(self, request, pk=None, slug=None):
param = pk
if slug is not None:
self.lookup_field = 'slug'
param = slug
try:
return super().detail_view(request, param)
except MultipleObjectsReturned:
# Redirect to the listing view, filtered by the relevant slug
# The router is registered with the `wagtailapi` namespace,
# `pages` is our endpoint namespace and `listing` is the listing view url name.
return redirect(
reverse('wagtailapi:pages:listing') + f'?{self.lookup_field}={param}'
)
@classmethod
def get_urlpatterns(cls):
"""
This returns a list of URL patterns for the endpoint
"""
return [
path('', cls.as_view({'get': 'listing_view'}), name='listing'),
path('<int:pk>/', cls.as_view({'get': 'detail_view'}), name='detail'),
path('<slug:slug>/', cls.as_view({'get': 'detail_view'}), name='detail'),
path('find/', cls.as_view({'get': 'find_view'}), name='find'),
]
<|fim▁hole|> nested_default_fields = BaseAPIViewSet.nested_default_fields + ['title', 'download_url', 'height', 'width']
# Create the router. “wagtailapi” is the URL namespace
api_router = WagtailAPIRouter('wagtailapi')
# Add the three endpoints using the "register_endpoint" method.
# The first parameter is the name of the endpoint (eg. pages, images). This
# is used in the URL of the endpoint
# The second parameter is the endpoint class that handles the requests
api_router.register_endpoint('pages', OpenstaxPagesAPIEndpoint)
api_router.register_endpoint('images', OpenStaxImagesAPIViewSet)
api_router.register_endpoint('documents', DocumentsAPIViewSet)<|fim▁end|> |
class OpenStaxImagesAPIViewSet(ImagesAPIViewSet):
meta_fields = BaseAPIViewSet.meta_fields + ['tags', 'download_url', 'height', 'width'] |
<|file_name|>ir_ula.py<|end_file_name|><|fim▁begin|>#----------------------------------------------
# ir_ula.py
#
# Intermediate representation for the ula (unconventional language)
# By Mitch Myburgh (MYBMIT001)
# 24 09 2015
#----------------------------------------------
from llvmlite import ir
from ctypes import CFUNCTYPE, c_float
import llvmlite.binding as llvm
# code for the parser
from ply import yacc
from lex_ula import tokens
import os
import sys
start = "Start"
def p_start(p):
"""Start : Program"""
p[0] = p[1]
def p_program_statements(p):
"""Program : Statements"""
p[0] = ["Program", p[1]]
def p_statements(p):
"""Statements : Statements Statement
| Statement"""
if len(p) == 2:
p[0] = [p[1]]
else:
p[0] = p[1] + [p[2]]
def p_statement(p):
"""Statement : ID '=' expression"""
p[0] = ["=", [p[1]], p[3]]
def p_expression_plus(p):
"""expression : expression '@' term"""
p[0] = ["@", p[1], p[3]]
def p_expression_minus(p):
"""expression : expression '$' term"""
p[0] = ["$", p[1], p[3]]
def p_expression_term(p):
"""expression : term"""
p[0] = p[1]
def p_term_multiply(p):
"""term : term '#' factor"""
p[0] = ["#", p[1], p[3]]
<|fim▁hole|>def p_term_divide(p):
"""term : term '&' factor"""
p[0] = ["&", p[1], p[3]]
def p_term_factor(p):
"""term : factor"""
p[0] = p[1]
def p_factor_expression(p):
"""factor : '(' expression ')'"""
p[0] = p[2]
def p_factor_float(p):
"""factor : FLOAT_LITERAL"""
p[0] = [p[1]]
def p_factor_id(p):
"""factor : ID"""
p[0] = [p[1]]
def p_error(p):
pass
def print_tree(tupletree, depth=0):
print("\t"*depth, tupletree[0])
for item in tupletree[1]:
if isinstance(item, tuple):
print_tree(item, depth + 1)
else:
print("\t"*(depth+1), item)
parser = yacc.yacc()
#main function for the parser
def main():
global infilename
if len(sys.argv) == 2:
infilename = sys.argv[1]
if os.path.isfile(infilename):
infile = open(infilename, "r")
syntree = parser.parse(infile.read())
# print_tree(syntree)
return syntree
else:
print("Not a valid file")
else:
print("Specify filename, e.g. parse_ula.ply my_program.ula")
##llvmlite stuff
last_var = "" # keeps track of the last var assigned
var_dict = {} # var names associated with memory location
def code_gen(tree): # traverse tree recursively to generate code
global last_var
if tree[0] == "Program":
for t in tree[1]:
code_gen(t)
elif tree[0] == "=":
last_var = tree[1][0]
var_dict[last_var] = builder.alloca(ir.FloatType())
builder.store(code_gen(tree[2]), var_dict[last_var])
elif tree[0] == "@":
return(builder.fadd(code_gen(tree[1]),code_gen(tree[2])))
elif tree[0] == "$":
return(builder.fsub(code_gen(tree[1]),code_gen(tree[2])))
elif tree[0] == "#":
return(builder.fmul(code_gen(tree[1]),code_gen(tree[2])))
elif tree[0] == "&":
return(builder.fdiv(code_gen(tree[1]),code_gen(tree[2])))
elif tree[0] in var_dict.keys():
return builder.load(var_dict[tree[0]])
elif isinstance(float(tree[0]), float):
return(ir.Constant(ir.FloatType(), float(tree[0])))
#main function for the ir generator
def run():
global builder
tree = main()
flttyp = ir.FloatType() # create float type
fnctyp = ir.FunctionType(flttyp, ()) # create function type to return a float
module = ir.Module(name="ula") # create module named "ula"
func = ir.Function(module, fnctyp, name="main") # create "main" function
block = func.append_basic_block(name="entry") # create block "entry" label
builder = ir.IRBuilder(block) # create irbuilder to generate code
code_gen(tree) # call code_gen() to traverse tree & generate code
builder.ret(builder.load(var_dict[last_var])) # specify return value
return module
if __name__ == "__main__":
module = run()
outfilename = os.path.splitext(infilename)[0]+".ir"
outfile = open(outfilename, "w")
print(str(module).strip())
print(str(module).strip(), file = outfile)
outfile.close()<|fim▁end|> | |
<|file_name|>INaviEdgeListener.java<|end_file_name|><|fim▁begin|>/*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.binnavi.disassembly;<|fim▁hole|>import com.google.security.zynamics.zylib.gui.zygraph.edges.IViewEdgeListener;
/**
* Interface for objects that want to be notified about changes in edges.
*/
public interface INaviEdgeListener extends IViewEdgeListener {
/**
* Invoked after the global comment of an edge changed.
*
* @param naviEdge The edge whose global comment changed.
*/
void changedGlobalComment(CNaviViewEdge naviEdge);
/**
* Invoked after the local comment of an edge changed.
*
* @param naviEdge The edge whose local comment changed.
*/
void changedLocalComment(CNaviViewEdge naviEdge);
}<|fim▁end|> | |
<|file_name|>exports.util.js<|end_file_name|><|fim▁begin|>function util() { <|fim▁hole|> Object.beget = function (o) {
var F = function () {};
F.prototype = o;
return new F();
};
}
};
exports.util = new util();<|fim▁end|> | var self = this;
if (typeof Object.beget !== 'function') { |
<|file_name|>0005_auto_20141127_1436.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>
class Migration(migrations.Migration):
dependencies = [
('computing', '0004_auto_20141127_1425'),
]
operations = [
migrations.CreateModel(
name='Subnet',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=200)),
('from_ip', models.CharField(max_length=15)),
('to_ip', models.CharField(max_length=15)),
],
options={
},
bases=(models.Model,),
),
migrations.DeleteModel(
name='Category',
),
migrations.AddField(
model_name='computer',
name='subnet',
field=models.ForeignKey(blank=True, to='computing.Subnet', null=True),
preserve_default=True,
),
]<|fim▁end|> | from __future__ import unicode_literals
from django.db import models, migrations |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from django.apps import AppConfig
<|fim▁hole|>class InstaappConfig(AppConfig):
name = 'instaapp'<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This library allows to export tracing data to an OTLP collector.
Usage
-----
The **OTLP Span Exporter** allows to export `OpenTelemetry`_ traces to the
`OTLP`_ collector.
You can configure the exporter with the following environment variables:
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_TIMEOUT`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_PROTOCOL`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_HEADERS`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_ENDPOINT`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_COMPRESSION`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE`
- :envvar:`OTEL_EXPORTER_OTLP_TIMEOUT`
- :envvar:`OTEL_EXPORTER_OTLP_PROTOCOL`
- :envvar:`OTEL_EXPORTER_OTLP_HEADERS`
- :envvar:`OTEL_EXPORTER_OTLP_ENDPOINT`
- :envvar:`OTEL_EXPORTER_OTLP_COMPRESSION`
- :envvar:`OTEL_EXPORTER_OTLP_CERTIFICATE`
.. _OTLP: https://github.com/open-telemetry/opentelemetry-collector/
.. _OpenTelemetry: https://github.com/open-telemetry/opentelemetry-python/
.. code:: python
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
# Resource can be required for some backends, e.g. Jaeger
# If resource wouldn't be set - traces wouldn't appears in Jaeger
resource = Resource(attributes={
"service.name": "service"
})
trace.set_tracer_provider(TracerProvider(resource=resource))
tracer = trace.get_tracer(__name__)
otlp_exporter = OTLPSpanExporter()
span_processor = BatchSpanProcessor(otlp_exporter)
trace.get_tracer_provider().add_span_processor(span_processor)
with tracer.start_as_current_span("foo"):
print("Hello world!")
<|fim▁hole|>
class Compression(enum.Enum):
NoCompression = "none"
Deflate = "deflate"
Gzip = "gzip"<|fim▁end|> | API
---
"""
import enum |
<|file_name|>uniprot_core.py<|end_file_name|><|fim▁begin|># reads uniprot core file and generates core features
from features_helpers import score_differences
def build_uniprot_to_index_to_core(sable_db_obj):
uniprot_to_index_to_core = {}
for line in sable_db_obj:
tokens = line.split()
try:
# PARSING ID
prot = tokens[0]
index = int(tokens[1])
core = tokens[2]
# PARSING ID
if uniprot_to_index_to_core.has_key(prot):
uniprot_to_index_to_core[prot][index] = core
else:
uniprot_to_index_to_core[prot] = {index: core}
except ValueError:
print "Cannot parse: " + line[0:len(line) - 1]
return uniprot_to_index_to_core
def get_sable_scores(map_file, f_sable_db_location, uniprot_core_output_location):
map_file_obj = open(map_file, 'r')
sable_db_obj = open(f_sable_db_location, 'r')
write_to = open(uniprot_core_output_location, 'w')
uniprot_to_index_to_core = build_uniprot_to_index_to_core(sable_db_obj)
for line in map_file_obj:
tokens = line.split()
asid = tokens[0].split("_")[0]
prot = tokens[1]
sstart = int(tokens[2])
start = int(tokens[3])
end = int(tokens[4])
eend = int(tokens[5])
rough_a_length = int(int(tokens[0].split("_")[-1].split("=")[1]) / 3)
if asid[0] == "I":
rough_a_length = 0
c1_count = 0
a_count = 0
c2_count = 0<|fim▁hole|> a_count = score_differences(uniprot_to_index_to_core, prot, start, end)
c2_count = score_differences(uniprot_to_index_to_core, prot, end, eend)
prot_len = int(line.split("\t")[7].strip())
canonical_absolute = score_differences(uniprot_to_index_to_core, prot, 1, prot_len)
print >> write_to, tokens[0] + "\t" + prot + "\t" + repr(c1_count) + "\t" + repr(a_count) + "\t" + repr(
c2_count) + "\t" + repr(canonical_absolute)
write_to.close()<|fim▁end|> | canonical_absolute = 0
if prot in uniprot_to_index_to_core:
c1_count = score_differences(uniprot_to_index_to_core, prot, sstart, start) |
<|file_name|>service.go<|end_file_name|><|fim▁begin|>package dataflow
import (
"context"
"strconv"
"time"
"go-common/app/interface/main/app-interface/conf"
"go-common/library/log"
"go-common/library/log/infoc"
)
// Service is search service
type Service struct {
c *conf.Config
infoc *infoc.Infoc<|fim▁hole|>}
// New is search service initial func
func New(c *conf.Config) (s *Service) {
s = &Service{
c: c,
infoc: infoc.New(c.Infoc),
}
return
}
func (s *Service) Report(c context.Context, eventID, eventType, buvid, fts, messageInfo string, now time.Time) (err error) {
if err = s.infoc.Info(strconv.FormatInt(now.Unix(), 10), eventID, eventType, buvid, fts, messageInfo); err != nil {
log.Error("s.infoc2.Info(%v,%v,%v,%v,%v,%v) error(%v)", strconv.FormatInt(now.Unix(), 10), eventID, eventType, buvid, fts, messageInfo, err)
}
return
}<|fim▁end|> | |
<|file_name|>oph_res_users.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#===============================================================================
# Custom res_users object
# Add an CAFAT ID for use in New Caledonia
#===============================================================================
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
class res_users(orm.Model):<|fim▁hole|> """
Custom res_users object
Add a CAFAT ID for use in New Caledonia
It's for odoo user not partner
For partner you'll find the CAFAT ID in res.parner object
"""
_inherit = "res.users"
_columns = {
'cafat_id':fields.char('CAFAT ID', size = 16, help = 'CAFAT ID of the doctor = convention number. This is not the CAFAT Number as for a patient'),
}<|fim▁end|> | |
<|file_name|>autotools_test.py<|end_file_name|><|fim▁begin|>import os
from conan.tools.files.files import save_toolchain_args
from conan.tools.gnu import Autotools
from conans.test.utils.mocks import ConanFileMock
from conans.test.utils.test_files import temp_folder
def test_source_folder_works():
folder = temp_folder()
os.chdir(folder)<|fim▁hole|> "make_args": ""}
)
conanfile = ConanFileMock()
conanfile.folders.set_base_install(folder)
sources = "/path/to/sources"
conanfile.folders.set_base_source(sources)
autotools = Autotools(conanfile)
autotools.configure(build_script_folder="subfolder")
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/subfolder/configure" -foo bar'
autotools.configure()
assert conanfile.command.replace("\\", "/") == '"/path/to/sources/configure" -foo bar'<|fim▁end|> | save_toolchain_args({
"configure_args": "-foo bar", |
<|file_name|>cache.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Alexander Bredo
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or
# without modification, are permitted provided that the
# following conditions are met:
#
# 1. Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials
# provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
# CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
# BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import time
from multiprocessing import Lock
class IndexedTimeCache():
'''
@param ttl: Maxmimum Time to live for inserted item (first one will be applied)
'''
lock = Lock()
def __init__(self, ttl=30):
self.cache = dict()
self.ttl = ttl
def insert(self, index, data, ignore_fields=[]):
IndexedTimeCache.lock.acquire()
if index in self.cache: # UPDATE + AGGREGATE
self.cache[index]['data'] = self.__aggregate(self.cache[index]['data'], data, ignore_fields)
else: # NEW
self.cache[index] = {
'timestamp': int(time.time()), # Insert Time
'data': data
}
IndexedTimeCache.lock.release()
def size(self):
return len(self.cache)
def getItemsOutOfTTL(self):
IndexedTimeCache.lock.acquire()
cache_outofdate = dict()
cache_new = dict()
for k,v in self.cache.items():
if v['timestamp'] < (time.time() - self.ttl):
cache_outofdate[k] = v
else:
cache_new[k] = v
self.cache = cache_new # Update Cache
IndexedTimeCache.lock.release()
#print(len(cache_outofdate), len(cache_new))
#print(cache_outofdate)
#print(cache_new)
return [item['data'] for item in cache_outofdate.values()]
# cache_outofdate: dict_values([{'data': {'b': 1, 'a': 2, 'c': 4}, 'timestamp': 1403523219}, {...} ])
# Return: [{'c': 2, 'b': 23, 'a': 25}, {'c': 2, 'b': 32, 'a': 29}, ...
def __aggregate(self, old, new, ignore_fields):
aggregated = old
for key, value in new.items():
if isinstance(value, dict):
for sub_key, sub_value in value.items():
if key in aggregated and (key not in ignore_fields or sub_key not in ignore_fields):
if sub_key in aggregated[key]:
aggregated[key][sub_key] += sub_value
else:
print("ERROR: Stats-Aggregation. Fields not found")
#aggregated[key][sub_key] = dict()
#aggregated[key][sub_key] = sub_value
else:
aggregated[key] = dict() #copy?
print("ERROR: Stats-Aggregation. Fields not found")
elif key not in ignore_fields:
aggregated[key] += new[key]
return aggregated
'''
import random
c = IndexedTimeCache(0)
for i in range(0,50):
c.insert((int(time.time() - random.randint(1, 5))), { 'a': random.randint(1, 5), 'b': random.randint(1, 5), 'c': random.randint(1, 5) }, ['c'])
print(c.size())
print("====", c.getItemsOutOfTTL())
print(c.size())
'''
'''
c = IndexedTimeCache(0)
c.insert('123456789Hamburg', {
"@timestamp": 123456789,
"networkLocation": "Hamburg",
"flow_request": {
"packetDeltaCountPerSec": 30,
"octetDeltaCountPerSec": 30,<|fim▁hole|>})
c.insert('123456789Hamburg', {
"@timestamp": 123456789,
"networkLocation": "Hamburg",
"flow_request": {
"packetDeltaCountPerSec": 60,
"octetDeltaCountPerSec": 60,
"flowDurationMilliseconds": 600
}
})
c.insert('123456789Hamburg', {
"@timestamp": 123456789,
"networkLocation": "Hamburg",
"flow_request": {
"packetDeltaCountPerSec": 20,
"octetDeltaCountPerSec": 200,
"flowDurationMilliseconds": 2000
}
})
print(c.getItemsOutOfTTL())
'''<|fim▁end|> | "flowDurationMilliseconds": 300
} |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(dead_code)] // FFI wrappers
#![allow(non_snake_case_functions)]
use llvm;
use llvm::{CallConv, AtomicBinOp, AtomicOrdering, AsmDialect};
use llvm::{Opcode, IntPredicate, RealPredicate};
use llvm::{ValueRef, BasicBlockRef};
use middle::trans::common::*;
use syntax::codemap::Span;
use middle::trans::builder::Builder;
use middle::trans::type_::Type;
use libc::{c_uint, c_ulonglong, c_char};
pub fn terminate(cx: &Block, _: &str) {
debug!("terminate({})", cx.to_str());
cx.terminated.set(true);
}
pub fn check_not_terminated(cx: &Block) {
if cx.terminated.get() {
fail!("already terminated!");
}
}
pub fn B<'a>(cx: &'a Block) -> Builder<'a> {
let b = cx.fcx.ccx.builder();
b.position_at_end(cx.llbb);
b
}
// The difference between a block being unreachable and being terminated is
// somewhat obscure, and has to do with error checking. When a block is
// terminated, we're saying that trying to add any further statements in the
// block is an error. On the other hand, if something is unreachable, that
// means that the block was terminated in some way that we don't want to check
// for (fail/break/return statements, call to diverging functions, etc), and
// further instructions to the block should simply be ignored.
pub fn RetVoid(cx: &Block) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
terminate(cx, "RetVoid");
B(cx).ret_void();
}
pub fn Ret(cx: &Block, v: ValueRef) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
terminate(cx, "Ret");
B(cx).ret(v);
}
pub fn AggregateRet(cx: &Block, ret_vals: &[ValueRef]) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
terminate(cx, "AggregateRet");
B(cx).aggregate_ret(ret_vals);
}
pub fn Br(cx: &Block, dest: BasicBlockRef) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
terminate(cx, "Br");
B(cx).br(dest);
}
pub fn CondBr(cx: &Block,
if_: ValueRef,
then: BasicBlockRef,
else_: BasicBlockRef) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
terminate(cx, "CondBr");
B(cx).cond_br(if_, then, else_);
}
pub fn Switch(cx: &Block, v: ValueRef, else_: BasicBlockRef, num_cases: uint)
-> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
check_not_terminated(cx);
terminate(cx, "Switch");
B(cx).switch(v, else_, num_cases)
}
pub fn AddCase(s: ValueRef, on_val: ValueRef, dest: BasicBlockRef) {
unsafe {
if llvm::LLVMIsUndef(s) == llvm::True { return; }
llvm::LLVMAddCase(s, on_val, dest);
}
}
pub fn IndirectBr(cx: &Block, addr: ValueRef, num_dests: uint) {
if cx.unreachable.get() { return; }
check_not_terminated(cx);
terminate(cx, "IndirectBr");
B(cx).indirect_br(addr, num_dests);
}
pub fn Invoke(cx: &Block,
fn_: ValueRef,
args: &[ValueRef],
then: BasicBlockRef,
catch: BasicBlockRef,
attributes: &[(uint, u64)])
-> ValueRef {
if cx.unreachable.get() {
return C_null(Type::i8(cx.ccx()));
}
check_not_terminated(cx);
terminate(cx, "Invoke");
debug!("Invoke({} with arguments ({}))",
cx.val_to_string(fn_),
args.iter().map(|a| cx.val_to_string(*a)).collect::<Vec<String>>().connect(", "));
B(cx).invoke(fn_, args, then, catch, attributes)
}
pub fn Unreachable(cx: &Block) {
if cx.unreachable.get() {
return
}
cx.unreachable.set(true);
if !cx.terminated.get() {
B(cx).unreachable();
}
}
pub fn _Undef(val: ValueRef) -> ValueRef {
unsafe {
return llvm::LLVMGetUndef(val_ty(val).to_ref());
}
}
/* Arithmetic */
pub fn Add(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).add(lhs, rhs)
}
pub fn NSWAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).nswadd(lhs, rhs)
}
pub fn NUWAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).nuwadd(lhs, rhs)
}
pub fn FAdd(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).fadd(lhs, rhs)
}
pub fn Sub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).sub(lhs, rhs)
}
pub fn NSWSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).nswsub(lhs, rhs)
}
pub fn NUWSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).nuwsub(lhs, rhs)
}
pub fn FSub(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).fsub(lhs, rhs)
}
pub fn Mul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).mul(lhs, rhs)
}
pub fn NSWMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).nswmul(lhs, rhs)
}
pub fn NUWMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).nuwmul(lhs, rhs)
}
pub fn FMul(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).fmul(lhs, rhs)
}
pub fn UDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).udiv(lhs, rhs)
}
pub fn SDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).sdiv(lhs, rhs)
}
pub fn ExactSDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).exactsdiv(lhs, rhs)
}
pub fn FDiv(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).fdiv(lhs, rhs)
}
pub fn URem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).urem(lhs, rhs)
}
pub fn SRem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).srem(lhs, rhs)
}
pub fn FRem(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).frem(lhs, rhs)
}
pub fn Shl(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).shl(lhs, rhs)
}
pub fn LShr(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).lshr(lhs, rhs)
}
pub fn AShr(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).ashr(lhs, rhs)
}
pub fn And(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).and(lhs, rhs)
}
pub fn Or(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).or(lhs, rhs)
}
pub fn Xor(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).xor(lhs, rhs)
}
pub fn BinOp(cx: &Block, op: Opcode, lhs: ValueRef, rhs: ValueRef)
-> ValueRef {
if cx.unreachable.get() { return _Undef(lhs); }
B(cx).binop(op, lhs, rhs)
}
pub fn Neg(cx: &Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
B(cx).neg(v)
}
pub fn NSWNeg(cx: &Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
B(cx).nswneg(v)
}
pub fn NUWNeg(cx: &Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
B(cx).nuwneg(v)
}
pub fn FNeg(cx: &Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
B(cx).fneg(v)
}
pub fn Not(cx: &Block, v: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(v); }
B(cx).not(v)
}
/* Memory */
pub fn Malloc(cx: &Block, ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
}
B(cx).malloc(ty)
}
}
pub fn ArrayMalloc(cx: &Block, ty: Type, val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
}
B(cx).array_malloc(ty, val)
}
}
pub fn Alloca(cx: &Block, ty: Type, name: &str) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
AllocaFcx(cx.fcx, ty, name)
}
}
pub fn AllocaFcx(fcx: &FunctionContext, ty: Type, name: &str) -> ValueRef {
let b = fcx.ccx.builder();
b.position_before(fcx.alloca_insert_pt.get().unwrap());
b.alloca(ty, name)
}
pub fn ArrayAlloca(cx: &Block, ty: Type, val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); }
let b = cx.fcx.ccx.builder();
b.position_before(cx.fcx.alloca_insert_pt.get().unwrap());
b.array_alloca(ty, val)
}
}
pub fn Free(cx: &Block, pointer_val: ValueRef) {
if cx.unreachable.get() { return; }
B(cx).free(pointer_val)
}
pub fn Load(cx: &Block, pointer_val: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable.get() {
let ty = val_ty(pointer_val);
let eltty = if ty.kind() == llvm::Array {
ty.element_type()
} else {
ccx.int_type
};
return llvm::LLVMGetUndef(eltty.to_ref());
}
B(cx).load(pointer_val)
}
}
pub fn VolatileLoad(cx: &Block, pointer_val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
}
B(cx).volatile_load(pointer_val)
}
}
pub fn AtomicLoad(cx: &Block, pointer_val: ValueRef, order: AtomicOrdering) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable.get() {
return llvm::LLVMGetUndef(ccx.int_type.to_ref());
}
B(cx).atomic_load(pointer_val, order)
}
}
pub fn LoadRangeAssert(cx: &Block, pointer_val: ValueRef, lo: c_ulonglong,
hi: c_ulonglong, signed: llvm::Bool) -> ValueRef {
if cx.unreachable.get() {
let ccx = cx.fcx.ccx;
let ty = val_ty(pointer_val);
let eltty = if ty.kind() == llvm::Array {
ty.element_type()
} else {
ccx.int_type
};
unsafe {
llvm::LLVMGetUndef(eltty.to_ref())
}
} else {
B(cx).load_range_assert(pointer_val, lo, hi, signed)
}
}
pub fn Store(cx: &Block, val: ValueRef, ptr: ValueRef) {
if cx.unreachable.get() { return; }
B(cx).store(val, ptr)
}
pub fn VolatileStore(cx: &Block, val: ValueRef, ptr: ValueRef) {
if cx.unreachable.get() { return; }
B(cx).volatile_store(val, ptr)
}
pub fn AtomicStore(cx: &Block, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) {
if cx.unreachable.get() { return; }
B(cx).atomic_store(val, ptr, order)
}
pub fn GEP(cx: &Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
}
B(cx).gep(pointer, indices)
}
}
// Simple wrapper around GEP that takes an array of ints and wraps them
// in C_i32()
#[inline]
pub fn GEPi(cx: &Block, base: ValueRef, ixs: &[uint]) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
}
B(cx).gepi(base, ixs)
}
}
pub fn InBoundsGEP(cx: &Block, pointer: ValueRef, indices: &[ValueRef]) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
}
B(cx).inbounds_gep(pointer, indices)
}
}
pub fn StructGEP(cx: &Block, pointer: ValueRef, idx: uint) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).ptr_to().to_ref());
}
B(cx).struct_gep(pointer, idx)
}
}
pub fn GlobalString(cx: &Block, _str: *const c_char) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
}
B(cx).global_string(_str)
}
}
pub fn GlobalStringPtr(cx: &Block, _str: *const c_char) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i8p(cx.ccx()).to_ref());
}
B(cx).global_string_ptr(_str)
}
}
/* Casts */
pub fn Trunc(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).trunc(val, dest_ty)
}
}
pub fn ZExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).zext(val, dest_ty)
}
}
pub fn SExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).sext(val, dest_ty)
}
}
pub fn FPToUI(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).fptoui(val, dest_ty)
}
}
pub fn FPToSI(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).fptosi(val, dest_ty)
}
}
pub fn UIToFP(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).uitofp(val, dest_ty)
}
}
pub fn SIToFP(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).sitofp(val, dest_ty)
}
}
pub fn FPTrunc(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).fptrunc(val, dest_ty)
}
}
pub fn FPExt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).fpext(val, dest_ty)
}
}
pub fn PtrToInt(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).ptrtoint(val, dest_ty)
}
}
pub fn IntToPtr(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).inttoptr(val, dest_ty)
}
}
pub fn BitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).bitcast(val, dest_ty)
}
}
pub fn ZExtOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).zext_or_bitcast(val, dest_ty)
}
}
pub fn SExtOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).sext_or_bitcast(val, dest_ty)
}
}
pub fn TruncOrBitCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).trunc_or_bitcast(val, dest_ty)
}
}
pub fn Cast(cx: &Block, op: Opcode, val: ValueRef, dest_ty: Type,
_: *const u8)
-> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).cast(op, val, dest_ty)
}
}
pub fn PointerCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).pointercast(val, dest_ty)
}
}
pub fn IntCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).intcast(val, dest_ty)
}
}
pub fn FPCast(cx: &Block, val: ValueRef, dest_ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(dest_ty.to_ref()); }
B(cx).fpcast(val, dest_ty)
}
}
/* Comparisons */
pub fn ICmp(cx: &Block, op: IntPredicate, lhs: ValueRef, rhs: ValueRef)
-> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
}
B(cx).icmp(op, lhs, rhs)
}
}
pub fn FCmp(cx: &Block, op: RealPredicate, lhs: ValueRef, rhs: ValueRef)
-> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
}
B(cx).fcmp(op, lhs, rhs)
}
}
/* Miscellaneous instructions */
pub fn EmptyPhi(cx: &Block, ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
B(cx).empty_phi(ty)
}
}
pub fn Phi(cx: &Block, ty: Type, vals: &[ValueRef],
bbs: &[BasicBlockRef]) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
B(cx).phi(ty, vals, bbs)
}
}
pub fn AddIncomingToPhi(phi: ValueRef, val: ValueRef, bb: BasicBlockRef) {
unsafe {
if llvm::LLVMIsUndef(phi) == llvm::True { return; }
llvm::LLVMAddIncoming(phi, &val, &bb, 1 as c_uint);
}
}
pub fn _UndefReturn(cx: &Block, fn_: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
let ty = val_ty(fn_);
let retty = if ty.kind() == llvm::Integer {
ty.return_type()
} else {
ccx.int_type
};
B(cx).count_insn("ret_undef");
llvm::LLVMGetUndef(retty.to_ref())
}
}
pub fn add_span_comment(cx: &Block, sp: Span, text: &str) {
B(cx).add_span_comment(sp, text)
}
pub fn add_comment(cx: &Block, text: &str) {
B(cx).add_comment(text)
}
pub fn InlineAsmCall(cx: &Block, asm: *const c_char, cons: *const c_char,
inputs: &[ValueRef], output: Type,
volatile: bool, alignstack: bool,
dia: AsmDialect) -> ValueRef {
B(cx).inline_asm_call(asm, cons, inputs, output, volatile, alignstack, dia)
}
pub fn Call(cx: &Block, fn_: ValueRef, args: &[ValueRef],
attributes: &[(uint, u64)]) -> ValueRef {
if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
B(cx).call(fn_, args, attributes)
}
pub fn CallWithConv(cx: &Block, fn_: ValueRef, args: &[ValueRef], conv: CallConv,
attributes: &[(uint, u64)]) -> ValueRef {
if cx.unreachable.get() { return _UndefReturn(cx, fn_); }
B(cx).call_with_conv(fn_, args, conv, attributes)
}
pub fn AtomicFence(cx: &Block, order: AtomicOrdering) {
if cx.unreachable.get() { return; }
B(cx).atomic_fence(order)
}
pub fn Select(cx: &Block, if_: ValueRef, then: ValueRef, else_: ValueRef) -> ValueRef {
if cx.unreachable.get() { return _Undef(then); }
B(cx).select(if_, then, else_)
}
pub fn VAArg(cx: &Block, list: ValueRef, ty: Type) -> ValueRef {
unsafe {
if cx.unreachable.get() { return llvm::LLVMGetUndef(ty.to_ref()); }
B(cx).va_arg(list, ty)
}
}
pub fn ExtractElement(cx: &Block, vec_val: ValueRef, index: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
}
B(cx).extract_element(vec_val, index)
}
}
pub fn InsertElement(cx: &Block, vec_val: ValueRef, elt_val: ValueRef,
index: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
}
B(cx).insert_element(vec_val, elt_val, index)
}
}
pub fn ShuffleVector(cx: &Block, v1: ValueRef, v2: ValueRef,
mask: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
}
B(cx).shuffle_vector(v1, v2, mask)
}
}
pub fn VectorSplat(cx: &Block, num_elts: uint, elt_val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
}
B(cx).vector_splat(num_elts, elt_val)
}
}
pub fn ExtractValue(cx: &Block, agg_val: ValueRef, index: uint) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
}
B(cx).extract_value(agg_val, index)
}
}
pub fn InsertValue(cx: &Block, agg_val: ValueRef, elt_val: ValueRef, index: uint) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::nil(cx.ccx()).to_ref());
}
B(cx).insert_value(agg_val, elt_val, index)
}
}
pub fn IsNull(cx: &Block, val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
}
B(cx).is_null(val)
}
}
pub fn IsNotNull(cx: &Block, val: ValueRef) -> ValueRef {
unsafe {
if cx.unreachable.get() {
return llvm::LLVMGetUndef(Type::i1(cx.ccx()).to_ref());
}
B(cx).is_not_null(val)
}
}
pub fn PtrDiff(cx: &Block, lhs: ValueRef, rhs: ValueRef) -> ValueRef {
unsafe {
let ccx = cx.fcx.ccx;
if cx.unreachable.get() { return llvm::LLVMGetUndef(ccx.int_type.to_ref()); }
B(cx).ptrdiff(lhs, rhs)
}
}
pub fn Trap(cx: &Block) {
if cx.unreachable.get() { return; }
B(cx).trap();
}
pub fn LandingPad(cx: &Block, ty: Type, pers_fn: ValueRef,
num_clauses: uint) -> ValueRef {
check_not_terminated(cx);
assert!(!cx.unreachable.get());
B(cx).landing_pad(ty, pers_fn, num_clauses)
}
pub fn SetCleanup(cx: &Block, landing_pad: ValueRef) {
B(cx).set_cleanup(landing_pad)
}
pub fn Resume(cx: &Block, exn: ValueRef) -> ValueRef {
check_not_terminated(cx);
terminate(cx, "Resume");
B(cx).resume(exn)
}<|fim▁hole|>// Atomic Operations
pub fn AtomicCmpXchg(cx: &Block, dst: ValueRef,
cmp: ValueRef, src: ValueRef,
order: AtomicOrdering,
failure_order: AtomicOrdering) -> ValueRef {
B(cx).atomic_cmpxchg(dst, cmp, src, order, failure_order)
}
pub fn AtomicRMW(cx: &Block, op: AtomicBinOp,
dst: ValueRef, src: ValueRef,
order: AtomicOrdering) -> ValueRef {
B(cx).atomic_rmw(op, dst, src, order)
}<|fim▁end|> | |
<|file_name|>cornetto-client.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2008-2013 by
# Erwin Marsi and Tilburg University
# This file is part of the Pycornetto package.
# Pycornetto is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# Pycornetto is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
A simple client to connect to the Cornetto database server.
Reads queries from standard input and writes results to standard output.
"""
# BUGS:
# - there is no way interrupt a query that goes bad on the server, as obviously
# a local Ctrl-C does not work
__author__ = 'Erwin Marsi <[email protected]>'
__version__ = '0.6.1'
# using optparse instead of argparse so client can run stand-alone
from sys import stdin, stdout, stderr, exit
from optparse import OptionParser, IndentedHelpFormatter
import xmlrpclib
from pprint import pformat
from socket import error as SocketError
class MyFormatter(IndentedHelpFormatter):
"""to prevent optparse from messing up the epilog text"""
def format_epilog(self, epilog):
return epilog or ""
def format_description(self, description):
return description.lstrip()
epilog = """
Interactive usage:
$ cornetto-client.py
$ cornetto-client.py -a
File processing:
$ echo 'ask("pijp")' | cornetto-client.py
$ cornetto-client.py <input >output
"""
try:
parser = OptionParser(description=__doc__, version="%(prog)s version " +
__version__, epilog=epilog, formatter=MyFormatter())
except TypeError:
# optparse in python 2.4 has no epilog keyword
parser = OptionParser(description=__doc__ + epilog,
version="%(prog)s version " + __version__)
parser.add_option("-a", "--ask", action='store_true',
help="assume all commands are input the 'ask' function, "
"- so you can type 'query' instead of 'ask(\"query\") - '"
"but online help is no longer accessible" )
parser.add_option("-H", "--host", default="localhost:5204",
metavar="HOST[:PORT]",
help="name or IP address of host (default is 'localhost') "
"optionally followed by a port number "
"(default is 5204)")
parser.add_option('-n', '--no-pretty-print', dest="pretty_print", action='store_false',
help="turn off pretty printing of output "
"(default when standard input is a file)")
parser.add_option("-p", "--port", type=int, default=5204,
help='port number (default is 5204)')
parser.add_option('-P', '--pretty-print', dest="pretty_print", action='store_true',
help="turn on pretty printing of output "
"(default when standard input is a tty)")
parser.add_option("-e", "--encoding", default="utf8", metavar="utf8,latin1,ascii,...",
help="character encoding of output (default is utf8)")
parser.add_option('-V', '--verbose', action='store_true',
help="verbose output for debugging")
(opts, args) = parser.parse_args()
if opts.host.startswith("http://"):
opts.host = opts.host[7:]
try:
host, port = opts.host.split(":")[:2]
except ValueError:
host, port = opts.host, None
# XMP-RPC requires specification of protocol
host = "http://" + (host or "localhost")
try:
port = int(port or 5204)
except ValueError:
exit("Error: %s is not a valid port number" % repr(port))
server = xmlrpclib.ServerProxy("%s:%s" % (host, port),
encoding="utf-8",
verbose=opts.verbose)
try:
eval('server.echo("test")')
except SocketError, inst:
print >>stderr, "Error: %s\nCornetto server not running on %s:%s ?" % (
inst, host, port), "See cornetto-server.py -h"
exit(1)
help_text = """
Type "?" to see his message.
Type "help()" for help on available methods.
Type "Ctrl-D" to exit.
Restart with "cornetto-client.py -h" to see command line options.
"""
startup_msg = ( "cornetto-client.py (version %s)\n" % __version__ +
"Copyright (c) Erwin Marsi\n" + help_text )
if stdin.isatty():
prompt = "$ "<|fim▁hole|> opts.pretty_print = True
print startup_msg
else:
prompt = ""
if opts.pretty_print is None:
opts.pretty_print = False
# use of eval might allow arbitrary code execution - probably not entirely safe
if opts.ask:
process = lambda c: eval('server.ask("%s")' % c.strip())
else:
process = lambda c: eval("server." + c.strip())
if opts.pretty_print:
formatter = pformat
else:
formatter = repr
# This is nasty way to enforce encoleast_common_subsumers("fiets", "auto")ding of strings embedded in lists or dicts.
# For examample [u'plafonnière'] rather than [u"plafonni\xe8re"]
encoder = lambda s: s.decode("unicode_escape").encode(opts.encoding, "backslashreplace")
while True:
try:
command = raw_input(prompt)
if command == "?":
print help_text
else:
result = process(command)
print encoder(formatter(result))
except EOFError:
print "\nSee you later alligator!"
exit(0)
except KeyboardInterrupt:
print >>stderr, "\nInterrupted. Latest command may still run on the server though..."
except SyntaxError:
print >>stderr, "Error: invalid syntax"
except NameError, inst:
print >>stderr, "Error:", inst, "- use quotes?"
except xmlrpclib.Error, inst:
print >>stderr, inst
except SocketError:
print >>stderr, "Error: %s\nCornetto server not running on %s:%s ?\n" % (
inst, host, port), "See cornetto-server.py -h"<|fim▁end|> | if opts.pretty_print is None: |
<|file_name|>ToolbarControlWebViewActivity.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2014 Soichiro Kashima
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.marshalchen.common.demoofui.observablescrollview;
import android.os.Bundle;
import android.support.v4.view.ViewCompat;
import android.support.v7.app.ActionBarActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import com.github.ksoichiro.android.observablescrollview.ObservableScrollView;
import com.github.ksoichiro.android.observablescrollview.ObservableScrollViewCallbacks;
import com.github.ksoichiro.android.observablescrollview.ObservableWebView;
import com.github.ksoichiro.android.observablescrollview.ScrollState;
import com.marshalchen.common.demoofui.R;
import com.nineoldandroids.view.ViewHelper;
import com.nineoldandroids.view.ViewPropertyAnimator;
public class ToolbarControlWebViewActivity extends ActionBarActivity {
private View mHeaderView;
private View mToolbarView;
private ObservableScrollView mScrollView;
private boolean mFirstScroll;
private boolean mDragging;
private int mBaseTranslationY;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.observable_scroll_view_activity_toolbarcontrolwebview);
setSupportActionBar((Toolbar) findViewById(R.id.toolbar));
mHeaderView = findViewById(R.id.header);
ViewCompat.setElevation(mHeaderView, getResources().getDimension(R.dimen.toolbar_elevation));
mToolbarView = findViewById(R.id.toolbar);
mScrollView = (ObservableScrollView) findViewById(R.id.scroll);
mScrollView.setScrollViewCallbacks(mScrollViewScrollCallbacks);
ObservableWebView mWebView = (ObservableWebView) findViewById(R.id.web);
mWebView.setScrollViewCallbacks(mWebViewScrollCallbacks);
mWebView.loadUrl("file:///android_asset/lipsum.html");
}
private ObservableScrollViewCallbacks mScrollViewScrollCallbacks = new ObservableScrollViewCallbacks() {
@Override
public void onScrollChanged(int scrollY, boolean firstScroll, boolean dragging) {
if (mDragging) {
int toolbarHeight = mToolbarView.getHeight();
if (mFirstScroll) {
mFirstScroll = false;
float currentHeaderTranslationY = ViewHelper.getTranslationY(mHeaderView);
if (-toolbarHeight < currentHeaderTranslationY && toolbarHeight < scrollY) {
mBaseTranslationY = scrollY;
}
}
int headerTranslationY = Math.min(0, Math.max(-toolbarHeight, -(scrollY - mBaseTranslationY)));
ViewPropertyAnimator.animate(mHeaderView).cancel();
ViewHelper.setTranslationY(mHeaderView, headerTranslationY);
}
}
@Override
public void onDownMotionEvent() {
}
@Override
public void onUpOrCancelMotionEvent(ScrollState scrollState) {
mDragging = false;
mBaseTranslationY = 0;
float headerTranslationY = ViewHelper.getTranslationY(mHeaderView);
int toolbarHeight = mToolbarView.getHeight();
if (scrollState == ScrollState.UP) {
if (toolbarHeight < mScrollView.getCurrentScrollY()) {
if (headerTranslationY != -toolbarHeight) {
ViewPropertyAnimator.animate(mHeaderView).cancel();
ViewPropertyAnimator.animate(mHeaderView).translationY(-toolbarHeight).setDuration(200).start();
}
}
} else if (scrollState == ScrollState.DOWN) {
if (toolbarHeight < mScrollView.getCurrentScrollY()) {
if (headerTranslationY != 0) {
ViewPropertyAnimator.animate(mHeaderView).cancel();
ViewPropertyAnimator.animate(mHeaderView).translationY(0).setDuration(200).start();
}
}
}
}
};
private ObservableScrollViewCallbacks mWebViewScrollCallbacks = new ObservableScrollViewCallbacks() {<|fim▁hole|>
@Override
public void onDownMotionEvent() {
// Workaround: WebView inside a ScrollView absorbs down motion events, so observing
// down motion event from the WebView is required.
mFirstScroll = mDragging = true;
}
@Override
public void onUpOrCancelMotionEvent(ScrollState scrollState) {
}
};
}<|fim▁end|> | @Override
public void onScrollChanged(int scrollY, boolean firstScroll, boolean dragging) {
} |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
__author__ = ", ".join(["Shyue Ping Ong", "Anubhav Jain", "Geoffroy Hautier",
"William Davidson Richard", "Stephen Dacek",
"Sai Jayaraman", "Michael Kocher", "Dan Gunter",
"Shreyas Cholia", "Vincent L Chevrier",
"Rickard Armiento"])
__date__ = "Oct 29 2014"
__version__ = "3.0.7"<|fim▁hole|>
from .core import *
from .serializers.json_coders import pmg_dump, pmg_load
from .electronic_structure.core import Spin, Orbital
from .io.smartio import read_structure, write_structure, read_mol, write_mol
from .matproj.rest import MPRester
from monty.json import MontyEncoder, MontyDecoder, MSONable<|fim▁end|> |
#Useful aliases for commonly used objects and modules. |
<|file_name|>file_test.go<|end_file_name|><|fim▁begin|>package client
import (
"bytes"<|fim▁hole|> "sort"
"strings"
"testing"
"time"
"github.com/grafana/loki/pkg/loghttp"
"github.com/grafana/loki/pkg/logproto"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFileClient_QueryRangeLogQueries(t *testing.T) {
input := []string{
`level=info event="loki started" caller=main.go ts=1625995076`,
`level=info event="runtime loader started" caller=main.go ts=1625995077`,
`level=error event="unable to read rules directory" file="/tmp/rules" caller=rules.go ts=1625995090`,
`level=error event="failed to apply wal" error="/tmp/wal/ corrupted" caller=wal.go ts=1625996090`,
`level=info event="loki ready" caller=main.go ts=1625996095`,
}
reversed := make([]string, len(input))
copy(reversed, input)
sort.Slice(reversed, func(i, j int) bool {
return i > j
})
now := time.Now()
cases := []struct {
name string
limit int
start, end time.Time
direction logproto.Direction
step, interval time.Duration
expectedStatus loghttp.QueryStatus
expected []string
}{
{
name: "return-all-logs-backward",
limit: 10, // more than input
start: now.Add(-1 * time.Hour),
end: now,
direction: logproto.BACKWARD,
step: 0, // let client decide based on start and end
interval: 0,
expectedStatus: loghttp.QueryStatusSuccess,
expected: reversed,
},
{
name: "return-all-logs-forward",
limit: 10, // more than input
start: now.Add(-1 * time.Hour),
end: now,
direction: logproto.FORWARD,
step: 0, // let the client decide based on start and end
interval: 0,
expectedStatus: loghttp.QueryStatusSuccess,
expected: input,
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
client := NewFileClient(io.NopCloser(strings.NewReader(strings.Join(input, "\n"))))
resp, err := client.QueryRange(
`{foo="bar"}`, // label matcher doesn't matter.
c.limit,
c.start,
c.end,
c.direction,
c.step,
c.interval,
true,
)
require.NoError(t, err)
require.Equal(t, loghttp.QueryStatusSuccess, resp.Status)
assert.Equal(t, string(resp.Data.ResultType), loghttp.ResultTypeStream)
assertStreams(t, resp.Data.Result, c.expected)
})
}
}
func TestFileClient_Query(t *testing.T) {
input := []string{
`level=info event="loki started" caller=main.go ts=1625995076`,
`level=info event="runtime loader started" caller=main.go ts=1625995077`,
`level=error event="unable to read rules directory" file="/tmp/rules" caller=rules.go ts=1625995090`,
`level=error event="failed to apply wal" error="/tmp/wal/ corrupted" caller=wal.go ts=1625996090`,
`level=info event="loki ready" caller=main.go ts=1625996095`,
}
reversed := make([]string, len(input))
copy(reversed, input)
sort.Slice(reversed, func(i, j int) bool {
return i > j
})
now := time.Now()
cases := []struct {
name string
limit int
ts time.Time
direction logproto.Direction
expectedStatus loghttp.QueryStatus
expected []string
}{
{
name: "return-all-logs-backward",
limit: 10, // more than input
ts: now.Add(-1 * time.Hour),
direction: logproto.BACKWARD,
expectedStatus: loghttp.QueryStatusSuccess,
expected: reversed,
},
{
name: "return-all-logs-forward",
limit: 10, // more than input
ts: now.Add(-1 * time.Hour),
direction: logproto.FORWARD,
expectedStatus: loghttp.QueryStatusSuccess,
expected: input,
},
}
for _, c := range cases {
t.Run(c.name, func(t *testing.T) {
client := NewFileClient(io.NopCloser(strings.NewReader(strings.Join(input, "\n"))))
resp, err := client.Query(
`{foo="bar"}`, // label matcher doesn't matter.
c.limit,
c.ts,
c.direction,
true,
)
require.NoError(t, err)
require.Equal(t, loghttp.QueryStatusSuccess, resp.Status)
assert.Equal(t, string(resp.Data.ResultType), loghttp.ResultTypeStream)
assertStreams(t, resp.Data.Result, c.expected)
})
}
}
func TestFileClient_ListLabelNames(t *testing.T) {
c := newEmptyClient(t)
values, err := c.ListLabelNames(true, time.Now(), time.Now())
require.NoError(t, err)
assert.Equal(t, &loghttp.LabelResponse{
Data: []string{defaultLabelKey},
Status: loghttp.QueryStatusSuccess,
}, values)
}
func TestFileClient_ListLabelValues(t *testing.T) {
c := newEmptyClient(t)
values, err := c.ListLabelValues(defaultLabelKey, true, time.Now(), time.Now())
require.NoError(t, err)
assert.Equal(t, &loghttp.LabelResponse{
Data: []string{defaultLabelValue},
Status: loghttp.QueryStatusSuccess,
}, values)
}
func TestFileClient_Series(t *testing.T) {
c := newEmptyClient(t)
got, err := c.Series(nil, time.Now(), time.Now(), true)
require.NoError(t, err)
exp := &loghttp.SeriesResponse{
Data: []loghttp.LabelSet{
{defaultLabelKey: defaultLabelValue},
},
Status: loghttp.QueryStatusSuccess,
}
assert.Equal(t, exp, got)
}
func TestFileClient_LiveTail(t *testing.T) {
c := newEmptyClient(t)
x, err := c.LiveTailQueryConn("", time.Second, 0, time.Now(), true)
require.Error(t, err)
require.Nil(t, x)
assert.True(t, errors.Is(err, ErrNotSupported))
}
func TestFileClient_GetOrgID(t *testing.T) {
c := newEmptyClient(t)
assert.Equal(t, defaultOrgID, c.GetOrgID())
}
func newEmptyClient(t *testing.T) *FileClient {
t.Helper()
return NewFileClient(io.NopCloser(&bytes.Buffer{}))
}
func assertStreams(t *testing.T, result loghttp.ResultValue, logLines []string) {
t.Helper()
streams, ok := result.(loghttp.Streams)
require.True(t, ok, "response type should be `loghttp.Streams`")
require.Len(t, streams, 1, "there should be only one stream for FileClient")
got := streams[0]
sort.Slice(got.Entries, func(i, j int) bool {
return got.Entries[i].Timestamp.UnixNano() < got.Entries[j].Timestamp.UnixNano()
})
require.Equal(t, len(got.Entries), len(logLines))
for i, entry := range got.Entries {
assert.Equal(t, entry.Line, logLines[i])
}
}<|fim▁end|> | "errors"
"io" |
<|file_name|>test_export_tasks.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import json
import logging
import os
import pickle
import sys
import uuid
from unittest.mock import Mock, PropertyMock, patch, MagicMock, ANY
import celery
import yaml
from billiard.einfo import ExceptionInfo
from django.conf import settings
from django.contrib.auth.models import Group, User
from django.contrib.gis.geos import GEOSGeometry, Polygon
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import timezone
from eventkit_cloud.celery import TaskPriority, app
from eventkit_cloud.jobs.models import DatamodelPreset, DataProvider, Job, DataProviderType
from eventkit_cloud.tasks.enumerations import TaskState
from eventkit_cloud.tasks.export_tasks import (
ExportTask,
export_task_error_handler,
finalize_run_task,
kml_export_task,
mapproxy_export_task,
geopackage_export_task,
shp_export_task,
arcgis_feature_service_export_task,
pick_up_run_task,
cancel_export_provider_task,
kill_task,
geotiff_export_task,
nitf_export_task,
bounds_export_task,
parse_result,
finalize_export_provider_task,
FormatTask,
wait_for_providers_task,
create_zip_task,
pbf_export_task,
sqlite_export_task,
gpx_export_task,
mbtiles_export_task,
wfs_export_task,
vector_file_export_task,
raster_file_export_task,
osm_data_collection_pipeline,
reprojection_task,
ogcapi_process_export_task,
get_ogcapi_data,
)
from eventkit_cloud.tasks.export_tasks import zip_files
from eventkit_cloud.tasks.helpers import default_format_time
from eventkit_cloud.tasks.models import (
DataProviderTaskRecord,
ExportRun,
ExportTaskRecord,
FileProducingTaskResult,
RunZipFile,
)
from eventkit_cloud.tasks.task_base import LockingTask
logger = logging.getLogger(__name__)
test_cert_info = """
cert_info:
cert_path: '/path/to/fake/cert'
cert_pass_var: 'fakepass'
"""
expected_cert_info = {"cert_path": "/path/to/fake/cert", "cert_pass_var": "fakepass"}
class TestLockingTask(TestCase):
def test_locking_task(self):
task_id = "0123"
retries = False
task_name = "lock_test_task"
expected_lock_key = f"TaskLock_{task_name}_{task_id}_{retries}"
expected_result = "result"
# Create a test task...
@app.task(base=LockingTask)
def lock_test_task():
return expected_result
# ...mock the cache...
mock_cache = MagicMock()
mock_cache.add.side_effect = ["A Lock", None, None, None, None]
# ...create two separate test tasks...
lock_task = lock_task2 = lock_test_task
lock_task.cache = lock_task2.cache = mock_cache
# ..create a mock request...
mock_request = Mock(task_name=task_name, id=task_id, retries=False)
mock_request_stack = Mock()
mock_request_stack.top = mock_request
mock_push_request = Mock()
# ...with duplicate requests...
lock_task.request_stack = lock_task2.request_stack = mock_request_stack
lock_task.push_request = lock_task2.push_request = mock_push_request
# ...call first task ensure it returns...
result = lock_task.__call__()
self.assertEqual(result, expected_result)
mock_cache.add.assert_called_with(expected_lock_key, task_id, lock_task.lock_expiration)
# ...call a second task with duplicate id, ensure nothing returns.
result = lock_task2.__call__()
self.assertIsNone(result)
mock_cache.add.assert_called_with(expected_lock_key, task_id, lock_task.lock_expiration)
class ExportTaskBase(TestCase):
fixtures = ("osm_provider.json", "datamodel_presets.json")
def setUp(self):
self.maxDiff = None
self.path = os.path.dirname(os.path.realpath(__file__))
self.group, created = Group.objects.get_or_create(name="TestDefault")
with patch("eventkit_cloud.jobs.signals.Group") as mock_group:
mock_group.objects.get.return_value = self.group
self.user = User.objects.create(username="demo", email="[email protected]", password="demo")
bbox = Polygon.from_bbox((-10.85, 6.25, -10.62, 6.40))
tags = DatamodelPreset.objects.get(name="hdm").json_tags
self.assertEqual(259, len(tags))
the_geom = GEOSGeometry(bbox, srid=4326)
self.job = Job.objects.create(
name="TestJob", description="Test description", user=self.user, the_geom=the_geom, json_tags=tags
)
self.job.feature_save = True
self.job.feature_pub = True
self.job.save()
self.run = ExportRun.objects.create(job=self.job, user=self.user)
self.provider = DataProvider.objects.first()
class TestExportTasks(ExportTaskBase):
stage_dir = "/stage"
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_run_shp_export_task(self, mock_request, mock_convert, mock_get_export_filepath):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
mock_convert.return_value = expected_output_path
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=shp_export_task.name
)
shp_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
result = shp_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
)
mock_convert.assert_called_once_with(
driver="ESRI Shapefile",
input_file=expected_output_path,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
boundary=None,
projection=4326,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.generate_qgs_style")
@patch("eventkit_cloud.tasks.export_tasks.convert_qgis_gpkg_to_kml")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_run_kml_export_task(
self, mock_request, mock_convert, mock_qgis_convert, mock_generate_qgs_style, mock_get_export_filepath
):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
mock_generate_qgs_style.return_value = qgs_file = "/style.qgs"
mock_convert.return_value = mock_qgis_convert.return_value = expected_output_path
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=kml_export_task.name
)
kml_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
result = kml_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
)
try:
import qgis # noqa
mock_qgis_convert.assert_called_once_with(qgs_file, expected_output_path, stage_dir=self.stage_dir)
except ImportError:
mock_convert.assert_called_once_with(
driver="libkml",
input_file=expected_output_path,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=None,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_run_sqlite_export_task(self, mock_request, mock_convert, mock_get_export_filepath):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
mock_convert.return_value = expected_output_path
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=sqlite_export_task.name
)
sqlite_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
result = sqlite_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
)
mock_convert.assert_called_once_with(
driver="SQLite",
input_file=expected_output_path,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=None,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.download_concurrently")
@patch("eventkit_cloud.tasks.helpers.download_data")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("eventkit_cloud.tasks.export_tasks.geopackage")
@patch("celery.app.task.Task.request")
def test_run_wfs_export_task(
self,
mock_request,
mock_gpkg,
mock_convert,
mock_download_data,
mock_download_concurrently,
mock_get_export_filepath,
):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
expected_provider_slug = "wfs-service"
self.provider.export_provider_type = DataProviderType.objects.get(type_name="wfs")
self.provider.slug = expected_provider_slug
self.provider.config = None
self.provider.save()
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
layer = "foo"
service_url = "https://abc.gov/WFSserver/"
expected_input_path = [
os.path.join(self.stage_dir, "chunk0.json"),
os.path.join(self.stage_dir, "chunk1.json"),
os.path.join(self.stage_dir, "chunk2.json"),
os.path.join(self.stage_dir, "chunk3.json"),
]
mock_convert.return_value = expected_output_path
mock_download_data.return_value = expected_input_path
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=wfs_export_task.name
)
wfs_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
mock_gpkg.check_content_exists.return_value = True
result = wfs_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
layer=layer,
bbox=[1, 2, 3, 4],
)
mock_convert.assert_called_once_with(
driver="gpkg",
input_file=expected_input_path,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=projection,
boundary=[1, 2, 3, 4],
layer_name=expected_provider_slug,
access_mode="append",
distinct_field=None,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
mock_gpkg.check_content_exists.assert_called_once_with(expected_output_path)
result_b = wfs_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=f"{service_url}/",
bbox=[1, 2, 3, 4],
)
self.assertEqual(expected_output_path, result_b["result"])
self.assertEqual(expected_output_path, result_b["source"])
url_1 = "https://abc.gov/wfs/services/x"
url_2 = "https://abc.gov/wfs/services/y"
layer_1 = "spam"
layer_2 = "ham"
config = f"""
vector_layers:
- name: '{layer_1}'
url: '{url_1}'
- name: '{layer_2}'
url: '{url_2}'
"""
expected_path_1 = f"{layer_1}.gpkg"
expected_path_2 = f"{layer_2}.gpkg"
expected_url_1 = (
f"{url_1}?SERVICE=WFS&VERSION=1.0.0&REQUEST=GetFeature&TYPENAME={layer_1}"
f"&SRSNAME=EPSG:{projection}&BBOX=BBOX_PLACEHOLDER"
)
expected_url_2 = (
f"{url_2}?SERVICE=WFS&VERSION=1.0.0&REQUEST=GetFeature&TYPENAME={layer_2}"
f"&SRSNAME=EPSG:{projection}&BBOX=BBOX_PLACEHOLDER"
)
expected_layers = {
layer_1: {
"task_uid": str(saved_export_task.uid),
"url": expected_url_1,
"path": expected_path_1,
"base_path": f"{self.stage_dir.rstrip('/')}/{layer_1}-{projection}",
"bbox": [1, 2, 3, 4],
"cert_info": None,
"layer_name": layer_1,
"projection": projection,
},
layer_2: {
"task_uid": str(saved_export_task.uid),
"url": expected_url_2,
"path": expected_path_2,
"base_path": f"{self.stage_dir.rstrip('/')}/{layer_2}-{projection}",
"bbox": [1, 2, 3, 4],
"cert_info": None,
"layer_name": layer_2,
"projection": projection,
},
}
mock_download_concurrently.return_value = expected_layers
mock_convert.reset_mock()
mock_get_export_filepath.side_effect = [expected_output_path, expected_path_1, expected_path_2]
# test with multiple layers
result_c = wfs_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
layer=layer,
config=config,
bbox=[1, 2, 3, 4],
)
_, args, _ = mock_download_concurrently.mock_calls[0]
self.assertEqual(list(args[0]), list(expected_layers.values()))
self.assertEqual(mock_convert.call_count, 2)
mock_convert.assert_any_call(
driver="gpkg",
input_file=expected_path_1,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=[1, 2, 3, 4],
access_mode="append",
layer_name=layer_1,
)
mock_convert.assert_any_call(
driver="gpkg",
input_file=expected_path_2,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=[1, 2, 3, 4],
access_mode="append",
layer_name=layer_2,
)
self.assertEqual(expected_output_path, result_c["result"])
self.assertEqual(expected_output_path, result_c["source"])
# test downloads with certs
mock_download_data.reset_mock()
mock_get_export_filepath.side_effect = [expected_output_path, expected_path_1, expected_path_2]
wfs_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
layer=layer,
bbox=[1, 2, 3, 4],
)
mock_download_data.assert_called_with(
str(saved_export_task.uid), ANY, expected_input_path[3], cert_info=None, task_points=400
)
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.utils.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_mbtiles_export_task(self, mock_request, mock_convert, mock_get_export_filepath):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
input_projection = 4326
output_projection = 3857
driver = "MBTiles"
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
mock_convert.return_value = expected_output_path
sample_input = "example.gpkg"
previous_task_result = {"source": sample_input}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=mbtiles_export_task.name
)
mbtiles_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
result = mbtiles_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=output_projection,
)
mock_convert.assert_called_once_with(
driver=driver,
input_file=sample_input,
output_file=expected_output_path,
src_srs=input_projection,
task_uid=str(saved_export_task.uid),
projection=output_projection,
boundary=None,
use_translate=True,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(sample_input, result["source"])
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.os.rename")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_run_gpkg_export_task(self, mock_request, mock_convert, mock_rename, mock_get_export_filepath):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.gpkg"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
mock_rename.return_value = expected_output_path
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=geopackage_export_task.name
)
result = geopackage_export_task(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
)
mock_rename.assert_called_once_with(expected_output_path, expected_output_path)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
example_input_file = "test.tif"
previous_task_result = {"source": example_input_file}
mock_convert.return_value = expected_output_path
result = geopackage_export_task(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
)
mock_convert.assert_called_once_with(
driver="gpkg",
input_file=example_input_file,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=None,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(example_input_file, result["source"])
@patch("eventkit_cloud.tasks.export_tasks.sqlite3.connect")
@patch("eventkit_cloud.tasks.export_tasks.cancel_export_provider_task.run")
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.get_export_task_record")
@patch("eventkit_cloud.tasks.export_tasks.os")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils")
@patch("eventkit_cloud.tasks.export_tasks.update_progress")
@patch("eventkit_cloud.tasks.export_tasks.geopackage")
@patch("eventkit_cloud.tasks.export_tasks.FeatureSelection")
@patch("eventkit_cloud.tasks.export_tasks.pbf")
@patch("eventkit_cloud.tasks.export_tasks.overpass")
def test_osm_data_collection_pipeline(
self,
mock_overpass,
mock_pbf,
mock_feature_selection,
mock_geopackage,
mock_update_progress,
mock_gdalutils,
mock_os,
mock_get_export_task_record,
mock_get_export_filepath,
mock_cancel_provider_task,
mock_connect,
):
example_export_task_record_uid = "1234"
example_bbox = [-1, -1, 1, 1]
mock_get_export_filepath.return_value = example_gpkg = "/path/to/file.gpkg"
mock_geopackage.Geopackage.return_value = Mock(results=[Mock(parts=[example_gpkg])])
# Test with using overpass
example_overpass_query = "some_query; out;"
example_config = {"overpass_query": example_overpass_query}
osm_data_collection_pipeline(
example_export_task_record_uid, self.stage_dir, bbox=example_bbox, config=yaml.dump(example_config)
)
mock_connect.assert_called_once()
mock_overpass.Overpass.assert_called_once()
mock_pbf.OSMToPBF.assert_called_once()
mock_feature_selection.example.assert_called_once()
mock_cancel_provider_task.assert_not_called()
# Test canceling the provider task on an empty geopackage.
mock_geopackage.Geopackage().run.return_value = None
osm_data_collection_pipeline(
example_export_task_record_uid, self.stage_dir, bbox=example_bbox, config=yaml.dump(example_config)
)
mock_cancel_provider_task.assert_called_once()
mock_overpass.reset_mock()
mock_pbf.reset_mock()
mock_feature_selection.reset_mock()
mock_geopackage.reset_mock()
# Test with using pbf_file
example_pbf_file = "test.pbf"
example_config = {"pbf_file": example_pbf_file}
osm_data_collection_pipeline(
example_export_task_record_uid, self.stage_dir, bbox=example_bbox, config=yaml.dump(example_config)
)
mock_overpass.Overpass.assert_not_called()
mock_pbf.OSMToPBF.assert_not_called()
mock_feature_selection.assert_not_called()
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.get_creation_options")
@patch("eventkit_cloud.tasks.export_tasks.get_export_task_record")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils")
def test_geotiff_export_task(
self, mock_gdalutils, mock_get_export_task_record, mock_get_creation_options, mock_get_export_filepath
):
# TODO: This can be setup as a way to test the other ExportTasks without all the boilerplate.
ExportTask.__call__ = lambda *args, **kwargs: celery.Task.__call__(*args, **kwargs)
example_geotiff = "example.tif"
example_result = {"source": example_geotiff}
task_uid = "1234"
warp_params = {"warp": "params"}
translate_params = {"translate": "params"}
mock_get_creation_options.return_value = warp_params, translate_params
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
geotiff_export_task(result=example_result, task_uid=task_uid, stage_dir=self.stage_dir, job_name="job")
mock_gdalutils.convert.return_value = expected_outfile
mock_gdalutils.convert.assert_called_once_with(
boundary=None,
driver="gtiff",
input_file=f"GTIFF_RAW:{example_geotiff}",
output_file=expected_outfile,
task_uid=task_uid,
warp_params=warp_params,
translate_params=translate_params,
)
mock_gdalutils.reset_mock()
example_result = {"source": example_geotiff, "selection": "selection"}
mock_gdalutils.convert.return_value = expected_outfile
geotiff_export_task(result=example_result, task_uid=task_uid, stage_dir=self.stage_dir, job_name="job")
mock_gdalutils.convert.assert_called_once_with(
boundary="selection",
driver="gtiff",
input_file=f"GTIFF_RAW:{example_geotiff}",
output_file=expected_outfile,
task_uid=task_uid,
warp_params=warp_params,
translate_params=translate_params,
)
mock_gdalutils.reset_mock()
example_result = {"gtiff": expected_outfile}
geotiff_export_task(result=example_result, task_uid=task_uid, stage_dir=self.stage_dir, job_name="job")
mock_gdalutils.assert_not_called()
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.get_export_task_record")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils")
def test_nitf_export_task(self, mock_gdalutils, mock_get_export_task_record, mock_get_export_filepath):
ExportTask.__call__ = lambda *args, **kwargs: celery.Task.__call__(*args, **kwargs)
example_nitf = "example.nitf"
example_result = {"source": example_nitf}
task_uid = "1234"
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
nitf_export_task(result=example_result, task_uid=task_uid, stage_dir=self.stage_dir, job_name="job")
mock_gdalutils.convert.return_value = expected_outfile
mock_gdalutils.convert.assert_called_once_with(
creation_options=["ICORDS=G"],
driver="nitf",
input_file=example_nitf,
output_file=expected_outfile,
task_uid=task_uid,
)
mock_gdalutils.reset_mock()
nitf_export_task(result=example_result, task_uid=task_uid, stage_dir=self.stage_dir, job_name="job")
mock_gdalutils.convert.assert_called_once_with(
creation_options=["ICORDS=G"],
driver="nitf",
input_file=example_nitf,
output_file=expected_outfile,
task_uid=task_uid,
)
def test_pbf_export_task(self):
# TODO: This can be setup as a way to test the other ExportTasks without all the boilerplate.
ExportTask.__call__ = lambda *args, **kwargs: celery.Task.__call__(*args, **kwargs)
example_pbf = "example.pbf"
example_result = {"pbf": example_pbf}
expected_result = {"file_extension": "pbf", "driver": "OSM", "pbf": example_pbf, "result": example_pbf}
returned_result = pbf_export_task(example_result)
self.assertEquals(expected_result, returned_result)
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.get_export_task_record")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_sqlite_export_task(
self, mock_request, mock_convert, mock_get_export_task_record, mock_get_export_filepath
):
ExportTask.__call__ = lambda *args, **kwargs: celery.Task.__call__(*args, **kwargs)
expected_provider_slug = "osm-generic"
expected_event = "event"
expected_label = "label"
mock_get_export_task_record.return_value = Mock(
export_provider_task=Mock(
run=Mock(job=Mock(event=expected_event)),
provider=Mock(slug=expected_provider_slug, data_type="vector", label=expected_label),
)
)
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
mock_convert.return_value = expected_output_path
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=sqlite_export_task.name
)
sqlite_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
result = sqlite_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
)
mock_convert.assert_called_once_with(
driver="SQLite",
input_file=expected_output_path,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=None,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.get_export_task_record")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils")
def test_gpx_export_task(self, mock_gdalutils, mock_get_export_task_record, mock_get_export_filepath):
# TODO: This can be setup as a way to test the other ExportTasks without all the boilerplate.
ExportTask.__call__ = lambda *args, **kwargs: celery.Task.__call__(*args, **kwargs)
expected_provider_slug = "osm-generic"
expected_event = "event"
expected_label = "label"
mock_get_export_task_record.return_value = Mock(
export_provider_task=Mock(
run=Mock(job=Mock(event=expected_event)),
provider=Mock(slug=expected_provider_slug, data_type="vector", label=expected_label),
)
)
example_source = "example.pbf"
example_geojson = "example.geojson"
task_uid = "1234"
example_result = {"pbf": example_source, "selection": example_geojson}
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
mock_gdalutils.convert.return_value = expected_output_path
expected_result = {
"pbf": example_source,
"file_extension": "gpx",
"driver": "GPX",
"result": expected_output_path,
"gpx": expected_output_path,
"selection": example_geojson,
}
returned_result = gpx_export_task(
result=example_result, task_uid=task_uid, stage_dir=self.stage_dir, job_name="job"
)
mock_gdalutils.convert.assert_called_once_with(
input_file=example_source,
output_file=expected_output_path,
driver="GPX",
dataset_creation_options=["GPX_USE_EXTENSIONS=YES"],
creation_options=["-explodecollections"],
boundary=example_geojson,
)
self.assertEqual(returned_result, expected_result)
@patch("eventkit_cloud.tasks.export_tasks.make_dirs")
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.geopackage")
@patch("eventkit_cloud.tasks.export_tasks.download_concurrently")
@patch("eventkit_cloud.tasks.helpers.download_feature_data")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_run_arcgis_feature_service_export_task(
self,
mock_request,
mock_convert,
mock_download_feature_data,
mock_download_concurrently,
mock_geopackage,
mock_get_export_filepath,
mock_makedirs,
):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
expected_provider_slug = "arcgis-feature-service"
self.provider.export_provider_type = DataProviderType.objects.get(type_name="arcgis-feature")
self.provider.slug = expected_provider_slug
self.provider.config = None
self.provider.save()
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
expected_esrijson = [
os.path.join(self.stage_dir, "chunk0.json"),
os.path.join(self.stage_dir, "chunk1.json"),
os.path.join(self.stage_dir, "chunk2.json"),
os.path.join(self.stage_dir, "chunk3.json"),
]
service_url = "https://abc.gov/arcgis/services/x"
bbox = [1, 2, 3, 4]
query_string = "query?where=objectid=objectid&outfields=*&f=json&geometry=BBOX_PLACEHOLDER"
expected_input_url = (
"https://abc.gov/arcgis/services/x/query?where=objectid=objectid&"
"outfields=*&f=json&geometry=2.0%2C%202.0%2C%203.0%2C%203.0"
)
mock_convert.return_value = expected_output_path
mock_download_feature_data.side_effect = expected_esrijson
previous_task_result = {"source": expected_input_url}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
status=TaskState.PENDING.value,
name=arcgis_feature_service_export_task.name,
)
mock_geopackage.check_content_exists.return_value = True
# test without trailing slash
result_a = arcgis_feature_service_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
bbox=bbox,
)
mock_download_feature_data.assert_called_with(
str(saved_export_task.uid), expected_input_url, ANY, cert_info=None, task_points=400
)
mock_convert.assert_called_once_with(
driver="gpkg",
input_file=expected_esrijson,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
layer_name=expected_provider_slug,
boundary=bbox,
access_mode="append",
distinct_field=None,
)
self.assertEqual(expected_output_path, result_a["result"])
self.assertEqual(expected_output_path, result_a["source"])
mock_download_feature_data.reset_mock(return_value=True, side_effect=True)
# test with trailing slash
result_b = arcgis_feature_service_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=f"{service_url}/",
bbox=bbox,
)
self.assertEqual(expected_output_path, result_b["result"])
self.assertEqual(expected_output_path, result_b["source"])
url_1 = "https://abc.gov/arcgis/services/x"
url_2 = "https://abc.gov/arcgis/services/y"
layer_name_1 = "foo"
layer_name_2 = "bar"
expected_field = "baz"
config = f"""
vector_layers:
- name: '{layer_name_1}'
url: '{url_1}'
- name: '{layer_name_2}'
url: '{url_2}'
distinct_field: '{expected_field}'
"""
expected_path_1 = f"{layer_name_1}.gpkg"
expected_path_2 = f"{layer_name_2}.gpkg"
expected_url_1 = f"{url_1}/{query_string}"
expected_url_2 = f"{url_2}/{query_string}"
expected_layers = {
layer_name_1: {
"task_uid": str(saved_export_task.uid),
"url": expected_url_1,
"path": expected_path_1,
"base_path": f"{self.stage_dir.rstrip('/')}/{layer_name_1}-{projection}",
"bbox": [1, 2, 3, 4],
"cert_info": None,
"projection": projection,
"layer_name": layer_name_1,
"distinct_field": None,
},
layer_name_2: {
"task_uid": str(saved_export_task.uid),
"url": expected_url_2,
"path": expected_path_2,
"base_path": f"{self.stage_dir.rstrip('/')}/{layer_name_2}-{projection}",
"bbox": [1, 2, 3, 4],
"cert_info": None,
"projection": projection,
"layer_name": layer_name_2,
"distinct_field": expected_field,
},
}
mock_download_concurrently.return_value = expected_layers
mock_convert.reset_mock()
mock_download_feature_data.reset_mock()
mock_get_export_filepath.side_effect = [expected_output_path, expected_path_1, expected_path_2]
# test with multiple layers
result_c = arcgis_feature_service_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=f"{service_url}/",
bbox=bbox,
config=config,
)
_, args, _ = mock_download_concurrently.mock_calls[0]
self.assertEqual(list(args[0]), list(expected_layers.values()))
self.assertEqual(mock_convert.call_count, 2)
mock_convert.assert_any_call(
driver="gpkg",
input_file=expected_path_1,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=bbox,
access_mode="append",
layer_name=layer_name_1,
)
mock_convert.assert_any_call(
driver="gpkg",
input_file=expected_path_2,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=4326,
boundary=bbox,
access_mode="append",
layer_name=layer_name_2,
)
self.assertEqual(expected_output_path, result_c["result"])
self.assertEqual(expected_output_path, result_c["source"])
# test downloads with certs
mock_download_feature_data.reset_mock()
mock_get_export_filepath.side_effect = [expected_output_path, expected_path_1, expected_path_2]
arcgis_feature_service_export_task.run(
run_uid=123,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir="dir",
job_name="job",
projection=projection,
service_url=url_1,
bbox=bbox,
)
mock_download_feature_data.assert_called_with(
str(saved_export_task.uid), expected_input_url, "dir/chunk3.json", cert_info=None, task_points=400
)
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("celery.app.task.Task.request")
@patch("eventkit_cloud.utils.mapproxy.MapproxyGeopackage")
def test_run_external_raster_service_export_task(self, mock_service, mock_request, mock_get_export_filepath):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
service_to_gpkg = mock_service.return_value
job_name = self.job.name.lower()
service_to_gpkg.convert.return_value = expected_output_path = os.path.join(self.stage_dir, f"{job_name}.gpkg")
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=mapproxy_export_task.name
)
mapproxy_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
result = mapproxy_export_task.run(
run_uid=self.run.uid, task_uid=str(saved_export_task.uid), stage_dir=self.stage_dir, job_name=job_name
)
service_to_gpkg.convert.assert_called_once()
self.assertEqual(expected_output_path, result["result"])
# test the tasks update_task_state method
run_task = ExportTaskRecord.objects.get(celery_uid=celery_uid)
self.assertIsNotNone(run_task)
self.assertEqual(TaskState.RUNNING.value, run_task.status)
service_to_gpkg.convert.side_effect = Exception("Task Failed")
with self.assertRaises(Exception):
mapproxy_export_task.run(
run_uid=self.run.uid, task_uid=str(saved_export_task.uid), stage_dir=self.stage_dir, job_name=job_name
)
def test_task_on_failure(self):
celery_uid = str(uuid.uuid4())
# assume task is running
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, name="Shapefile Export", provider=self.provider
)
test_export_task_record = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
celery_uid=celery_uid,
status=TaskState.RUNNING.value,
name=shp_export_task.name,
)
try:
raise ValueError("some unexpected error")
except ValueError as e:
exc = e
exc_info = sys.exc_info()
einfo = ExceptionInfo(exc_info=exc_info)
shp_export_task.task_failure(
exc, task_id=test_export_task_record.uid, einfo=einfo, args={}, kwargs={"run_uid": str(self.run.uid)}
)
task = ExportTaskRecord.objects.get(celery_uid=celery_uid)
self.assertIsNotNone(task)
exception = task.exceptions.all()[0]
exc_info = pickle.loads(exception.exception.encode()).exc_info
error_type, msg = exc_info[0], exc_info[1]
self.assertEqual(error_type, ValueError)
self.assertEqual("some unexpected error", str(msg))
@patch("eventkit_cloud.tasks.export_tasks.get_data_package_manifest")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.retry")
@patch("shutil.copy")
@patch("os.remove")
@patch("eventkit_cloud.tasks.export_tasks.ZipFile")
@patch("os.walk")
@patch("os.path.getsize")
def test_zipfile_task(
self, os_path_getsize, mock_os_walk, mock_zipfile, remove, copy, mock_retry, mock_get_data_package_manifest
):
os_path_getsize.return_value = 20
class MockZipFile:
def __init__(self):
self.files = {}
def __iter__(self):
return iter(self.files)
def write(self, filename, **kw):
arcname = kw.get("arcname", filename)
self.files[arcname] = filename
def __exit__(self, *args, **kw):
pass
def __enter__(self, *args, **kw):
return self
def testzip(self):
return None
expected_archived_files = {
"MANIFEST/manifest.xml": "MANIFEST/manifest.xml",
"data/osm/file1.txt": "osm/file1.txt",
"data/osm/file2.txt": "osm/file2.txt",
}
run_uid = str(self.run.uid)
self.run.job.include_zipfile = True
self.run.job.event = "test"
self.run.job.save()
run_zip_file = RunZipFile.objects.create(run=self.run)
zipfile = MockZipFile()
mock_zipfile.return_value = zipfile
provider_slug = "osm"
zipfile_path = os.path.join(self.stage_dir, "{0}".format(run_uid), provider_slug, "test.gpkg")
expected_manifest_file = os.path.join("MANIFEST", "manifest.xml")
mock_get_data_package_manifest.return_value = expected_manifest_file
files = {
"{0}/file1.txt".format(provider_slug): "data/{0}/file1.txt".format(provider_slug),
"{0}/file2.txt".format(provider_slug): "data/{0}/file2.txt".format(provider_slug),
}
mock_os_walk.return_value = [
(
os.path.join(self.stage_dir, run_uid, provider_slug),
None,
["test.gpkg", "test.om5", "test.osm"], # om5 and osm should get filtered out
)
]
result = zip_files(files=files, run_zip_file_uid=run_zip_file.uid, file_path=zipfile_path)
self.assertEqual(zipfile.files, expected_archived_files)
self.assertEqual(result, zipfile_path)
mock_get_data_package_manifest.assert_called_once()
zipfile.testzip = Exception("Bad Zip")
with self.assertRaises(Exception):
zip_files(files=files, file_path=zipfile_path)
@patch("celery.app.task.Task.request")
@patch("eventkit_cloud.tasks.export_tasks.geopackage")
def test_run_bounds_export_task(self, mock_geopackage, mock_request):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
provider_slug = "provider_slug"
mock_geopackage.add_geojson_to_geopackage.return_value = os.path.join(
self.stage_dir, "{}_bounds.gpkg".format(provider_slug)
)
expected_output_path = os.path.join(self.stage_dir, "{}_bounds.gpkg".format(provider_slug))
export_provider_task = DataProviderTaskRecord.objects.create(run=self.run, provider=self.provider)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=bounds_export_task.name
)
bounds_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid))
result = bounds_export_task.run(
run_uid=self.run.uid, task_uid=str(saved_export_task.uid), stage_dir=self.stage_dir, provider_slug=job_name
)
self.assertEqual(expected_output_path, result["result"])
# test the tasks update_task_state method
run_task = ExportTaskRecord.objects.get(celery_uid=celery_uid)
self.assertIsNotNone(run_task)
self.assertEqual(TaskState.RUNNING.value, run_task.status)
@override_settings(CELERY_GROUP_NAME="test")
@patch("eventkit_cloud.tasks.task_factory.TaskFactory")
@patch("eventkit_cloud.tasks.export_tasks.ExportRun")
@patch("eventkit_cloud.tasks.export_tasks.socket")
def test_pickup_run_task(self, socket, mock_export_run, task_factory):
mock_run = MagicMock()
mock_run.uid = self.run.uid
mock_run.status = TaskState.SUBMITTED.value
# This would normally return providers.
mock_run.data_provider_task_records.exclude.return_value = True
mock_export_run.objects.get.return_value = mock_run
socket.gethostname.return_value = "test"
self.assertEqual("Pickup Run", pick_up_run_task.name)
pick_up_run_task.run(run_uid=mock_run.uid, user_details={"username": "test_pickup_run_task"})
task_factory.assert_called_once()
expected_user_details = {"username": "test_pickup_run_task"}
task_factory.return_value.parse_tasks.assert_called_once_with(
run_uid=mock_run.uid,
user_details=expected_user_details,
worker="test",
run_zip_file_slug_sets=None,
session_token=None,
queue_group="test",
)
mock_run.download_data.assert_called_once()
@patch("eventkit_cloud.tasks.export_tasks.logger")
@patch("shutil.rmtree")
@patch("os.path.isdir")
def test_finalize_run_task_after_return(self, isdir, rmtree, logger):
celery_uid = str(uuid.uuid4())
run_uid = self.run.uid
isdir.return_value = True
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, name="Shapefile Export", provider=self.provider
)
ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
celery_uid=celery_uid,
status="SUCCESS",
name="Default Shapefile Export",
)
finalize_run_task.after_return("status", {"stage_dir": self.stage_dir}, run_uid, (), {}, "Exception Info")
isdir.assert_called_with(self.stage_dir)
rmtree.assert_called_with(self.stage_dir)
rmtree.side_effect = IOError()
finalize_run_task.after_return("status", {"stage_dir": self.stage_dir}, run_uid, (), {}, "Exception Info")
rmtree.assert_called_with(self.stage_dir)
self.assertRaises(IOError, rmtree)
logger.error.assert_called_once()
@patch("eventkit_cloud.tasks.export_tasks.EmailMultiAlternatives")
def test_finalize_run_task(self, email):
celery_uid = str(uuid.uuid4())
run_uid = self.run.uid
export_provider_task = DataProviderTaskRecord.objects.create(
status=TaskState.SUCCESS.value, run=self.run, name="Shapefile Export", provider=self.provider
)
ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
celery_uid=celery_uid,
status=TaskState.SUCCESS.value,
name="Default Shapefile Export",
)
self.assertEqual("Finalize Run Task", finalize_run_task.name)
finalize_run_task.run(run_uid=run_uid, stage_dir=self.stage_dir)
email().send.assert_called_once()
@patch("eventkit_cloud.tasks.export_tasks.RocketChat")
@patch("eventkit_cloud.tasks.export_tasks.EmailMultiAlternatives")
@patch("shutil.rmtree")
@patch("os.path.isdir")
def test_export_task_error_handler(self, isdir, rmtree, email, rocket_chat):
celery_uid = str(uuid.uuid4())
task_id = str(uuid.uuid4())
run_uid = self.run.uid
site_url = settings.SITE_URL
url = "{0}/status/{1}".format(site_url.rstrip("/"), self.run.job.uid)
os.environ["ROCKETCHAT_NOTIFICATIONS"] = json.dumps(
{"auth_token": "auth_token", "user_id": "user_id", "channels": ["channel"], "url": "http://api.example.dev"}
)
with self.settings(
ROCKETCHAT_NOTIFICATIONS={
"auth_token": "auth_token",
"user_id": "user_id",
"channels": ["channel"],
"url": "http://api.example.dev",
}
):
rocketchat_notifications = settings.ROCKETCHAT_NOTIFICATIONS
channel = rocketchat_notifications["channels"][0]
message = f"@here: A DataPack has failed during processing. {url}"
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, name="Shapefile Export", provider=self.provider
)
ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
uid=task_id,
celery_uid=celery_uid,
status=TaskState.FAILED.value,
name="Default Shapefile Export",
)
self.assertEqual("Export Task Error Handler", export_task_error_handler.name)
export_task_error_handler.run(run_uid=run_uid, task_id=task_id, stage_dir=self.stage_dir)
isdir.assert_any_call(self.stage_dir)
rmtree.assert_called_once_with(self.stage_dir)
email().send.assert_called_once()
rocket_chat.assert_called_once_with(**rocketchat_notifications)
rocket_chat().post_message.assert_called_once_with(channel, message)
@patch("eventkit_cloud.tasks.export_tasks.kill_task")
def test_cancel_task(self, mock_kill_task):
worker_name = "test_worker"
task_pid = 55
celery_uid = uuid.uuid4()
with patch("eventkit_cloud.jobs.signals.Group") as mock_group:
mock_group.objects.get.return_value = self.group
user = User.objects.create(username="test_user", password="test_password", email="[email protected]")
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, name="test_provider_task", provider=self.provider, status=TaskState.PENDING.value
)
export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
status=TaskState.PENDING.value,
name="test_task",
celery_uid=celery_uid,
pid=task_pid,
worker=worker_name,
)
self.assertEqual("Cancel Export Provider Task", cancel_export_provider_task.name)
cancel_export_provider_task.run(
data_provider_task_uid=export_provider_task.uid, canceling_username=user.username
)
mock_kill_task.apply_async.assert_called_once_with(
kwargs={"task_pid": task_pid, "celery_uid": celery_uid},
queue="{0}.priority".format(worker_name),
priority=TaskPriority.CANCEL.value,
routing_key="{0}.priority".format(worker_name),
)
export_task = ExportTaskRecord.objects.get(uid=export_task.uid)
export_provider_task = DataProviderTaskRecord.objects.get(uid=export_provider_task.uid)
self.assertEqual(export_task.status, TaskState.CANCELED.value)
self.assertEqual(export_provider_task.status, TaskState.CANCELED.value)
def test_parse_result(self):
result = parse_result(None, None)
self.assertIsNone(result)
task_result = [{"test": True}]
expected_result = True
returned_result = parse_result(task_result, "test")
self.assertEqual(expected_result, returned_result)
task_result = {"test": True}
expected_result = True
returned_result = parse_result(task_result, "test")
self.assertEqual(expected_result, returned_result)
def test_finalize_export_provider_task(self):
worker_name = "test_worker"
task_pid = 55
filename = "test.gpkg"
celery_uid = uuid.uuid4()
run_uid = self.run.uid
self.job.include_zipfile = True
self.job.save()
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, name="test_provider_task", status=TaskState.COMPLETED.value, provider=self.provider
)
result = FileProducingTaskResult.objects.create(filename=filename, size=10)
ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
status=TaskState.COMPLETED.value,
name="test_task",
celery_uid=celery_uid,
pid=task_pid,
worker=worker_name,
result=result,
)
download_root = settings.EXPORT_DOWNLOAD_ROOT.rstrip("\/")
run_dir = os.path.join(download_root, str(run_uid))
finalize_export_provider_task.run(
result={"status": TaskState.SUCCESS.value},
run_uid=self.run.uid,
data_provider_task_uid=export_provider_task.uid,
run_dir=run_dir,
status=TaskState.COMPLETED.value,
)
export_provider_task.refresh_from_db()
self.assertEqual(export_provider_task.status, TaskState.COMPLETED.value)
@patch("eventkit_cloud.tasks.export_tasks.progressive_kill")
@patch("eventkit_cloud.tasks.export_tasks.AsyncResult")
def test_kill_task(self, async_result, mock_progressive_kill):
# Ensure that kill isn't called with default.
task_pid = -1
celery_uid = uuid.uuid4()
self.assertEqual("Kill Task", kill_task.name)
kill_task.run(task_pid=task_pid, celery_uid=celery_uid)
mock_progressive_kill.assert_not_called()
# Ensure that kill is not called with an invalid state
task_pid = 55
async_result.return_value = Mock(state=celery.states.FAILURE)
self.assertEqual("Kill Task", kill_task.name)
kill_task.run(task_pid=task_pid, celery_uid=celery_uid)
mock_progressive_kill.assert_not_called()
# Ensure that kill is called with a valid pid
task_pid = 55
async_result.return_value = Mock(state=celery.states.STARTED)
self.assertEqual("Kill Task", kill_task.name)
kill_task.run(task_pid=task_pid, celery_uid=celery_uid)
mock_progressive_kill.assert_called_once_with(task_pid)
@patch("eventkit_cloud.tasks.export_tasks.ExportRun")
def test_wait_for_providers_task(self, mock_export_run):
mock_run_uid = str(uuid.uuid4())
mock_provider_task = Mock(status=TaskState.SUCCESS.value)
mock_export_run.objects.filter().first.return_value = Mock()
mock_export_run.objects.filter().first().data_provider_task_records.filter.return_value = [mock_provider_task]
callback_task = MagicMock()
apply_args = {"arg1": "example_value"}
wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args)
callback_task.apply_async.assert_called_once_with(**apply_args)
callback_task.reset_mock()
mock_provider_task = Mock(status=TaskState.RUNNING.value)
mock_export_run.objects.filter().first.return_value = Mock()
mock_export_run.objects.filter().first().data_provider_task_records.filter.return_value = [mock_provider_task]
wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args)
callback_task.apply_async.assert_not_called()
with self.assertRaises(Exception):
mock_export_run.reset_mock()
mock_export_run.objects.filter().first().__nonzero__.return_value = False
wait_for_providers_task(run_uid=mock_run_uid, callback_task=callback_task, apply_args=apply_args)
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.get_arcgis_templates")
@patch("eventkit_cloud.tasks.export_tasks.get_metadata")
@patch("eventkit_cloud.tasks.export_tasks.zip_files")
@patch("eventkit_cloud.tasks.export_tasks.get_human_readable_metadata_document")
@patch("eventkit_cloud.tasks.export_tasks.get_style_files")
@patch("eventkit_cloud.tasks.export_tasks.json")
@patch("eventkit_cloud.tasks.export_tasks.generate_qgs_style")
@patch("os.path.join", side_effect=lambda *args: args[-1])
@patch("eventkit_cloud.tasks.export_tasks.get_export_task_record")
@patch("eventkit_cloud.tasks.export_tasks.DataProviderTaskRecord")
def test_create_zip_task(
self,
mock_DataProviderTaskRecord,
mock_get_export_task_record,
join,
mock_generate_qgs_style,
mock_json,
mock_get_style_files,
mock_get_human_readable_metadata_document,
mock_zip_files,
mock_get_metadata,
mock_get_arcgis_templates,
mock_get_export_filepath,
):
meta_files = {}
mock_get_style_files.return_value = style_files = {"/styles.png": "icons/styles.png"}
meta_files.update(style_files)
mock_get_arcgis_templates.return_value = arcgis_files = {"/arcgis/create_aprx.py": "arcgis/create_aprx.pyt"}
meta_files.update(arcgis_files)
mock_get_human_readable_metadata_document.return_value = human_metadata_doc = {
"/human_metadata.txt": "/human_metadata.txt"
}
meta_files.update(human_metadata_doc)
mock_generate_qgs_style.return_value = qgis_file = {"/style.qgs": "/style.qgs"}
meta_files.update(qgis_file)
include_files = {
"/var/lib/eventkit/exports_stage/7fadf34e-58f9-4bb8-ab57-adc1015c4269/osm/test.gpkg": "osm/test.gpkg",
"/var/lib/eventkit/exports_stage/7fadf34e-58f9-4bb8-ab57-adc1015c4269/osm/osm_selection.geojson": "osm/osm_selection.geojson", # NOQA
}
include_files.update(meta_files)
metadata = {
"aoi": "AOI",
"bbox": [-1, -1, 1, 1],
"data_sources": {
"osm": {
"copyright": None,
"description": "OpenStreetMap vector data provided in a custom thematic schema. \r\n\t\r\n\t"
"Data is grouped into separate tables (e.g. water, roads...).",
"file_path": "data/osm/test-osm-20181101.gpkg",
"file_type": ".gpkg",
"full_file_path": "/var/lib/eventkit/exports_stage/7fadf34e-58f9-4bb8-ab57-adc1015c4269/osm/"
"test.gpkg",
"last_update": "2018-10-29T04:35:02Z\n",
"metadata": "https://overpass-server.com/overpass/interpreter",
"name": "OpenStreetMap Data (Themes)",
"slug": "osm",
"type": "osm",
"uid": "0d08ddf6-35c1-464f-b271-75f6911c3f78",
}
},
"date": "20181101",
"description": "Test",
"has_elevation": False,
"has_raster": True,
"include_files": include_files,
"name": "test",
"project": "Test",
"run_uid": "7fadf34e-58f9-4bb8-ab57-adc1015c4269",
"url": "http://cloud.eventkit.test/status/2010025c-6d61-4a0b-8d5d-ff9c657259eb",
}
data_provider_task_record_uids = ["0d08ddf6-35c1-464f-b271-75f6911c3f78"]
mock_get_metadata.return_value = metadata
run_zip_file = RunZipFile.objects.create(run=self.run)
expected_zip = f"{metadata['name']}.zip"
mock_get_export_filepath.return_value = expected_zip
mock_zip_files.return_value = expected_zip
returned_zip = create_zip_task.run(
task_uid="UID",
data_provider_task_record_uids=data_provider_task_record_uids,
run_zip_file_uid=run_zip_file.uid,
)
mock_generate_qgs_style.assert_called_once_with(metadata)
mock_zip_files.assert_called_once_with(
files=metadata["include_files"],
run_zip_file_uid=run_zip_file.uid,
meta_files=meta_files,
file_path=expected_zip,
metadata=metadata,
)
mock_get_export_task_record.assert_called_once()
self.assertEqual(returned_zip, {"result": expected_zip})
def test_zip_file_task_invalid_params(self):
with self.assertRaises(Exception):
include_files = []
file_path = "/test/path.zip"
res = zip_files(include_files, file_path=file_path)
self.assertIsNone(res)
with self.assertRaises(Exception):
include_files = ["test1", "test2"]
file_path = ""
res = zip_files(include_files, file_path=file_path)
self.assertIsNone(res)
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.download_data")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_vector_file_export_task(self, mock_request, mock_convert, mock_download_data, mock_get_export_filepath):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
expected_provider_slug = "vector-file"
self.provider.export_provider_type = DataProviderType.objects.get(type_name="vector-file")
self.provider.slug = expected_provider_slug
self.provider.config = None
self.provider.save()
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
layer = "foo"
config = test_cert_info
service_url = "https://abc.gov/file.geojson"
mock_convert.return_value = expected_output_path
mock_download_data.return_value = service_url
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=vector_file_export_task.name
)
vector_file_export_task.update_task_state(
task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid)
)
result = vector_file_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
layer=layer,
config=config,
)
mock_convert.assert_called_once_with(
driver="gpkg",
input_file=expected_output_path,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=projection,
boundary=None,
layer_name=expected_provider_slug,
is_raster=False,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
self.assertEqual(expected_output_path, result["gpkg"])
mock_download_data.assert_called_once_with(
str(saved_export_task.uid),
service_url,
expected_output_path,
)
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.download_data")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_raster_file_export_task(self, mock_request, mock_convert, mock_download_data, mock_get_export_filepath):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
expected_provider_slug = "raster-file"
self.provider.export_provider_type = DataProviderType.objects.get(type_name="raster-file")
self.provider.slug = expected_provider_slug
self.provider.config = None
self.provider.save()
mock_get_export_filepath.return_value = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
layer = "foo"
config = test_cert_info
service_url = "https://abc.gov/file.geojson"
mock_convert.return_value = expected_output_path
mock_download_data.return_value = service_url
previous_task_result = {"source": expected_output_path}
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=raster_file_export_task.name
)
raster_file_export_task.update_task_state(
task_status=TaskState.RUNNING.value, task_uid=str(saved_export_task.uid)
)
result = raster_file_export_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=str(saved_export_task.uid),
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
layer=layer,
config=config,
)
mock_convert.assert_called_once_with(
driver="gpkg",
input_file=expected_output_path,
output_file=expected_output_path,
task_uid=str(saved_export_task.uid),
projection=projection,
boundary=None,
is_raster=True,
)
self.assertEqual(expected_output_path, result["result"])
self.assertEqual(expected_output_path, result["source"])
self.assertEqual(expected_output_path, result["gpkg"])
mock_download_data.assert_called_once_with(
str(saved_export_task.uid),
service_url,
expected_output_path,
)
@patch("eventkit_cloud.tasks.export_tasks.parse_result")
@patch("eventkit_cloud.tasks.export_tasks.os")
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.get_metadata")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("eventkit_cloud.tasks.export_tasks.mapproxy.MapproxyGeopackage")
def test_reprojection_task(
self, mock_mapproxy, mock_gdal_convert, mock_get_metadata, mock_get_export_filepath, mock_os, mock_parse_result
):
job_name = self.job.name.lower()
in_projection = "4326"
out_projection = "3857"
expected_provider_slug = "some_provider"
self.provider.slug = expected_provider_slug
self.provider.config = None
self.provider.save()
date = default_format_time(timezone.now())
driver = "tif"
mock_get_export_filepath.return_value = expected_infile = expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
expected_input_path = os.path.join(self.stage_dir, expected_infile)
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task, status=TaskState.PENDING.value, name=reprojection_task.name
)
task_uid = str(saved_export_task.uid)
config = """
cert_info:
cert_path: '/path/to/cert'
cert_pass_var: 'fake_pass'
"""
selection = "selection.geojson"
metadata = {"data_sources": {expected_provider_slug: {"type": "something"}}}
mock_get_metadata.return_value = metadata
mock_gdal_convert.return_value = expected_output_path
mock_parse_result.side_effect = [driver, selection, None, expected_infile]
mock_get_export_filepath.return_value = expected_output_path
mock_os.path.splitext.return_value = ["path", driver]
previous_task_result = {"source": expected_output_path}
reprojection_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=task_uid,
stage_dir=self.stage_dir,
job_name=job_name,
projection=None,
config=None,
user_details=None,
)
# test reprojection is skipped
mock_os.rename.assert_called_once_with(expected_infile, expected_output_path)
mock_parse_result.side_effect = [driver, selection, None, expected_input_path]
reprojection_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=task_uid,
stage_dir=self.stage_dir,
job_name=job_name,
projection=out_projection,
config=config,
user_details=None,
)
# test reprojecting
mock_gdal_convert.assert_called_once_with(
driver=driver,
input_file=f"GTIFF_RAW:{expected_input_path}",
output_file=expected_output_path,
task_uid=task_uid,<|fim▁hole|> translate_params=ANY,
)
# test reprojecting raster geopackages
driver = "gpkg"
level_from = 0
level_to = 12
metadata = {
"data_sources": {expected_provider_slug: {"type": "raster", "level_from": level_from, "level_to": level_to}}
}
mock_get_metadata.return_value = metadata
expected_infile = f"{job_name}-{in_projection}-{expected_provider_slug}-{date}.{driver}"
expected_input_path = os.path.join(self.stage_dir, expected_infile)
mock_os.path.splitext.return_value = ["path", driver]
mock_parse_result.side_effect = [driver, selection, None, expected_input_path]
reprojection_task.run(
run_uid=self.run.uid,
result=previous_task_result,
task_uid=task_uid,
stage_dir=self.stage_dir,
job_name=job_name,
projection=out_projection,
config=config,
user_details=None,
)
mock_mapproxy.assert_called_once_with(
gpkgfile=expected_output_path,
service_url=expected_output_path,
name=job_name,
config=config,
bbox=ANY,
level_from=level_from,
level_to=level_to,
task_uid=task_uid,
selection=selection,
projection=out_projection,
input_gpkg=expected_input_path,
)
mock_mapproxy().convert.assert_called_once()
@patch("eventkit_cloud.tasks.export_tasks.get_export_filepath")
@patch("eventkit_cloud.tasks.export_tasks.find_in_zip")
@patch("eventkit_cloud.tasks.export_tasks.get_geometry")
@patch("eventkit_cloud.tasks.export_tasks.os.getenv")
@patch("eventkit_cloud.tasks.export_tasks.get_ogcapi_data")
@patch("eventkit_cloud.tasks.export_tasks.gdalutils.convert")
@patch("celery.app.task.Task.request")
def test_ogcapi_process_export_task(
self,
mock_request,
mock_convert,
mock_get_ogcapi_data,
mock_getenv,
mock_get_geometry,
mock_find_in_zip,
mock_get_export_filepath,
):
celery_uid = str(uuid.uuid4())
type(mock_request).id = PropertyMock(return_value=celery_uid)
job_name = self.job.name.lower()
projection = 4326
bbox = [1, 2, 3, 4]
example_geojson = "/path/to/geo.json"
example_result = {"selection": example_geojson}
expected_provider_slug = "ogc_api_proc"
example_format_slug = "fmt"
self.provider.export_provider_type = DataProviderType.objects.get(type_name="ogcapi-process")
self.provider.slug = expected_provider_slug
self.provider.config = None
self.provider.save()
expected_outfile = "/path/to/file.ext"
expected_output_path = os.path.join(self.stage_dir, expected_outfile)
expected_outzip = "/path/to/file.zip"
expected_outzip_path = os.path.join(self.stage_dir, expected_outzip)
source_file = "foo.gpkg"
export_provider_task = DataProviderTaskRecord.objects.create(
run=self.run, status=TaskState.PENDING.value, provider=self.provider
)
saved_export_task = ExportTaskRecord.objects.create(
export_provider_task=export_provider_task,
status=TaskState.PENDING.value,
name=ogcapi_process_export_task.name,
)
username = "user"
password = "password"
mock_getenv.return_value = f"{username}:{password}"
task_uid = str(saved_export_task.uid)
ogcapi_process_export_task.update_task_state(task_status=TaskState.RUNNING.value, task_uid=task_uid)
mock_geometry = Mock()
mock_get_geometry.return_value = mock_geometry
cred_var = "USER_PASS_ENV_VAR"
config = f"""
ogcapi_process:
id: 'eventkit'
inputs:
input:
value: 'random'
format:
value: 'gpkg'
outputs:
format:
mediaType: 'application/zip'
output_file_ext: '.gpkg'
download_credentials:
cred_var: '{cred_var}'
cred_var: '{cred_var}'
"""
service_url = "http://example.test/v1/"
session_token = "_some_token_"
mock_get_ogcapi_data.return_value = expected_outzip_path
mock_convert.return_value = expected_output_path
mock_find_in_zip.return_value = source_file
mock_get_export_filepath.side_effect = [expected_output_path, expected_outzip_path]
result = ogcapi_process_export_task.run(
result=example_result,
run_uid=self.run.uid,
task_uid=task_uid,
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
layer=None,
config=config,
bbox=bbox,
session_token=session_token,
export_format_slug=example_format_slug,
)
mock_get_ogcapi_data.assert_called_with(
config=config,
task_uid=task_uid,
stage_dir=self.stage_dir,
bbox=bbox,
service_url=service_url,
session_token=session_token,
export_format_slug=example_format_slug,
selection=example_geojson,
download_path=expected_outzip_path,
)
mock_convert.assert_not_called()
expected_result = {"selection": example_geojson, "result": expected_outzip_path}
self.assertEqual(result, expected_result)
example_source_data = "source_path"
mock_find_in_zip.return_value = example_source_data
mock_convert.return_value = expected_output_path
mock_get_export_filepath.side_effect = [expected_output_path, expected_outzip_path]
result = ogcapi_process_export_task.run(
result=example_result,
run_uid=self.run.uid,
task_uid=task_uid,
stage_dir=self.stage_dir,
job_name=job_name,
projection=projection,
service_url=service_url,
layer=None,
config=config,
bbox=bbox,
session_token=session_token,
)
expected_result = {
"driver": "gpkg",
"file_extension": ".gpkg",
"ogcapi_process": expected_output_path,
"source": expected_output_path,
"gpkg": expected_output_path,
"selection": example_geojson,
"result": expected_outzip_path,
}
self.assertEqual(result, expected_result)
mock_convert.assert_called_once_with(
driver="gpkg",
input_file=example_source_data,
output_file=expected_output_path,
task_uid=task_uid,
projection=projection,
boundary=bbox,
)
@patch("eventkit_cloud.tasks.export_tasks.extract_metadata_files")
@patch("eventkit_cloud.tasks.export_tasks.update_progress")
@patch("eventkit_cloud.tasks.export_tasks.download_data")
@patch("eventkit_cloud.tasks.export_tasks.OgcApiProcess")
@patch("eventkit_cloud.tasks.export_tasks.get_geometry")
def test_get_ogcapi_data(
self,
mock_get_geometry,
mock_ogc_api_process,
mock_download_data,
mock_update_progress,
mock_extract_metadata_files,
):
bbox = [1, 2, 3, 4]
example_geojson = "/path/to/geo.json"
example_format_slug = "fmt"
task_uid = "1234"
mock_geometry = Mock()
mock_get_geometry.return_value = mock_geometry
config = """
ogcapi_process:
id: 'eventkit'
inputs:
input:
value: 'random'
format:
value: 'gpkg'
outputs:
format:
mediaType: 'application/zip'
output_file_ext: '.gpkg'
download_credentials:
cert_info:
cert_path: "something"
cert_pass: "something"
cert_info:
cert_path: "something"
cert_pass: "something"
"""
configuration = yaml.load(config)["ogcapi_process"]
service_url = "http://example.test/v1/"
session_token = "_some_token_"
example_download_url = "https://example.test/path.zip"
example_download_path = "/example/file.gpkg"
mock_ogc_api_process().get_job_results.return_value = example_download_url
mock_download_data.return_value = example_download_path
result = get_ogcapi_data(
config=config,
task_uid=task_uid,
stage_dir=self.stage_dir,
bbox=bbox,
service_url=service_url,
session_token=session_token,
export_format_slug=example_format_slug,
selection=example_geojson,
download_path=example_download_path,
)
self.assertEqual(result, example_download_path)
mock_ogc_api_process.called_with(
url=service_url,
config=config,
session_token=session_token,
task_id=task_uid,
cred_var=configuration.get("cred_var"),
cert_info=configuration.get("cert_info"),
)
mock_ogc_api_process().create_job.called_once_with(mock_geometry, file_format=example_format_slug)
mock_download_data.assert_called_once_with(
task_uid, example_download_url, example_download_path, session=None, headers=None, token=None
)
mock_extract_metadata_files.assert_called_once_with(example_download_path, self.stage_dir)
class TestFormatTasks(ExportTaskBase):
def test_ensure_display(self):
self.assertTrue(FormatTask.display)<|fim▁end|> | projection=out_projection,
boundary=selection,
warp_params=ANY, |
<|file_name|>stdout.rs<|end_file_name|><|fim▁begin|>use opentelemetry::{
sdk::export::trace::stdout,
sdk::trace::{self, Sampler},
trace::Tracer,
};
fn main() {
// Install stdout exporter pipeline to be able to retrieve collected spans.
// For the demonstration, use `Sampler::AlwaysOn` sampler to sample all traces. In a production
// application, use `Sampler::ParentBased` or `Sampler::TraceIdRatioBased` with a desired ratio.
let tracer = stdout::new_pipeline()
.with_trace_config(trace::config().with_sampler(Sampler::AlwaysOn))
.install_simple();
<|fim▁hole|>}<|fim▁end|> | tracer.in_span("operation", |_cx| {}); |
<|file_name|>swell_foop_.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
"""
Sandbox for "swell-foop" command
"""
import glob
import os
import signal
import sys
import network_mod
import subtask_mod
class Main:
"""
Main class
"""
def __init__(self) -> None:
try:
self.config()
sys.exit(self.run())
except (EOFError, KeyboardInterrupt):
sys.exit(114)
except SystemExit as exception:
sys.exit(exception)
@staticmethod
def config() -> None:
"""
Configure program
"""
if hasattr(signal, 'SIGPIPE'):
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
if os.name == 'nt':
argv = []
for arg in sys.argv:
files = glob.glob(arg) # Fixes Windows globbing bug
if files:
argv.extend(files)
else:
argv.append(arg)
sys.argv = argv
@staticmethod
def run() -> int:
"""
Start program
"""
command = network_mod.Sandbox(
'/usr/games/swell-foop',
args=sys.argv[1:],
errors='stop'
)
# Start slow for very large history (.local/share/swell-foop/)<|fim▁hole|> if not os.path.isfile(command.get_file() + '.py'):
configs = [
'/dev/dri',
f'/run/user/{os.getuid()}/dconf',
os.path.join(os.getenv('HOME', '/'), '.config/dconf/user'),
]
command.sandbox(configs)
subtask_mod.Background(command.get_cmdline()).run()
return 0
if __name__ == '__main__':
if '--pydoc' in sys.argv:
help(__name__)
else:
Main()<|fim▁end|> | |
<|file_name|>token.rs<|end_file_name|><|fim▁begin|>#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct Token(pub usize);
use slab;
impl Token {<|fim▁hole|> #[inline]
pub fn as_usize(self) -> usize {
let Token(inner) = self;
inner
}
}
impl slab::Index for Token {
fn from_usize(i: usize) -> Token {
Token(i)
}
fn as_usize(&self) -> usize {
Token::as_usize(*self)
}
}<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
COMP_ID = 'layer'
_ = trstring_factory(COMP_ID)<|fim▁end|> | from ..i18n import trstring_factory |
Subsets and Splits